[RS6000] Don't restore fixed regs
[gcc.git] / gcc / tree-ssa-scopedtables.c
1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 #include "options.h"
37 #include "params.h"
38
39 static bool hashable_expr_equal_p (const struct hashable_expr *,
40 const struct hashable_expr *);
41
42 /* Initialize local stacks for this optimizer and record equivalences
43 upon entry to BB. Equivalences can come from the edge traversed to
44 reach BB or they may come from PHI nodes at the start of BB. */
45
46 /* Pop items off the unwinding stack, removing each from the hash table
47 until a marker is encountered. */
48
49 void
50 avail_exprs_stack::pop_to_marker ()
51 {
52 /* Remove all the expressions made available in this block. */
53 while (m_stack.length () > 0)
54 {
55 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
56 expr_hash_elt **slot;
57
58 if (victim.first == NULL)
59 break;
60
61 /* This must precede the actual removal from the hash table,
62 as ELEMENT and the table entry may share a call argument
63 vector which will be freed during removal. */
64 if (dump_file && (dump_flags & TDF_DETAILS))
65 {
66 fprintf (dump_file, "<<<< ");
67 victim.first->print (dump_file);
68 }
69
70 slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
71 gcc_assert (slot && *slot == victim.first);
72 if (victim.second != NULL)
73 {
74 delete *slot;
75 *slot = victim.second;
76 }
77 else
78 m_avail_exprs->clear_slot (slot);
79 }
80 }
81
82 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
83 from the hash table. */
84
85 void
86 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
87 class expr_hash_elt *elt2,
88 char type)
89 {
90 if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
91 {
92 fprintf (dump_file, "%c>>> ", type);
93 elt1->print (dump_file);
94 }
95
96 m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
97 }
98
99 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
100 the desired memory state. */
101
102 static void *
103 vuse_eq (ao_ref *, tree vuse1, unsigned int cnt, void *data)
104 {
105 tree vuse2 = (tree) data;
106 if (vuse1 == vuse2)
107 return data;
108
109 /* This bounds the stmt walks we perform on reference lookups
110 to O(1) instead of O(N) where N is the number of dominating
111 stores leading to a candidate. We re-use the SCCVN param
112 for this as it is basically the same complexity. */
113 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
114 return (void *)-1;
115
116 return NULL;
117 }
118
119 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
120 If found, return its LHS. Otherwise insert STMT in the table and
121 return NULL_TREE.
122
123 Also, when an expression is first inserted in the table, it is also
124 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
125 we finish processing this block and its children. */
126
127 tree
128 avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
129 {
130 expr_hash_elt **slot;
131 tree lhs;
132
133 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
134 if (gimple_code (stmt) == GIMPLE_PHI)
135 lhs = gimple_phi_result (stmt);
136 else
137 lhs = gimple_get_lhs (stmt);
138
139 class expr_hash_elt element (stmt, lhs);
140
141 if (dump_file && (dump_flags & TDF_DETAILS))
142 {
143 fprintf (dump_file, "LKUP ");
144 element.print (dump_file);
145 }
146
147 /* Don't bother remembering constant assignments and copy operations.
148 Constants and copy operations are handled by the constant/copy propagator
149 in optimize_stmt. */
150 if (element.expr()->kind == EXPR_SINGLE
151 && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
152 || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
153 return NULL_TREE;
154
155 /* Finally try to find the expression in the main expression hash table. */
156 slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
157 if (slot == NULL)
158 {
159 return NULL_TREE;
160 }
161 else if (*slot == NULL)
162 {
163 class expr_hash_elt *element2 = new expr_hash_elt (element);
164 *slot = element2;
165
166 record_expr (element2, NULL, '2');
167 return NULL_TREE;
168 }
169
170 /* If we found a redundant memory operation do an alias walk to
171 check if we can re-use it. */
172 if (gimple_vuse (stmt) != (*slot)->vop ())
173 {
174 tree vuse1 = (*slot)->vop ();
175 tree vuse2 = gimple_vuse (stmt);
176 /* If we have a load of a register and a candidate in the
177 hash with vuse1 then try to reach its stmt by walking
178 up the virtual use-def chain using walk_non_aliased_vuses.
179 But don't do this when removing expressions from the hash. */
180 ao_ref ref;
181 if (!(vuse1 && vuse2
182 && gimple_assign_single_p (stmt)
183 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
184 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
185 ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
186 && walk_non_aliased_vuses (&ref, vuse2,
187 vuse_eq, NULL, NULL, vuse1) != NULL))
188 {
189 if (insert)
190 {
191 class expr_hash_elt *element2 = new expr_hash_elt (element);
192
193 /* Insert the expr into the hash by replacing the current
194 entry and recording the value to restore in the
195 avail_exprs_stack. */
196 record_expr (element2, *slot, '2');
197 *slot = element2;
198 }
199 return NULL_TREE;
200 }
201 }
202
203 /* Extract the LHS of the assignment so that it can be used as the current
204 definition of another variable. */
205 lhs = (*slot)->lhs ();
206
207 /* Valueize the result. */
208 if (TREE_CODE (lhs) == SSA_NAME)
209 {
210 tree tem = SSA_NAME_VALUE (lhs);
211 if (tem)
212 lhs = tem;
213 }
214
215 if (dump_file && (dump_flags & TDF_DETAILS))
216 {
217 fprintf (dump_file, "FIND: ");
218 print_generic_expr (dump_file, lhs);
219 fprintf (dump_file, "\n");
220 }
221
222 return lhs;
223 }
224
225 /* Enter condition equivalence P into the hash table.
226
227 This indicates that a conditional expression has a known
228 boolean value. */
229
230 void
231 avail_exprs_stack::record_cond (cond_equivalence *p)
232 {
233 class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
234 expr_hash_elt **slot;
235
236 slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
237 if (*slot == NULL)
238 {
239 *slot = element;
240 record_expr (element, NULL, '1');
241 }
242 else
243 delete element;
244 }
245
246 /* Generate a hash value for a pair of expressions. This can be used
247 iteratively by passing a previous result in HSTATE.
248
249 The same hash value is always returned for a given pair of expressions,
250 regardless of the order in which they are presented. This is useful in
251 hashing the operands of commutative functions. */
252
253 namespace inchash
254 {
255
256 static void
257 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
258 {
259 hash one, two;
260
261 inchash::add_expr (t1, one);
262 inchash::add_expr (t2, two);
263 hstate.add_commutative (one, two);
264 }
265
266 /* Compute a hash value for a hashable_expr value EXPR and a
267 previously accumulated hash value VAL. If two hashable_expr
268 values compare equal with hashable_expr_equal_p, they must
269 hash to the same value, given an identical value of VAL.
270 The logic is intended to follow inchash::add_expr in tree.c. */
271
272 static void
273 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
274 {
275 switch (expr->kind)
276 {
277 case EXPR_SINGLE:
278 inchash::add_expr (expr->ops.single.rhs, hstate);
279 break;
280
281 case EXPR_UNARY:
282 hstate.add_object (expr->ops.unary.op);
283
284 /* Make sure to include signedness in the hash computation.
285 Don't hash the type, that can lead to having nodes which
286 compare equal according to operand_equal_p, but which
287 have different hash codes. */
288 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
289 || expr->ops.unary.op == NON_LVALUE_EXPR)
290 hstate.add_int (TYPE_UNSIGNED (expr->type));
291
292 inchash::add_expr (expr->ops.unary.opnd, hstate);
293 break;
294
295 case EXPR_BINARY:
296 hstate.add_object (expr->ops.binary.op);
297 if (commutative_tree_code (expr->ops.binary.op))
298 inchash::add_expr_commutative (expr->ops.binary.opnd0,
299 expr->ops.binary.opnd1, hstate);
300 else
301 {
302 inchash::add_expr (expr->ops.binary.opnd0, hstate);
303 inchash::add_expr (expr->ops.binary.opnd1, hstate);
304 }
305 break;
306
307 case EXPR_TERNARY:
308 hstate.add_object (expr->ops.ternary.op);
309 if (commutative_ternary_tree_code (expr->ops.ternary.op))
310 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
311 expr->ops.ternary.opnd1, hstate);
312 else
313 {
314 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
315 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
316 }
317 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
318 break;
319
320 case EXPR_CALL:
321 {
322 size_t i;
323 enum tree_code code = CALL_EXPR;
324 gcall *fn_from;
325
326 hstate.add_object (code);
327 fn_from = expr->ops.call.fn_from;
328 if (gimple_call_internal_p (fn_from))
329 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
330 else
331 inchash::add_expr (gimple_call_fn (fn_from), hstate);
332 for (i = 0; i < expr->ops.call.nargs; i++)
333 inchash::add_expr (expr->ops.call.args[i], hstate);
334 }
335 break;
336
337 case EXPR_PHI:
338 {
339 size_t i;
340
341 for (i = 0; i < expr->ops.phi.nargs; i++)
342 inchash::add_expr (expr->ops.phi.args[i], hstate);
343 }
344 break;
345
346 default:
347 gcc_unreachable ();
348 }
349 }
350
351 }
352
353 /* Hashing and equality functions. We compute a value number for expressions
354 using the code of the expression and the SSA numbers of its operands. */
355
356 static hashval_t
357 avail_expr_hash (class expr_hash_elt *p)
358 {
359 const struct hashable_expr *expr = p->expr ();
360 inchash::hash hstate;
361
362 if (expr->kind == EXPR_SINGLE)
363 {
364 /* T could potentially be a switch index or a goto dest. */
365 tree t = expr->ops.single.rhs;
366 if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
367 {
368 /* Make equivalent statements of both these kinds hash together.
369 Dealing with both MEM_REF and ARRAY_REF allows us not to care
370 about equivalence with other statements not considered here. */
371 bool reverse;
372 HOST_WIDE_INT offset, size, max_size;
373 tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
374 &reverse);
375 /* Strictly, we could try to normalize variable-sized accesses too,
376 but here we just deal with the common case. */
377 if (size != -1
378 && size == max_size)
379 {
380 enum tree_code code = MEM_REF;
381 hstate.add_object (code);
382 inchash::add_expr (base, hstate);
383 hstate.add_object (offset);
384 hstate.add_object (size);
385 return hstate.end ();
386 }
387 }
388 }
389
390 inchash::add_hashable_expr (expr, hstate);
391
392 return hstate.end ();
393 }
394
395 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
396 to each other. (That is, they return the value of the same bit of memory.)
397
398 Return TRUE if the two are so equivalent; FALSE if not (which could still
399 mean the two are equivalent by other means). */
400
401 static bool
402 equal_mem_array_ref_p (tree t0, tree t1)
403 {
404 if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
405 return false;
406 if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
407 return false;
408
409 if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
410 return false;
411 bool rev0;
412 HOST_WIDE_INT off0, sz0, max0;
413 tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
414 if (sz0 == -1
415 || sz0 != max0)
416 return false;
417
418 bool rev1;
419 HOST_WIDE_INT off1, sz1, max1;
420 tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
421 if (sz1 == -1
422 || sz1 != max1)
423 return false;
424
425 if (rev0 != rev1)
426 return false;
427
428 /* Types were compatible, so this is a sanity check. */
429 gcc_assert (sz0 == sz1);
430
431 return (off0 == off1) && operand_equal_p (base0, base1, 0);
432 }
433
434 /* Compare two hashable_expr structures for equivalence. They are
435 considered equivalent when the expressions they denote must
436 necessarily be equal. The logic is intended to follow that of
437 operand_equal_p in fold-const.c */
438
439 static bool
440 hashable_expr_equal_p (const struct hashable_expr *expr0,
441 const struct hashable_expr *expr1)
442 {
443 tree type0 = expr0->type;
444 tree type1 = expr1->type;
445
446 /* If either type is NULL, there is nothing to check. */
447 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
448 return false;
449
450 /* If both types don't have the same signedness, precision, and mode,
451 then we can't consider them equal. */
452 if (type0 != type1
453 && (TREE_CODE (type0) == ERROR_MARK
454 || TREE_CODE (type1) == ERROR_MARK
455 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
456 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
457 || TYPE_MODE (type0) != TYPE_MODE (type1)))
458 return false;
459
460 if (expr0->kind != expr1->kind)
461 return false;
462
463 switch (expr0->kind)
464 {
465 case EXPR_SINGLE:
466 return equal_mem_array_ref_p (expr0->ops.single.rhs,
467 expr1->ops.single.rhs)
468 || operand_equal_p (expr0->ops.single.rhs,
469 expr1->ops.single.rhs, 0);
470 case EXPR_UNARY:
471 if (expr0->ops.unary.op != expr1->ops.unary.op)
472 return false;
473
474 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
475 || expr0->ops.unary.op == NON_LVALUE_EXPR)
476 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
477 return false;
478
479 return operand_equal_p (expr0->ops.unary.opnd,
480 expr1->ops.unary.opnd, 0);
481
482 case EXPR_BINARY:
483 if (expr0->ops.binary.op != expr1->ops.binary.op)
484 return false;
485
486 if (operand_equal_p (expr0->ops.binary.opnd0,
487 expr1->ops.binary.opnd0, 0)
488 && operand_equal_p (expr0->ops.binary.opnd1,
489 expr1->ops.binary.opnd1, 0))
490 return true;
491
492 /* For commutative ops, allow the other order. */
493 return (commutative_tree_code (expr0->ops.binary.op)
494 && operand_equal_p (expr0->ops.binary.opnd0,
495 expr1->ops.binary.opnd1, 0)
496 && operand_equal_p (expr0->ops.binary.opnd1,
497 expr1->ops.binary.opnd0, 0));
498
499 case EXPR_TERNARY:
500 if (expr0->ops.ternary.op != expr1->ops.ternary.op
501 || !operand_equal_p (expr0->ops.ternary.opnd2,
502 expr1->ops.ternary.opnd2, 0))
503 return false;
504
505 /* BIT_INSERT_EXPR has an implict operand as the type precision
506 of op1. Need to check to make sure they are the same. */
507 if (expr0->ops.ternary.op == BIT_INSERT_EXPR
508 && TREE_CODE (expr0->ops.ternary.opnd1) == INTEGER_CST
509 && TREE_CODE (expr1->ops.ternary.opnd1) == INTEGER_CST
510 && TYPE_PRECISION (TREE_TYPE (expr0->ops.ternary.opnd1))
511 != TYPE_PRECISION (TREE_TYPE (expr1->ops.ternary.opnd1)))
512 return false;
513
514 if (operand_equal_p (expr0->ops.ternary.opnd0,
515 expr1->ops.ternary.opnd0, 0)
516 && operand_equal_p (expr0->ops.ternary.opnd1,
517 expr1->ops.ternary.opnd1, 0))
518 return true;
519
520 /* For commutative ops, allow the other order. */
521 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
522 && operand_equal_p (expr0->ops.ternary.opnd0,
523 expr1->ops.ternary.opnd1, 0)
524 && operand_equal_p (expr0->ops.ternary.opnd1,
525 expr1->ops.ternary.opnd0, 0));
526
527 case EXPR_CALL:
528 {
529 size_t i;
530
531 /* If the calls are to different functions, then they
532 clearly cannot be equal. */
533 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
534 expr1->ops.call.fn_from))
535 return false;
536
537 if (! expr0->ops.call.pure)
538 return false;
539
540 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
541 return false;
542
543 for (i = 0; i < expr0->ops.call.nargs; i++)
544 if (! operand_equal_p (expr0->ops.call.args[i],
545 expr1->ops.call.args[i], 0))
546 return false;
547
548 if (stmt_could_throw_p (expr0->ops.call.fn_from))
549 {
550 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
551 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
552 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
553 return false;
554 }
555
556 return true;
557 }
558
559 case EXPR_PHI:
560 {
561 size_t i;
562
563 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
564 return false;
565
566 for (i = 0; i < expr0->ops.phi.nargs; i++)
567 if (! operand_equal_p (expr0->ops.phi.args[i],
568 expr1->ops.phi.args[i], 0))
569 return false;
570
571 return true;
572 }
573
574 default:
575 gcc_unreachable ();
576 }
577 }
578
579 /* Given a statement STMT, construct a hash table element. */
580
581 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
582 {
583 enum gimple_code code = gimple_code (stmt);
584 struct hashable_expr *expr = this->expr ();
585
586 if (code == GIMPLE_ASSIGN)
587 {
588 enum tree_code subcode = gimple_assign_rhs_code (stmt);
589
590 switch (get_gimple_rhs_class (subcode))
591 {
592 case GIMPLE_SINGLE_RHS:
593 expr->kind = EXPR_SINGLE;
594 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
595 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
596 break;
597 case GIMPLE_UNARY_RHS:
598 expr->kind = EXPR_UNARY;
599 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
600 if (CONVERT_EXPR_CODE_P (subcode))
601 subcode = NOP_EXPR;
602 expr->ops.unary.op = subcode;
603 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
604 break;
605 case GIMPLE_BINARY_RHS:
606 expr->kind = EXPR_BINARY;
607 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
608 expr->ops.binary.op = subcode;
609 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
610 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
611 break;
612 case GIMPLE_TERNARY_RHS:
613 expr->kind = EXPR_TERNARY;
614 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
615 expr->ops.ternary.op = subcode;
616 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
617 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
618 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
619 break;
620 default:
621 gcc_unreachable ();
622 }
623 }
624 else if (code == GIMPLE_COND)
625 {
626 expr->type = boolean_type_node;
627 expr->kind = EXPR_BINARY;
628 expr->ops.binary.op = gimple_cond_code (stmt);
629 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
630 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
631 }
632 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
633 {
634 size_t nargs = gimple_call_num_args (call_stmt);
635 size_t i;
636
637 gcc_assert (gimple_call_lhs (call_stmt));
638
639 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
640 expr->kind = EXPR_CALL;
641 expr->ops.call.fn_from = call_stmt;
642
643 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
644 expr->ops.call.pure = true;
645 else
646 expr->ops.call.pure = false;
647
648 expr->ops.call.nargs = nargs;
649 expr->ops.call.args = XCNEWVEC (tree, nargs);
650 for (i = 0; i < nargs; i++)
651 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
652 }
653 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
654 {
655 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
656 expr->kind = EXPR_SINGLE;
657 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
658 }
659 else if (code == GIMPLE_GOTO)
660 {
661 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
662 expr->kind = EXPR_SINGLE;
663 expr->ops.single.rhs = gimple_goto_dest (stmt);
664 }
665 else if (code == GIMPLE_PHI)
666 {
667 size_t nargs = gimple_phi_num_args (stmt);
668 size_t i;
669
670 expr->type = TREE_TYPE (gimple_phi_result (stmt));
671 expr->kind = EXPR_PHI;
672 expr->ops.phi.nargs = nargs;
673 expr->ops.phi.args = XCNEWVEC (tree, nargs);
674 for (i = 0; i < nargs; i++)
675 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
676 }
677 else
678 gcc_unreachable ();
679
680 m_lhs = orig_lhs;
681 m_vop = gimple_vuse (stmt);
682 m_hash = avail_expr_hash (this);
683 m_stamp = this;
684 }
685
686 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
687 construct a hash table element. */
688
689 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
690 {
691 m_expr = *orig;
692 m_lhs = orig_lhs;
693 m_vop = NULL_TREE;
694 m_hash = avail_expr_hash (this);
695 m_stamp = this;
696 }
697
698 /* Copy constructor for a hash table element. */
699
700 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
701 {
702 m_expr = old_elt.m_expr;
703 m_lhs = old_elt.m_lhs;
704 m_vop = old_elt.m_vop;
705 m_hash = old_elt.m_hash;
706 m_stamp = this;
707
708 /* Now deep copy the malloc'd space for CALL and PHI args. */
709 if (old_elt.m_expr.kind == EXPR_CALL)
710 {
711 size_t nargs = old_elt.m_expr.ops.call.nargs;
712 size_t i;
713
714 m_expr.ops.call.args = XCNEWVEC (tree, nargs);
715 for (i = 0; i < nargs; i++)
716 m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
717 }
718 else if (old_elt.m_expr.kind == EXPR_PHI)
719 {
720 size_t nargs = old_elt.m_expr.ops.phi.nargs;
721 size_t i;
722
723 m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
724 for (i = 0; i < nargs; i++)
725 m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
726 }
727 }
728
729 /* Calls and PHIs have a variable number of arguments that are allocated
730 on the heap. Thus we have to have a special dtor to release them. */
731
732 expr_hash_elt::~expr_hash_elt ()
733 {
734 if (m_expr.kind == EXPR_CALL)
735 free (m_expr.ops.call.args);
736 else if (m_expr.kind == EXPR_PHI)
737 free (m_expr.ops.phi.args);
738 }
739
740 /* Print a diagnostic dump of an expression hash table entry. */
741
742 void
743 expr_hash_elt::print (FILE *stream)
744 {
745 fprintf (stream, "STMT ");
746
747 if (m_lhs)
748 {
749 print_generic_expr (stream, m_lhs);
750 fprintf (stream, " = ");
751 }
752
753 switch (m_expr.kind)
754 {
755 case EXPR_SINGLE:
756 print_generic_expr (stream, m_expr.ops.single.rhs);
757 break;
758
759 case EXPR_UNARY:
760 fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
761 print_generic_expr (stream, m_expr.ops.unary.opnd);
762 break;
763
764 case EXPR_BINARY:
765 print_generic_expr (stream, m_expr.ops.binary.opnd0);
766 fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
767 print_generic_expr (stream, m_expr.ops.binary.opnd1);
768 break;
769
770 case EXPR_TERNARY:
771 fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
772 print_generic_expr (stream, m_expr.ops.ternary.opnd0);
773 fputs (", ", stream);
774 print_generic_expr (stream, m_expr.ops.ternary.opnd1);
775 fputs (", ", stream);
776 print_generic_expr (stream, m_expr.ops.ternary.opnd2);
777 fputs (">", stream);
778 break;
779
780 case EXPR_CALL:
781 {
782 size_t i;
783 size_t nargs = m_expr.ops.call.nargs;
784 gcall *fn_from;
785
786 fn_from = m_expr.ops.call.fn_from;
787 if (gimple_call_internal_p (fn_from))
788 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
789 stream);
790 else
791 print_generic_expr (stream, gimple_call_fn (fn_from));
792 fprintf (stream, " (");
793 for (i = 0; i < nargs; i++)
794 {
795 print_generic_expr (stream, m_expr.ops.call.args[i]);
796 if (i + 1 < nargs)
797 fprintf (stream, ", ");
798 }
799 fprintf (stream, ")");
800 }
801 break;
802
803 case EXPR_PHI:
804 {
805 size_t i;
806 size_t nargs = m_expr.ops.phi.nargs;
807
808 fprintf (stream, "PHI <");
809 for (i = 0; i < nargs; i++)
810 {
811 print_generic_expr (stream, m_expr.ops.phi.args[i]);
812 if (i + 1 < nargs)
813 fprintf (stream, ", ");
814 }
815 fprintf (stream, ">");
816 }
817 break;
818 }
819
820 if (m_vop)
821 {
822 fprintf (stream, " with ");
823 print_generic_expr (stream, m_vop);
824 }
825
826 fprintf (stream, "\n");
827 }
828
829 /* Pop entries off the stack until we hit the NULL marker.
830 For each entry popped, use the SRC/DEST pair to restore
831 SRC to its prior value. */
832
833 void
834 const_and_copies::pop_to_marker (void)
835 {
836 while (m_stack.length () > 0)
837 {
838 tree prev_value, dest;
839
840 dest = m_stack.pop ();
841
842 /* A NULL value indicates we should stop unwinding, otherwise
843 pop off the next entry as they're recorded in pairs. */
844 if (dest == NULL)
845 break;
846
847 if (dump_file && (dump_flags & TDF_DETAILS))
848 {
849 fprintf (dump_file, "<<<< COPY ");
850 print_generic_expr (dump_file, dest);
851 fprintf (dump_file, " = ");
852 print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
853 fprintf (dump_file, "\n");
854 }
855
856 prev_value = m_stack.pop ();
857 set_ssa_name_value (dest, prev_value);
858 }
859 }
860
861 /* Record that X has the value Y and that X's previous value is PREV_X.
862
863 This variant does not follow the value chain for Y. */
864
865 void
866 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
867 {
868 if (dump_file && (dump_flags & TDF_DETAILS))
869 {
870 fprintf (dump_file, "0>>> COPY ");
871 print_generic_expr (dump_file, x);
872 fprintf (dump_file, " = ");
873 print_generic_expr (dump_file, y);
874 fprintf (dump_file, "\n");
875 }
876
877 set_ssa_name_value (x, y);
878 m_stack.reserve (2);
879 m_stack.quick_push (prev_x);
880 m_stack.quick_push (x);
881 }
882
883 /* Record that X has the value Y. */
884
885 void
886 const_and_copies::record_const_or_copy (tree x, tree y)
887 {
888 record_const_or_copy (x, y, SSA_NAME_VALUE (x));
889 }
890
891 /* Record that X has the value Y and that X's previous value is PREV_X.
892
893 This variant follow's Y value chain. */
894
895 void
896 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
897 {
898 /* Y may be NULL if we are invalidating entries in the table. */
899 if (y && TREE_CODE (y) == SSA_NAME)
900 {
901 tree tmp = SSA_NAME_VALUE (y);
902 y = tmp ? tmp : y;
903 }
904
905 record_const_or_copy_raw (x, y, prev_x);
906 }
907
908 bool
909 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
910 {
911 const struct hashable_expr *expr1 = p1->expr ();
912 const struct expr_hash_elt *stamp1 = p1->stamp ();
913 const struct hashable_expr *expr2 = p2->expr ();
914 const struct expr_hash_elt *stamp2 = p2->stamp ();
915
916 /* This case should apply only when removing entries from the table. */
917 if (stamp1 == stamp2)
918 return true;
919
920 if (p1->hash () != p2->hash ())
921 return false;
922
923 /* In case of a collision, both RHS have to be identical and have the
924 same VUSE operands. */
925 if (hashable_expr_equal_p (expr1, expr2)
926 && types_compatible_p (expr1->type, expr2->type))
927 return true;
928
929 return false;
930 }
931
932 /* Given a conditional expression COND as a tree, initialize
933 a hashable_expr expression EXPR. The conditional must be a
934 comparison or logical negation. A constant or a variable is
935 not permitted. */
936
937 void
938 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
939 {
940 expr->type = boolean_type_node;
941
942 if (COMPARISON_CLASS_P (cond))
943 {
944 expr->kind = EXPR_BINARY;
945 expr->ops.binary.op = TREE_CODE (cond);
946 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
947 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
948 }
949 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
950 {
951 expr->kind = EXPR_UNARY;
952 expr->ops.unary.op = TRUTH_NOT_EXPR;
953 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
954 }
955 else
956 gcc_unreachable ();
957 }
958
959 /* Build a cond_equivalence record indicating that the comparison
960 CODE holds between operands OP0 and OP1 and push it to **P. */
961
962 static void
963 build_and_record_new_cond (enum tree_code code,
964 tree op0, tree op1,
965 vec<cond_equivalence> *p,
966 bool val = true)
967 {
968 cond_equivalence c;
969 struct hashable_expr *cond = &c.cond;
970
971 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
972
973 cond->type = boolean_type_node;
974 cond->kind = EXPR_BINARY;
975 cond->ops.binary.op = code;
976 cond->ops.binary.opnd0 = op0;
977 cond->ops.binary.opnd1 = op1;
978
979 c.value = val ? boolean_true_node : boolean_false_node;
980 p->safe_push (c);
981 }
982
983 /* Record that COND is true and INVERTED is false into the edge information
984 structure. Also record that any conditions dominated by COND are true
985 as well.
986
987 For example, if a < b is true, then a <= b must also be true. */
988
989 void
990 record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
991 {
992 tree op0, op1;
993 cond_equivalence c;
994
995 if (!COMPARISON_CLASS_P (cond))
996 return;
997
998 op0 = TREE_OPERAND (cond, 0);
999 op1 = TREE_OPERAND (cond, 1);
1000
1001 switch (TREE_CODE (cond))
1002 {
1003 case LT_EXPR:
1004 case GT_EXPR:
1005 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1006 {
1007 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1008 build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1009 }
1010
1011 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1012 ? LE_EXPR : GE_EXPR),
1013 op0, op1, p);
1014 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1015 build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1016 break;
1017
1018 case GE_EXPR:
1019 case LE_EXPR:
1020 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1021 {
1022 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1023 }
1024 break;
1025
1026 case EQ_EXPR:
1027 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1028 {
1029 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1030 }
1031 build_and_record_new_cond (LE_EXPR, op0, op1, p);
1032 build_and_record_new_cond (GE_EXPR, op0, op1, p);
1033 break;
1034
1035 case UNORDERED_EXPR:
1036 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1037 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1038 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1039 build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1040 build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1041 build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1042 break;
1043
1044 case UNLT_EXPR:
1045 case UNGT_EXPR:
1046 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1047 ? UNLE_EXPR : UNGE_EXPR),
1048 op0, op1, p);
1049 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1050 break;
1051
1052 case UNEQ_EXPR:
1053 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1054 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1055 break;
1056
1057 case LTGT_EXPR:
1058 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1059 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1060 break;
1061
1062 default:
1063 break;
1064 }
1065
1066 /* Now store the original true and false conditions into the first
1067 two slots. */
1068 initialize_expr_from_cond (cond, &c.cond);
1069 c.value = boolean_true_node;
1070 p->safe_push (c);
1071
1072 /* It is possible for INVERTED to be the negation of a comparison,
1073 and not a valid RHS or GIMPLE_COND condition. This happens because
1074 invert_truthvalue may return such an expression when asked to invert
1075 a floating-point comparison. These comparisons are not assumed to
1076 obey the trichotomy law. */
1077 initialize_expr_from_cond (inverted, &c.cond);
1078 c.value = boolean_false_node;
1079 p->safe_push (c);
1080 }