re PR c/39648 (internal compiler error: in fold_convert, at fold-const.c:2506)
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
48
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
54
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
58
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
63
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
68
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
72
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
77
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
81
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
84
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
90
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
96
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
104 */
105
106 /* The set of hashtables and alloc_pool's for their items. */
107
108 typedef struct vn_tables_s
109 {
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
117
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120
121
122 /* Valid hashtables storing information we have proven to be
123 correct. */
124
125 static vn_tables_t valid_info;
126
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
129
130 static vn_tables_t optimistic_info;
131
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
135
136 static vn_tables_t current_info;
137
138
139 /* Reverse post order index for each basic block. */
140
141 static int *rpo_numbers;
142
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
147
148 tree VN_TOP;
149
150 /* Unique counter for our value ids. */
151
152 static unsigned int next_value_id;
153
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
156
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159
160 static bool may_insert;
161
162
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
165
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
169
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
172
173 /* Return the value numbering information for a given SSA name. */
174
175 vn_ssa_aux_t
176 VN_INFO (tree name)
177 {
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
182 }
183
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
186
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
189 {
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
192 }
193
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
196
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
199 {
200 vn_ssa_aux_t newinfo;
201
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
210 }
211
212
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
215
216 tree
217 vn_get_expr_for (tree name)
218 {
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
222
223 if (vn->valnum == VN_TOP)
224 return name;
225
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
230
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
233
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
238
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
242
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
251
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
254
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
258 {
259 case tcc_reference:
260 if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
266 break;
267
268 case tcc_unary:
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
272 break;
273
274 case tcc_binary:
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
279 break;
280
281 default:;
282 }
283 if (expr == NULL_TREE)
284 return vn->valnum;
285
286 /* Cache the expression. */
287 vn->expr = expr;
288
289 return expr;
290 }
291
292
293 /* Free a phi operation structure VP. */
294
295 static void
296 free_phi (void *vp)
297 {
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
300 }
301
302 /* Free a reference operation structure VP. */
303
304 static void
305 free_reference (void *vp)
306 {
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
309 }
310
311 /* Hash table equality function for vn_constant_t. */
312
313 static int
314 vn_constant_eq (const void *p1, const void *p2)
315 {
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
318
319 if (vc1->hashcode != vc2->hashcode)
320 return false;
321
322 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 }
324
325 /* Hash table hash function for vn_constant_t. */
326
327 static hashval_t
328 vn_constant_hash (const void *p1)
329 {
330 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
331 return vc1->hashcode;
332 }
333
334 /* Lookup a value id for CONSTANT and return it. If it does not
335 exist returns 0. */
336
337 unsigned int
338 get_constant_value_id (tree constant)
339 {
340 void **slot;
341 struct vn_constant_s vc;
342
343 vc.hashcode = vn_hash_constant_with_type (constant);
344 vc.constant = constant;
345 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
346 vc.hashcode, NO_INSERT);
347 if (slot)
348 return ((vn_constant_t)*slot)->value_id;
349 return 0;
350 }
351
352 /* Lookup a value id for CONSTANT, and if it does not exist, create a
353 new one and return it. If it does exist, return it. */
354
355 unsigned int
356 get_or_alloc_constant_value_id (tree constant)
357 {
358 void **slot;
359 vn_constant_t vc = XNEW (struct vn_constant_s);
360
361 vc->hashcode = vn_hash_constant_with_type (constant);
362 vc->constant = constant;
363 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
364 vc->hashcode, INSERT);
365 if (*slot)
366 {
367 free (vc);
368 return ((vn_constant_t)*slot)->value_id;
369 }
370 vc->value_id = get_next_value_id ();
371 *slot = vc;
372 bitmap_set_bit (constant_value_ids, vc->value_id);
373 return vc->value_id;
374 }
375
376 /* Return true if V is a value id for a constant. */
377
378 bool
379 value_id_constant_p (unsigned int v)
380 {
381 return bitmap_bit_p (constant_value_ids, v);
382 }
383
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
386
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
389 {
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
392
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
398 }
399
400 /* Compute the hash for a reference operand VRO1. */
401
402 static hashval_t
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
404 {
405 hashval_t result = 0;
406 if (vro1->op0)
407 result += iterative_hash_expr (vro1->op0, vro1->opcode);
408 if (vro1->op1)
409 result += iterative_hash_expr (vro1->op1, vro1->opcode);
410 if (vro1->op2)
411 result += iterative_hash_expr (vro1->op2, vro1->opcode);
412 return result;
413 }
414
415 /* Return the hashcode for a given reference operation P1. */
416
417 static hashval_t
418 vn_reference_hash (const void *p1)
419 {
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
422 }
423
424 /* Compute a hash for the reference operation VR1 and return it. */
425
426 hashval_t
427 vn_reference_compute_hash (const vn_reference_t vr1)
428 {
429 hashval_t result;
430 int i;
431 vn_reference_op_t vro;
432
433 result = iterative_hash_expr (vr1->vuse, 0);
434 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
435 result += vn_reference_op_compute_hash (vro);
436
437 return result;
438 }
439
440 /* Return true if reference operations P1 and P2 are equivalent. This
441 means they have the same set of operands and vuses. */
442
443 int
444 vn_reference_eq (const void *p1, const void *p2)
445 {
446 int i;
447 vn_reference_op_t vro;
448
449 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
450 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
451 if (vr1->hashcode != vr2->hashcode)
452 return false;
453
454 /* Early out if this is not a hash collision. */
455 if (vr1->hashcode != vr2->hashcode)
456 return false;
457
458 /* The VOP needs to be the same. */
459 if (vr1->vuse != vr2->vuse)
460 return false;
461
462 /* If the operands are the same we are done. */
463 if (vr1->operands == vr2->operands)
464 return true;
465
466 /* We require that address operands be canonicalized in a way that
467 two memory references will have the same operands if they are
468 equivalent. */
469 if (VEC_length (vn_reference_op_s, vr1->operands)
470 != VEC_length (vn_reference_op_s, vr2->operands))
471 return false;
472
473 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
474 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
475 vro))
476 return false;
477
478 return true;
479 }
480
481 /* Copy the operations present in load/store REF into RESULT, a vector of
482 vn_reference_op_s's. */
483
484 void
485 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
486 {
487 if (TREE_CODE (ref) == TARGET_MEM_REF)
488 {
489 vn_reference_op_s temp;
490
491 memset (&temp, 0, sizeof (temp));
492 /* We do not care for spurious type qualifications. */
493 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
494 temp.opcode = TREE_CODE (ref);
495 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
496 temp.op1 = TMR_INDEX (ref);
497 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
498
499 memset (&temp, 0, sizeof (temp));
500 temp.type = NULL_TREE;
501 temp.opcode = TREE_CODE (ref);
502 temp.op0 = TMR_STEP (ref);
503 temp.op1 = TMR_OFFSET (ref);
504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
505 return;
506 }
507
508 /* For non-calls, store the information that makes up the address. */
509
510 while (ref)
511 {
512 vn_reference_op_s temp;
513
514 memset (&temp, 0, sizeof (temp));
515 /* We do not care for spurious type qualifications. */
516 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
517 temp.opcode = TREE_CODE (ref);
518
519 switch (temp.opcode)
520 {
521 case ALIGN_INDIRECT_REF:
522 case INDIRECT_REF:
523 /* The only operand is the address, which gets its own
524 vn_reference_op_s structure. */
525 break;
526 case MISALIGNED_INDIRECT_REF:
527 temp.op0 = TREE_OPERAND (ref, 1);
528 break;
529 case BIT_FIELD_REF:
530 /* Record bits and position. */
531 temp.op0 = TREE_OPERAND (ref, 1);
532 temp.op1 = TREE_OPERAND (ref, 2);
533 break;
534 case COMPONENT_REF:
535 /* The field decl is enough to unambiguously specify the field,
536 a matching type is not necessary and a mismatching type
537 is always a spurious difference. */
538 temp.type = NULL_TREE;
539 /* If this is a reference to a union member, record the union
540 member size as operand. Do so only if we are doing
541 expression insertion (during FRE), as PRE currently gets
542 confused with this. */
543 if (may_insert
544 && TREE_OPERAND (ref, 2) == NULL_TREE
545 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
546 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
547 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
548 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
549 else
550 {
551 /* Record field as operand. */
552 temp.op0 = TREE_OPERAND (ref, 1);
553 temp.op1 = TREE_OPERAND (ref, 2);
554 }
555 break;
556 case ARRAY_RANGE_REF:
557 case ARRAY_REF:
558 /* Record index as operand. */
559 temp.op0 = TREE_OPERAND (ref, 1);
560 temp.op1 = TREE_OPERAND (ref, 2);
561 temp.op2 = TREE_OPERAND (ref, 3);
562 break;
563 case STRING_CST:
564 case INTEGER_CST:
565 case COMPLEX_CST:
566 case VECTOR_CST:
567 case REAL_CST:
568 case CONSTRUCTOR:
569 case VAR_DECL:
570 case PARM_DECL:
571 case CONST_DECL:
572 case RESULT_DECL:
573 case SSA_NAME:
574 case EXC_PTR_EXPR:
575 case FILTER_EXPR:
576 temp.op0 = ref;
577 break;
578 case ADDR_EXPR:
579 if (is_gimple_min_invariant (ref))
580 {
581 temp.op0 = ref;
582 break;
583 }
584 /* Fallthrough. */
585 /* These are only interesting for their operands, their
586 existence, and their type. They will never be the last
587 ref in the chain of references (IE they require an
588 operand), so we don't have to put anything
589 for op* as it will be handled by the iteration */
590 case IMAGPART_EXPR:
591 case REALPART_EXPR:
592 case VIEW_CONVERT_EXPR:
593 break;
594 default:
595 gcc_unreachable ();
596 }
597 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
598
599 if (REFERENCE_CLASS_P (ref)
600 || (TREE_CODE (ref) == ADDR_EXPR
601 && !is_gimple_min_invariant (ref)))
602 ref = TREE_OPERAND (ref, 0);
603 else
604 ref = NULL_TREE;
605 }
606 }
607
608 /* Re-create a reference tree from the reference ops OPS.
609 Returns NULL_TREE if the ops were not handled.
610 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
611
612 tree
613 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
614 {
615 vn_reference_op_t op;
616 unsigned i;
617 tree ref, *op0_p = &ref;
618
619 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
620 {
621 switch (op->opcode)
622 {
623 case CALL_EXPR:
624 return NULL_TREE;
625
626 case ALIGN_INDIRECT_REF:
627 case INDIRECT_REF:
628 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
629 op0_p = &TREE_OPERAND (*op0_p, 0);
630 break;
631
632 case MISALIGNED_INDIRECT_REF:
633 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
634 NULL_TREE, op->op0);
635 op0_p = &TREE_OPERAND (*op0_p, 0);
636 break;
637
638 case BIT_FIELD_REF:
639 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
640 op->op0, op->op1);
641 op0_p = &TREE_OPERAND (*op0_p, 0);
642 break;
643
644 case COMPONENT_REF:
645 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
646 op->op0, op->op1);
647 op0_p = &TREE_OPERAND (*op0_p, 0);
648 break;
649
650 case ARRAY_RANGE_REF:
651 case ARRAY_REF:
652 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
653 op->op0, op->op1, op->op2);
654 op0_p = &TREE_OPERAND (*op0_p, 0);
655 break;
656
657 case STRING_CST:
658 case INTEGER_CST:
659 case COMPLEX_CST:
660 case VECTOR_CST:
661 case REAL_CST:
662 case CONSTRUCTOR:
663 case VAR_DECL:
664 case PARM_DECL:
665 case CONST_DECL:
666 case RESULT_DECL:
667 case SSA_NAME:
668 case FILTER_EXPR:
669 case EXC_PTR_EXPR:
670 *op0_p = op->op0;
671 break;
672
673 case ADDR_EXPR:
674 if (op->op0 != NULL_TREE)
675 {
676 gcc_assert (is_gimple_min_invariant (op->op0));
677 *op0_p = op->op0;
678 break;
679 }
680 /* Fallthrough. */
681 case IMAGPART_EXPR:
682 case REALPART_EXPR:
683 case VIEW_CONVERT_EXPR:
684 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
685 op0_p = &TREE_OPERAND (*op0_p, 0);
686 break;
687
688 default:
689 return NULL_TREE;
690 }
691 }
692
693 return ref;
694 }
695
696 /* Copy the operations present in load/store/call REF into RESULT, a vector of
697 vn_reference_op_s's. */
698
699 void
700 copy_reference_ops_from_call (gimple call,
701 VEC(vn_reference_op_s, heap) **result)
702 {
703 vn_reference_op_s temp;
704 unsigned i;
705
706 /* Copy the type, opcode, function being called and static chain. */
707 memset (&temp, 0, sizeof (temp));
708 temp.type = gimple_call_return_type (call);
709 temp.opcode = CALL_EXPR;
710 temp.op0 = gimple_call_fn (call);
711 temp.op1 = gimple_call_chain (call);
712 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
713
714 /* Copy the call arguments. As they can be references as well,
715 just chain them together. */
716 for (i = 0; i < gimple_call_num_args (call); ++i)
717 {
718 tree callarg = gimple_call_arg (call, i);
719 copy_reference_ops_from_ref (callarg, result);
720 }
721 }
722
723 /* Create a vector of vn_reference_op_s structures from REF, a
724 REFERENCE_CLASS_P tree. The vector is not shared. */
725
726 static VEC(vn_reference_op_s, heap) *
727 create_reference_ops_from_ref (tree ref)
728 {
729 VEC (vn_reference_op_s, heap) *result = NULL;
730
731 copy_reference_ops_from_ref (ref, &result);
732 return result;
733 }
734
735 /* Create a vector of vn_reference_op_s structures from CALL, a
736 call statement. The vector is not shared. */
737
738 static VEC(vn_reference_op_s, heap) *
739 create_reference_ops_from_call (gimple call)
740 {
741 VEC (vn_reference_op_s, heap) *result = NULL;
742
743 copy_reference_ops_from_call (call, &result);
744 return result;
745 }
746
747 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
748 *I_P to point to the last element of the replacement. */
749 void
750 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
751 unsigned int *i_p)
752 {
753 VEC(vn_reference_op_s, heap) *mem = NULL;
754 vn_reference_op_t op;
755 unsigned int i = *i_p;
756 unsigned int j;
757
758 /* Get ops for the addressed object. */
759 op = VEC_index (vn_reference_op_s, *ops, i);
760 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
761 around it to avoid later ICEs. */
762 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
763 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
764 {
765 vn_reference_op_s aref;
766 tree dom;
767 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
768 aref.opcode = ARRAY_REF;
769 aref.op0 = integer_zero_node;
770 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
771 && TYPE_MIN_VALUE (dom))
772 aref.op0 = TYPE_MIN_VALUE (dom);
773 aref.op1 = NULL_TREE;
774 aref.op2 = NULL_TREE;
775 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
776 }
777 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
778
779 /* Do the replacement - we should have at least one op in mem now. */
780 if (VEC_length (vn_reference_op_s, mem) == 1)
781 {
782 VEC_replace (vn_reference_op_s, *ops, i - 1,
783 VEC_index (vn_reference_op_s, mem, 0));
784 VEC_ordered_remove (vn_reference_op_s, *ops, i);
785 i--;
786 }
787 else if (VEC_length (vn_reference_op_s, mem) == 2)
788 {
789 VEC_replace (vn_reference_op_s, *ops, i - 1,
790 VEC_index (vn_reference_op_s, mem, 0));
791 VEC_replace (vn_reference_op_s, *ops, i,
792 VEC_index (vn_reference_op_s, mem, 1));
793 }
794 else if (VEC_length (vn_reference_op_s, mem) > 2)
795 {
796 VEC_replace (vn_reference_op_s, *ops, i - 1,
797 VEC_index (vn_reference_op_s, mem, 0));
798 VEC_replace (vn_reference_op_s, *ops, i,
799 VEC_index (vn_reference_op_s, mem, 1));
800 /* ??? There is no VEC_splice. */
801 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
802 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
803 }
804 else
805 gcc_unreachable ();
806
807 VEC_free (vn_reference_op_s, heap, mem);
808 *i_p = i;
809 }
810
811 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
812 structures into their value numbers. This is done in-place, and
813 the vector passed in is returned. */
814
815 static VEC (vn_reference_op_s, heap) *
816 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
817 {
818 vn_reference_op_t vro;
819 unsigned int i;
820
821 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
822 {
823 if (vro->opcode == SSA_NAME
824 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
825 {
826 vro->op0 = SSA_VAL (vro->op0);
827 /* If it transforms from an SSA_NAME to a constant, update
828 the opcode. */
829 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
830 vro->opcode = TREE_CODE (vro->op0);
831 /* If it transforms from an SSA_NAME to an address, fold with
832 a preceding indirect reference. */
833 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
834 && VEC_index (vn_reference_op_s,
835 orig, i - 1)->opcode == INDIRECT_REF)
836 vn_reference_fold_indirect (&orig, &i);
837 }
838 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
839 vro->op1 = SSA_VAL (vro->op1);
840 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
841 vro->op2 = SSA_VAL (vro->op2);
842 }
843
844 return orig;
845 }
846
847 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
848
849 /* Create a vector of vn_reference_op_s structures from REF, a
850 REFERENCE_CLASS_P tree. The vector is shared among all callers of
851 this function. */
852
853 static VEC(vn_reference_op_s, heap) *
854 valueize_shared_reference_ops_from_ref (tree ref)
855 {
856 if (!ref)
857 return NULL;
858 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
859 copy_reference_ops_from_ref (ref, &shared_lookup_references);
860 shared_lookup_references = valueize_refs (shared_lookup_references);
861 return shared_lookup_references;
862 }
863
864 /* Create a vector of vn_reference_op_s structures from CALL, a
865 call statement. The vector is shared among all callers of
866 this function. */
867
868 static VEC(vn_reference_op_s, heap) *
869 valueize_shared_reference_ops_from_call (gimple call)
870 {
871 if (!call)
872 return NULL;
873 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
874 copy_reference_ops_from_call (call, &shared_lookup_references);
875 shared_lookup_references = valueize_refs (shared_lookup_references);
876 return shared_lookup_references;
877 }
878
879 /* Lookup a SCCVN reference operation VR in the current hash table.
880 Returns the resulting value number if it exists in the hash table,
881 NULL_TREE otherwise. VNRESULT will be filled in with the actual
882 vn_reference_t stored in the hashtable if something is found. */
883
884 static tree
885 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
886 {
887 void **slot;
888 hashval_t hash;
889
890 hash = vr->hashcode;
891 slot = htab_find_slot_with_hash (current_info->references, vr,
892 hash, NO_INSERT);
893 if (!slot && current_info == optimistic_info)
894 slot = htab_find_slot_with_hash (valid_info->references, vr,
895 hash, NO_INSERT);
896 if (slot)
897 {
898 if (vnresult)
899 *vnresult = (vn_reference_t)*slot;
900 return ((vn_reference_t)*slot)->result;
901 }
902
903 return NULL_TREE;
904 }
905
906 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
907 with the current VUSE and performs the expression lookup. */
908
909 static void *
910 vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
911 {
912 vn_reference_t vr = (vn_reference_t)vr_;
913 void **slot;
914 hashval_t hash;
915
916 /* Fixup vuse and hash. */
917 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
918 vr->vuse = SSA_VAL (vuse);
919 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
920
921 hash = vr->hashcode;
922 slot = htab_find_slot_with_hash (current_info->references, vr,
923 hash, NO_INSERT);
924 if (!slot && current_info == optimistic_info)
925 slot = htab_find_slot_with_hash (valid_info->references, vr,
926 hash, NO_INSERT);
927 if (slot)
928 return *slot;
929
930 return NULL;
931 }
932
933 /* Lookup a reference operation by it's parts, in the current hash table.
934 Returns the resulting value number if it exists in the hash table,
935 NULL_TREE otherwise. VNRESULT will be filled in with the actual
936 vn_reference_t stored in the hashtable if something is found. */
937
938 tree
939 vn_reference_lookup_pieces (tree vuse,
940 VEC (vn_reference_op_s, heap) *operands,
941 vn_reference_t *vnresult, bool maywalk)
942 {
943 struct vn_reference_s vr1;
944 vn_reference_t tmp;
945
946 if (!vnresult)
947 vnresult = &tmp;
948 *vnresult = NULL;
949
950 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
951 vr1.operands = valueize_refs (operands);
952 vr1.hashcode = vn_reference_compute_hash (&vr1);
953 vn_reference_lookup_1 (&vr1, vnresult);
954
955 if (!*vnresult
956 && maywalk
957 && vr1.vuse)
958 {
959 tree ref = get_ref_from_reference_ops (operands);
960 if (!ref)
961 return NULL_TREE;
962 *vnresult =
963 (vn_reference_t)walk_non_aliased_vuses (ref, vr1.vuse,
964 vn_reference_lookup_2, &vr1);
965 }
966
967 if (*vnresult)
968 return (*vnresult)->result;
969
970 return NULL_TREE;
971 }
972
973 /* Lookup OP in the current hash table, and return the resulting value
974 number if it exists in the hash table. Return NULL_TREE if it does
975 not exist in the hash table or if the result field of the structure
976 was NULL.. VNRESULT will be filled in with the vn_reference_t
977 stored in the hashtable if one exists. */
978
979 tree
980 vn_reference_lookup (tree op, tree vuse, bool maywalk,
981 vn_reference_t *vnresult)
982 {
983 struct vn_reference_s vr1;
984
985 if (vnresult)
986 *vnresult = NULL;
987
988 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
989 vr1.operands = valueize_shared_reference_ops_from_ref (op);
990 vr1.hashcode = vn_reference_compute_hash (&vr1);
991
992 if (maywalk
993 && vr1.vuse)
994 {
995 vn_reference_t wvnresult;
996 wvnresult =
997 (vn_reference_t)walk_non_aliased_vuses (op, vr1.vuse,
998 vn_reference_lookup_2, &vr1);
999 if (wvnresult)
1000 {
1001 if (vnresult)
1002 *vnresult = wvnresult;
1003 return wvnresult->result;
1004 }
1005
1006 return NULL_TREE;
1007 }
1008
1009 return vn_reference_lookup_1 (&vr1, vnresult);
1010 }
1011
1012
1013 /* Insert OP into the current hash table with a value number of
1014 RESULT, and return the resulting reference structure we created. */
1015
1016 vn_reference_t
1017 vn_reference_insert (tree op, tree result, tree vuse)
1018 {
1019 void **slot;
1020 vn_reference_t vr1;
1021
1022 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1023 if (TREE_CODE (result) == SSA_NAME)
1024 vr1->value_id = VN_INFO (result)->value_id;
1025 else
1026 vr1->value_id = get_or_alloc_constant_value_id (result);
1027 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1028 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1029 vr1->hashcode = vn_reference_compute_hash (vr1);
1030 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1031
1032 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1033 INSERT);
1034
1035 /* Because we lookup stores using vuses, and value number failures
1036 using the vdefs (see visit_reference_op_store for how and why),
1037 it's possible that on failure we may try to insert an already
1038 inserted store. This is not wrong, there is no ssa name for a
1039 store that we could use as a differentiator anyway. Thus, unlike
1040 the other lookup functions, you cannot gcc_assert (!*slot)
1041 here. */
1042
1043 /* But free the old slot in case of a collision. */
1044 if (*slot)
1045 free_reference (*slot);
1046
1047 *slot = vr1;
1048 return vr1;
1049 }
1050
1051 /* Insert a reference by it's pieces into the current hash table with
1052 a value number of RESULT. Return the resulting reference
1053 structure we created. */
1054
1055 vn_reference_t
1056 vn_reference_insert_pieces (tree vuse,
1057 VEC (vn_reference_op_s, heap) *operands,
1058 tree result, unsigned int value_id)
1059
1060 {
1061 void **slot;
1062 vn_reference_t vr1;
1063
1064 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1065 vr1->value_id = value_id;
1066 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1067 vr1->operands = valueize_refs (operands);
1068 vr1->hashcode = vn_reference_compute_hash (vr1);
1069 if (result && TREE_CODE (result) == SSA_NAME)
1070 result = SSA_VAL (result);
1071 vr1->result = result;
1072
1073 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1074 INSERT);
1075
1076 /* At this point we should have all the things inserted that we have
1077 seen before, and we should never try inserting something that
1078 already exists. */
1079 gcc_assert (!*slot);
1080 if (*slot)
1081 free_reference (*slot);
1082
1083 *slot = vr1;
1084 return vr1;
1085 }
1086
1087 /* Compute and return the hash value for nary operation VBO1. */
1088
1089 inline hashval_t
1090 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1091 {
1092 hashval_t hash = 0;
1093 unsigned i;
1094
1095 for (i = 0; i < vno1->length; ++i)
1096 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1097 vno1->op[i] = SSA_VAL (vno1->op[i]);
1098
1099 if (vno1->length == 2
1100 && commutative_tree_code (vno1->opcode)
1101 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1102 {
1103 tree temp = vno1->op[0];
1104 vno1->op[0] = vno1->op[1];
1105 vno1->op[1] = temp;
1106 }
1107
1108 for (i = 0; i < vno1->length; ++i)
1109 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1110
1111 return hash;
1112 }
1113
1114 /* Return the computed hashcode for nary operation P1. */
1115
1116 static hashval_t
1117 vn_nary_op_hash (const void *p1)
1118 {
1119 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1120 return vno1->hashcode;
1121 }
1122
1123 /* Compare nary operations P1 and P2 and return true if they are
1124 equivalent. */
1125
1126 int
1127 vn_nary_op_eq (const void *p1, const void *p2)
1128 {
1129 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1130 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1131 unsigned i;
1132
1133 if (vno1->hashcode != vno2->hashcode)
1134 return false;
1135
1136 if (vno1->opcode != vno2->opcode
1137 || !types_compatible_p (vno1->type, vno2->type))
1138 return false;
1139
1140 for (i = 0; i < vno1->length; ++i)
1141 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1142 return false;
1143
1144 return true;
1145 }
1146
1147 /* Lookup a n-ary operation by its pieces and return the resulting value
1148 number if it exists in the hash table. Return NULL_TREE if it does
1149 not exist in the hash table or if the result field of the operation
1150 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1151 if it exists. */
1152
1153 tree
1154 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1155 tree type, tree op0, tree op1, tree op2,
1156 tree op3, vn_nary_op_t *vnresult)
1157 {
1158 void **slot;
1159 struct vn_nary_op_s vno1;
1160 if (vnresult)
1161 *vnresult = NULL;
1162 vno1.opcode = code;
1163 vno1.length = length;
1164 vno1.type = type;
1165 vno1.op[0] = op0;
1166 vno1.op[1] = op1;
1167 vno1.op[2] = op2;
1168 vno1.op[3] = op3;
1169 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1170 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1171 NO_INSERT);
1172 if (!slot && current_info == optimistic_info)
1173 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1174 NO_INSERT);
1175 if (!slot)
1176 return NULL_TREE;
1177 if (vnresult)
1178 *vnresult = (vn_nary_op_t)*slot;
1179 return ((vn_nary_op_t)*slot)->result;
1180 }
1181
1182 /* Lookup OP in the current hash table, and return the resulting value
1183 number if it exists in the hash table. Return NULL_TREE if it does
1184 not exist in the hash table or if the result field of the operation
1185 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1186 if it exists. */
1187
1188 tree
1189 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1190 {
1191 void **slot;
1192 struct vn_nary_op_s vno1;
1193 unsigned i;
1194
1195 if (vnresult)
1196 *vnresult = NULL;
1197 vno1.opcode = TREE_CODE (op);
1198 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1199 vno1.type = TREE_TYPE (op);
1200 for (i = 0; i < vno1.length; ++i)
1201 vno1.op[i] = TREE_OPERAND (op, i);
1202 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1203 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1204 NO_INSERT);
1205 if (!slot && current_info == optimistic_info)
1206 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1207 NO_INSERT);
1208 if (!slot)
1209 return NULL_TREE;
1210 if (vnresult)
1211 *vnresult = (vn_nary_op_t)*slot;
1212 return ((vn_nary_op_t)*slot)->result;
1213 }
1214
1215 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1216 value number if it exists in the hash table. Return NULL_TREE if
1217 it does not exist in the hash table. VNRESULT will contain the
1218 vn_nary_op_t from the hashtable if it exists. */
1219
1220 tree
1221 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1222 {
1223 void **slot;
1224 struct vn_nary_op_s vno1;
1225 unsigned i;
1226
1227 if (vnresult)
1228 *vnresult = NULL;
1229 vno1.opcode = gimple_assign_rhs_code (stmt);
1230 vno1.length = gimple_num_ops (stmt) - 1;
1231 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1232 for (i = 0; i < vno1.length; ++i)
1233 vno1.op[i] = gimple_op (stmt, i + 1);
1234 if (vno1.opcode == REALPART_EXPR
1235 || vno1.opcode == IMAGPART_EXPR
1236 || vno1.opcode == VIEW_CONVERT_EXPR)
1237 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1238 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1239 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1240 NO_INSERT);
1241 if (!slot && current_info == optimistic_info)
1242 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1243 NO_INSERT);
1244 if (!slot)
1245 return NULL_TREE;
1246 if (vnresult)
1247 *vnresult = (vn_nary_op_t)*slot;
1248 return ((vn_nary_op_t)*slot)->result;
1249 }
1250
1251 /* Insert a n-ary operation into the current hash table using it's
1252 pieces. Return the vn_nary_op_t structure we created and put in
1253 the hashtable. */
1254
1255 vn_nary_op_t
1256 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1257 tree type, tree op0,
1258 tree op1, tree op2, tree op3,
1259 tree result,
1260 unsigned int value_id)
1261 {
1262 void **slot;
1263 vn_nary_op_t vno1;
1264
1265 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1266 (sizeof (struct vn_nary_op_s)
1267 - sizeof (tree) * (4 - length)));
1268 vno1->value_id = value_id;
1269 vno1->opcode = code;
1270 vno1->length = length;
1271 vno1->type = type;
1272 if (length >= 1)
1273 vno1->op[0] = op0;
1274 if (length >= 2)
1275 vno1->op[1] = op1;
1276 if (length >= 3)
1277 vno1->op[2] = op2;
1278 if (length >= 4)
1279 vno1->op[3] = op3;
1280 vno1->result = result;
1281 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1282 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1283 INSERT);
1284 gcc_assert (!*slot);
1285
1286 *slot = vno1;
1287 return vno1;
1288
1289 }
1290
1291 /* Insert OP into the current hash table with a value number of
1292 RESULT. Return the vn_nary_op_t structure we created and put in
1293 the hashtable. */
1294
1295 vn_nary_op_t
1296 vn_nary_op_insert (tree op, tree result)
1297 {
1298 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1299 void **slot;
1300 vn_nary_op_t vno1;
1301 unsigned i;
1302
1303 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1304 (sizeof (struct vn_nary_op_s)
1305 - sizeof (tree) * (4 - length)));
1306 vno1->value_id = VN_INFO (result)->value_id;
1307 vno1->opcode = TREE_CODE (op);
1308 vno1->length = length;
1309 vno1->type = TREE_TYPE (op);
1310 for (i = 0; i < vno1->length; ++i)
1311 vno1->op[i] = TREE_OPERAND (op, i);
1312 vno1->result = result;
1313 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1314 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1315 INSERT);
1316 gcc_assert (!*slot);
1317
1318 *slot = vno1;
1319 return vno1;
1320 }
1321
1322 /* Insert the rhs of STMT into the current hash table with a value number of
1323 RESULT. */
1324
1325 vn_nary_op_t
1326 vn_nary_op_insert_stmt (gimple stmt, tree result)
1327 {
1328 unsigned length = gimple_num_ops (stmt) - 1;
1329 void **slot;
1330 vn_nary_op_t vno1;
1331 unsigned i;
1332
1333 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1334 (sizeof (struct vn_nary_op_s)
1335 - sizeof (tree) * (4 - length)));
1336 vno1->value_id = VN_INFO (result)->value_id;
1337 vno1->opcode = gimple_assign_rhs_code (stmt);
1338 vno1->length = length;
1339 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1340 for (i = 0; i < vno1->length; ++i)
1341 vno1->op[i] = gimple_op (stmt, i + 1);
1342 if (vno1->opcode == REALPART_EXPR
1343 || vno1->opcode == IMAGPART_EXPR
1344 || vno1->opcode == VIEW_CONVERT_EXPR)
1345 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1346 vno1->result = result;
1347 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1348 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1349 INSERT);
1350 gcc_assert (!*slot);
1351
1352 *slot = vno1;
1353 return vno1;
1354 }
1355
1356 /* Compute a hashcode for PHI operation VP1 and return it. */
1357
1358 static inline hashval_t
1359 vn_phi_compute_hash (vn_phi_t vp1)
1360 {
1361 hashval_t result = 0;
1362 int i;
1363 tree phi1op;
1364 tree type;
1365
1366 result = vp1->block->index;
1367
1368 /* If all PHI arguments are constants we need to distinguish
1369 the PHI node via its type. */
1370 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1371 result += (INTEGRAL_TYPE_P (type)
1372 + (INTEGRAL_TYPE_P (type)
1373 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1374
1375 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1376 {
1377 if (phi1op == VN_TOP)
1378 continue;
1379 result += iterative_hash_expr (phi1op, result);
1380 }
1381
1382 return result;
1383 }
1384
1385 /* Return the computed hashcode for phi operation P1. */
1386
1387 static hashval_t
1388 vn_phi_hash (const void *p1)
1389 {
1390 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1391 return vp1->hashcode;
1392 }
1393
1394 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1395
1396 static int
1397 vn_phi_eq (const void *p1, const void *p2)
1398 {
1399 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1400 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1401
1402 if (vp1->hashcode != vp2->hashcode)
1403 return false;
1404
1405 if (vp1->block == vp2->block)
1406 {
1407 int i;
1408 tree phi1op;
1409
1410 /* If the PHI nodes do not have compatible types
1411 they are not the same. */
1412 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1413 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1414 return false;
1415
1416 /* Any phi in the same block will have it's arguments in the
1417 same edge order, because of how we store phi nodes. */
1418 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1419 {
1420 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1421 if (phi1op == VN_TOP || phi2op == VN_TOP)
1422 continue;
1423 if (!expressions_equal_p (phi1op, phi2op))
1424 return false;
1425 }
1426 return true;
1427 }
1428 return false;
1429 }
1430
1431 static VEC(tree, heap) *shared_lookup_phiargs;
1432
1433 /* Lookup PHI in the current hash table, and return the resulting
1434 value number if it exists in the hash table. Return NULL_TREE if
1435 it does not exist in the hash table. */
1436
1437 static tree
1438 vn_phi_lookup (gimple phi)
1439 {
1440 void **slot;
1441 struct vn_phi_s vp1;
1442 unsigned i;
1443
1444 VEC_truncate (tree, shared_lookup_phiargs, 0);
1445
1446 /* Canonicalize the SSA_NAME's to their value number. */
1447 for (i = 0; i < gimple_phi_num_args (phi); i++)
1448 {
1449 tree def = PHI_ARG_DEF (phi, i);
1450 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1451 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1452 }
1453 vp1.phiargs = shared_lookup_phiargs;
1454 vp1.block = gimple_bb (phi);
1455 vp1.hashcode = vn_phi_compute_hash (&vp1);
1456 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1457 NO_INSERT);
1458 if (!slot && current_info == optimistic_info)
1459 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1460 NO_INSERT);
1461 if (!slot)
1462 return NULL_TREE;
1463 return ((vn_phi_t)*slot)->result;
1464 }
1465
1466 /* Insert PHI into the current hash table with a value number of
1467 RESULT. */
1468
1469 static vn_phi_t
1470 vn_phi_insert (gimple phi, tree result)
1471 {
1472 void **slot;
1473 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1474 unsigned i;
1475 VEC (tree, heap) *args = NULL;
1476
1477 /* Canonicalize the SSA_NAME's to their value number. */
1478 for (i = 0; i < gimple_phi_num_args (phi); i++)
1479 {
1480 tree def = PHI_ARG_DEF (phi, i);
1481 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1482 VEC_safe_push (tree, heap, args, def);
1483 }
1484 vp1->value_id = VN_INFO (result)->value_id;
1485 vp1->phiargs = args;
1486 vp1->block = gimple_bb (phi);
1487 vp1->result = result;
1488 vp1->hashcode = vn_phi_compute_hash (vp1);
1489
1490 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1491 INSERT);
1492
1493 /* Because we iterate over phi operations more than once, it's
1494 possible the slot might already exist here, hence no assert.*/
1495 *slot = vp1;
1496 return vp1;
1497 }
1498
1499
1500 /* Print set of components in strongly connected component SCC to OUT. */
1501
1502 static void
1503 print_scc (FILE *out, VEC (tree, heap) *scc)
1504 {
1505 tree var;
1506 unsigned int i;
1507
1508 fprintf (out, "SCC consists of: ");
1509 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1510 {
1511 print_generic_expr (out, var, 0);
1512 fprintf (out, " ");
1513 }
1514 fprintf (out, "\n");
1515 }
1516
1517 /* Set the value number of FROM to TO, return true if it has changed
1518 as a result. */
1519
1520 static inline bool
1521 set_ssa_val_to (tree from, tree to)
1522 {
1523 tree currval;
1524
1525 if (from != to
1526 && TREE_CODE (to) == SSA_NAME
1527 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1528 to = from;
1529
1530 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1531 and invariants. So assert that here. */
1532 gcc_assert (to != NULL_TREE
1533 && (to == VN_TOP
1534 || TREE_CODE (to) == SSA_NAME
1535 || is_gimple_min_invariant (to)));
1536
1537 if (dump_file && (dump_flags & TDF_DETAILS))
1538 {
1539 fprintf (dump_file, "Setting value number of ");
1540 print_generic_expr (dump_file, from, 0);
1541 fprintf (dump_file, " to ");
1542 print_generic_expr (dump_file, to, 0);
1543 }
1544
1545 currval = SSA_VAL (from);
1546
1547 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1548 {
1549 VN_INFO (from)->valnum = to;
1550 if (dump_file && (dump_flags & TDF_DETAILS))
1551 fprintf (dump_file, " (changed)\n");
1552 return true;
1553 }
1554 if (dump_file && (dump_flags & TDF_DETAILS))
1555 fprintf (dump_file, "\n");
1556 return false;
1557 }
1558
1559 /* Set all definitions in STMT to value number to themselves.
1560 Return true if a value number changed. */
1561
1562 static bool
1563 defs_to_varying (gimple stmt)
1564 {
1565 bool changed = false;
1566 ssa_op_iter iter;
1567 def_operand_p defp;
1568
1569 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1570 {
1571 tree def = DEF_FROM_PTR (defp);
1572
1573 VN_INFO (def)->use_processed = true;
1574 changed |= set_ssa_val_to (def, def);
1575 }
1576 return changed;
1577 }
1578
1579 static bool expr_has_constants (tree expr);
1580 static tree valueize_expr (tree expr);
1581
1582 /* Visit a copy between LHS and RHS, return true if the value number
1583 changed. */
1584
1585 static bool
1586 visit_copy (tree lhs, tree rhs)
1587 {
1588 /* Follow chains of copies to their destination. */
1589 while (TREE_CODE (rhs) == SSA_NAME
1590 && SSA_VAL (rhs) != rhs)
1591 rhs = SSA_VAL (rhs);
1592
1593 /* The copy may have a more interesting constant filled expression
1594 (we don't, since we know our RHS is just an SSA name). */
1595 if (TREE_CODE (rhs) == SSA_NAME)
1596 {
1597 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1598 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1599 }
1600
1601 return set_ssa_val_to (lhs, rhs);
1602 }
1603
1604 /* Visit a unary operator RHS, value number it, and return true if the
1605 value number of LHS has changed as a result. */
1606
1607 static bool
1608 visit_unary_op (tree lhs, gimple stmt)
1609 {
1610 bool changed = false;
1611 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1612
1613 if (result)
1614 {
1615 changed = set_ssa_val_to (lhs, result);
1616 }
1617 else
1618 {
1619 changed = set_ssa_val_to (lhs, lhs);
1620 vn_nary_op_insert_stmt (stmt, lhs);
1621 }
1622
1623 return changed;
1624 }
1625
1626 /* Visit a binary operator RHS, value number it, and return true if the
1627 value number of LHS has changed as a result. */
1628
1629 static bool
1630 visit_binary_op (tree lhs, gimple stmt)
1631 {
1632 bool changed = false;
1633 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1634
1635 if (result)
1636 {
1637 changed = set_ssa_val_to (lhs, result);
1638 }
1639 else
1640 {
1641 changed = set_ssa_val_to (lhs, lhs);
1642 vn_nary_op_insert_stmt (stmt, lhs);
1643 }
1644
1645 return changed;
1646 }
1647
1648 /* Visit a call STMT storing into LHS. Return true if the value number
1649 of the LHS has changed as a result. */
1650
1651 static bool
1652 visit_reference_op_call (tree lhs, gimple stmt)
1653 {
1654 bool changed = false;
1655 struct vn_reference_s vr1;
1656 tree result;
1657 tree vuse = gimple_vuse (stmt);
1658
1659 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1660 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1661 vr1.hashcode = vn_reference_compute_hash (&vr1);
1662 result = vn_reference_lookup_1 (&vr1, NULL);
1663 if (result)
1664 {
1665 changed = set_ssa_val_to (lhs, result);
1666 if (TREE_CODE (result) == SSA_NAME
1667 && VN_INFO (result)->has_constants)
1668 VN_INFO (lhs)->has_constants = true;
1669 }
1670 else
1671 {
1672 void **slot;
1673 vn_reference_t vr2;
1674 changed = set_ssa_val_to (lhs, lhs);
1675 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1676 vr2->vuse = vr1.vuse;
1677 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1678 vr2->hashcode = vr1.hashcode;
1679 vr2->result = lhs;
1680 slot = htab_find_slot_with_hash (current_info->references,
1681 vr2, vr2->hashcode, INSERT);
1682 if (*slot)
1683 free_reference (*slot);
1684 *slot = vr2;
1685 }
1686
1687 return changed;
1688 }
1689
1690 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1691 and return true if the value number of the LHS has changed as a result. */
1692
1693 static bool
1694 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1695 {
1696 bool changed = false;
1697 tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1698
1699 /* We handle type-punning through unions by value-numbering based
1700 on offset and size of the access. Be prepared to handle a
1701 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1702 if (result
1703 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1704 {
1705 /* We will be setting the value number of lhs to the value number
1706 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1707 So first simplify and lookup this expression to see if it
1708 is already available. */
1709 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1710 if ((CONVERT_EXPR_P (val)
1711 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1712 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1713 {
1714 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1715 if ((CONVERT_EXPR_P (tem)
1716 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1717 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1718 TREE_TYPE (val), tem)))
1719 val = tem;
1720 }
1721 result = val;
1722 if (!is_gimple_min_invariant (val)
1723 && TREE_CODE (val) != SSA_NAME)
1724 result = vn_nary_op_lookup (val, NULL);
1725 /* If the expression is not yet available, value-number lhs to
1726 a new SSA_NAME we create. */
1727 if (!result && may_insert)
1728 {
1729 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1730 /* Initialize value-number information properly. */
1731 VN_INFO_GET (result)->valnum = result;
1732 VN_INFO (result)->value_id = get_next_value_id ();
1733 VN_INFO (result)->expr = val;
1734 VN_INFO (result)->has_constants = expr_has_constants (val);
1735 VN_INFO (result)->needs_insertion = true;
1736 /* As all "inserted" statements are singleton SCCs, insert
1737 to the valid table. This is strictly needed to
1738 avoid re-generating new value SSA_NAMEs for the same
1739 expression during SCC iteration over and over (the
1740 optimistic table gets cleared after each iteration).
1741 We do not need to insert into the optimistic table, as
1742 lookups there will fall back to the valid table. */
1743 if (current_info == optimistic_info)
1744 {
1745 current_info = valid_info;
1746 vn_nary_op_insert (val, result);
1747 current_info = optimistic_info;
1748 }
1749 else
1750 vn_nary_op_insert (val, result);
1751 if (dump_file && (dump_flags & TDF_DETAILS))
1752 {
1753 fprintf (dump_file, "Inserting name ");
1754 print_generic_expr (dump_file, result, 0);
1755 fprintf (dump_file, " for expression ");
1756 print_generic_expr (dump_file, val, 0);
1757 fprintf (dump_file, "\n");
1758 }
1759 }
1760 }
1761
1762 if (result)
1763 {
1764 changed = set_ssa_val_to (lhs, result);
1765 if (TREE_CODE (result) == SSA_NAME
1766 && VN_INFO (result)->has_constants)
1767 {
1768 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1769 VN_INFO (lhs)->has_constants = true;
1770 }
1771 }
1772 else
1773 {
1774 changed = set_ssa_val_to (lhs, lhs);
1775 vn_reference_insert (op, lhs, gimple_vuse (stmt));
1776 }
1777
1778 return changed;
1779 }
1780
1781
1782 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1783 and return true if the value number of the LHS has changed as a result. */
1784
1785 static bool
1786 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1787 {
1788 bool changed = false;
1789 tree result;
1790 bool resultsame = false;
1791
1792 /* First we want to lookup using the *vuses* from the store and see
1793 if there the last store to this location with the same address
1794 had the same value.
1795
1796 The vuses represent the memory state before the store. If the
1797 memory state, address, and value of the store is the same as the
1798 last store to this location, then this store will produce the
1799 same memory state as that store.
1800
1801 In this case the vdef versions for this store are value numbered to those
1802 vuse versions, since they represent the same memory state after
1803 this store.
1804
1805 Otherwise, the vdefs for the store are used when inserting into
1806 the table, since the store generates a new memory state. */
1807
1808 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
1809
1810 if (result)
1811 {
1812 if (TREE_CODE (result) == SSA_NAME)
1813 result = SSA_VAL (result);
1814 if (TREE_CODE (op) == SSA_NAME)
1815 op = SSA_VAL (op);
1816 resultsame = expressions_equal_p (result, op);
1817 }
1818
1819 if (!result || !resultsame)
1820 {
1821 tree vdef;
1822
1823 if (dump_file && (dump_flags & TDF_DETAILS))
1824 {
1825 fprintf (dump_file, "No store match\n");
1826 fprintf (dump_file, "Value numbering store ");
1827 print_generic_expr (dump_file, lhs, 0);
1828 fprintf (dump_file, " to ");
1829 print_generic_expr (dump_file, op, 0);
1830 fprintf (dump_file, "\n");
1831 }
1832 /* Have to set value numbers before insert, since insert is
1833 going to valueize the references in-place. */
1834 if ((vdef = gimple_vdef (stmt)))
1835 {
1836 VN_INFO (vdef)->use_processed = true;
1837 changed |= set_ssa_val_to (vdef, vdef);
1838 }
1839
1840 /* Do not insert structure copies into the tables. */
1841 if (is_gimple_min_invariant (op)
1842 || is_gimple_reg (op))
1843 vn_reference_insert (lhs, op, vdef);
1844 }
1845 else
1846 {
1847 /* We had a match, so value number the vdef to have the value
1848 number of the vuse it came from. */
1849 tree def, use;
1850
1851 if (dump_file && (dump_flags & TDF_DETAILS))
1852 fprintf (dump_file, "Store matched earlier value,"
1853 "value numbering store vdefs to matching vuses.\n");
1854
1855 def = gimple_vdef (stmt);
1856 use = gimple_vuse (stmt);
1857
1858 VN_INFO (def)->use_processed = true;
1859 changed |= set_ssa_val_to (def, SSA_VAL (use));
1860 }
1861
1862 return changed;
1863 }
1864
1865 /* Visit and value number PHI, return true if the value number
1866 changed. */
1867
1868 static bool
1869 visit_phi (gimple phi)
1870 {
1871 bool changed = false;
1872 tree result;
1873 tree sameval = VN_TOP;
1874 bool allsame = true;
1875 unsigned i;
1876
1877 /* TODO: We could check for this in init_sccvn, and replace this
1878 with a gcc_assert. */
1879 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1880 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1881
1882 /* See if all non-TOP arguments have the same value. TOP is
1883 equivalent to everything, so we can ignore it. */
1884 for (i = 0; i < gimple_phi_num_args (phi); i++)
1885 {
1886 tree def = PHI_ARG_DEF (phi, i);
1887
1888 if (TREE_CODE (def) == SSA_NAME)
1889 def = SSA_VAL (def);
1890 if (def == VN_TOP)
1891 continue;
1892 if (sameval == VN_TOP)
1893 {
1894 sameval = def;
1895 }
1896 else
1897 {
1898 if (!expressions_equal_p (def, sameval))
1899 {
1900 allsame = false;
1901 break;
1902 }
1903 }
1904 }
1905
1906 /* If all value numbered to the same value, the phi node has that
1907 value. */
1908 if (allsame)
1909 {
1910 if (is_gimple_min_invariant (sameval))
1911 {
1912 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1913 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1914 }
1915 else
1916 {
1917 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1918 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1919 }
1920
1921 if (TREE_CODE (sameval) == SSA_NAME)
1922 return visit_copy (PHI_RESULT (phi), sameval);
1923
1924 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1925 }
1926
1927 /* Otherwise, see if it is equivalent to a phi node in this block. */
1928 result = vn_phi_lookup (phi);
1929 if (result)
1930 {
1931 if (TREE_CODE (result) == SSA_NAME)
1932 changed = visit_copy (PHI_RESULT (phi), result);
1933 else
1934 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1935 }
1936 else
1937 {
1938 vn_phi_insert (phi, PHI_RESULT (phi));
1939 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1940 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1941 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1942 }
1943
1944 return changed;
1945 }
1946
1947 /* Return true if EXPR contains constants. */
1948
1949 static bool
1950 expr_has_constants (tree expr)
1951 {
1952 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1953 {
1954 case tcc_unary:
1955 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1956
1957 case tcc_binary:
1958 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1959 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1960 /* Constants inside reference ops are rarely interesting, but
1961 it can take a lot of looking to find them. */
1962 case tcc_reference:
1963 case tcc_declaration:
1964 return false;
1965 default:
1966 return is_gimple_min_invariant (expr);
1967 }
1968 return false;
1969 }
1970
1971 /* Return true if STMT contains constants. */
1972
1973 static bool
1974 stmt_has_constants (gimple stmt)
1975 {
1976 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1977 return false;
1978
1979 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1980 {
1981 case GIMPLE_UNARY_RHS:
1982 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
1983
1984 case GIMPLE_BINARY_RHS:
1985 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
1986 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
1987 case GIMPLE_SINGLE_RHS:
1988 /* Constants inside reference ops are rarely interesting, but
1989 it can take a lot of looking to find them. */
1990 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
1991 default:
1992 gcc_unreachable ();
1993 }
1994 return false;
1995 }
1996
1997 /* Replace SSA_NAMES in expr with their value numbers, and return the
1998 result.
1999 This is performed in place. */
2000
2001 static tree
2002 valueize_expr (tree expr)
2003 {
2004 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2005 {
2006 case tcc_unary:
2007 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2008 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2009 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2010 break;
2011 case tcc_binary:
2012 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2013 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2014 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2015 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2016 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2017 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2018 break;
2019 default:
2020 break;
2021 }
2022 return expr;
2023 }
2024
2025 /* Simplify the binary expression RHS, and return the result if
2026 simplified. */
2027
2028 static tree
2029 simplify_binary_expression (gimple stmt)
2030 {
2031 tree result = NULL_TREE;
2032 tree op0 = gimple_assign_rhs1 (stmt);
2033 tree op1 = gimple_assign_rhs2 (stmt);
2034
2035 /* This will not catch every single case we could combine, but will
2036 catch those with constants. The goal here is to simultaneously
2037 combine constants between expressions, but avoid infinite
2038 expansion of expressions during simplification. */
2039 if (TREE_CODE (op0) == SSA_NAME)
2040 {
2041 if (VN_INFO (op0)->has_constants
2042 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2043 op0 = valueize_expr (vn_get_expr_for (op0));
2044 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2045 op0 = SSA_VAL (op0);
2046 }
2047
2048 if (TREE_CODE (op1) == SSA_NAME)
2049 {
2050 if (VN_INFO (op1)->has_constants)
2051 op1 = valueize_expr (vn_get_expr_for (op1));
2052 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2053 op1 = SSA_VAL (op1);
2054 }
2055
2056 /* Avoid folding if nothing changed. */
2057 if (op0 == gimple_assign_rhs1 (stmt)
2058 && op1 == gimple_assign_rhs2 (stmt))
2059 return NULL_TREE;
2060
2061 fold_defer_overflow_warnings ();
2062
2063 result = fold_binary (gimple_assign_rhs_code (stmt),
2064 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
2065 if (result)
2066 STRIP_USELESS_TYPE_CONVERSION (result);
2067
2068 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2069 stmt, 0);
2070
2071 /* Make sure result is not a complex expression consisting
2072 of operators of operators (IE (a + b) + (a + c))
2073 Otherwise, we will end up with unbounded expressions if
2074 fold does anything at all. */
2075 if (result && valid_gimple_rhs_p (result))
2076 return result;
2077
2078 return NULL_TREE;
2079 }
2080
2081 /* Simplify the unary expression RHS, and return the result if
2082 simplified. */
2083
2084 static tree
2085 simplify_unary_expression (gimple stmt)
2086 {
2087 tree result = NULL_TREE;
2088 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2089
2090 /* We handle some tcc_reference codes here that are all
2091 GIMPLE_ASSIGN_SINGLE codes. */
2092 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2093 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2094 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2095 op0 = TREE_OPERAND (op0, 0);
2096
2097 if (TREE_CODE (op0) != SSA_NAME)
2098 return NULL_TREE;
2099
2100 orig_op0 = op0;
2101 if (VN_INFO (op0)->has_constants)
2102 op0 = valueize_expr (vn_get_expr_for (op0));
2103 else if (gimple_assign_cast_p (stmt)
2104 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2105 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2106 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2107 {
2108 /* We want to do tree-combining on conversion-like expressions.
2109 Make sure we feed only SSA_NAMEs or constants to fold though. */
2110 tree tem = valueize_expr (vn_get_expr_for (op0));
2111 if (UNARY_CLASS_P (tem)
2112 || BINARY_CLASS_P (tem)
2113 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2114 || TREE_CODE (tem) == SSA_NAME
2115 || is_gimple_min_invariant (tem))
2116 op0 = tem;
2117 }
2118
2119 /* Avoid folding if nothing changed, but remember the expression. */
2120 if (op0 == orig_op0)
2121 return NULL_TREE;
2122
2123 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2124 gimple_expr_type (stmt), op0);
2125 if (result)
2126 {
2127 STRIP_USELESS_TYPE_CONVERSION (result);
2128 if (valid_gimple_rhs_p (result))
2129 return result;
2130 }
2131
2132 return NULL_TREE;
2133 }
2134
2135 /* Try to simplify RHS using equivalences and constant folding. */
2136
2137 static tree
2138 try_to_simplify (gimple stmt)
2139 {
2140 tree tem;
2141
2142 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2143 in this case, there is no point in doing extra work. */
2144 if (gimple_assign_copy_p (stmt)
2145 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2146 return NULL_TREE;
2147
2148 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2149 {
2150 case tcc_declaration:
2151 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2152 if (tem)
2153 return tem;
2154 break;
2155
2156 case tcc_reference:
2157 /* Do not do full-blown reference lookup here, but simplify
2158 reads from constant aggregates. */
2159 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2160 if (tem)
2161 return tem;
2162
2163 /* Fallthrough for some codes that can operate on registers. */
2164 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2165 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2166 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2167 break;
2168 /* We could do a little more with unary ops, if they expand
2169 into binary ops, but it's debatable whether it is worth it. */
2170 case tcc_unary:
2171 return simplify_unary_expression (stmt);
2172 break;
2173 case tcc_comparison:
2174 case tcc_binary:
2175 return simplify_binary_expression (stmt);
2176 break;
2177 default:
2178 break;
2179 }
2180
2181 return NULL_TREE;
2182 }
2183
2184 /* Visit and value number USE, return true if the value number
2185 changed. */
2186
2187 static bool
2188 visit_use (tree use)
2189 {
2190 bool changed = false;
2191 gimple stmt = SSA_NAME_DEF_STMT (use);
2192
2193 VN_INFO (use)->use_processed = true;
2194
2195 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2196 if (dump_file && (dump_flags & TDF_DETAILS)
2197 && !SSA_NAME_IS_DEFAULT_DEF (use))
2198 {
2199 fprintf (dump_file, "Value numbering ");
2200 print_generic_expr (dump_file, use, 0);
2201 fprintf (dump_file, " stmt = ");
2202 print_gimple_stmt (dump_file, stmt, 0, 0);
2203 }
2204
2205 /* Handle uninitialized uses. */
2206 if (SSA_NAME_IS_DEFAULT_DEF (use))
2207 changed = set_ssa_val_to (use, use);
2208 else
2209 {
2210 if (gimple_code (stmt) == GIMPLE_PHI)
2211 changed = visit_phi (stmt);
2212 else if (!gimple_has_lhs (stmt)
2213 || gimple_has_volatile_ops (stmt)
2214 || stmt_could_throw_p (stmt))
2215 changed = defs_to_varying (stmt);
2216 else if (is_gimple_assign (stmt))
2217 {
2218 tree lhs = gimple_assign_lhs (stmt);
2219 tree simplified;
2220
2221 /* Shortcut for copies. Simplifying copies is pointless,
2222 since we copy the expression and value they represent. */
2223 if (gimple_assign_copy_p (stmt)
2224 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2225 && TREE_CODE (lhs) == SSA_NAME)
2226 {
2227 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2228 goto done;
2229 }
2230 simplified = try_to_simplify (stmt);
2231 if (simplified)
2232 {
2233 if (dump_file && (dump_flags & TDF_DETAILS))
2234 {
2235 fprintf (dump_file, "RHS ");
2236 print_gimple_expr (dump_file, stmt, 0, 0);
2237 fprintf (dump_file, " simplified to ");
2238 print_generic_expr (dump_file, simplified, 0);
2239 if (TREE_CODE (lhs) == SSA_NAME)
2240 fprintf (dump_file, " has constants %d\n",
2241 expr_has_constants (simplified));
2242 else
2243 fprintf (dump_file, "\n");
2244 }
2245 }
2246 /* Setting value numbers to constants will occasionally
2247 screw up phi congruence because constants are not
2248 uniquely associated with a single ssa name that can be
2249 looked up. */
2250 if (simplified
2251 && is_gimple_min_invariant (simplified)
2252 && TREE_CODE (lhs) == SSA_NAME)
2253 {
2254 VN_INFO (lhs)->expr = simplified;
2255 VN_INFO (lhs)->has_constants = true;
2256 changed = set_ssa_val_to (lhs, simplified);
2257 goto done;
2258 }
2259 else if (simplified
2260 && TREE_CODE (simplified) == SSA_NAME
2261 && TREE_CODE (lhs) == SSA_NAME)
2262 {
2263 changed = visit_copy (lhs, simplified);
2264 goto done;
2265 }
2266 else if (simplified)
2267 {
2268 if (TREE_CODE (lhs) == SSA_NAME)
2269 {
2270 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2271 /* We have to unshare the expression or else
2272 valuizing may change the IL stream. */
2273 VN_INFO (lhs)->expr = unshare_expr (simplified);
2274 }
2275 }
2276 else if (stmt_has_constants (stmt)
2277 && TREE_CODE (lhs) == SSA_NAME)
2278 VN_INFO (lhs)->has_constants = true;
2279 else if (TREE_CODE (lhs) == SSA_NAME)
2280 {
2281 /* We reset expr and constantness here because we may
2282 have been value numbering optimistically, and
2283 iterating. They may become non-constant in this case,
2284 even if they were optimistically constant. */
2285
2286 VN_INFO (lhs)->has_constants = false;
2287 VN_INFO (lhs)->expr = NULL_TREE;
2288 }
2289
2290 if ((TREE_CODE (lhs) == SSA_NAME
2291 /* We can substitute SSA_NAMEs that are live over
2292 abnormal edges with their constant value. */
2293 && !(gimple_assign_copy_p (stmt)
2294 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2295 && !(simplified
2296 && is_gimple_min_invariant (simplified))
2297 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2298 /* Stores or copies from SSA_NAMEs that are live over
2299 abnormal edges are a problem. */
2300 || (gimple_assign_single_p (stmt)
2301 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2302 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2303 changed = defs_to_varying (stmt);
2304 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2305 {
2306 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2307 }
2308 else if (TREE_CODE (lhs) == SSA_NAME)
2309 {
2310 if ((gimple_assign_copy_p (stmt)
2311 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2312 || (simplified
2313 && is_gimple_min_invariant (simplified)))
2314 {
2315 VN_INFO (lhs)->has_constants = true;
2316 if (simplified)
2317 changed = set_ssa_val_to (lhs, simplified);
2318 else
2319 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2320 }
2321 else
2322 {
2323 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2324 {
2325 case GIMPLE_UNARY_RHS:
2326 changed = visit_unary_op (lhs, stmt);
2327 break;
2328 case GIMPLE_BINARY_RHS:
2329 changed = visit_binary_op (lhs, stmt);
2330 break;
2331 case GIMPLE_SINGLE_RHS:
2332 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2333 {
2334 case tcc_reference:
2335 /* VOP-less references can go through unary case. */
2336 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2337 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2338 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2339 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2340 {
2341 changed = visit_unary_op (lhs, stmt);
2342 break;
2343 }
2344 /* Fallthrough. */
2345 case tcc_declaration:
2346 changed = visit_reference_op_load
2347 (lhs, gimple_assign_rhs1 (stmt), stmt);
2348 break;
2349 case tcc_expression:
2350 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2351 {
2352 changed = visit_unary_op (lhs, stmt);
2353 break;
2354 }
2355 /* Fallthrough. */
2356 default:
2357 changed = defs_to_varying (stmt);
2358 }
2359 break;
2360 default:
2361 changed = defs_to_varying (stmt);
2362 break;
2363 }
2364 }
2365 }
2366 else
2367 changed = defs_to_varying (stmt);
2368 }
2369 else if (is_gimple_call (stmt))
2370 {
2371 tree lhs = gimple_call_lhs (stmt);
2372
2373 /* ??? We could try to simplify calls. */
2374
2375 if (stmt_has_constants (stmt)
2376 && TREE_CODE (lhs) == SSA_NAME)
2377 VN_INFO (lhs)->has_constants = true;
2378 else if (TREE_CODE (lhs) == SSA_NAME)
2379 {
2380 /* We reset expr and constantness here because we may
2381 have been value numbering optimistically, and
2382 iterating. They may become non-constant in this case,
2383 even if they were optimistically constant. */
2384 VN_INFO (lhs)->has_constants = false;
2385 VN_INFO (lhs)->expr = NULL_TREE;
2386 }
2387
2388 if (TREE_CODE (lhs) == SSA_NAME
2389 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2390 changed = defs_to_varying (stmt);
2391 /* ??? We should handle stores from calls. */
2392 else if (TREE_CODE (lhs) == SSA_NAME)
2393 {
2394 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2395 changed = visit_reference_op_call (lhs, stmt);
2396 else
2397 changed = defs_to_varying (stmt);
2398 }
2399 else
2400 changed = defs_to_varying (stmt);
2401 }
2402 }
2403 done:
2404 return changed;
2405 }
2406
2407 /* Compare two operands by reverse postorder index */
2408
2409 static int
2410 compare_ops (const void *pa, const void *pb)
2411 {
2412 const tree opa = *((const tree *)pa);
2413 const tree opb = *((const tree *)pb);
2414 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2415 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2416 basic_block bba;
2417 basic_block bbb;
2418
2419 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2420 return 0;
2421 else if (gimple_nop_p (opstmta))
2422 return -1;
2423 else if (gimple_nop_p (opstmtb))
2424 return 1;
2425
2426 bba = gimple_bb (opstmta);
2427 bbb = gimple_bb (opstmtb);
2428
2429 if (!bba && !bbb)
2430 return 0;
2431 else if (!bba)
2432 return -1;
2433 else if (!bbb)
2434 return 1;
2435
2436 if (bba == bbb)
2437 {
2438 if (gimple_code (opstmta) == GIMPLE_PHI
2439 && gimple_code (opstmtb) == GIMPLE_PHI)
2440 return 0;
2441 else if (gimple_code (opstmta) == GIMPLE_PHI)
2442 return -1;
2443 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2444 return 1;
2445 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2446 }
2447 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2448 }
2449
2450 /* Sort an array containing members of a strongly connected component
2451 SCC so that the members are ordered by RPO number.
2452 This means that when the sort is complete, iterating through the
2453 array will give you the members in RPO order. */
2454
2455 static void
2456 sort_scc (VEC (tree, heap) *scc)
2457 {
2458 qsort (VEC_address (tree, scc),
2459 VEC_length (tree, scc),
2460 sizeof (tree),
2461 compare_ops);
2462 }
2463
2464 /* Process a strongly connected component in the SSA graph. */
2465
2466 static void
2467 process_scc (VEC (tree, heap) *scc)
2468 {
2469 /* If the SCC has a single member, just visit it. */
2470
2471 if (VEC_length (tree, scc) == 1)
2472 {
2473 tree use = VEC_index (tree, scc, 0);
2474 if (!VN_INFO (use)->use_processed)
2475 visit_use (use);
2476 }
2477 else
2478 {
2479 tree var;
2480 unsigned int i;
2481 unsigned int iterations = 0;
2482 bool changed = true;
2483
2484 /* Iterate over the SCC with the optimistic table until it stops
2485 changing. */
2486 current_info = optimistic_info;
2487 while (changed)
2488 {
2489 changed = false;
2490 iterations++;
2491 /* As we are value-numbering optimistically we have to
2492 clear the expression tables and the simplified expressions
2493 in each iteration until we converge. */
2494 htab_empty (optimistic_info->nary);
2495 htab_empty (optimistic_info->phis);
2496 htab_empty (optimistic_info->references);
2497 obstack_free (&optimistic_info->nary_obstack, NULL);
2498 gcc_obstack_init (&optimistic_info->nary_obstack);
2499 empty_alloc_pool (optimistic_info->phis_pool);
2500 empty_alloc_pool (optimistic_info->references_pool);
2501 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2502 VN_INFO (var)->expr = NULL_TREE;
2503 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2504 changed |= visit_use (var);
2505 }
2506
2507 statistics_histogram_event (cfun, "SCC iterations", iterations);
2508
2509 /* Finally, visit the SCC once using the valid table. */
2510 current_info = valid_info;
2511 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2512 visit_use (var);
2513 }
2514 }
2515
2516 DEF_VEC_O(ssa_op_iter);
2517 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2518
2519 /* Pop the components of the found SCC for NAME off the SCC stack
2520 and process them. Returns true if all went well, false if
2521 we run into resource limits. */
2522
2523 static bool
2524 extract_and_process_scc_for_name (tree name)
2525 {
2526 VEC (tree, heap) *scc = NULL;
2527 tree x;
2528
2529 /* Found an SCC, pop the components off the SCC stack and
2530 process them. */
2531 do
2532 {
2533 x = VEC_pop (tree, sccstack);
2534
2535 VN_INFO (x)->on_sccstack = false;
2536 VEC_safe_push (tree, heap, scc, x);
2537 } while (x != name);
2538
2539 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2540 if (VEC_length (tree, scc)
2541 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2542 {
2543 if (dump_file)
2544 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2545 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2546 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2547 return false;
2548 }
2549
2550 if (VEC_length (tree, scc) > 1)
2551 sort_scc (scc);
2552
2553 if (dump_file && (dump_flags & TDF_DETAILS))
2554 print_scc (dump_file, scc);
2555
2556 process_scc (scc);
2557
2558 VEC_free (tree, heap, scc);
2559
2560 return true;
2561 }
2562
2563 /* Depth first search on NAME to discover and process SCC's in the SSA
2564 graph.
2565 Execution of this algorithm relies on the fact that the SCC's are
2566 popped off the stack in topological order.
2567 Returns true if successful, false if we stopped processing SCC's due
2568 to resource constraints. */
2569
2570 static bool
2571 DFS (tree name)
2572 {
2573 VEC(ssa_op_iter, heap) *itervec = NULL;
2574 VEC(tree, heap) *namevec = NULL;
2575 use_operand_p usep = NULL;
2576 gimple defstmt;
2577 tree use;
2578 ssa_op_iter iter;
2579
2580 start_over:
2581 /* SCC info */
2582 VN_INFO (name)->dfsnum = next_dfs_num++;
2583 VN_INFO (name)->visited = true;
2584 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2585
2586 VEC_safe_push (tree, heap, sccstack, name);
2587 VN_INFO (name)->on_sccstack = true;
2588 defstmt = SSA_NAME_DEF_STMT (name);
2589
2590 /* Recursively DFS on our operands, looking for SCC's. */
2591 if (!gimple_nop_p (defstmt))
2592 {
2593 /* Push a new iterator. */
2594 if (gimple_code (defstmt) == GIMPLE_PHI)
2595 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2596 else
2597 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2598 }
2599 else
2600 clear_and_done_ssa_iter (&iter);
2601
2602 while (1)
2603 {
2604 /* If we are done processing uses of a name, go up the stack
2605 of iterators and process SCCs as we found them. */
2606 if (op_iter_done (&iter))
2607 {
2608 /* See if we found an SCC. */
2609 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2610 if (!extract_and_process_scc_for_name (name))
2611 {
2612 VEC_free (tree, heap, namevec);
2613 VEC_free (ssa_op_iter, heap, itervec);
2614 return false;
2615 }
2616
2617 /* Check if we are done. */
2618 if (VEC_empty (tree, namevec))
2619 {
2620 VEC_free (tree, heap, namevec);
2621 VEC_free (ssa_op_iter, heap, itervec);
2622 return true;
2623 }
2624
2625 /* Restore the last use walker and continue walking there. */
2626 use = name;
2627 name = VEC_pop (tree, namevec);
2628 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2629 sizeof (ssa_op_iter));
2630 VEC_pop (ssa_op_iter, itervec);
2631 goto continue_walking;
2632 }
2633
2634 use = USE_FROM_PTR (usep);
2635
2636 /* Since we handle phi nodes, we will sometimes get
2637 invariants in the use expression. */
2638 if (TREE_CODE (use) == SSA_NAME)
2639 {
2640 if (! (VN_INFO (use)->visited))
2641 {
2642 /* Recurse by pushing the current use walking state on
2643 the stack and starting over. */
2644 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2645 VEC_safe_push(tree, heap, namevec, name);
2646 name = use;
2647 goto start_over;
2648
2649 continue_walking:
2650 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2651 VN_INFO (use)->low);
2652 }
2653 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2654 && VN_INFO (use)->on_sccstack)
2655 {
2656 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2657 VN_INFO (name)->low);
2658 }
2659 }
2660
2661 usep = op_iter_next_use (&iter);
2662 }
2663 }
2664
2665 /* Allocate a value number table. */
2666
2667 static void
2668 allocate_vn_table (vn_tables_t table)
2669 {
2670 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2671 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2672 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2673 free_reference);
2674
2675 gcc_obstack_init (&table->nary_obstack);
2676 table->phis_pool = create_alloc_pool ("VN phis",
2677 sizeof (struct vn_phi_s),
2678 30);
2679 table->references_pool = create_alloc_pool ("VN references",
2680 sizeof (struct vn_reference_s),
2681 30);
2682 }
2683
2684 /* Free a value number table. */
2685
2686 static void
2687 free_vn_table (vn_tables_t table)
2688 {
2689 htab_delete (table->phis);
2690 htab_delete (table->nary);
2691 htab_delete (table->references);
2692 obstack_free (&table->nary_obstack, NULL);
2693 free_alloc_pool (table->phis_pool);
2694 free_alloc_pool (table->references_pool);
2695 }
2696
2697 static void
2698 init_scc_vn (void)
2699 {
2700 size_t i;
2701 int j;
2702 int *rpo_numbers_temp;
2703
2704 calculate_dominance_info (CDI_DOMINATORS);
2705 sccstack = NULL;
2706 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2707 free);
2708
2709 constant_value_ids = BITMAP_ALLOC (NULL);
2710
2711 next_dfs_num = 1;
2712 next_value_id = 1;
2713
2714 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2715 /* VEC_alloc doesn't actually grow it to the right size, it just
2716 preallocates the space to do so. */
2717 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2718 gcc_obstack_init (&vn_ssa_aux_obstack);
2719
2720 shared_lookup_phiargs = NULL;
2721 shared_lookup_references = NULL;
2722 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2723 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2724 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2725
2726 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2727 the i'th block in RPO order is bb. We want to map bb's to RPO
2728 numbers, so we need to rearrange this array. */
2729 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2730 rpo_numbers[rpo_numbers_temp[j]] = j;
2731
2732 XDELETE (rpo_numbers_temp);
2733
2734 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2735
2736 /* Create the VN_INFO structures, and initialize value numbers to
2737 TOP. */
2738 for (i = 0; i < num_ssa_names; i++)
2739 {
2740 tree name = ssa_name (i);
2741 if (name)
2742 {
2743 VN_INFO_GET (name)->valnum = VN_TOP;
2744 VN_INFO (name)->expr = NULL_TREE;
2745 VN_INFO (name)->value_id = 0;
2746 }
2747 }
2748
2749 renumber_gimple_stmt_uids ();
2750
2751 /* Create the valid and optimistic value numbering tables. */
2752 valid_info = XCNEW (struct vn_tables_s);
2753 allocate_vn_table (valid_info);
2754 optimistic_info = XCNEW (struct vn_tables_s);
2755 allocate_vn_table (optimistic_info);
2756 }
2757
2758 void
2759 free_scc_vn (void)
2760 {
2761 size_t i;
2762
2763 htab_delete (constant_to_value_id);
2764 BITMAP_FREE (constant_value_ids);
2765 VEC_free (tree, heap, shared_lookup_phiargs);
2766 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2767 XDELETEVEC (rpo_numbers);
2768
2769 for (i = 0; i < num_ssa_names; i++)
2770 {
2771 tree name = ssa_name (i);
2772 if (name
2773 && VN_INFO (name)->needs_insertion)
2774 release_ssa_name (name);
2775 }
2776 obstack_free (&vn_ssa_aux_obstack, NULL);
2777 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2778
2779 VEC_free (tree, heap, sccstack);
2780 free_vn_table (valid_info);
2781 XDELETE (valid_info);
2782 free_vn_table (optimistic_info);
2783 XDELETE (optimistic_info);
2784 }
2785
2786 /* Set the value ids in the valid hash tables. */
2787
2788 static void
2789 set_hashtable_value_ids (void)
2790 {
2791 htab_iterator hi;
2792 vn_nary_op_t vno;
2793 vn_reference_t vr;
2794 vn_phi_t vp;
2795
2796 /* Now set the value ids of the things we had put in the hash
2797 table. */
2798
2799 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2800 vno, vn_nary_op_t, hi)
2801 {
2802 if (vno->result)
2803 {
2804 if (TREE_CODE (vno->result) == SSA_NAME)
2805 vno->value_id = VN_INFO (vno->result)->value_id;
2806 else if (is_gimple_min_invariant (vno->result))
2807 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2808 }
2809 }
2810
2811 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2812 vp, vn_phi_t, hi)
2813 {
2814 if (vp->result)
2815 {
2816 if (TREE_CODE (vp->result) == SSA_NAME)
2817 vp->value_id = VN_INFO (vp->result)->value_id;
2818 else if (is_gimple_min_invariant (vp->result))
2819 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2820 }
2821 }
2822
2823 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2824 vr, vn_reference_t, hi)
2825 {
2826 if (vr->result)
2827 {
2828 if (TREE_CODE (vr->result) == SSA_NAME)
2829 vr->value_id = VN_INFO (vr->result)->value_id;
2830 else if (is_gimple_min_invariant (vr->result))
2831 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2832 }
2833 }
2834 }
2835
2836 /* Do SCCVN. Returns true if it finished, false if we bailed out
2837 due to resource constraints. */
2838
2839 bool
2840 run_scc_vn (bool may_insert_arg)
2841 {
2842 size_t i;
2843 tree param;
2844 bool changed = true;
2845
2846 may_insert = may_insert_arg;
2847
2848 init_scc_vn ();
2849 current_info = valid_info;
2850
2851 for (param = DECL_ARGUMENTS (current_function_decl);
2852 param;
2853 param = TREE_CHAIN (param))
2854 {
2855 if (gimple_default_def (cfun, param) != NULL)
2856 {
2857 tree def = gimple_default_def (cfun, param);
2858 VN_INFO (def)->valnum = def;
2859 }
2860 }
2861
2862 for (i = 1; i < num_ssa_names; ++i)
2863 {
2864 tree name = ssa_name (i);
2865 if (name
2866 && VN_INFO (name)->visited == false
2867 && !has_zero_uses (name))
2868 if (!DFS (name))
2869 {
2870 free_scc_vn ();
2871 may_insert = false;
2872 return false;
2873 }
2874 }
2875
2876 /* Initialize the value ids. */
2877
2878 for (i = 1; i < num_ssa_names; ++i)
2879 {
2880 tree name = ssa_name (i);
2881 vn_ssa_aux_t info;
2882 if (!name)
2883 continue;
2884 info = VN_INFO (name);
2885 if (info->valnum == name)
2886 info->value_id = get_next_value_id ();
2887 else if (is_gimple_min_invariant (info->valnum))
2888 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2889 }
2890
2891 /* Propagate until they stop changing. */
2892 while (changed)
2893 {
2894 changed = false;
2895 for (i = 1; i < num_ssa_names; ++i)
2896 {
2897 tree name = ssa_name (i);
2898 vn_ssa_aux_t info;
2899 if (!name)
2900 continue;
2901 info = VN_INFO (name);
2902 if (TREE_CODE (info->valnum) == SSA_NAME
2903 && info->valnum != name
2904 && info->value_id != VN_INFO (info->valnum)->value_id)
2905 {
2906 changed = true;
2907 info->value_id = VN_INFO (info->valnum)->value_id;
2908 }
2909 }
2910 }
2911
2912 set_hashtable_value_ids ();
2913
2914 if (dump_file && (dump_flags & TDF_DETAILS))
2915 {
2916 fprintf (dump_file, "Value numbers:\n");
2917 for (i = 0; i < num_ssa_names; i++)
2918 {
2919 tree name = ssa_name (i);
2920 if (name
2921 && VN_INFO (name)->visited
2922 && SSA_VAL (name) != name)
2923 {
2924 print_generic_expr (dump_file, name, 0);
2925 fprintf (dump_file, " = ");
2926 print_generic_expr (dump_file, SSA_VAL (name), 0);
2927 fprintf (dump_file, "\n");
2928 }
2929 }
2930 }
2931
2932 may_insert = false;
2933 return true;
2934 }
2935
2936 /* Return the maximum value id we have ever seen. */
2937
2938 unsigned int
2939 get_max_value_id (void)
2940 {
2941 return next_value_id;
2942 }
2943
2944 /* Return the next unique value id. */
2945
2946 unsigned int
2947 get_next_value_id (void)
2948 {
2949 return next_value_id++;
2950 }
2951
2952
2953 /* Compare two expressions E1 and E2 and return true if they are equal. */
2954
2955 bool
2956 expressions_equal_p (tree e1, tree e2)
2957 {
2958 /* The obvious case. */
2959 if (e1 == e2)
2960 return true;
2961
2962 /* If only one of them is null, they cannot be equal. */
2963 if (!e1 || !e2)
2964 return false;
2965
2966 /* Recurse on elements of lists. */
2967 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
2968 {
2969 tree lop1 = e1;
2970 tree lop2 = e2;
2971 for (lop1 = e1, lop2 = e2;
2972 lop1 || lop2;
2973 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
2974 {
2975 if (!lop1 || !lop2)
2976 return false;
2977 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
2978 return false;
2979 }
2980 return true;
2981 }
2982
2983 /* Now perform the actual comparison. */
2984 if (TREE_CODE (e1) == TREE_CODE (e2)
2985 && operand_equal_p (e1, e2, OEP_PURE_SAME))
2986 return true;
2987
2988 return false;
2989 }
2990
2991
2992 /* Return true if the nary operation NARY may trap. This is a copy
2993 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
2994
2995 bool
2996 vn_nary_may_trap (vn_nary_op_t nary)
2997 {
2998 tree type;
2999 tree rhs2;
3000 bool honor_nans = false;
3001 bool honor_snans = false;
3002 bool fp_operation = false;
3003 bool honor_trapv = false;
3004 bool handled, ret;
3005 unsigned i;
3006
3007 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3008 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3009 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3010 {
3011 type = nary->type;
3012 fp_operation = FLOAT_TYPE_P (type);
3013 if (fp_operation)
3014 {
3015 honor_nans = flag_trapping_math && !flag_finite_math_only;
3016 honor_snans = flag_signaling_nans != 0;
3017 }
3018 else if (INTEGRAL_TYPE_P (type)
3019 && TYPE_OVERFLOW_TRAPS (type))
3020 honor_trapv = true;
3021 }
3022 rhs2 = nary->op[1];
3023 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3024 honor_trapv,
3025 honor_nans, honor_snans, rhs2,
3026 &handled);
3027 if (handled
3028 && ret)
3029 return true;
3030
3031 for (i = 0; i < nary->length; ++i)
3032 if (tree_could_trap_p (nary->op[i]))
3033 return true;
3034
3035 return false;
3036 }