re PR tree-optimization/37145 (XFAILs from PRE rewrite, SCCVN union optimization...
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
48
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
54
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
58
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
63
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
68
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
72
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
77
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
81
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
84
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
90
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
96
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
104 */
105
106 /* The set of hashtables and alloc_pool's for their items. */
107
108 typedef struct vn_tables_s
109 {
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
117
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120
121
122 /* Valid hashtables storing information we have proven to be
123 correct. */
124
125 static vn_tables_t valid_info;
126
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
129
130 static vn_tables_t optimistic_info;
131
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
135
136 static vn_tables_t current_info;
137
138
139 /* Reverse post order index for each basic block. */
140
141 static int *rpo_numbers;
142
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
147
148 tree VN_TOP;
149
150 /* Unique counter for our value ids. */
151
152 static unsigned int next_value_id;
153
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
156
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159
160 static bool may_insert;
161
162
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
165
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
169
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
172
173 /* Return the value numbering information for a given SSA name. */
174
175 vn_ssa_aux_t
176 VN_INFO (tree name)
177 {
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
182 }
183
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
186
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
189 {
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
192 }
193
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
196
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
199 {
200 vn_ssa_aux_t newinfo;
201
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
210 }
211
212
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
215
216 tree
217 vn_get_expr_for (tree name)
218 {
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
222
223 if (vn->valnum == VN_TOP)
224 return name;
225
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
230
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
233
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
238
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
242
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
251
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
254
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
258 {
259 case tcc_reference:
260 if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 && gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 && gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
266 break;
267
268 case tcc_unary:
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
272 break;
273
274 case tcc_binary:
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
279 break;
280
281 default:;
282 }
283 if (expr == NULL_TREE)
284 return vn->valnum;
285
286 /* Cache the expression. */
287 vn->expr = expr;
288
289 return expr;
290 }
291
292
293 /* Free a phi operation structure VP. */
294
295 static void
296 free_phi (void *vp)
297 {
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
300 }
301
302 /* Free a reference operation structure VP. */
303
304 static void
305 free_reference (void *vp)
306 {
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
309 }
310
311 /* Hash table equality function for vn_constant_t. */
312
313 static int
314 vn_constant_eq (const void *p1, const void *p2)
315 {
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
318
319 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
320 }
321
322 /* Hash table hash function for vn_constant_t. */
323
324 static hashval_t
325 vn_constant_hash (const void *p1)
326 {
327 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
328 return vc1->hashcode;
329 }
330
331 /* Lookup a value id for CONSTANT and return it. If it does not
332 exist returns 0. */
333
334 unsigned int
335 get_constant_value_id (tree constant)
336 {
337 void **slot;
338 struct vn_constant_s vc;
339
340 vc.hashcode = vn_hash_constant_with_type (constant);
341 vc.constant = constant;
342 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
343 vc.hashcode, NO_INSERT);
344 if (slot)
345 return ((vn_constant_t)*slot)->value_id;
346 return 0;
347 }
348
349 /* Lookup a value id for CONSTANT, and if it does not exist, create a
350 new one and return it. If it does exist, return it. */
351
352 unsigned int
353 get_or_alloc_constant_value_id (tree constant)
354 {
355 void **slot;
356 vn_constant_t vc = XNEW (struct vn_constant_s);
357
358 vc->hashcode = vn_hash_constant_with_type (constant);
359 vc->constant = constant;
360 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
361 vc->hashcode, INSERT);
362 if (*slot)
363 {
364 free (vc);
365 return ((vn_constant_t)*slot)->value_id;
366 }
367 vc->value_id = get_next_value_id ();
368 *slot = vc;
369 bitmap_set_bit (constant_value_ids, vc->value_id);
370 return vc->value_id;
371 }
372
373 /* Return true if V is a value id for a constant. */
374
375 bool
376 value_id_constant_p (unsigned int v)
377 {
378 return bitmap_bit_p (constant_value_ids, v);
379 }
380
381 /* Compare two reference operands P1 and P2 for equality. Return true if
382 they are equal, and false otherwise. */
383
384 static int
385 vn_reference_op_eq (const void *p1, const void *p2)
386 {
387 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
388 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
389 return vro1->opcode == vro2->opcode
390 && vro1->type == vro2->type
391 && expressions_equal_p (vro1->op0, vro2->op0)
392 && expressions_equal_p (vro1->op1, vro2->op1)
393 && expressions_equal_p (vro1->op2, vro2->op2);
394 }
395
396 /* Compute the hash for a reference operand VRO1. */
397
398 static hashval_t
399 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
400 {
401 return iterative_hash_expr (vro1->op0, vro1->opcode)
402 + iterative_hash_expr (vro1->op1, vro1->opcode)
403 + iterative_hash_expr (vro1->op2, vro1->opcode);
404 }
405
406 /* Return the hashcode for a given reference operation P1. */
407
408 static hashval_t
409 vn_reference_hash (const void *p1)
410 {
411 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
412 return vr1->hashcode;
413 }
414
415 /* Compute a hash for the reference operation VR1 and return it. */
416
417 hashval_t
418 vn_reference_compute_hash (const vn_reference_t vr1)
419 {
420 hashval_t result = 0;
421 tree v;
422 int i;
423 vn_reference_op_t vro;
424
425 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
426 result += iterative_hash_expr (v, 0);
427 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
428 result += vn_reference_op_compute_hash (vro);
429
430 return result;
431 }
432
433 /* Return true if reference operations P1 and P2 are equivalent. This
434 means they have the same set of operands and vuses. */
435
436 int
437 vn_reference_eq (const void *p1, const void *p2)
438 {
439 tree v;
440 int i;
441 vn_reference_op_t vro;
442
443 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
444 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
445
446 if (vr1->vuses == vr2->vuses
447 && vr1->operands == vr2->operands)
448 return true;
449
450 /* Impossible for them to be equivalent if they have different
451 number of vuses. */
452 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
453 return false;
454
455 /* We require that address operands be canonicalized in a way that
456 two memory references will have the same operands if they are
457 equivalent. */
458 if (VEC_length (vn_reference_op_s, vr1->operands)
459 != VEC_length (vn_reference_op_s, vr2->operands))
460 return false;
461
462 /* The memory state is more often different than the address of the
463 store/load, so check it first. */
464 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
465 {
466 if (VEC_index (tree, vr2->vuses, i) != v)
467 return false;
468 }
469
470 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
471 {
472 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
473 vro))
474 return false;
475 }
476 return true;
477 }
478
479 /* Place the vuses from STMT into *result. */
480
481 static inline void
482 vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
483 {
484 ssa_op_iter iter;
485 tree vuse;
486
487 if (!stmt)
488 return;
489
490 VEC_reserve_exact (tree, gc, *result,
491 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
492
493 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
494 VEC_quick_push (tree, *result, vuse);
495 }
496
497
498 /* Copy the VUSE names in STMT into a vector, and return
499 the vector. */
500
501 VEC (tree, gc) *
502 copy_vuses_from_stmt (gimple stmt)
503 {
504 VEC (tree, gc) *vuses = NULL;
505
506 vuses_to_vec (stmt, &vuses);
507
508 return vuses;
509 }
510
511 /* Place the vdefs from STMT into *result. */
512
513 static inline void
514 vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
515 {
516 ssa_op_iter iter;
517 tree vdef;
518
519 if (!stmt)
520 return;
521
522 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
523
524 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
525 VEC_quick_push (tree, *result, vdef);
526 }
527
528 /* Copy the names of vdef results in STMT into a vector, and return
529 the vector. */
530
531 static VEC (tree, gc) *
532 copy_vdefs_from_stmt (gimple stmt)
533 {
534 VEC (tree, gc) *vdefs = NULL;
535
536 vdefs_to_vec (stmt, &vdefs);
537
538 return vdefs;
539 }
540
541 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
542 static VEC (tree, gc) *shared_lookup_vops;
543
544 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
545 This function will overwrite the current SHARED_LOOKUP_VOPS
546 variable. */
547
548 VEC (tree, gc) *
549 shared_vuses_from_stmt (gimple stmt)
550 {
551 VEC_truncate (tree, shared_lookup_vops, 0);
552 vuses_to_vec (stmt, &shared_lookup_vops);
553
554 return shared_lookup_vops;
555 }
556
557 /* Copy the operations present in load/store REF into RESULT, a vector of
558 vn_reference_op_s's. */
559
560 void
561 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
562 {
563 if (TREE_CODE (ref) == TARGET_MEM_REF)
564 {
565 vn_reference_op_s temp;
566
567 memset (&temp, 0, sizeof (temp));
568 /* We do not care for spurious type qualifications. */
569 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
570 temp.opcode = TREE_CODE (ref);
571 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
572 temp.op1 = TMR_INDEX (ref);
573 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
574
575 memset (&temp, 0, sizeof (temp));
576 temp.type = NULL_TREE;
577 temp.opcode = TREE_CODE (ref);
578 temp.op0 = TMR_STEP (ref);
579 temp.op1 = TMR_OFFSET (ref);
580 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
581 return;
582 }
583
584 /* For non-calls, store the information that makes up the address. */
585
586 while (ref)
587 {
588 vn_reference_op_s temp;
589
590 memset (&temp, 0, sizeof (temp));
591 /* We do not care for spurious type qualifications. */
592 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
593 temp.opcode = TREE_CODE (ref);
594
595 switch (temp.opcode)
596 {
597 case ALIGN_INDIRECT_REF:
598 case INDIRECT_REF:
599 /* The only operand is the address, which gets its own
600 vn_reference_op_s structure. */
601 break;
602 case MISALIGNED_INDIRECT_REF:
603 temp.op0 = TREE_OPERAND (ref, 1);
604 break;
605 case BIT_FIELD_REF:
606 /* Record bits and position. */
607 temp.op0 = TREE_OPERAND (ref, 1);
608 temp.op1 = TREE_OPERAND (ref, 2);
609 break;
610 case COMPONENT_REF:
611 /* The field decl is enough to unambiguously specify the field,
612 a matching type is not necessary and a mismatching type
613 is always a spurious difference. */
614 temp.type = NULL_TREE;
615 /* If this is a reference to a union member, record the union
616 member size as operand. Do so only if we are doing
617 expression insertion (during FRE), as PRE currently gets
618 confused with this. */
619 if (may_insert
620 && TREE_OPERAND (ref, 2) == NULL_TREE
621 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
622 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
623 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
624 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
625 else
626 {
627 /* Record field as operand. */
628 temp.op0 = TREE_OPERAND (ref, 1);
629 temp.op1 = TREE_OPERAND (ref, 2);
630 }
631 break;
632 case ARRAY_RANGE_REF:
633 case ARRAY_REF:
634 /* Record index as operand. */
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
637 temp.op2 = TREE_OPERAND (ref, 3);
638 break;
639 case STRING_CST:
640 case INTEGER_CST:
641 case COMPLEX_CST:
642 case VECTOR_CST:
643 case REAL_CST:
644 case CONSTRUCTOR:
645 case VAR_DECL:
646 case PARM_DECL:
647 case CONST_DECL:
648 case RESULT_DECL:
649 case SSA_NAME:
650 temp.op0 = ref;
651 break;
652 case ADDR_EXPR:
653 if (is_gimple_min_invariant (ref))
654 {
655 temp.op0 = ref;
656 break;
657 }
658 /* Fallthrough. */
659 /* These are only interesting for their operands, their
660 existence, and their type. They will never be the last
661 ref in the chain of references (IE they require an
662 operand), so we don't have to put anything
663 for op* as it will be handled by the iteration */
664 case IMAGPART_EXPR:
665 case REALPART_EXPR:
666 case VIEW_CONVERT_EXPR:
667 break;
668 default:
669 gcc_unreachable ();
670 }
671 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
672
673 if (REFERENCE_CLASS_P (ref)
674 || (TREE_CODE (ref) == ADDR_EXPR
675 && !is_gimple_min_invariant (ref)))
676 ref = TREE_OPERAND (ref, 0);
677 else
678 ref = NULL_TREE;
679 }
680 }
681
682 /* Re-create a reference tree from the reference ops OPS.
683 Returns NULL_TREE if the ops were not handled.
684 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
685
686 static tree
687 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
688 {
689 vn_reference_op_t op;
690 unsigned i;
691 tree ref, *op0_p = &ref;
692
693 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
694 {
695 switch (op->opcode)
696 {
697 case CALL_EXPR:
698 return NULL_TREE;
699
700 case ALIGN_INDIRECT_REF:
701 case INDIRECT_REF:
702 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
703 op0_p = &TREE_OPERAND (*op0_p, 0);
704 break;
705
706 case MISALIGNED_INDIRECT_REF:
707 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
708 NULL_TREE, op->op0);
709 op0_p = &TREE_OPERAND (*op0_p, 0);
710 break;
711
712 case BIT_FIELD_REF:
713 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
714 op->op0, op->op1);
715 op0_p = &TREE_OPERAND (*op0_p, 0);
716 break;
717
718 case COMPONENT_REF:
719 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
720 op->op0, op->op1);
721 op0_p = &TREE_OPERAND (*op0_p, 0);
722 break;
723
724 case ARRAY_RANGE_REF:
725 case ARRAY_REF:
726 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
727 op->op0, op->op1, op->op2);
728 op0_p = &TREE_OPERAND (*op0_p, 0);
729 break;
730
731 case STRING_CST:
732 case INTEGER_CST:
733 case COMPLEX_CST:
734 case VECTOR_CST:
735 case REAL_CST:
736 case CONSTRUCTOR:
737 case VAR_DECL:
738 case PARM_DECL:
739 case CONST_DECL:
740 case RESULT_DECL:
741 case SSA_NAME:
742 *op0_p = op->op0;
743 break;
744
745 case ADDR_EXPR:
746 if (op->op0 != NULL_TREE)
747 {
748 gcc_assert (is_gimple_min_invariant (op->op0));
749 *op0_p = op->op0;
750 break;
751 }
752 /* Fallthrough. */
753 case IMAGPART_EXPR:
754 case REALPART_EXPR:
755 case VIEW_CONVERT_EXPR:
756 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
757 op0_p = &TREE_OPERAND (*op0_p, 0);
758 break;
759
760 default:
761 return NULL_TREE;
762 }
763 }
764
765 return ref;
766 }
767
768 /* Copy the operations present in load/store/call REF into RESULT, a vector of
769 vn_reference_op_s's. */
770
771 void
772 copy_reference_ops_from_call (gimple call,
773 VEC(vn_reference_op_s, heap) **result)
774 {
775 vn_reference_op_s temp;
776 unsigned i;
777
778 /* Copy the type, opcode, function being called and static chain. */
779 memset (&temp, 0, sizeof (temp));
780 temp.type = gimple_call_return_type (call);
781 temp.opcode = CALL_EXPR;
782 temp.op0 = gimple_call_fn (call);
783 temp.op1 = gimple_call_chain (call);
784 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
785
786 /* Copy the call arguments. As they can be references as well,
787 just chain them together. */
788 for (i = 0; i < gimple_call_num_args (call); ++i)
789 {
790 tree callarg = gimple_call_arg (call, i);
791 copy_reference_ops_from_ref (callarg, result);
792 }
793 }
794
795 /* Create a vector of vn_reference_op_s structures from REF, a
796 REFERENCE_CLASS_P tree. The vector is not shared. */
797
798 static VEC(vn_reference_op_s, heap) *
799 create_reference_ops_from_ref (tree ref)
800 {
801 VEC (vn_reference_op_s, heap) *result = NULL;
802
803 copy_reference_ops_from_ref (ref, &result);
804 return result;
805 }
806
807 /* Create a vector of vn_reference_op_s structures from CALL, a
808 call statement. The vector is not shared. */
809
810 static VEC(vn_reference_op_s, heap) *
811 create_reference_ops_from_call (gimple call)
812 {
813 VEC (vn_reference_op_s, heap) *result = NULL;
814
815 copy_reference_ops_from_call (call, &result);
816 return result;
817 }
818
819 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
820
821 /* Create a vector of vn_reference_op_s structures from REF, a
822 REFERENCE_CLASS_P tree. The vector is shared among all callers of
823 this function. */
824
825 static VEC(vn_reference_op_s, heap) *
826 shared_reference_ops_from_ref (tree ref)
827 {
828 if (!ref)
829 return NULL;
830 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
831 copy_reference_ops_from_ref (ref, &shared_lookup_references);
832 return shared_lookup_references;
833 }
834
835 /* Create a vector of vn_reference_op_s structures from CALL, a
836 call statement. The vector is shared among all callers of
837 this function. */
838
839 static VEC(vn_reference_op_s, heap) *
840 shared_reference_ops_from_call (gimple call)
841 {
842 if (!call)
843 return NULL;
844 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
845 copy_reference_ops_from_call (call, &shared_lookup_references);
846 return shared_lookup_references;
847 }
848
849
850 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
851 structures into their value numbers. This is done in-place, and
852 the vector passed in is returned. */
853
854 static VEC (vn_reference_op_s, heap) *
855 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
856 {
857 vn_reference_op_t vro;
858 int i;
859
860 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
861 {
862 if (vro->opcode == SSA_NAME
863 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
864 {
865 vro->op0 = SSA_VAL (vro->op0);
866 /* If it transforms from an SSA_NAME to a constant, update
867 the opcode. */
868 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
869 vro->opcode = TREE_CODE (vro->op0);
870 }
871 /* TODO: Do we want to valueize op2 and op1 of
872 ARRAY_REF/COMPONENT_REF for Ada */
873
874 }
875
876 return orig;
877 }
878
879 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
880 their value numbers. This is done in-place, and the vector passed
881 in is returned. */
882
883 static VEC (tree, gc) *
884 valueize_vuses (VEC (tree, gc) *orig)
885 {
886 bool made_replacement = false;
887 tree vuse;
888 int i;
889
890 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
891 {
892 if (vuse != SSA_VAL (vuse))
893 {
894 made_replacement = true;
895 VEC_replace (tree, orig, i, SSA_VAL (vuse));
896 }
897 }
898
899 if (made_replacement && VEC_length (tree, orig) > 1)
900 sort_vuses (orig);
901
902 return orig;
903 }
904
905 /* Return the single reference statement defining all virtual uses
906 in VUSES or NULL_TREE, if there are multiple defining statements.
907 Take into account only definitions that alias REF if following
908 back-edges. */
909
910 static gimple
911 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
912 {
913 gimple def_stmt;
914 tree vuse;
915 unsigned int i;
916
917 gcc_assert (VEC_length (tree, vuses) >= 1);
918
919 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
920 if (gimple_code (def_stmt) == GIMPLE_PHI)
921 {
922 /* We can only handle lookups over PHI nodes for a single
923 virtual operand. */
924 if (VEC_length (tree, vuses) == 1)
925 {
926 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
927 goto cont;
928 }
929 else
930 return NULL;
931 }
932
933 /* Verify each VUSE reaches the same defining stmt. */
934 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
935 {
936 gimple tmp = SSA_NAME_DEF_STMT (vuse);
937 if (tmp != def_stmt)
938 return NULL;
939 }
940
941 /* Now see if the definition aliases ref, and loop until it does. */
942 cont:
943 while (def_stmt
944 && is_gimple_assign (def_stmt)
945 && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
946 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
947
948 return def_stmt;
949 }
950
951 /* Lookup a SCCVN reference operation VR in the current hash table.
952 Returns the resulting value number if it exists in the hash table,
953 NULL_TREE otherwise. VNRESULT will be filled in with the actual
954 vn_reference_t stored in the hashtable if something is found. */
955
956 static tree
957 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
958 {
959 void **slot;
960 hashval_t hash;
961
962 hash = vr->hashcode;
963 slot = htab_find_slot_with_hash (current_info->references, vr,
964 hash, NO_INSERT);
965 if (!slot && current_info == optimistic_info)
966 slot = htab_find_slot_with_hash (valid_info->references, vr,
967 hash, NO_INSERT);
968 if (slot)
969 {
970 if (vnresult)
971 *vnresult = (vn_reference_t)*slot;
972 return ((vn_reference_t)*slot)->result;
973 }
974
975 return NULL_TREE;
976 }
977
978
979 /* Lookup a reference operation by it's parts, in the current hash table.
980 Returns the resulting value number if it exists in the hash table,
981 NULL_TREE otherwise. VNRESULT will be filled in with the actual
982 vn_reference_t stored in the hashtable if something is found. */
983
984 tree
985 vn_reference_lookup_pieces (VEC (tree, gc) *vuses,
986 VEC (vn_reference_op_s, heap) *operands,
987 vn_reference_t *vnresult, bool maywalk)
988 {
989 struct vn_reference_s vr1;
990 tree result;
991 if (vnresult)
992 *vnresult = NULL;
993
994 vr1.vuses = valueize_vuses (vuses);
995 vr1.operands = valueize_refs (operands);
996 vr1.hashcode = vn_reference_compute_hash (&vr1);
997 result = vn_reference_lookup_1 (&vr1, vnresult);
998
999 /* If there is a single defining statement for all virtual uses, we can
1000 use that, following virtual use-def chains. */
1001 if (!result
1002 && maywalk
1003 && vr1.vuses
1004 && VEC_length (tree, vr1.vuses) >= 1)
1005 {
1006 tree ref = get_ref_from_reference_ops (operands);
1007 gimple def_stmt;
1008 if (ref
1009 && (def_stmt = get_def_ref_stmt_vuses (ref, vr1.vuses))
1010 && is_gimple_assign (def_stmt))
1011 {
1012 /* We are now at an aliasing definition for the vuses we want to
1013 look up. Re-do the lookup with the vdefs for this stmt. */
1014 vdefs_to_vec (def_stmt, &vuses);
1015 vr1.vuses = valueize_vuses (vuses);
1016 vr1.hashcode = vn_reference_compute_hash (&vr1);
1017 result = vn_reference_lookup_1 (&vr1, vnresult);
1018 }
1019 }
1020
1021 return result;
1022 }
1023
1024 /* Lookup OP in the current hash table, and return the resulting value
1025 number if it exists in the hash table. Return NULL_TREE if it does
1026 not exist in the hash table or if the result field of the structure
1027 was NULL.. VNRESULT will be filled in with the vn_reference_t
1028 stored in the hashtable if one exists. */
1029
1030 tree
1031 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
1032 vn_reference_t *vnresult)
1033 {
1034 struct vn_reference_s vr1;
1035 tree result;
1036 gimple def_stmt;
1037 if (vnresult)
1038 *vnresult = NULL;
1039
1040 vr1.vuses = valueize_vuses (vuses);
1041 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
1042 vr1.hashcode = vn_reference_compute_hash (&vr1);
1043 result = vn_reference_lookup_1 (&vr1, vnresult);
1044
1045 /* If there is a single defining statement for all virtual uses, we can
1046 use that, following virtual use-def chains. */
1047 if (!result
1048 && maywalk
1049 && vr1.vuses
1050 && VEC_length (tree, vr1.vuses) >= 1
1051 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
1052 && is_gimple_assign (def_stmt))
1053 {
1054 /* We are now at an aliasing definition for the vuses we want to
1055 look up. Re-do the lookup with the vdefs for this stmt. */
1056 vdefs_to_vec (def_stmt, &vuses);
1057 vr1.vuses = valueize_vuses (vuses);
1058 vr1.hashcode = vn_reference_compute_hash (&vr1);
1059 result = vn_reference_lookup_1 (&vr1, vnresult);
1060 }
1061
1062 return result;
1063 }
1064
1065
1066 /* Insert OP into the current hash table with a value number of
1067 RESULT, and return the resulting reference structure we created. */
1068
1069 vn_reference_t
1070 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
1071 {
1072 void **slot;
1073 vn_reference_t vr1;
1074
1075 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1076 if (TREE_CODE (result) == SSA_NAME)
1077 vr1->value_id = VN_INFO (result)->value_id;
1078 else
1079 vr1->value_id = get_or_alloc_constant_value_id (result);
1080 vr1->vuses = valueize_vuses (vuses);
1081 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1082 vr1->hashcode = vn_reference_compute_hash (vr1);
1083 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1084
1085 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1086 INSERT);
1087
1088 /* Because we lookup stores using vuses, and value number failures
1089 using the vdefs (see visit_reference_op_store for how and why),
1090 it's possible that on failure we may try to insert an already
1091 inserted store. This is not wrong, there is no ssa name for a
1092 store that we could use as a differentiator anyway. Thus, unlike
1093 the other lookup functions, you cannot gcc_assert (!*slot)
1094 here. */
1095
1096 /* But free the old slot in case of a collision. */
1097 if (*slot)
1098 free_reference (*slot);
1099
1100 *slot = vr1;
1101 return vr1;
1102 }
1103
1104 /* Insert a reference by it's pieces into the current hash table with
1105 a value number of RESULT. Return the resulting reference
1106 structure we created. */
1107
1108 vn_reference_t
1109 vn_reference_insert_pieces (VEC (tree, gc) *vuses,
1110 VEC (vn_reference_op_s, heap) *operands,
1111 tree result, unsigned int value_id)
1112
1113 {
1114 void **slot;
1115 vn_reference_t vr1;
1116
1117 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1118 vr1->value_id = value_id;
1119 vr1->vuses = valueize_vuses (vuses);
1120 vr1->operands = valueize_refs (operands);
1121 vr1->hashcode = vn_reference_compute_hash (vr1);
1122 if (result && TREE_CODE (result) == SSA_NAME)
1123 result = SSA_VAL (result);
1124 vr1->result = result;
1125
1126 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1127 INSERT);
1128
1129 /* At this point we should have all the things inserted that we have
1130 seen before, and we should never try inserting something that
1131 already exists. */
1132 gcc_assert (!*slot);
1133 if (*slot)
1134 free_reference (*slot);
1135
1136 *slot = vr1;
1137 return vr1;
1138 }
1139
1140 /* Compute and return the hash value for nary operation VBO1. */
1141
1142 inline hashval_t
1143 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1144 {
1145 hashval_t hash = 0;
1146 unsigned i;
1147
1148 for (i = 0; i < vno1->length; ++i)
1149 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1150 vno1->op[i] = SSA_VAL (vno1->op[i]);
1151
1152 if (vno1->length == 2
1153 && commutative_tree_code (vno1->opcode)
1154 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1155 {
1156 tree temp = vno1->op[0];
1157 vno1->op[0] = vno1->op[1];
1158 vno1->op[1] = temp;
1159 }
1160
1161 for (i = 0; i < vno1->length; ++i)
1162 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1163
1164 return hash;
1165 }
1166
1167 /* Return the computed hashcode for nary operation P1. */
1168
1169 static hashval_t
1170 vn_nary_op_hash (const void *p1)
1171 {
1172 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1173 return vno1->hashcode;
1174 }
1175
1176 /* Compare nary operations P1 and P2 and return true if they are
1177 equivalent. */
1178
1179 int
1180 vn_nary_op_eq (const void *p1, const void *p2)
1181 {
1182 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1183 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1184 unsigned i;
1185
1186 if (vno1->opcode != vno2->opcode
1187 || vno1->type != vno2->type)
1188 return false;
1189
1190 for (i = 0; i < vno1->length; ++i)
1191 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1192 return false;
1193
1194 return true;
1195 }
1196
1197 /* Lookup a n-ary operation by its pieces and return the resulting value
1198 number if it exists in the hash table. Return NULL_TREE if it does
1199 not exist in the hash table or if the result field of the operation
1200 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1201 if it exists. */
1202
1203 tree
1204 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1205 tree type, tree op0, tree op1, tree op2,
1206 tree op3, vn_nary_op_t *vnresult)
1207 {
1208 void **slot;
1209 struct vn_nary_op_s vno1;
1210 if (vnresult)
1211 *vnresult = NULL;
1212 vno1.opcode = code;
1213 vno1.length = length;
1214 vno1.type = type;
1215 vno1.op[0] = op0;
1216 vno1.op[1] = op1;
1217 vno1.op[2] = op2;
1218 vno1.op[3] = op3;
1219 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1220 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1221 NO_INSERT);
1222 if (!slot && current_info == optimistic_info)
1223 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1224 NO_INSERT);
1225 if (!slot)
1226 return NULL_TREE;
1227 if (vnresult)
1228 *vnresult = (vn_nary_op_t)*slot;
1229 return ((vn_nary_op_t)*slot)->result;
1230 }
1231
1232 /* Lookup OP in the current hash table, and return the resulting value
1233 number if it exists in the hash table. Return NULL_TREE if it does
1234 not exist in the hash table or if the result field of the operation
1235 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1236 if it exists. */
1237
1238 tree
1239 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1240 {
1241 void **slot;
1242 struct vn_nary_op_s vno1;
1243 unsigned i;
1244
1245 if (vnresult)
1246 *vnresult = NULL;
1247 vno1.opcode = TREE_CODE (op);
1248 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1249 vno1.type = TREE_TYPE (op);
1250 for (i = 0; i < vno1.length; ++i)
1251 vno1.op[i] = TREE_OPERAND (op, i);
1252 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1253 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1254 NO_INSERT);
1255 if (!slot && current_info == optimistic_info)
1256 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1257 NO_INSERT);
1258 if (!slot)
1259 return NULL_TREE;
1260 if (vnresult)
1261 *vnresult = (vn_nary_op_t)*slot;
1262 return ((vn_nary_op_t)*slot)->result;
1263 }
1264
1265 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1266 value number if it exists in the hash table. Return NULL_TREE if
1267 it does not exist in the hash table. VNRESULT will contain the
1268 vn_nary_op_t from the hashtable if it exists. */
1269
1270 tree
1271 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1272 {
1273 void **slot;
1274 struct vn_nary_op_s vno1;
1275 unsigned i;
1276
1277 if (vnresult)
1278 *vnresult = NULL;
1279 vno1.opcode = gimple_assign_rhs_code (stmt);
1280 vno1.length = gimple_num_ops (stmt) - 1;
1281 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1282 for (i = 0; i < vno1.length; ++i)
1283 vno1.op[i] = gimple_op (stmt, i + 1);
1284 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1285 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1286 NO_INSERT);
1287 if (!slot && current_info == optimistic_info)
1288 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1289 NO_INSERT);
1290 if (!slot)
1291 return NULL_TREE;
1292 if (vnresult)
1293 *vnresult = (vn_nary_op_t)*slot;
1294 return ((vn_nary_op_t)*slot)->result;
1295 }
1296
1297 /* Insert a n-ary operation into the current hash table using it's
1298 pieces. Return the vn_nary_op_t structure we created and put in
1299 the hashtable. */
1300
1301 vn_nary_op_t
1302 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1303 tree type, tree op0,
1304 tree op1, tree op2, tree op3,
1305 tree result,
1306 unsigned int value_id)
1307 {
1308 void **slot;
1309 vn_nary_op_t vno1;
1310
1311 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1312 (sizeof (struct vn_nary_op_s)
1313 - sizeof (tree) * (4 - length)));
1314 vno1->value_id = value_id;
1315 vno1->opcode = code;
1316 vno1->length = length;
1317 vno1->type = type;
1318 if (length >= 1)
1319 vno1->op[0] = op0;
1320 if (length >= 2)
1321 vno1->op[1] = op1;
1322 if (length >= 3)
1323 vno1->op[2] = op2;
1324 if (length >= 4)
1325 vno1->op[3] = op3;
1326 vno1->result = result;
1327 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1328 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1329 INSERT);
1330 gcc_assert (!*slot);
1331
1332 *slot = vno1;
1333 return vno1;
1334
1335 }
1336
1337 /* Insert OP into the current hash table with a value number of
1338 RESULT. Return the vn_nary_op_t structure we created and put in
1339 the hashtable. */
1340
1341 vn_nary_op_t
1342 vn_nary_op_insert (tree op, tree result)
1343 {
1344 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1345 void **slot;
1346 vn_nary_op_t vno1;
1347 unsigned i;
1348
1349 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1350 (sizeof (struct vn_nary_op_s)
1351 - sizeof (tree) * (4 - length)));
1352 vno1->value_id = VN_INFO (result)->value_id;
1353 vno1->opcode = TREE_CODE (op);
1354 vno1->length = length;
1355 vno1->type = TREE_TYPE (op);
1356 for (i = 0; i < vno1->length; ++i)
1357 vno1->op[i] = TREE_OPERAND (op, i);
1358 vno1->result = result;
1359 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1360 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1361 INSERT);
1362 gcc_assert (!*slot);
1363
1364 *slot = vno1;
1365 return vno1;
1366 }
1367
1368 /* Insert the rhs of STMT into the current hash table with a value number of
1369 RESULT. */
1370
1371 vn_nary_op_t
1372 vn_nary_op_insert_stmt (gimple stmt, tree result)
1373 {
1374 unsigned length = gimple_num_ops (stmt) - 1;
1375 void **slot;
1376 vn_nary_op_t vno1;
1377 unsigned i;
1378
1379 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1380 (sizeof (struct vn_nary_op_s)
1381 - sizeof (tree) * (4 - length)));
1382 vno1->value_id = VN_INFO (result)->value_id;
1383 vno1->opcode = gimple_assign_rhs_code (stmt);
1384 vno1->length = length;
1385 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1386 for (i = 0; i < vno1->length; ++i)
1387 vno1->op[i] = gimple_op (stmt, i + 1);
1388 vno1->result = result;
1389 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1390 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1391 INSERT);
1392 gcc_assert (!*slot);
1393
1394 *slot = vno1;
1395 return vno1;
1396 }
1397
1398 /* Compute a hashcode for PHI operation VP1 and return it. */
1399
1400 static inline hashval_t
1401 vn_phi_compute_hash (vn_phi_t vp1)
1402 {
1403 hashval_t result = 0;
1404 int i;
1405 tree phi1op;
1406 tree type;
1407
1408 result = vp1->block->index;
1409
1410 /* If all PHI arguments are constants we need to distinguish
1411 the PHI node via its type. */
1412 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1413 result += (INTEGRAL_TYPE_P (type)
1414 + (INTEGRAL_TYPE_P (type)
1415 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1416
1417 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1418 {
1419 if (phi1op == VN_TOP)
1420 continue;
1421 result += iterative_hash_expr (phi1op, result);
1422 }
1423
1424 return result;
1425 }
1426
1427 /* Return the computed hashcode for phi operation P1. */
1428
1429 static hashval_t
1430 vn_phi_hash (const void *p1)
1431 {
1432 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1433 return vp1->hashcode;
1434 }
1435
1436 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1437
1438 static int
1439 vn_phi_eq (const void *p1, const void *p2)
1440 {
1441 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1442 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1443
1444 if (vp1->block == vp2->block)
1445 {
1446 int i;
1447 tree phi1op;
1448
1449 /* If the PHI nodes do not have compatible types
1450 they are not the same. */
1451 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1452 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1453 return false;
1454
1455 /* Any phi in the same block will have it's arguments in the
1456 same edge order, because of how we store phi nodes. */
1457 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1458 {
1459 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1460 if (phi1op == VN_TOP || phi2op == VN_TOP)
1461 continue;
1462 if (!expressions_equal_p (phi1op, phi2op))
1463 return false;
1464 }
1465 return true;
1466 }
1467 return false;
1468 }
1469
1470 static VEC(tree, heap) *shared_lookup_phiargs;
1471
1472 /* Lookup PHI in the current hash table, and return the resulting
1473 value number if it exists in the hash table. Return NULL_TREE if
1474 it does not exist in the hash table. */
1475
1476 tree
1477 vn_phi_lookup (gimple phi)
1478 {
1479 void **slot;
1480 struct vn_phi_s vp1;
1481 unsigned i;
1482
1483 VEC_truncate (tree, shared_lookup_phiargs, 0);
1484
1485 /* Canonicalize the SSA_NAME's to their value number. */
1486 for (i = 0; i < gimple_phi_num_args (phi); i++)
1487 {
1488 tree def = PHI_ARG_DEF (phi, i);
1489 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1490 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1491 }
1492 vp1.phiargs = shared_lookup_phiargs;
1493 vp1.block = gimple_bb (phi);
1494 vp1.hashcode = vn_phi_compute_hash (&vp1);
1495 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1496 NO_INSERT);
1497 if (!slot && current_info == optimistic_info)
1498 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1499 NO_INSERT);
1500 if (!slot)
1501 return NULL_TREE;
1502 return ((vn_phi_t)*slot)->result;
1503 }
1504
1505 /* Insert PHI into the current hash table with a value number of
1506 RESULT. */
1507
1508 static vn_phi_t
1509 vn_phi_insert (gimple phi, tree result)
1510 {
1511 void **slot;
1512 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1513 unsigned i;
1514 VEC (tree, heap) *args = NULL;
1515
1516 /* Canonicalize the SSA_NAME's to their value number. */
1517 for (i = 0; i < gimple_phi_num_args (phi); i++)
1518 {
1519 tree def = PHI_ARG_DEF (phi, i);
1520 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1521 VEC_safe_push (tree, heap, args, def);
1522 }
1523 vp1->value_id = VN_INFO (result)->value_id;
1524 vp1->phiargs = args;
1525 vp1->block = gimple_bb (phi);
1526 vp1->result = result;
1527 vp1->hashcode = vn_phi_compute_hash (vp1);
1528
1529 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1530 INSERT);
1531
1532 /* Because we iterate over phi operations more than once, it's
1533 possible the slot might already exist here, hence no assert.*/
1534 *slot = vp1;
1535 return vp1;
1536 }
1537
1538
1539 /* Print set of components in strongly connected component SCC to OUT. */
1540
1541 static void
1542 print_scc (FILE *out, VEC (tree, heap) *scc)
1543 {
1544 tree var;
1545 unsigned int i;
1546
1547 fprintf (out, "SCC consists of: ");
1548 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1549 {
1550 print_generic_expr (out, var, 0);
1551 fprintf (out, " ");
1552 }
1553 fprintf (out, "\n");
1554 }
1555
1556 /* Set the value number of FROM to TO, return true if it has changed
1557 as a result. */
1558
1559 static inline bool
1560 set_ssa_val_to (tree from, tree to)
1561 {
1562 tree currval;
1563
1564 if (from != to
1565 && TREE_CODE (to) == SSA_NAME
1566 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1567 to = from;
1568
1569 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1570 and invariants. So assert that here. */
1571 gcc_assert (to != NULL_TREE
1572 && (to == VN_TOP
1573 || TREE_CODE (to) == SSA_NAME
1574 || is_gimple_min_invariant (to)));
1575
1576 if (dump_file && (dump_flags & TDF_DETAILS))
1577 {
1578 fprintf (dump_file, "Setting value number of ");
1579 print_generic_expr (dump_file, from, 0);
1580 fprintf (dump_file, " to ");
1581 print_generic_expr (dump_file, to, 0);
1582 fprintf (dump_file, "\n");
1583 }
1584
1585 currval = SSA_VAL (from);
1586
1587 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1588 {
1589 SSA_VAL (from) = to;
1590 return true;
1591 }
1592 return false;
1593 }
1594
1595 /* Set all definitions in STMT to value number to themselves.
1596 Return true if a value number changed. */
1597
1598 static bool
1599 defs_to_varying (gimple stmt)
1600 {
1601 bool changed = false;
1602 ssa_op_iter iter;
1603 def_operand_p defp;
1604
1605 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1606 {
1607 tree def = DEF_FROM_PTR (defp);
1608
1609 VN_INFO (def)->use_processed = true;
1610 changed |= set_ssa_val_to (def, def);
1611 }
1612 return changed;
1613 }
1614
1615 static bool expr_has_constants (tree expr);
1616 static tree valueize_expr (tree expr);
1617
1618 /* Visit a copy between LHS and RHS, return true if the value number
1619 changed. */
1620
1621 static bool
1622 visit_copy (tree lhs, tree rhs)
1623 {
1624 /* Follow chains of copies to their destination. */
1625 while (TREE_CODE (rhs) == SSA_NAME
1626 && SSA_VAL (rhs) != rhs)
1627 rhs = SSA_VAL (rhs);
1628
1629 /* The copy may have a more interesting constant filled expression
1630 (we don't, since we know our RHS is just an SSA name). */
1631 if (TREE_CODE (rhs) == SSA_NAME)
1632 {
1633 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1634 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1635 }
1636
1637 return set_ssa_val_to (lhs, rhs);
1638 }
1639
1640 /* Visit a unary operator RHS, value number it, and return true if the
1641 value number of LHS has changed as a result. */
1642
1643 static bool
1644 visit_unary_op (tree lhs, gimple stmt)
1645 {
1646 bool changed = false;
1647 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1648
1649 if (result)
1650 {
1651 changed = set_ssa_val_to (lhs, result);
1652 }
1653 else
1654 {
1655 changed = set_ssa_val_to (lhs, lhs);
1656 vn_nary_op_insert_stmt (stmt, lhs);
1657 }
1658
1659 return changed;
1660 }
1661
1662 /* Visit a binary operator RHS, value number it, and return true if the
1663 value number of LHS has changed as a result. */
1664
1665 static bool
1666 visit_binary_op (tree lhs, gimple stmt)
1667 {
1668 bool changed = false;
1669 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1670
1671 if (result)
1672 {
1673 changed = set_ssa_val_to (lhs, result);
1674 }
1675 else
1676 {
1677 changed = set_ssa_val_to (lhs, lhs);
1678 vn_nary_op_insert_stmt (stmt, lhs);
1679 }
1680
1681 return changed;
1682 }
1683
1684 /* Visit a call STMT storing into LHS. Return true if the value number
1685 of the LHS has changed as a result. */
1686
1687 static bool
1688 visit_reference_op_call (tree lhs, gimple stmt)
1689 {
1690 bool changed = false;
1691 struct vn_reference_s vr1;
1692 tree result;
1693
1694 vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt));
1695 vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt));
1696 vr1.hashcode = vn_reference_compute_hash (&vr1);
1697 result = vn_reference_lookup_1 (&vr1, NULL);
1698 if (result)
1699 {
1700 changed = set_ssa_val_to (lhs, result);
1701 if (TREE_CODE (result) == SSA_NAME
1702 && VN_INFO (result)->has_constants)
1703 VN_INFO (lhs)->has_constants = true;
1704 }
1705 else
1706 {
1707 void **slot;
1708 vn_reference_t vr2;
1709 changed = set_ssa_val_to (lhs, lhs);
1710 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1711 vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt));
1712 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1713 vr2->hashcode = vr1.hashcode;
1714 vr2->result = lhs;
1715 slot = htab_find_slot_with_hash (current_info->references,
1716 vr2, vr2->hashcode, INSERT);
1717 if (*slot)
1718 free_reference (*slot);
1719 *slot = vr2;
1720 }
1721
1722 return changed;
1723 }
1724
1725 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1726 and return true if the value number of the LHS has changed as a result. */
1727
1728 static bool
1729 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1730 {
1731 bool changed = false;
1732 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
1733 NULL);
1734
1735 /* We handle type-punning through unions by value-numbering based
1736 on offset and size of the access. Be prepared to handle a
1737 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1738 if (result
1739 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1740 {
1741 /* We will be setting the value number of lhs to the value number
1742 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1743 So first simplify and lookup this expression to see if it
1744 is already available. */
1745 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1746 if ((CONVERT_EXPR_P (val)
1747 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1748 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1749 {
1750 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1751 if ((CONVERT_EXPR_P (tem)
1752 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1753 && (tem = fold_unary (TREE_CODE (val), TREE_TYPE (val), tem)))
1754 val = tem;
1755 }
1756 result = val;
1757 if (!is_gimple_min_invariant (val)
1758 && TREE_CODE (val) != SSA_NAME)
1759 result = vn_nary_op_lookup (val, NULL);
1760 /* If the expression is not yet available, value-number lhs to
1761 a new SSA_NAME we create. */
1762 if (!result && may_insert)
1763 {
1764 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1765 /* Initialize value-number information properly. */
1766 VN_INFO_GET (result)->valnum = result;
1767 VN_INFO (result)->value_id = get_next_value_id ();
1768 VN_INFO (result)->expr = val;
1769 VN_INFO (result)->has_constants = expr_has_constants (val);
1770 VN_INFO (result)->needs_insertion = true;
1771 /* As all "inserted" statements are singleton SCCs, insert
1772 to the valid table. This is strictly needed to
1773 avoid re-generating new value SSA_NAMEs for the same
1774 expression during SCC iteration over and over (the
1775 optimistic table gets cleared after each iteration).
1776 We do not need to insert into the optimistic table, as
1777 lookups there will fall back to the valid table. */
1778 if (current_info == optimistic_info)
1779 {
1780 current_info = valid_info;
1781 vn_nary_op_insert (val, result);
1782 current_info = optimistic_info;
1783 }
1784 else
1785 vn_nary_op_insert (val, result);
1786 if (dump_file && (dump_flags & TDF_DETAILS))
1787 {
1788 fprintf (dump_file, "Inserting name ");
1789 print_generic_expr (dump_file, result, 0);
1790 fprintf (dump_file, " for expression ");
1791 print_generic_expr (dump_file, val, 0);
1792 fprintf (dump_file, "\n");
1793 }
1794 }
1795 }
1796
1797 if (result)
1798 {
1799 changed = set_ssa_val_to (lhs, result);
1800 if (TREE_CODE (result) == SSA_NAME
1801 && VN_INFO (result)->has_constants)
1802 {
1803 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1804 VN_INFO (lhs)->has_constants = true;
1805 }
1806 }
1807 else
1808 {
1809 changed = set_ssa_val_to (lhs, lhs);
1810 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1811 }
1812
1813 return changed;
1814 }
1815
1816
1817 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1818 and return true if the value number of the LHS has changed as a result. */
1819
1820 static bool
1821 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1822 {
1823 bool changed = false;
1824 tree result;
1825 bool resultsame = false;
1826
1827 /* First we want to lookup using the *vuses* from the store and see
1828 if there the last store to this location with the same address
1829 had the same value.
1830
1831 The vuses represent the memory state before the store. If the
1832 memory state, address, and value of the store is the same as the
1833 last store to this location, then this store will produce the
1834 same memory state as that store.
1835
1836 In this case the vdef versions for this store are value numbered to those
1837 vuse versions, since they represent the same memory state after
1838 this store.
1839
1840 Otherwise, the vdefs for the store are used when inserting into
1841 the table, since the store generates a new memory state. */
1842
1843 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false,
1844 NULL);
1845
1846 if (result)
1847 {
1848 if (TREE_CODE (result) == SSA_NAME)
1849 result = SSA_VAL (result);
1850 if (TREE_CODE (op) == SSA_NAME)
1851 op = SSA_VAL (op);
1852 resultsame = expressions_equal_p (result, op);
1853 }
1854
1855 if (!result || !resultsame)
1856 {
1857 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1858 int i;
1859 tree vdef;
1860
1861 if (dump_file && (dump_flags & TDF_DETAILS))
1862 {
1863 fprintf (dump_file, "No store match\n");
1864 fprintf (dump_file, "Value numbering store ");
1865 print_generic_expr (dump_file, lhs, 0);
1866 fprintf (dump_file, " to ");
1867 print_generic_expr (dump_file, op, 0);
1868 fprintf (dump_file, "\n");
1869 }
1870 /* Have to set value numbers before insert, since insert is
1871 going to valueize the references in-place. */
1872 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1873 {
1874 VN_INFO (vdef)->use_processed = true;
1875 changed |= set_ssa_val_to (vdef, vdef);
1876 }
1877
1878 /* Do not insert structure copies into the tables. */
1879 if (is_gimple_min_invariant (op)
1880 || is_gimple_reg (op))
1881 vn_reference_insert (lhs, op, vdefs);
1882 }
1883 else
1884 {
1885 /* We had a match, so value number the vdefs to have the value
1886 number of the vuses they came from. */
1887 ssa_op_iter op_iter;
1888 def_operand_p var;
1889 vuse_vec_p vv;
1890
1891 if (dump_file && (dump_flags & TDF_DETAILS))
1892 fprintf (dump_file, "Store matched earlier value,"
1893 "value numbering store vdefs to matching vuses.\n");
1894
1895 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1896 {
1897 tree def = DEF_FROM_PTR (var);
1898 tree use;
1899
1900 /* Uh, if the vuse is a multiuse, we can't really do much
1901 here, sadly, since we don't know which value number of
1902 which vuse to use. */
1903 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1904 use = def;
1905 else
1906 use = VUSE_ELEMENT_VAR (*vv, 0);
1907
1908 VN_INFO (def)->use_processed = true;
1909 changed |= set_ssa_val_to (def, SSA_VAL (use));
1910 }
1911 }
1912
1913 return changed;
1914 }
1915
1916 /* Visit and value number PHI, return true if the value number
1917 changed. */
1918
1919 static bool
1920 visit_phi (gimple phi)
1921 {
1922 bool changed = false;
1923 tree result;
1924 tree sameval = VN_TOP;
1925 bool allsame = true;
1926 unsigned i;
1927
1928 /* TODO: We could check for this in init_sccvn, and replace this
1929 with a gcc_assert. */
1930 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1931 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1932
1933 /* See if all non-TOP arguments have the same value. TOP is
1934 equivalent to everything, so we can ignore it. */
1935 for (i = 0; i < gimple_phi_num_args (phi); i++)
1936 {
1937 tree def = PHI_ARG_DEF (phi, i);
1938
1939 if (TREE_CODE (def) == SSA_NAME)
1940 def = SSA_VAL (def);
1941 if (def == VN_TOP)
1942 continue;
1943 if (sameval == VN_TOP)
1944 {
1945 sameval = def;
1946 }
1947 else
1948 {
1949 if (!expressions_equal_p (def, sameval))
1950 {
1951 allsame = false;
1952 break;
1953 }
1954 }
1955 }
1956
1957 /* If all value numbered to the same value, the phi node has that
1958 value. */
1959 if (allsame)
1960 {
1961 if (is_gimple_min_invariant (sameval))
1962 {
1963 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1964 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1965 }
1966 else
1967 {
1968 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1969 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1970 }
1971
1972 if (TREE_CODE (sameval) == SSA_NAME)
1973 return visit_copy (PHI_RESULT (phi), sameval);
1974
1975 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1976 }
1977
1978 /* Otherwise, see if it is equivalent to a phi node in this block. */
1979 result = vn_phi_lookup (phi);
1980 if (result)
1981 {
1982 if (TREE_CODE (result) == SSA_NAME)
1983 changed = visit_copy (PHI_RESULT (phi), result);
1984 else
1985 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1986 }
1987 else
1988 {
1989 vn_phi_insert (phi, PHI_RESULT (phi));
1990 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1991 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1992 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1993 }
1994
1995 return changed;
1996 }
1997
1998 /* Return true if EXPR contains constants. */
1999
2000 static bool
2001 expr_has_constants (tree expr)
2002 {
2003 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2004 {
2005 case tcc_unary:
2006 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2007
2008 case tcc_binary:
2009 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2010 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2011 /* Constants inside reference ops are rarely interesting, but
2012 it can take a lot of looking to find them. */
2013 case tcc_reference:
2014 case tcc_declaration:
2015 return false;
2016 default:
2017 return is_gimple_min_invariant (expr);
2018 }
2019 return false;
2020 }
2021
2022 /* Return true if STMT contains constants. */
2023
2024 static bool
2025 stmt_has_constants (gimple stmt)
2026 {
2027 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2028 return false;
2029
2030 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2031 {
2032 case GIMPLE_UNARY_RHS:
2033 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2034
2035 case GIMPLE_BINARY_RHS:
2036 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2037 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2038 case GIMPLE_SINGLE_RHS:
2039 /* Constants inside reference ops are rarely interesting, but
2040 it can take a lot of looking to find them. */
2041 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2042 default:
2043 gcc_unreachable ();
2044 }
2045 return false;
2046 }
2047
2048 /* Replace SSA_NAMES in expr with their value numbers, and return the
2049 result.
2050 This is performed in place. */
2051
2052 static tree
2053 valueize_expr (tree expr)
2054 {
2055 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2056 {
2057 case tcc_unary:
2058 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2059 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2060 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2061 break;
2062 case tcc_binary:
2063 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2064 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2065 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2066 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2067 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2068 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2069 break;
2070 default:
2071 break;
2072 }
2073 return expr;
2074 }
2075
2076 /* Simplify the binary expression RHS, and return the result if
2077 simplified. */
2078
2079 static tree
2080 simplify_binary_expression (gimple stmt)
2081 {
2082 tree result = NULL_TREE;
2083 tree op0 = gimple_assign_rhs1 (stmt);
2084 tree op1 = gimple_assign_rhs2 (stmt);
2085
2086 /* This will not catch every single case we could combine, but will
2087 catch those with constants. The goal here is to simultaneously
2088 combine constants between expressions, but avoid infinite
2089 expansion of expressions during simplification. */
2090 if (TREE_CODE (op0) == SSA_NAME)
2091 {
2092 if (VN_INFO (op0)->has_constants
2093 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2094 op0 = valueize_expr (vn_get_expr_for (op0));
2095 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2096 op0 = SSA_VAL (op0);
2097 }
2098
2099 if (TREE_CODE (op1) == SSA_NAME)
2100 {
2101 if (VN_INFO (op1)->has_constants)
2102 op1 = valueize_expr (vn_get_expr_for (op1));
2103 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2104 op1 = SSA_VAL (op1);
2105 }
2106
2107 /* Avoid folding if nothing changed. */
2108 if (op0 == gimple_assign_rhs1 (stmt)
2109 && op1 == gimple_assign_rhs2 (stmt))
2110 return NULL_TREE;
2111
2112 fold_defer_overflow_warnings ();
2113
2114 result = fold_binary (gimple_assign_rhs_code (stmt),
2115 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
2116
2117 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2118 stmt, 0);
2119
2120 /* Make sure result is not a complex expression consisting
2121 of operators of operators (IE (a + b) + (a + c))
2122 Otherwise, we will end up with unbounded expressions if
2123 fold does anything at all. */
2124 if (result && valid_gimple_rhs_p (result))
2125 return result;
2126
2127 return NULL_TREE;
2128 }
2129
2130 /* Simplify the unary expression RHS, and return the result if
2131 simplified. */
2132
2133 static tree
2134 simplify_unary_expression (gimple stmt)
2135 {
2136 tree result = NULL_TREE;
2137 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2138
2139 /* We handle some tcc_reference codes here that are all
2140 GIMPLE_ASSIGN_SINGLE codes. */
2141 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2142 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2143 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2144 op0 = TREE_OPERAND (op0, 0);
2145
2146 if (TREE_CODE (op0) != SSA_NAME)
2147 return NULL_TREE;
2148
2149 orig_op0 = op0;
2150 if (VN_INFO (op0)->has_constants)
2151 op0 = valueize_expr (vn_get_expr_for (op0));
2152 else if (gimple_assign_cast_p (stmt)
2153 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2154 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2155 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2156 {
2157 /* We want to do tree-combining on conversion-like expressions.
2158 Make sure we feed only SSA_NAMEs or constants to fold though. */
2159 tree tem = valueize_expr (vn_get_expr_for (op0));
2160 if (UNARY_CLASS_P (tem)
2161 || BINARY_CLASS_P (tem)
2162 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2163 || TREE_CODE (tem) == SSA_NAME
2164 || is_gimple_min_invariant (tem))
2165 op0 = tem;
2166 }
2167
2168 /* Avoid folding if nothing changed, but remember the expression. */
2169 if (op0 == orig_op0)
2170 return NULL_TREE;
2171
2172 result = fold_unary (gimple_assign_rhs_code (stmt),
2173 gimple_expr_type (stmt), op0);
2174 if (result)
2175 {
2176 STRIP_USELESS_TYPE_CONVERSION (result);
2177 if (valid_gimple_rhs_p (result))
2178 return result;
2179 }
2180
2181 return NULL_TREE;
2182 }
2183
2184 /* Try to simplify RHS using equivalences and constant folding. */
2185
2186 static tree
2187 try_to_simplify (gimple stmt)
2188 {
2189 tree tem;
2190
2191 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2192 in this case, there is no point in doing extra work. */
2193 if (gimple_assign_copy_p (stmt)
2194 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2195 return NULL_TREE;
2196
2197 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2198 {
2199 case tcc_declaration:
2200 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2201 if (tem)
2202 return tem;
2203 break;
2204
2205 case tcc_reference:
2206 /* Do not do full-blown reference lookup here, but simplify
2207 reads from constant aggregates. */
2208 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2209 if (tem)
2210 return tem;
2211
2212 /* Fallthrough for some codes that can operate on registers. */
2213 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2214 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2215 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2216 break;
2217 /* We could do a little more with unary ops, if they expand
2218 into binary ops, but it's debatable whether it is worth it. */
2219 case tcc_unary:
2220 return simplify_unary_expression (stmt);
2221 break;
2222 case tcc_comparison:
2223 case tcc_binary:
2224 return simplify_binary_expression (stmt);
2225 break;
2226 default:
2227 break;
2228 }
2229
2230 return NULL_TREE;
2231 }
2232
2233 /* Visit and value number USE, return true if the value number
2234 changed. */
2235
2236 static bool
2237 visit_use (tree use)
2238 {
2239 bool changed = false;
2240 gimple stmt = SSA_NAME_DEF_STMT (use);
2241
2242 VN_INFO (use)->use_processed = true;
2243
2244 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2245 if (dump_file && (dump_flags & TDF_DETAILS)
2246 && !SSA_NAME_IS_DEFAULT_DEF (use))
2247 {
2248 fprintf (dump_file, "Value numbering ");
2249 print_generic_expr (dump_file, use, 0);
2250 fprintf (dump_file, " stmt = ");
2251 print_gimple_stmt (dump_file, stmt, 0, 0);
2252 }
2253
2254 /* Handle uninitialized uses. */
2255 if (SSA_NAME_IS_DEFAULT_DEF (use))
2256 changed = set_ssa_val_to (use, use);
2257 else
2258 {
2259 if (gimple_code (stmt) == GIMPLE_PHI)
2260 changed = visit_phi (stmt);
2261 else if (!gimple_has_lhs (stmt)
2262 || gimple_has_volatile_ops (stmt)
2263 || stmt_could_throw_p (stmt))
2264 changed = defs_to_varying (stmt);
2265 else if (is_gimple_assign (stmt))
2266 {
2267 tree lhs = gimple_assign_lhs (stmt);
2268 tree simplified;
2269
2270 /* Shortcut for copies. Simplifying copies is pointless,
2271 since we copy the expression and value they represent. */
2272 if (gimple_assign_copy_p (stmt)
2273 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2274 && TREE_CODE (lhs) == SSA_NAME)
2275 {
2276 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2277 goto done;
2278 }
2279 simplified = try_to_simplify (stmt);
2280 if (simplified)
2281 {
2282 if (dump_file && (dump_flags & TDF_DETAILS))
2283 {
2284 fprintf (dump_file, "RHS ");
2285 print_gimple_expr (dump_file, stmt, 0, 0);
2286 fprintf (dump_file, " simplified to ");
2287 print_generic_expr (dump_file, simplified, 0);
2288 if (TREE_CODE (lhs) == SSA_NAME)
2289 fprintf (dump_file, " has constants %d\n",
2290 expr_has_constants (simplified));
2291 else
2292 fprintf (dump_file, "\n");
2293 }
2294 }
2295 /* Setting value numbers to constants will occasionally
2296 screw up phi congruence because constants are not
2297 uniquely associated with a single ssa name that can be
2298 looked up. */
2299 if (simplified
2300 && is_gimple_min_invariant (simplified)
2301 && TREE_CODE (lhs) == SSA_NAME)
2302 {
2303 VN_INFO (lhs)->expr = simplified;
2304 VN_INFO (lhs)->has_constants = true;
2305 changed = set_ssa_val_to (lhs, simplified);
2306 goto done;
2307 }
2308 else if (simplified
2309 && TREE_CODE (simplified) == SSA_NAME
2310 && TREE_CODE (lhs) == SSA_NAME)
2311 {
2312 changed = visit_copy (lhs, simplified);
2313 goto done;
2314 }
2315 else if (simplified)
2316 {
2317 if (TREE_CODE (lhs) == SSA_NAME)
2318 {
2319 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2320 /* We have to unshare the expression or else
2321 valuizing may change the IL stream. */
2322 VN_INFO (lhs)->expr = unshare_expr (simplified);
2323 }
2324 }
2325 else if (stmt_has_constants (stmt)
2326 && TREE_CODE (lhs) == SSA_NAME)
2327 VN_INFO (lhs)->has_constants = true;
2328 else if (TREE_CODE (lhs) == SSA_NAME)
2329 {
2330 /* We reset expr and constantness here because we may
2331 have been value numbering optimistically, and
2332 iterating. They may become non-constant in this case,
2333 even if they were optimistically constant. */
2334
2335 VN_INFO (lhs)->has_constants = false;
2336 VN_INFO (lhs)->expr = NULL_TREE;
2337 }
2338
2339 if (TREE_CODE (lhs) == SSA_NAME
2340 /* We can substitute SSA_NAMEs that are live over
2341 abnormal edges with their constant value. */
2342 && !(gimple_assign_copy_p (stmt)
2343 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2344 && !(simplified
2345 && is_gimple_min_invariant (simplified))
2346 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2347 changed = defs_to_varying (stmt);
2348 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2349 {
2350 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2351 }
2352 else if (TREE_CODE (lhs) == SSA_NAME)
2353 {
2354 if ((gimple_assign_copy_p (stmt)
2355 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2356 || (simplified
2357 && is_gimple_min_invariant (simplified)))
2358 {
2359 VN_INFO (lhs)->has_constants = true;
2360 if (simplified)
2361 changed = set_ssa_val_to (lhs, simplified);
2362 else
2363 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2364 }
2365 else
2366 {
2367 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2368 {
2369 case GIMPLE_UNARY_RHS:
2370 changed = visit_unary_op (lhs, stmt);
2371 break;
2372 case GIMPLE_BINARY_RHS:
2373 changed = visit_binary_op (lhs, stmt);
2374 break;
2375 case GIMPLE_SINGLE_RHS:
2376 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2377 {
2378 case tcc_declaration:
2379 case tcc_reference:
2380 changed = visit_reference_op_load
2381 (lhs, gimple_assign_rhs1 (stmt), stmt);
2382 break;
2383 case tcc_expression:
2384 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2385 {
2386 changed = visit_unary_op (lhs, stmt);
2387 break;
2388 }
2389 /* Fallthrough. */
2390 default:
2391 changed = defs_to_varying (stmt);
2392 }
2393 break;
2394 default:
2395 changed = defs_to_varying (stmt);
2396 break;
2397 }
2398 }
2399 }
2400 else
2401 changed = defs_to_varying (stmt);
2402 }
2403 else if (is_gimple_call (stmt))
2404 {
2405 tree lhs = gimple_call_lhs (stmt);
2406
2407 /* ??? We could try to simplify calls. */
2408
2409 if (stmt_has_constants (stmt)
2410 && TREE_CODE (lhs) == SSA_NAME)
2411 VN_INFO (lhs)->has_constants = true;
2412 else if (TREE_CODE (lhs) == SSA_NAME)
2413 {
2414 /* We reset expr and constantness here because we may
2415 have been value numbering optimistically, and
2416 iterating. They may become non-constant in this case,
2417 even if they were optimistically constant. */
2418 VN_INFO (lhs)->has_constants = false;
2419 VN_INFO (lhs)->expr = NULL_TREE;
2420 }
2421
2422 if (TREE_CODE (lhs) == SSA_NAME
2423 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2424 changed = defs_to_varying (stmt);
2425 /* ??? We should handle stores from calls. */
2426 else if (TREE_CODE (lhs) == SSA_NAME)
2427 {
2428 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2429 changed = visit_reference_op_call (lhs, stmt);
2430 else
2431 changed = defs_to_varying (stmt);
2432 }
2433 else
2434 changed = defs_to_varying (stmt);
2435 }
2436 }
2437 done:
2438 return changed;
2439 }
2440
2441 /* Compare two operands by reverse postorder index */
2442
2443 static int
2444 compare_ops (const void *pa, const void *pb)
2445 {
2446 const tree opa = *((const tree *)pa);
2447 const tree opb = *((const tree *)pb);
2448 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2449 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2450 basic_block bba;
2451 basic_block bbb;
2452
2453 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2454 return 0;
2455 else if (gimple_nop_p (opstmta))
2456 return -1;
2457 else if (gimple_nop_p (opstmtb))
2458 return 1;
2459
2460 bba = gimple_bb (opstmta);
2461 bbb = gimple_bb (opstmtb);
2462
2463 if (!bba && !bbb)
2464 return 0;
2465 else if (!bba)
2466 return -1;
2467 else if (!bbb)
2468 return 1;
2469
2470 if (bba == bbb)
2471 {
2472 if (gimple_code (opstmta) == GIMPLE_PHI
2473 && gimple_code (opstmtb) == GIMPLE_PHI)
2474 return 0;
2475 else if (gimple_code (opstmta) == GIMPLE_PHI)
2476 return -1;
2477 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2478 return 1;
2479 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2480 }
2481 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2482 }
2483
2484 /* Sort an array containing members of a strongly connected component
2485 SCC so that the members are ordered by RPO number.
2486 This means that when the sort is complete, iterating through the
2487 array will give you the members in RPO order. */
2488
2489 static void
2490 sort_scc (VEC (tree, heap) *scc)
2491 {
2492 qsort (VEC_address (tree, scc),
2493 VEC_length (tree, scc),
2494 sizeof (tree),
2495 compare_ops);
2496 }
2497
2498 /* Process a strongly connected component in the SSA graph. */
2499
2500 static void
2501 process_scc (VEC (tree, heap) *scc)
2502 {
2503 /* If the SCC has a single member, just visit it. */
2504
2505 if (VEC_length (tree, scc) == 1)
2506 {
2507 tree use = VEC_index (tree, scc, 0);
2508 if (!VN_INFO (use)->use_processed)
2509 visit_use (use);
2510 }
2511 else
2512 {
2513 tree var;
2514 unsigned int i;
2515 unsigned int iterations = 0;
2516 bool changed = true;
2517
2518 /* Iterate over the SCC with the optimistic table until it stops
2519 changing. */
2520 current_info = optimistic_info;
2521 while (changed)
2522 {
2523 changed = false;
2524 iterations++;
2525 /* As we are value-numbering optimistically we have to
2526 clear the expression tables and the simplified expressions
2527 in each iteration until we converge. */
2528 htab_empty (optimistic_info->nary);
2529 htab_empty (optimistic_info->phis);
2530 htab_empty (optimistic_info->references);
2531 obstack_free (&optimistic_info->nary_obstack, NULL);
2532 gcc_obstack_init (&optimistic_info->nary_obstack);
2533 empty_alloc_pool (optimistic_info->phis_pool);
2534 empty_alloc_pool (optimistic_info->references_pool);
2535 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2536 VN_INFO (var)->expr = NULL_TREE;
2537 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2538 changed |= visit_use (var);
2539 }
2540
2541 statistics_histogram_event (cfun, "SCC iterations", iterations);
2542
2543 /* Finally, visit the SCC once using the valid table. */
2544 current_info = valid_info;
2545 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2546 visit_use (var);
2547 }
2548 }
2549
2550 DEF_VEC_O(ssa_op_iter);
2551 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2552
2553 /* Pop the components of the found SCC for NAME off the SCC stack
2554 and process them. Returns true if all went well, false if
2555 we run into resource limits. */
2556
2557 static bool
2558 extract_and_process_scc_for_name (tree name)
2559 {
2560 VEC (tree, heap) *scc = NULL;
2561 tree x;
2562
2563 /* Found an SCC, pop the components off the SCC stack and
2564 process them. */
2565 do
2566 {
2567 x = VEC_pop (tree, sccstack);
2568
2569 VN_INFO (x)->on_sccstack = false;
2570 VEC_safe_push (tree, heap, scc, x);
2571 } while (x != name);
2572
2573 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2574 if (VEC_length (tree, scc)
2575 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2576 {
2577 if (dump_file)
2578 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2579 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2580 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2581 return false;
2582 }
2583
2584 if (VEC_length (tree, scc) > 1)
2585 sort_scc (scc);
2586
2587 if (dump_file && (dump_flags & TDF_DETAILS))
2588 print_scc (dump_file, scc);
2589
2590 process_scc (scc);
2591
2592 VEC_free (tree, heap, scc);
2593
2594 return true;
2595 }
2596
2597 /* Depth first search on NAME to discover and process SCC's in the SSA
2598 graph.
2599 Execution of this algorithm relies on the fact that the SCC's are
2600 popped off the stack in topological order.
2601 Returns true if successful, false if we stopped processing SCC's due
2602 to resource constraints. */
2603
2604 static bool
2605 DFS (tree name)
2606 {
2607 VEC(ssa_op_iter, heap) *itervec = NULL;
2608 VEC(tree, heap) *namevec = NULL;
2609 use_operand_p usep = NULL;
2610 gimple defstmt;
2611 tree use;
2612 ssa_op_iter iter;
2613
2614 start_over:
2615 /* SCC info */
2616 VN_INFO (name)->dfsnum = next_dfs_num++;
2617 VN_INFO (name)->visited = true;
2618 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2619
2620 VEC_safe_push (tree, heap, sccstack, name);
2621 VN_INFO (name)->on_sccstack = true;
2622 defstmt = SSA_NAME_DEF_STMT (name);
2623
2624 /* Recursively DFS on our operands, looking for SCC's. */
2625 if (!gimple_nop_p (defstmt))
2626 {
2627 /* Push a new iterator. */
2628 if (gimple_code (defstmt) == GIMPLE_PHI)
2629 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2630 else
2631 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2632 }
2633 else
2634 iter.done = true;
2635
2636 while (1)
2637 {
2638 /* If we are done processing uses of a name, go up the stack
2639 of iterators and process SCCs as we found them. */
2640 if (op_iter_done (&iter))
2641 {
2642 /* See if we found an SCC. */
2643 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2644 if (!extract_and_process_scc_for_name (name))
2645 {
2646 VEC_free (tree, heap, namevec);
2647 VEC_free (ssa_op_iter, heap, itervec);
2648 return false;
2649 }
2650
2651 /* Check if we are done. */
2652 if (VEC_empty (tree, namevec))
2653 {
2654 VEC_free (tree, heap, namevec);
2655 VEC_free (ssa_op_iter, heap, itervec);
2656 return true;
2657 }
2658
2659 /* Restore the last use walker and continue walking there. */
2660 use = name;
2661 name = VEC_pop (tree, namevec);
2662 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2663 sizeof (ssa_op_iter));
2664 VEC_pop (ssa_op_iter, itervec);
2665 goto continue_walking;
2666 }
2667
2668 use = USE_FROM_PTR (usep);
2669
2670 /* Since we handle phi nodes, we will sometimes get
2671 invariants in the use expression. */
2672 if (TREE_CODE (use) == SSA_NAME)
2673 {
2674 if (! (VN_INFO (use)->visited))
2675 {
2676 /* Recurse by pushing the current use walking state on
2677 the stack and starting over. */
2678 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2679 VEC_safe_push(tree, heap, namevec, name);
2680 name = use;
2681 goto start_over;
2682
2683 continue_walking:
2684 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2685 VN_INFO (use)->low);
2686 }
2687 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2688 && VN_INFO (use)->on_sccstack)
2689 {
2690 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2691 VN_INFO (name)->low);
2692 }
2693 }
2694
2695 usep = op_iter_next_use (&iter);
2696 }
2697 }
2698
2699 /* Allocate a value number table. */
2700
2701 static void
2702 allocate_vn_table (vn_tables_t table)
2703 {
2704 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2705 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2706 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2707 free_reference);
2708
2709 gcc_obstack_init (&table->nary_obstack);
2710 table->phis_pool = create_alloc_pool ("VN phis",
2711 sizeof (struct vn_phi_s),
2712 30);
2713 table->references_pool = create_alloc_pool ("VN references",
2714 sizeof (struct vn_reference_s),
2715 30);
2716 }
2717
2718 /* Free a value number table. */
2719
2720 static void
2721 free_vn_table (vn_tables_t table)
2722 {
2723 htab_delete (table->phis);
2724 htab_delete (table->nary);
2725 htab_delete (table->references);
2726 obstack_free (&table->nary_obstack, NULL);
2727 free_alloc_pool (table->phis_pool);
2728 free_alloc_pool (table->references_pool);
2729 }
2730
2731 static void
2732 init_scc_vn (void)
2733 {
2734 size_t i;
2735 int j;
2736 int *rpo_numbers_temp;
2737
2738 calculate_dominance_info (CDI_DOMINATORS);
2739 sccstack = NULL;
2740 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2741 free);
2742
2743 constant_value_ids = BITMAP_ALLOC (NULL);
2744
2745 next_dfs_num = 1;
2746 next_value_id = 1;
2747
2748 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2749 /* VEC_alloc doesn't actually grow it to the right size, it just
2750 preallocates the space to do so. */
2751 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2752 gcc_obstack_init (&vn_ssa_aux_obstack);
2753
2754 shared_lookup_phiargs = NULL;
2755 shared_lookup_vops = NULL;
2756 shared_lookup_references = NULL;
2757 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2758 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2759 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2760
2761 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2762 the i'th block in RPO order is bb. We want to map bb's to RPO
2763 numbers, so we need to rearrange this array. */
2764 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2765 rpo_numbers[rpo_numbers_temp[j]] = j;
2766
2767 XDELETE (rpo_numbers_temp);
2768
2769 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2770
2771 /* Create the VN_INFO structures, and initialize value numbers to
2772 TOP. */
2773 for (i = 0; i < num_ssa_names; i++)
2774 {
2775 tree name = ssa_name (i);
2776 if (name)
2777 {
2778 VN_INFO_GET (name)->valnum = VN_TOP;
2779 VN_INFO (name)->expr = NULL_TREE;
2780 VN_INFO (name)->value_id = 0;
2781 }
2782 }
2783
2784 renumber_gimple_stmt_uids ();
2785
2786 /* Create the valid and optimistic value numbering tables. */
2787 valid_info = XCNEW (struct vn_tables_s);
2788 allocate_vn_table (valid_info);
2789 optimistic_info = XCNEW (struct vn_tables_s);
2790 allocate_vn_table (optimistic_info);
2791 }
2792
2793 void
2794 free_scc_vn (void)
2795 {
2796 size_t i;
2797
2798 htab_delete (constant_to_value_id);
2799 BITMAP_FREE (constant_value_ids);
2800 VEC_free (tree, heap, shared_lookup_phiargs);
2801 VEC_free (tree, gc, shared_lookup_vops);
2802 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2803 XDELETEVEC (rpo_numbers);
2804
2805 for (i = 0; i < num_ssa_names; i++)
2806 {
2807 tree name = ssa_name (i);
2808 if (name
2809 && VN_INFO (name)->needs_insertion)
2810 release_ssa_name (name);
2811 }
2812 obstack_free (&vn_ssa_aux_obstack, NULL);
2813 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2814
2815 VEC_free (tree, heap, sccstack);
2816 free_vn_table (valid_info);
2817 XDELETE (valid_info);
2818 free_vn_table (optimistic_info);
2819 XDELETE (optimistic_info);
2820 }
2821
2822 /* Set the value ids in the valid hash tables. */
2823
2824 static void
2825 set_hashtable_value_ids (void)
2826 {
2827 htab_iterator hi;
2828 vn_nary_op_t vno;
2829 vn_reference_t vr;
2830 vn_phi_t vp;
2831
2832 /* Now set the value ids of the things we had put in the hash
2833 table. */
2834
2835 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2836 vno, vn_nary_op_t, hi)
2837 {
2838 if (vno->result)
2839 {
2840 if (TREE_CODE (vno->result) == SSA_NAME)
2841 vno->value_id = VN_INFO (vno->result)->value_id;
2842 else if (is_gimple_min_invariant (vno->result))
2843 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2844 }
2845 }
2846
2847 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2848 vp, vn_phi_t, hi)
2849 {
2850 if (vp->result)
2851 {
2852 if (TREE_CODE (vp->result) == SSA_NAME)
2853 vp->value_id = VN_INFO (vp->result)->value_id;
2854 else if (is_gimple_min_invariant (vp->result))
2855 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2856 }
2857 }
2858
2859 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2860 vr, vn_reference_t, hi)
2861 {
2862 if (vr->result)
2863 {
2864 if (TREE_CODE (vr->result) == SSA_NAME)
2865 vr->value_id = VN_INFO (vr->result)->value_id;
2866 else if (is_gimple_min_invariant (vr->result))
2867 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2868 }
2869 }
2870 }
2871
2872 /* Do SCCVN. Returns true if it finished, false if we bailed out
2873 due to resource constraints. */
2874
2875 bool
2876 run_scc_vn (bool may_insert_arg)
2877 {
2878 size_t i;
2879 tree param;
2880 bool changed = true;
2881
2882 may_insert = may_insert_arg;
2883
2884 init_scc_vn ();
2885 current_info = valid_info;
2886
2887 for (param = DECL_ARGUMENTS (current_function_decl);
2888 param;
2889 param = TREE_CHAIN (param))
2890 {
2891 if (gimple_default_def (cfun, param) != NULL)
2892 {
2893 tree def = gimple_default_def (cfun, param);
2894 SSA_VAL (def) = def;
2895 }
2896 }
2897
2898 for (i = 1; i < num_ssa_names; ++i)
2899 {
2900 tree name = ssa_name (i);
2901 if (name
2902 && VN_INFO (name)->visited == false
2903 && !has_zero_uses (name))
2904 if (!DFS (name))
2905 {
2906 free_scc_vn ();
2907 may_insert = false;
2908 return false;
2909 }
2910 }
2911
2912 /* Initialize the value ids. */
2913
2914 for (i = 1; i < num_ssa_names; ++i)
2915 {
2916 tree name = ssa_name (i);
2917 vn_ssa_aux_t info;
2918 if (!name)
2919 continue;
2920 info = VN_INFO (name);
2921 if (info->valnum == name)
2922 info->value_id = get_next_value_id ();
2923 else if (is_gimple_min_invariant (info->valnum))
2924 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2925 }
2926
2927 /* Propagate until they stop changing. */
2928 while (changed)
2929 {
2930 changed = false;
2931 for (i = 1; i < num_ssa_names; ++i)
2932 {
2933 tree name = ssa_name (i);
2934 vn_ssa_aux_t info;
2935 if (!name)
2936 continue;
2937 info = VN_INFO (name);
2938 if (TREE_CODE (info->valnum) == SSA_NAME
2939 && info->valnum != name
2940 && info->value_id != VN_INFO (info->valnum)->value_id)
2941 {
2942 changed = true;
2943 info->value_id = VN_INFO (info->valnum)->value_id;
2944 }
2945 }
2946 }
2947
2948 set_hashtable_value_ids ();
2949
2950 if (dump_file && (dump_flags & TDF_DETAILS))
2951 {
2952 fprintf (dump_file, "Value numbers:\n");
2953 for (i = 0; i < num_ssa_names; i++)
2954 {
2955 tree name = ssa_name (i);
2956 if (name
2957 && VN_INFO (name)->visited
2958 && SSA_VAL (name) != name)
2959 {
2960 print_generic_expr (dump_file, name, 0);
2961 fprintf (dump_file, " = ");
2962 print_generic_expr (dump_file, SSA_VAL (name), 0);
2963 fprintf (dump_file, "\n");
2964 }
2965 }
2966 }
2967
2968 may_insert = false;
2969 return true;
2970 }
2971
2972 /* Return the maximum value id we have ever seen. */
2973
2974 unsigned int
2975 get_max_value_id (void)
2976 {
2977 return next_value_id;
2978 }
2979
2980 /* Return the next unique value id. */
2981
2982 unsigned int
2983 get_next_value_id (void)
2984 {
2985 return next_value_id++;
2986 }
2987
2988
2989 /* Compare two expressions E1 and E2 and return true if they are equal. */
2990
2991 bool
2992 expressions_equal_p (tree e1, tree e2)
2993 {
2994 /* The obvious case. */
2995 if (e1 == e2)
2996 return true;
2997
2998 /* If only one of them is null, they cannot be equal. */
2999 if (!e1 || !e2)
3000 return false;
3001
3002 /* Recurse on elements of lists. */
3003 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3004 {
3005 tree lop1 = e1;
3006 tree lop2 = e2;
3007 for (lop1 = e1, lop2 = e2;
3008 lop1 || lop2;
3009 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3010 {
3011 if (!lop1 || !lop2)
3012 return false;
3013 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3014 return false;
3015 }
3016 return true;
3017 }
3018
3019 /* Now perform the actual comparison. */
3020 if (TREE_CODE (e1) == TREE_CODE (e2)
3021 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3022 return true;
3023
3024 return false;
3025 }
3026
3027 /* Sort the VUSE array so that we can do equality comparisons
3028 quicker on two vuse vecs. */
3029
3030 void
3031 sort_vuses (VEC (tree,gc) *vuses)
3032 {
3033 if (VEC_length (tree, vuses) > 1)
3034 qsort (VEC_address (tree, vuses),
3035 VEC_length (tree, vuses),
3036 sizeof (tree),
3037 operand_build_cmp);
3038 }
3039
3040 /* Sort the VUSE array so that we can do equality comparisons
3041 quicker on two vuse vecs. */
3042
3043 void
3044 sort_vuses_heap (VEC (tree,heap) *vuses)
3045 {
3046 if (VEC_length (tree, vuses) > 1)
3047 qsort (VEC_address (tree, vuses),
3048 VEC_length (tree, vuses),
3049 sizeof (tree),
3050 operand_build_cmp);
3051 }