re PR tree-optimization/35972 (load-PRE missed opportunities without SFTs)
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
48
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
54
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
58
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
63
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
68
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
72
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
77
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
81
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
84
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
90
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
96
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
104 */
105
106 /* The set of hashtables and alloc_pool's for their items. */
107
108 typedef struct vn_tables_s
109 {
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
117
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120
121
122 /* Valid hashtables storing information we have proven to be
123 correct. */
124
125 static vn_tables_t valid_info;
126
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
129
130 static vn_tables_t optimistic_info;
131
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
135
136 static vn_tables_t current_info;
137
138
139 /* Reverse post order index for each basic block. */
140
141 static int *rpo_numbers;
142
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
147
148 tree VN_TOP;
149
150 /* Unique counter for our value ids. */
151
152 static unsigned int next_value_id;
153
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
156
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159
160 static bool may_insert;
161
162
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
165
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
169
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
172
173 /* Return the value numbering information for a given SSA name. */
174
175 vn_ssa_aux_t
176 VN_INFO (tree name)
177 {
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
182 }
183
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
186
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
189 {
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
192 }
193
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
196
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
199 {
200 vn_ssa_aux_t newinfo;
201
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
210 }
211
212
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
215
216 tree
217 vn_get_expr_for (tree name)
218 {
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
222
223 if (vn->valnum == VN_TOP)
224 return name;
225
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
230
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
233
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
238
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
242
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
251
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
254
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
258 {
259 case tcc_reference:
260 if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 && gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 && gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
266 break;
267
268 case tcc_unary:
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
272 break;
273
274 case tcc_binary:
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
279 break;
280
281 default:;
282 }
283 if (expr == NULL_TREE)
284 return vn->valnum;
285
286 /* Cache the expression. */
287 vn->expr = expr;
288
289 return expr;
290 }
291
292
293 /* Free a phi operation structure VP. */
294
295 static void
296 free_phi (void *vp)
297 {
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
300 }
301
302 /* Free a reference operation structure VP. */
303
304 static void
305 free_reference (void *vp)
306 {
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
309 }
310
311 /* Hash table equality function for vn_constant_t. */
312
313 static int
314 vn_constant_eq (const void *p1, const void *p2)
315 {
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
318
319 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
320 }
321
322 /* Hash table hash function for vn_constant_t. */
323
324 static hashval_t
325 vn_constant_hash (const void *p1)
326 {
327 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
328 return vc1->hashcode;
329 }
330
331 /* Lookup a value id for CONSTANT and return it. If it does not
332 exist returns 0. */
333
334 unsigned int
335 get_constant_value_id (tree constant)
336 {
337 void **slot;
338 struct vn_constant_s vc;
339
340 vc.hashcode = vn_hash_constant_with_type (constant);
341 vc.constant = constant;
342 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
343 vc.hashcode, NO_INSERT);
344 if (slot)
345 return ((vn_constant_t)*slot)->value_id;
346 return 0;
347 }
348
349 /* Lookup a value id for CONSTANT, and if it does not exist, create a
350 new one and return it. If it does exist, return it. */
351
352 unsigned int
353 get_or_alloc_constant_value_id (tree constant)
354 {
355 void **slot;
356 vn_constant_t vc = XNEW (struct vn_constant_s);
357
358 vc->hashcode = vn_hash_constant_with_type (constant);
359 vc->constant = constant;
360 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
361 vc->hashcode, INSERT);
362 if (*slot)
363 {
364 free (vc);
365 return ((vn_constant_t)*slot)->value_id;
366 }
367 vc->value_id = get_next_value_id ();
368 *slot = vc;
369 bitmap_set_bit (constant_value_ids, vc->value_id);
370 return vc->value_id;
371 }
372
373 /* Return true if V is a value id for a constant. */
374
375 bool
376 value_id_constant_p (unsigned int v)
377 {
378 return bitmap_bit_p (constant_value_ids, v);
379 }
380
381 /* Compare two reference operands P1 and P2 for equality. Return true if
382 they are equal, and false otherwise. */
383
384 static int
385 vn_reference_op_eq (const void *p1, const void *p2)
386 {
387 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
388 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
389 return vro1->opcode == vro2->opcode
390 && vro1->type == vro2->type
391 && expressions_equal_p (vro1->op0, vro2->op0)
392 && expressions_equal_p (vro1->op1, vro2->op1)
393 && expressions_equal_p (vro1->op2, vro2->op2);
394 }
395
396 /* Compute the hash for a reference operand VRO1. */
397
398 static hashval_t
399 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
400 {
401 return iterative_hash_expr (vro1->op0, vro1->opcode)
402 + iterative_hash_expr (vro1->op1, vro1->opcode)
403 + iterative_hash_expr (vro1->op2, vro1->opcode);
404 }
405
406 /* Return the hashcode for a given reference operation P1. */
407
408 static hashval_t
409 vn_reference_hash (const void *p1)
410 {
411 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
412 return vr1->hashcode;
413 }
414
415 /* Compute a hash for the reference operation VR1 and return it. */
416
417 hashval_t
418 vn_reference_compute_hash (const vn_reference_t vr1)
419 {
420 hashval_t result = 0;
421 tree v;
422 int i;
423 vn_reference_op_t vro;
424
425 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
426 result += iterative_hash_expr (v, 0);
427 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
428 result += vn_reference_op_compute_hash (vro);
429
430 return result;
431 }
432
433 /* Return true if reference operations P1 and P2 are equivalent. This
434 means they have the same set of operands and vuses. */
435
436 int
437 vn_reference_eq (const void *p1, const void *p2)
438 {
439 tree v;
440 int i;
441 vn_reference_op_t vro;
442
443 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
444 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
445
446 if (vr1->vuses == vr2->vuses
447 && vr1->operands == vr2->operands)
448 return true;
449
450 /* Impossible for them to be equivalent if they have different
451 number of vuses. */
452 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
453 return false;
454
455 /* We require that address operands be canonicalized in a way that
456 two memory references will have the same operands if they are
457 equivalent. */
458 if (VEC_length (vn_reference_op_s, vr1->operands)
459 != VEC_length (vn_reference_op_s, vr2->operands))
460 return false;
461
462 /* The memory state is more often different than the address of the
463 store/load, so check it first. */
464 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
465 {
466 if (VEC_index (tree, vr2->vuses, i) != v)
467 return false;
468 }
469
470 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
471 {
472 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
473 vro))
474 return false;
475 }
476 return true;
477 }
478
479 /* Place the vuses from STMT into *result. */
480
481 static inline void
482 vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
483 {
484 ssa_op_iter iter;
485 tree vuse;
486
487 if (!stmt)
488 return;
489
490 VEC_reserve_exact (tree, gc, *result,
491 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
492
493 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
494 VEC_quick_push (tree, *result, vuse);
495 }
496
497
498 /* Copy the VUSE names in STMT into a vector, and return
499 the vector. */
500
501 VEC (tree, gc) *
502 copy_vuses_from_stmt (gimple stmt)
503 {
504 VEC (tree, gc) *vuses = NULL;
505
506 vuses_to_vec (stmt, &vuses);
507
508 return vuses;
509 }
510
511 /* Place the vdefs from STMT into *result. */
512
513 static inline void
514 vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
515 {
516 ssa_op_iter iter;
517 tree vdef;
518
519 if (!stmt)
520 return;
521
522 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
523
524 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
525 VEC_quick_push (tree, *result, vdef);
526 }
527
528 /* Copy the names of vdef results in STMT into a vector, and return
529 the vector. */
530
531 static VEC (tree, gc) *
532 copy_vdefs_from_stmt (gimple stmt)
533 {
534 VEC (tree, gc) *vdefs = NULL;
535
536 vdefs_to_vec (stmt, &vdefs);
537
538 return vdefs;
539 }
540
541 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
542 static VEC (tree, gc) *shared_lookup_vops;
543
544 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
545 This function will overwrite the current SHARED_LOOKUP_VOPS
546 variable. */
547
548 VEC (tree, gc) *
549 shared_vuses_from_stmt (gimple stmt)
550 {
551 VEC_truncate (tree, shared_lookup_vops, 0);
552 vuses_to_vec (stmt, &shared_lookup_vops);
553
554 return shared_lookup_vops;
555 }
556
557 /* Copy the operations present in load/store REF into RESULT, a vector of
558 vn_reference_op_s's. */
559
560 void
561 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
562 {
563 if (TREE_CODE (ref) == TARGET_MEM_REF)
564 {
565 vn_reference_op_s temp;
566
567 memset (&temp, 0, sizeof (temp));
568 /* We do not care for spurious type qualifications. */
569 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
570 temp.opcode = TREE_CODE (ref);
571 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
572 temp.op1 = TMR_INDEX (ref);
573 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
574
575 memset (&temp, 0, sizeof (temp));
576 temp.type = NULL_TREE;
577 temp.opcode = TREE_CODE (ref);
578 temp.op0 = TMR_STEP (ref);
579 temp.op1 = TMR_OFFSET (ref);
580 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
581 return;
582 }
583
584 /* For non-calls, store the information that makes up the address. */
585
586 while (ref)
587 {
588 vn_reference_op_s temp;
589
590 memset (&temp, 0, sizeof (temp));
591 /* We do not care for spurious type qualifications. */
592 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
593 temp.opcode = TREE_CODE (ref);
594
595 switch (temp.opcode)
596 {
597 case ALIGN_INDIRECT_REF:
598 case INDIRECT_REF:
599 /* The only operand is the address, which gets its own
600 vn_reference_op_s structure. */
601 break;
602 case MISALIGNED_INDIRECT_REF:
603 temp.op0 = TREE_OPERAND (ref, 1);
604 break;
605 case BIT_FIELD_REF:
606 /* Record bits and position. */
607 temp.op0 = TREE_OPERAND (ref, 1);
608 temp.op1 = TREE_OPERAND (ref, 2);
609 break;
610 case COMPONENT_REF:
611 /* The field decl is enough to unambiguously specify the field,
612 a matching type is not necessary and a mismatching type
613 is always a spurious difference. */
614 temp.type = NULL_TREE;
615 #if FIXME
616 /* If this is a reference to a union member, record the union
617 member size as operand. Do so only if we are doing
618 expression insertion (during FRE), as PRE currently gets
619 confused with this. */
620 if (may_insert
621 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
622 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
623 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
624 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
625 else
626 #endif
627 /* Record field as operand. */
628 temp.op0 = TREE_OPERAND (ref, 1);
629 temp.op1 = TREE_OPERAND (ref, 2);
630 break;
631 case ARRAY_RANGE_REF:
632 case ARRAY_REF:
633 /* Record index as operand. */
634 temp.op0 = TREE_OPERAND (ref, 1);
635 temp.op1 = TREE_OPERAND (ref, 2);
636 temp.op2 = TREE_OPERAND (ref, 3);
637 break;
638 case STRING_CST:
639 case INTEGER_CST:
640 case COMPLEX_CST:
641 case VECTOR_CST:
642 case REAL_CST:
643 case CONSTRUCTOR:
644 case VAR_DECL:
645 case PARM_DECL:
646 case CONST_DECL:
647 case RESULT_DECL:
648 case SSA_NAME:
649 temp.op0 = ref;
650 break;
651 case ADDR_EXPR:
652 if (is_gimple_min_invariant (ref))
653 {
654 temp.op0 = ref;
655 break;
656 }
657 /* Fallthrough. */
658 /* These are only interesting for their operands, their
659 existence, and their type. They will never be the last
660 ref in the chain of references (IE they require an
661 operand), so we don't have to put anything
662 for op* as it will be handled by the iteration */
663 case IMAGPART_EXPR:
664 case REALPART_EXPR:
665 case VIEW_CONVERT_EXPR:
666 break;
667 default:
668 gcc_unreachable ();
669 }
670 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
671
672 if (REFERENCE_CLASS_P (ref)
673 || (TREE_CODE (ref) == ADDR_EXPR
674 && !is_gimple_min_invariant (ref)))
675 ref = TREE_OPERAND (ref, 0);
676 else
677 ref = NULL_TREE;
678 }
679 }
680
681 /* Re-create a reference tree from the reference ops OPS.
682 Returns NULL_TREE if the ops were not handled.
683 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
684
685 static tree
686 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
687 {
688 vn_reference_op_t op;
689 unsigned i;
690 tree ref, *op0_p = &ref;
691
692 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
693 {
694 switch (op->opcode)
695 {
696 case CALL_EXPR:
697 return NULL_TREE;
698
699 case ALIGN_INDIRECT_REF:
700 case INDIRECT_REF:
701 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
702 op0_p = &TREE_OPERAND (*op0_p, 0);
703 break;
704
705 case MISALIGNED_INDIRECT_REF:
706 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
707 NULL_TREE, op->op0);
708 op0_p = &TREE_OPERAND (*op0_p, 0);
709 break;
710
711 case BIT_FIELD_REF:
712 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
713 op->op0, op->op1);
714 op0_p = &TREE_OPERAND (*op0_p, 0);
715 break;
716
717 case COMPONENT_REF:
718 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
719 op->op0, op->op1);
720 op0_p = &TREE_OPERAND (*op0_p, 0);
721 break;
722
723 case ARRAY_RANGE_REF:
724 case ARRAY_REF:
725 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
726 op->op0, op->op1, op->op2);
727 op0_p = &TREE_OPERAND (*op0_p, 0);
728 break;
729
730 case STRING_CST:
731 case INTEGER_CST:
732 case COMPLEX_CST:
733 case VECTOR_CST:
734 case REAL_CST:
735 case CONSTRUCTOR:
736 case VAR_DECL:
737 case PARM_DECL:
738 case CONST_DECL:
739 case RESULT_DECL:
740 case SSA_NAME:
741 *op0_p = op->op0;
742 break;
743
744 case ADDR_EXPR:
745 if (op->op0 != NULL_TREE)
746 {
747 gcc_assert (is_gimple_min_invariant (op->op0));
748 *op0_p = op->op0;
749 break;
750 }
751 /* Fallthrough. */
752 case IMAGPART_EXPR:
753 case REALPART_EXPR:
754 case VIEW_CONVERT_EXPR:
755 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
756 op0_p = &TREE_OPERAND (*op0_p, 0);
757 break;
758
759 default:
760 return NULL_TREE;
761 }
762 }
763
764 return ref;
765 }
766
767 /* Copy the operations present in load/store/call REF into RESULT, a vector of
768 vn_reference_op_s's. */
769
770 void
771 copy_reference_ops_from_call (gimple call,
772 VEC(vn_reference_op_s, heap) **result)
773 {
774 vn_reference_op_s temp;
775 unsigned i;
776
777 /* Copy the type, opcode, function being called and static chain. */
778 memset (&temp, 0, sizeof (temp));
779 temp.type = gimple_call_return_type (call);
780 temp.opcode = CALL_EXPR;
781 temp.op0 = gimple_call_fn (call);
782 temp.op1 = gimple_call_chain (call);
783 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
784
785 /* Copy the call arguments. As they can be references as well,
786 just chain them together. */
787 for (i = 0; i < gimple_call_num_args (call); ++i)
788 {
789 tree callarg = gimple_call_arg (call, i);
790 copy_reference_ops_from_ref (callarg, result);
791 }
792 }
793
794 /* Create a vector of vn_reference_op_s structures from REF, a
795 REFERENCE_CLASS_P tree. The vector is not shared. */
796
797 static VEC(vn_reference_op_s, heap) *
798 create_reference_ops_from_ref (tree ref)
799 {
800 VEC (vn_reference_op_s, heap) *result = NULL;
801
802 copy_reference_ops_from_ref (ref, &result);
803 return result;
804 }
805
806 /* Create a vector of vn_reference_op_s structures from CALL, a
807 call statement. The vector is not shared. */
808
809 static VEC(vn_reference_op_s, heap) *
810 create_reference_ops_from_call (gimple call)
811 {
812 VEC (vn_reference_op_s, heap) *result = NULL;
813
814 copy_reference_ops_from_call (call, &result);
815 return result;
816 }
817
818 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
819
820 /* Create a vector of vn_reference_op_s structures from REF, a
821 REFERENCE_CLASS_P tree. The vector is shared among all callers of
822 this function. */
823
824 static VEC(vn_reference_op_s, heap) *
825 shared_reference_ops_from_ref (tree ref)
826 {
827 if (!ref)
828 return NULL;
829 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
830 copy_reference_ops_from_ref (ref, &shared_lookup_references);
831 return shared_lookup_references;
832 }
833
834 /* Create a vector of vn_reference_op_s structures from CALL, a
835 call statement. The vector is shared among all callers of
836 this function. */
837
838 static VEC(vn_reference_op_s, heap) *
839 shared_reference_ops_from_call (gimple call)
840 {
841 if (!call)
842 return NULL;
843 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
844 copy_reference_ops_from_call (call, &shared_lookup_references);
845 return shared_lookup_references;
846 }
847
848
849 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
850 structures into their value numbers. This is done in-place, and
851 the vector passed in is returned. */
852
853 static VEC (vn_reference_op_s, heap) *
854 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
855 {
856 vn_reference_op_t vro;
857 int i;
858
859 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
860 {
861 if (vro->opcode == SSA_NAME
862 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
863 {
864 vro->op0 = SSA_VAL (vro->op0);
865 /* If it transforms from an SSA_NAME to a constant, update
866 the opcode. */
867 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
868 vro->opcode = TREE_CODE (vro->op0);
869 }
870 /* TODO: Do we want to valueize op2 and op1 of
871 ARRAY_REF/COMPONENT_REF for Ada */
872
873 }
874
875 return orig;
876 }
877
878 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
879 their value numbers. This is done in-place, and the vector passed
880 in is returned. */
881
882 static VEC (tree, gc) *
883 valueize_vuses (VEC (tree, gc) *orig)
884 {
885 bool made_replacement = false;
886 tree vuse;
887 int i;
888
889 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
890 {
891 if (vuse != SSA_VAL (vuse))
892 {
893 made_replacement = true;
894 VEC_replace (tree, orig, i, SSA_VAL (vuse));
895 }
896 }
897
898 if (made_replacement && VEC_length (tree, orig) > 1)
899 sort_vuses (orig);
900
901 return orig;
902 }
903
904 /* Return the single reference statement defining all virtual uses
905 in VUSES or NULL_TREE, if there are multiple defining statements.
906 Take into account only definitions that alias REF if following
907 back-edges. */
908
909 static gimple
910 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
911 {
912 gimple def_stmt;
913 tree vuse;
914 unsigned int i;
915
916 gcc_assert (VEC_length (tree, vuses) >= 1);
917
918 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
919 if (gimple_code (def_stmt) == GIMPLE_PHI)
920 {
921 /* We can only handle lookups over PHI nodes for a single
922 virtual operand. */
923 if (VEC_length (tree, vuses) == 1)
924 {
925 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
926 goto cont;
927 }
928 else
929 return NULL;
930 }
931
932 /* Verify each VUSE reaches the same defining stmt. */
933 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
934 {
935 gimple tmp = SSA_NAME_DEF_STMT (vuse);
936 if (tmp != def_stmt)
937 return NULL;
938 }
939
940 /* Now see if the definition aliases ref, and loop until it does. */
941 cont:
942 while (def_stmt
943 && is_gimple_assign (def_stmt)
944 && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
945 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
946
947 return def_stmt;
948 }
949
950 /* Lookup a SCCVN reference operation VR in the current hash table.
951 Returns the resulting value number if it exists in the hash table,
952 NULL_TREE otherwise. VNRESULT will be filled in with the actual
953 vn_reference_t stored in the hashtable if something is found. */
954
955 static tree
956 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
957 {
958 void **slot;
959 hashval_t hash;
960
961 hash = vr->hashcode;
962 slot = htab_find_slot_with_hash (current_info->references, vr,
963 hash, NO_INSERT);
964 if (!slot && current_info == optimistic_info)
965 slot = htab_find_slot_with_hash (valid_info->references, vr,
966 hash, NO_INSERT);
967 if (slot)
968 {
969 if (vnresult)
970 *vnresult = (vn_reference_t)*slot;
971 return ((vn_reference_t)*slot)->result;
972 }
973
974 return NULL_TREE;
975 }
976
977
978 /* Lookup a reference operation by it's parts, in the current hash table.
979 Returns the resulting value number if it exists in the hash table,
980 NULL_TREE otherwise. VNRESULT will be filled in with the actual
981 vn_reference_t stored in the hashtable if something is found. */
982
983 tree
984 vn_reference_lookup_pieces (VEC (tree, gc) *vuses,
985 VEC (vn_reference_op_s, heap) *operands,
986 vn_reference_t *vnresult, bool maywalk)
987 {
988 struct vn_reference_s vr1;
989 tree result;
990 if (vnresult)
991 *vnresult = NULL;
992
993 vr1.vuses = valueize_vuses (vuses);
994 vr1.operands = valueize_refs (operands);
995 vr1.hashcode = vn_reference_compute_hash (&vr1);
996 result = vn_reference_lookup_1 (&vr1, vnresult);
997
998 /* If there is a single defining statement for all virtual uses, we can
999 use that, following virtual use-def chains. */
1000 if (!result
1001 && maywalk
1002 && vr1.vuses
1003 && VEC_length (tree, vr1.vuses) >= 1)
1004 {
1005 tree ref = get_ref_from_reference_ops (operands);
1006 gimple def_stmt;
1007 if (ref
1008 && (def_stmt = get_def_ref_stmt_vuses (ref, vr1.vuses))
1009 && is_gimple_assign (def_stmt))
1010 {
1011 /* We are now at an aliasing definition for the vuses we want to
1012 look up. Re-do the lookup with the vdefs for this stmt. */
1013 vdefs_to_vec (def_stmt, &vuses);
1014 vr1.vuses = valueize_vuses (vuses);
1015 vr1.hashcode = vn_reference_compute_hash (&vr1);
1016 result = vn_reference_lookup_1 (&vr1, vnresult);
1017 }
1018 }
1019
1020 return result;
1021 }
1022
1023 /* Lookup OP in the current hash table, and return the resulting value
1024 number if it exists in the hash table. Return NULL_TREE if it does
1025 not exist in the hash table or if the result field of the structure
1026 was NULL.. VNRESULT will be filled in with the vn_reference_t
1027 stored in the hashtable if one exists. */
1028
1029 tree
1030 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
1031 vn_reference_t *vnresult)
1032 {
1033 struct vn_reference_s vr1;
1034 tree result;
1035 gimple def_stmt;
1036 if (vnresult)
1037 *vnresult = NULL;
1038
1039 vr1.vuses = valueize_vuses (vuses);
1040 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
1041 vr1.hashcode = vn_reference_compute_hash (&vr1);
1042 result = vn_reference_lookup_1 (&vr1, vnresult);
1043
1044 /* If there is a single defining statement for all virtual uses, we can
1045 use that, following virtual use-def chains. */
1046 if (!result
1047 && maywalk
1048 && vr1.vuses
1049 && VEC_length (tree, vr1.vuses) >= 1
1050 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
1051 && is_gimple_assign (def_stmt))
1052 {
1053 /* We are now at an aliasing definition for the vuses we want to
1054 look up. Re-do the lookup with the vdefs for this stmt. */
1055 vdefs_to_vec (def_stmt, &vuses);
1056 vr1.vuses = valueize_vuses (vuses);
1057 vr1.hashcode = vn_reference_compute_hash (&vr1);
1058 result = vn_reference_lookup_1 (&vr1, vnresult);
1059 }
1060
1061 return result;
1062 }
1063
1064
1065 /* Insert OP into the current hash table with a value number of
1066 RESULT, and return the resulting reference structure we created. */
1067
1068 vn_reference_t
1069 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
1070 {
1071 void **slot;
1072 vn_reference_t vr1;
1073
1074 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1075 if (TREE_CODE (result) == SSA_NAME)
1076 vr1->value_id = VN_INFO (result)->value_id;
1077 else
1078 vr1->value_id = get_or_alloc_constant_value_id (result);
1079 vr1->vuses = valueize_vuses (vuses);
1080 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1081 vr1->hashcode = vn_reference_compute_hash (vr1);
1082 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1083
1084 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1085 INSERT);
1086
1087 /* Because we lookup stores using vuses, and value number failures
1088 using the vdefs (see visit_reference_op_store for how and why),
1089 it's possible that on failure we may try to insert an already
1090 inserted store. This is not wrong, there is no ssa name for a
1091 store that we could use as a differentiator anyway. Thus, unlike
1092 the other lookup functions, you cannot gcc_assert (!*slot)
1093 here. */
1094
1095 /* But free the old slot in case of a collision. */
1096 if (*slot)
1097 free_reference (*slot);
1098
1099 *slot = vr1;
1100 return vr1;
1101 }
1102
1103 /* Insert a reference by it's pieces into the current hash table with
1104 a value number of RESULT. Return the resulting reference
1105 structure we created. */
1106
1107 vn_reference_t
1108 vn_reference_insert_pieces (VEC (tree, gc) *vuses,
1109 VEC (vn_reference_op_s, heap) *operands,
1110 tree result, unsigned int value_id)
1111
1112 {
1113 void **slot;
1114 vn_reference_t vr1;
1115
1116 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1117 vr1->value_id = value_id;
1118 vr1->vuses = valueize_vuses (vuses);
1119 vr1->operands = valueize_refs (operands);
1120 vr1->hashcode = vn_reference_compute_hash (vr1);
1121 if (result && TREE_CODE (result) == SSA_NAME)
1122 result = SSA_VAL (result);
1123 vr1->result = result;
1124
1125 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1126 INSERT);
1127
1128 /* At this point we should have all the things inserted that we have
1129 seen before, and we should never try inserting something that
1130 already exists. */
1131 gcc_assert (!*slot);
1132 if (*slot)
1133 free_reference (*slot);
1134
1135 *slot = vr1;
1136 return vr1;
1137 }
1138
1139 /* Compute and return the hash value for nary operation VBO1. */
1140
1141 inline hashval_t
1142 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1143 {
1144 hashval_t hash = 0;
1145 unsigned i;
1146
1147 for (i = 0; i < vno1->length; ++i)
1148 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1149 vno1->op[i] = SSA_VAL (vno1->op[i]);
1150
1151 if (vno1->length == 2
1152 && commutative_tree_code (vno1->opcode)
1153 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1154 {
1155 tree temp = vno1->op[0];
1156 vno1->op[0] = vno1->op[1];
1157 vno1->op[1] = temp;
1158 }
1159
1160 for (i = 0; i < vno1->length; ++i)
1161 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1162
1163 return hash;
1164 }
1165
1166 /* Return the computed hashcode for nary operation P1. */
1167
1168 static hashval_t
1169 vn_nary_op_hash (const void *p1)
1170 {
1171 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1172 return vno1->hashcode;
1173 }
1174
1175 /* Compare nary operations P1 and P2 and return true if they are
1176 equivalent. */
1177
1178 int
1179 vn_nary_op_eq (const void *p1, const void *p2)
1180 {
1181 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1182 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1183 unsigned i;
1184
1185 if (vno1->opcode != vno2->opcode
1186 || vno1->type != vno2->type)
1187 return false;
1188
1189 for (i = 0; i < vno1->length; ++i)
1190 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1191 return false;
1192
1193 return true;
1194 }
1195
1196 /* Lookup a n-ary operation by its pieces and return the resulting value
1197 number if it exists in the hash table. Return NULL_TREE if it does
1198 not exist in the hash table or if the result field of the operation
1199 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1200 if it exists. */
1201
1202 tree
1203 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1204 tree type, tree op0, tree op1, tree op2,
1205 tree op3, vn_nary_op_t *vnresult)
1206 {
1207 void **slot;
1208 struct vn_nary_op_s vno1;
1209 if (vnresult)
1210 *vnresult = NULL;
1211 vno1.opcode = code;
1212 vno1.length = length;
1213 vno1.type = type;
1214 vno1.op[0] = op0;
1215 vno1.op[1] = op1;
1216 vno1.op[2] = op2;
1217 vno1.op[3] = op3;
1218 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1219 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1220 NO_INSERT);
1221 if (!slot && current_info == optimistic_info)
1222 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1223 NO_INSERT);
1224 if (!slot)
1225 return NULL_TREE;
1226 if (vnresult)
1227 *vnresult = (vn_nary_op_t)*slot;
1228 return ((vn_nary_op_t)*slot)->result;
1229 }
1230
1231 /* Lookup OP in the current hash table, and return the resulting value
1232 number if it exists in the hash table. Return NULL_TREE if it does
1233 not exist in the hash table or if the result field of the operation
1234 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1235 if it exists. */
1236
1237 tree
1238 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1239 {
1240 void **slot;
1241 struct vn_nary_op_s vno1;
1242 unsigned i;
1243
1244 if (vnresult)
1245 *vnresult = NULL;
1246 vno1.opcode = TREE_CODE (op);
1247 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1248 vno1.type = TREE_TYPE (op);
1249 for (i = 0; i < vno1.length; ++i)
1250 vno1.op[i] = TREE_OPERAND (op, i);
1251 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1252 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1253 NO_INSERT);
1254 if (!slot && current_info == optimistic_info)
1255 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1256 NO_INSERT);
1257 if (!slot)
1258 return NULL_TREE;
1259 if (vnresult)
1260 *vnresult = (vn_nary_op_t)*slot;
1261 return ((vn_nary_op_t)*slot)->result;
1262 }
1263
1264 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1265 value number if it exists in the hash table. Return NULL_TREE if
1266 it does not exist in the hash table. VNRESULT will contain the
1267 vn_nary_op_t from the hashtable if it exists. */
1268
1269 tree
1270 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1271 {
1272 void **slot;
1273 struct vn_nary_op_s vno1;
1274 unsigned i;
1275
1276 if (vnresult)
1277 *vnresult = NULL;
1278 vno1.opcode = gimple_assign_rhs_code (stmt);
1279 vno1.length = gimple_num_ops (stmt) - 1;
1280 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1281 for (i = 0; i < vno1.length; ++i)
1282 vno1.op[i] = gimple_op (stmt, i + 1);
1283 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1284 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1285 NO_INSERT);
1286 if (!slot && current_info == optimistic_info)
1287 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1288 NO_INSERT);
1289 if (!slot)
1290 return NULL_TREE;
1291 if (vnresult)
1292 *vnresult = (vn_nary_op_t)*slot;
1293 return ((vn_nary_op_t)*slot)->result;
1294 }
1295
1296 /* Insert a n-ary operation into the current hash table using it's
1297 pieces. Return the vn_nary_op_t structure we created and put in
1298 the hashtable. */
1299
1300 vn_nary_op_t
1301 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1302 tree type, tree op0,
1303 tree op1, tree op2, tree op3,
1304 tree result,
1305 unsigned int value_id)
1306 {
1307 void **slot;
1308 vn_nary_op_t vno1;
1309
1310 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1311 (sizeof (struct vn_nary_op_s)
1312 - sizeof (tree) * (4 - length)));
1313 vno1->value_id = value_id;
1314 vno1->opcode = code;
1315 vno1->length = length;
1316 vno1->type = type;
1317 if (length >= 1)
1318 vno1->op[0] = op0;
1319 if (length >= 2)
1320 vno1->op[1] = op1;
1321 if (length >= 3)
1322 vno1->op[2] = op2;
1323 if (length >= 4)
1324 vno1->op[3] = op3;
1325 vno1->result = result;
1326 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1327 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1328 INSERT);
1329 gcc_assert (!*slot);
1330
1331 *slot = vno1;
1332 return vno1;
1333
1334 }
1335
1336 /* Insert OP into the current hash table with a value number of
1337 RESULT. Return the vn_nary_op_t structure we created and put in
1338 the hashtable. */
1339
1340 vn_nary_op_t
1341 vn_nary_op_insert (tree op, tree result)
1342 {
1343 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1344 void **slot;
1345 vn_nary_op_t vno1;
1346 unsigned i;
1347
1348 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1349 (sizeof (struct vn_nary_op_s)
1350 - sizeof (tree) * (4 - length)));
1351 vno1->value_id = VN_INFO (result)->value_id;
1352 vno1->opcode = TREE_CODE (op);
1353 vno1->length = length;
1354 vno1->type = TREE_TYPE (op);
1355 for (i = 0; i < vno1->length; ++i)
1356 vno1->op[i] = TREE_OPERAND (op, i);
1357 vno1->result = result;
1358 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1359 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1360 INSERT);
1361 gcc_assert (!*slot);
1362
1363 *slot = vno1;
1364 return vno1;
1365 }
1366
1367 /* Insert the rhs of STMT into the current hash table with a value number of
1368 RESULT. */
1369
1370 vn_nary_op_t
1371 vn_nary_op_insert_stmt (gimple stmt, tree result)
1372 {
1373 unsigned length = gimple_num_ops (stmt) - 1;
1374 void **slot;
1375 vn_nary_op_t vno1;
1376 unsigned i;
1377
1378 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1379 (sizeof (struct vn_nary_op_s)
1380 - sizeof (tree) * (4 - length)));
1381 vno1->value_id = VN_INFO (result)->value_id;
1382 vno1->opcode = gimple_assign_rhs_code (stmt);
1383 vno1->length = length;
1384 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1385 for (i = 0; i < vno1->length; ++i)
1386 vno1->op[i] = gimple_op (stmt, i + 1);
1387 vno1->result = result;
1388 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1389 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1390 INSERT);
1391 gcc_assert (!*slot);
1392
1393 *slot = vno1;
1394 return vno1;
1395 }
1396
1397 /* Compute a hashcode for PHI operation VP1 and return it. */
1398
1399 static inline hashval_t
1400 vn_phi_compute_hash (vn_phi_t vp1)
1401 {
1402 hashval_t result = 0;
1403 int i;
1404 tree phi1op;
1405
1406 result = vp1->block->index;
1407
1408 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1409 {
1410 if (phi1op == VN_TOP)
1411 continue;
1412 result += iterative_hash_expr (phi1op, result);
1413 }
1414
1415 return result;
1416 }
1417
1418 /* Return the computed hashcode for phi operation P1. */
1419
1420 static hashval_t
1421 vn_phi_hash (const void *p1)
1422 {
1423 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1424 return vp1->hashcode;
1425 }
1426
1427 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1428
1429 static int
1430 vn_phi_eq (const void *p1, const void *p2)
1431 {
1432 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1433 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1434
1435 if (vp1->block == vp2->block)
1436 {
1437 int i;
1438 tree phi1op;
1439
1440 /* Any phi in the same block will have it's arguments in the
1441 same edge order, because of how we store phi nodes. */
1442 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1443 {
1444 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1445 if (phi1op == VN_TOP || phi2op == VN_TOP)
1446 continue;
1447 if (!expressions_equal_p (phi1op, phi2op))
1448 return false;
1449 }
1450 return true;
1451 }
1452 return false;
1453 }
1454
1455 static VEC(tree, heap) *shared_lookup_phiargs;
1456
1457 /* Lookup PHI in the current hash table, and return the resulting
1458 value number if it exists in the hash table. Return NULL_TREE if
1459 it does not exist in the hash table. */
1460
1461 static tree
1462 vn_phi_lookup (gimple phi)
1463 {
1464 void **slot;
1465 struct vn_phi_s vp1;
1466 unsigned i;
1467
1468 VEC_truncate (tree, shared_lookup_phiargs, 0);
1469
1470 /* Canonicalize the SSA_NAME's to their value number. */
1471 for (i = 0; i < gimple_phi_num_args (phi); i++)
1472 {
1473 tree def = PHI_ARG_DEF (phi, i);
1474 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1475 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1476 }
1477 vp1.phiargs = shared_lookup_phiargs;
1478 vp1.block = gimple_bb (phi);
1479 vp1.hashcode = vn_phi_compute_hash (&vp1);
1480 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1481 NO_INSERT);
1482 if (!slot && current_info == optimistic_info)
1483 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1484 NO_INSERT);
1485 if (!slot)
1486 return NULL_TREE;
1487 return ((vn_phi_t)*slot)->result;
1488 }
1489
1490 /* Insert PHI into the current hash table with a value number of
1491 RESULT. */
1492
1493 static vn_phi_t
1494 vn_phi_insert (gimple phi, tree result)
1495 {
1496 void **slot;
1497 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1498 unsigned i;
1499 VEC (tree, heap) *args = NULL;
1500
1501 /* Canonicalize the SSA_NAME's to their value number. */
1502 for (i = 0; i < gimple_phi_num_args (phi); i++)
1503 {
1504 tree def = PHI_ARG_DEF (phi, i);
1505 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1506 VEC_safe_push (tree, heap, args, def);
1507 }
1508 vp1->value_id = VN_INFO (result)->value_id;
1509 vp1->phiargs = args;
1510 vp1->block = gimple_bb (phi);
1511 vp1->result = result;
1512 vp1->hashcode = vn_phi_compute_hash (vp1);
1513
1514 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1515 INSERT);
1516
1517 /* Because we iterate over phi operations more than once, it's
1518 possible the slot might already exist here, hence no assert.*/
1519 *slot = vp1;
1520 return vp1;
1521 }
1522
1523
1524 /* Print set of components in strongly connected component SCC to OUT. */
1525
1526 static void
1527 print_scc (FILE *out, VEC (tree, heap) *scc)
1528 {
1529 tree var;
1530 unsigned int i;
1531
1532 fprintf (out, "SCC consists of: ");
1533 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1534 {
1535 print_generic_expr (out, var, 0);
1536 fprintf (out, " ");
1537 }
1538 fprintf (out, "\n");
1539 }
1540
1541 /* Set the value number of FROM to TO, return true if it has changed
1542 as a result. */
1543
1544 static inline bool
1545 set_ssa_val_to (tree from, tree to)
1546 {
1547 tree currval;
1548
1549 if (from != to
1550 && TREE_CODE (to) == SSA_NAME
1551 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1552 to = from;
1553
1554 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1555 and invariants. So assert that here. */
1556 gcc_assert (to != NULL_TREE
1557 && (to == VN_TOP
1558 || TREE_CODE (to) == SSA_NAME
1559 || is_gimple_min_invariant (to)));
1560
1561 if (dump_file && (dump_flags & TDF_DETAILS))
1562 {
1563 fprintf (dump_file, "Setting value number of ");
1564 print_generic_expr (dump_file, from, 0);
1565 fprintf (dump_file, " to ");
1566 print_generic_expr (dump_file, to, 0);
1567 fprintf (dump_file, "\n");
1568 }
1569
1570 currval = SSA_VAL (from);
1571
1572 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1573 {
1574 SSA_VAL (from) = to;
1575 return true;
1576 }
1577 return false;
1578 }
1579
1580 /* Set all definitions in STMT to value number to themselves.
1581 Return true if a value number changed. */
1582
1583 static bool
1584 defs_to_varying (gimple stmt)
1585 {
1586 bool changed = false;
1587 ssa_op_iter iter;
1588 def_operand_p defp;
1589
1590 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1591 {
1592 tree def = DEF_FROM_PTR (defp);
1593
1594 VN_INFO (def)->use_processed = true;
1595 changed |= set_ssa_val_to (def, def);
1596 }
1597 return changed;
1598 }
1599
1600 static bool expr_has_constants (tree expr);
1601 static tree try_to_simplify (gimple stmt);
1602
1603 /* Visit a copy between LHS and RHS, return true if the value number
1604 changed. */
1605
1606 static bool
1607 visit_copy (tree lhs, tree rhs)
1608 {
1609 /* Follow chains of copies to their destination. */
1610 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1611 rhs = SSA_VAL (rhs);
1612
1613 /* The copy may have a more interesting constant filled expression
1614 (we don't, since we know our RHS is just an SSA name). */
1615 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1616 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1617
1618 return set_ssa_val_to (lhs, rhs);
1619 }
1620
1621 /* Visit a unary operator RHS, value number it, and return true if the
1622 value number of LHS has changed as a result. */
1623
1624 static bool
1625 visit_unary_op (tree lhs, gimple stmt)
1626 {
1627 bool changed = false;
1628 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1629
1630 if (result)
1631 {
1632 changed = set_ssa_val_to (lhs, result);
1633 }
1634 else
1635 {
1636 changed = set_ssa_val_to (lhs, lhs);
1637 vn_nary_op_insert_stmt (stmt, lhs);
1638 }
1639
1640 return changed;
1641 }
1642
1643 /* Visit a binary operator RHS, value number it, and return true if the
1644 value number of LHS has changed as a result. */
1645
1646 static bool
1647 visit_binary_op (tree lhs, gimple stmt)
1648 {
1649 bool changed = false;
1650 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1651
1652 if (result)
1653 {
1654 changed = set_ssa_val_to (lhs, result);
1655 }
1656 else
1657 {
1658 changed = set_ssa_val_to (lhs, lhs);
1659 vn_nary_op_insert_stmt (stmt, lhs);
1660 }
1661
1662 return changed;
1663 }
1664
1665 /* Visit a call STMT storing into LHS. Return true if the value number
1666 of the LHS has changed as a result. */
1667
1668 static bool
1669 visit_reference_op_call (tree lhs, gimple stmt)
1670 {
1671 bool changed = false;
1672 struct vn_reference_s vr1;
1673 tree result;
1674
1675 vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt));
1676 vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt));
1677 vr1.hashcode = vn_reference_compute_hash (&vr1);
1678 result = vn_reference_lookup_1 (&vr1, NULL);
1679 if (result)
1680 {
1681 changed = set_ssa_val_to (lhs, result);
1682 if (TREE_CODE (result) == SSA_NAME
1683 && VN_INFO (result)->has_constants)
1684 VN_INFO (lhs)->has_constants = true;
1685 }
1686 else
1687 {
1688 void **slot;
1689 vn_reference_t vr2;
1690 changed = set_ssa_val_to (lhs, lhs);
1691 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1692 vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt));
1693 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1694 vr2->hashcode = vr1.hashcode;
1695 vr2->result = lhs;
1696 slot = htab_find_slot_with_hash (current_info->references,
1697 vr2, vr2->hashcode, INSERT);
1698 if (*slot)
1699 free_reference (*slot);
1700 *slot = vr2;
1701 }
1702
1703 return changed;
1704 }
1705
1706 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1707 and return true if the value number of the LHS has changed as a result. */
1708
1709 static bool
1710 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1711 {
1712 bool changed = false;
1713 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
1714 NULL);
1715
1716 /* We handle type-punning through unions by value-numbering based
1717 on offset and size of the access. Be prepared to handle a
1718 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1719 if (result
1720 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1721 {
1722 /* We will be setting the value number of lhs to the value number
1723 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1724 So first simplify and lookup this expression to see if it
1725 is already available. */
1726 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1727 if (stmt
1728 && !is_gimple_min_invariant (val)
1729 && TREE_CODE (val) != SSA_NAME)
1730 {
1731 tree tem = try_to_simplify (stmt);
1732 if (tem)
1733 val = tem;
1734 }
1735 result = val;
1736 if (!is_gimple_min_invariant (val)
1737 && TREE_CODE (val) != SSA_NAME)
1738 result = vn_nary_op_lookup (val, NULL);
1739 /* If the expression is not yet available, value-number lhs to
1740 a new SSA_NAME we create. */
1741 if (!result && may_insert)
1742 {
1743 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1744 /* Initialize value-number information properly. */
1745 VN_INFO_GET (result)->valnum = result;
1746 VN_INFO (result)->value_id = get_next_value_id ();
1747 VN_INFO (result)->expr = val;
1748 VN_INFO (result)->has_constants = expr_has_constants (val);
1749 VN_INFO (result)->needs_insertion = true;
1750 /* As all "inserted" statements are singleton SCCs, insert
1751 to the valid table. This is strictly needed to
1752 avoid re-generating new value SSA_NAMEs for the same
1753 expression during SCC iteration over and over (the
1754 optimistic table gets cleared after each iteration).
1755 We do not need to insert into the optimistic table, as
1756 lookups there will fall back to the valid table. */
1757 if (current_info == optimistic_info)
1758 {
1759 current_info = valid_info;
1760 vn_nary_op_insert (val, result);
1761 current_info = optimistic_info;
1762 }
1763 else
1764 vn_nary_op_insert (val, result);
1765 if (dump_file && (dump_flags & TDF_DETAILS))
1766 {
1767 fprintf (dump_file, "Inserting name ");
1768 print_generic_expr (dump_file, result, 0);
1769 fprintf (dump_file, " for expression ");
1770 print_generic_expr (dump_file, val, 0);
1771 fprintf (dump_file, "\n");
1772 }
1773 }
1774 }
1775
1776 if (result)
1777 {
1778 changed = set_ssa_val_to (lhs, result);
1779 if (TREE_CODE (result) == SSA_NAME
1780 && VN_INFO (result)->has_constants)
1781 {
1782 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1783 VN_INFO (lhs)->has_constants = true;
1784 }
1785 }
1786 else
1787 {
1788 changed = set_ssa_val_to (lhs, lhs);
1789 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1790 }
1791
1792 return changed;
1793 }
1794
1795
1796 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1797 and return true if the value number of the LHS has changed as a result. */
1798
1799 static bool
1800 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1801 {
1802 bool changed = false;
1803 tree result;
1804 bool resultsame = false;
1805
1806 /* First we want to lookup using the *vuses* from the store and see
1807 if there the last store to this location with the same address
1808 had the same value.
1809
1810 The vuses represent the memory state before the store. If the
1811 memory state, address, and value of the store is the same as the
1812 last store to this location, then this store will produce the
1813 same memory state as that store.
1814
1815 In this case the vdef versions for this store are value numbered to those
1816 vuse versions, since they represent the same memory state after
1817 this store.
1818
1819 Otherwise, the vdefs for the store are used when inserting into
1820 the table, since the store generates a new memory state. */
1821
1822 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false,
1823 NULL);
1824
1825 if (result)
1826 {
1827 if (TREE_CODE (result) == SSA_NAME)
1828 result = SSA_VAL (result);
1829 if (TREE_CODE (op) == SSA_NAME)
1830 op = SSA_VAL (op);
1831 resultsame = expressions_equal_p (result, op);
1832 }
1833
1834 if (!result || !resultsame)
1835 {
1836 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1837 int i;
1838 tree vdef;
1839
1840 if (dump_file && (dump_flags & TDF_DETAILS))
1841 {
1842 fprintf (dump_file, "No store match\n");
1843 fprintf (dump_file, "Value numbering store ");
1844 print_generic_expr (dump_file, lhs, 0);
1845 fprintf (dump_file, " to ");
1846 print_generic_expr (dump_file, op, 0);
1847 fprintf (dump_file, "\n");
1848 }
1849 /* Have to set value numbers before insert, since insert is
1850 going to valueize the references in-place. */
1851 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1852 {
1853 VN_INFO (vdef)->use_processed = true;
1854 changed |= set_ssa_val_to (vdef, vdef);
1855 }
1856
1857 /* Do not insert structure copies into the tables. */
1858 if (is_gimple_min_invariant (op)
1859 || is_gimple_reg (op))
1860 vn_reference_insert (lhs, op, vdefs);
1861 }
1862 else
1863 {
1864 /* We had a match, so value number the vdefs to have the value
1865 number of the vuses they came from. */
1866 ssa_op_iter op_iter;
1867 def_operand_p var;
1868 vuse_vec_p vv;
1869
1870 if (dump_file && (dump_flags & TDF_DETAILS))
1871 fprintf (dump_file, "Store matched earlier value,"
1872 "value numbering store vdefs to matching vuses.\n");
1873
1874 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1875 {
1876 tree def = DEF_FROM_PTR (var);
1877 tree use;
1878
1879 /* Uh, if the vuse is a multiuse, we can't really do much
1880 here, sadly, since we don't know which value number of
1881 which vuse to use. */
1882 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1883 use = def;
1884 else
1885 use = VUSE_ELEMENT_VAR (*vv, 0);
1886
1887 VN_INFO (def)->use_processed = true;
1888 changed |= set_ssa_val_to (def, SSA_VAL (use));
1889 }
1890 }
1891
1892 return changed;
1893 }
1894
1895 /* Visit and value number PHI, return true if the value number
1896 changed. */
1897
1898 static bool
1899 visit_phi (gimple phi)
1900 {
1901 bool changed = false;
1902 tree result;
1903 tree sameval = VN_TOP;
1904 bool allsame = true;
1905 unsigned i;
1906
1907 /* TODO: We could check for this in init_sccvn, and replace this
1908 with a gcc_assert. */
1909 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1910 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1911
1912 /* See if all non-TOP arguments have the same value. TOP is
1913 equivalent to everything, so we can ignore it. */
1914 for (i = 0; i < gimple_phi_num_args (phi); i++)
1915 {
1916 tree def = PHI_ARG_DEF (phi, i);
1917
1918 if (TREE_CODE (def) == SSA_NAME)
1919 def = SSA_VAL (def);
1920 if (def == VN_TOP)
1921 continue;
1922 if (sameval == VN_TOP)
1923 {
1924 sameval = def;
1925 }
1926 else
1927 {
1928 if (!expressions_equal_p (def, sameval))
1929 {
1930 allsame = false;
1931 break;
1932 }
1933 }
1934 }
1935
1936 /* If all value numbered to the same value, the phi node has that
1937 value. */
1938 if (allsame)
1939 {
1940 if (is_gimple_min_invariant (sameval))
1941 {
1942 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1943 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1944 }
1945 else
1946 {
1947 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1948 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1949 }
1950
1951 if (TREE_CODE (sameval) == SSA_NAME)
1952 return visit_copy (PHI_RESULT (phi), sameval);
1953
1954 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1955 }
1956
1957 /* Otherwise, see if it is equivalent to a phi node in this block. */
1958 result = vn_phi_lookup (phi);
1959 if (result)
1960 {
1961 if (TREE_CODE (result) == SSA_NAME)
1962 changed = visit_copy (PHI_RESULT (phi), result);
1963 else
1964 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1965 }
1966 else
1967 {
1968 vn_phi_insert (phi, PHI_RESULT (phi));
1969 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1970 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1971 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1972 }
1973
1974 return changed;
1975 }
1976
1977 /* Return true if EXPR contains constants. */
1978
1979 static bool
1980 expr_has_constants (tree expr)
1981 {
1982 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1983 {
1984 case tcc_unary:
1985 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1986
1987 case tcc_binary:
1988 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1989 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1990 /* Constants inside reference ops are rarely interesting, but
1991 it can take a lot of looking to find them. */
1992 case tcc_reference:
1993 case tcc_declaration:
1994 return false;
1995 default:
1996 return is_gimple_min_invariant (expr);
1997 }
1998 return false;
1999 }
2000
2001 /* Return true if STMT contains constants. */
2002
2003 static bool
2004 stmt_has_constants (gimple stmt)
2005 {
2006 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2007 return false;
2008
2009 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2010 {
2011 case GIMPLE_UNARY_RHS:
2012 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2013
2014 case GIMPLE_BINARY_RHS:
2015 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2016 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2017 case GIMPLE_SINGLE_RHS:
2018 /* Constants inside reference ops are rarely interesting, but
2019 it can take a lot of looking to find them. */
2020 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2021 default:
2022 gcc_unreachable ();
2023 }
2024 return false;
2025 }
2026
2027 /* Replace SSA_NAMES in expr with their value numbers, and return the
2028 result.
2029 This is performed in place. */
2030
2031 static tree
2032 valueize_expr (tree expr)
2033 {
2034 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2035 {
2036 case tcc_unary:
2037 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2038 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2039 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2040 break;
2041 case tcc_binary:
2042 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2043 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2044 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2045 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2046 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2047 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2048 break;
2049 default:
2050 break;
2051 }
2052 return expr;
2053 }
2054
2055 /* Simplify the binary expression RHS, and return the result if
2056 simplified. */
2057
2058 static tree
2059 simplify_binary_expression (gimple stmt)
2060 {
2061 tree result = NULL_TREE;
2062 tree op0 = gimple_assign_rhs1 (stmt);
2063 tree op1 = gimple_assign_rhs2 (stmt);
2064
2065 /* This will not catch every single case we could combine, but will
2066 catch those with constants. The goal here is to simultaneously
2067 combine constants between expressions, but avoid infinite
2068 expansion of expressions during simplification. */
2069 if (TREE_CODE (op0) == SSA_NAME)
2070 {
2071 if (VN_INFO (op0)->has_constants
2072 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2073 op0 = valueize_expr (vn_get_expr_for (op0));
2074 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2075 op0 = SSA_VAL (op0);
2076 }
2077
2078 if (TREE_CODE (op1) == SSA_NAME)
2079 {
2080 if (VN_INFO (op1)->has_constants)
2081 op1 = valueize_expr (vn_get_expr_for (op1));
2082 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2083 op1 = SSA_VAL (op1);
2084 }
2085
2086 /* Avoid folding if nothing changed. */
2087 if (op0 == gimple_assign_rhs1 (stmt)
2088 && op1 == gimple_assign_rhs2 (stmt))
2089 return NULL_TREE;
2090
2091 fold_defer_overflow_warnings ();
2092
2093 result = fold_binary (gimple_assign_rhs_code (stmt),
2094 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
2095
2096 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2097 stmt, 0);
2098
2099 /* Make sure result is not a complex expression consisting
2100 of operators of operators (IE (a + b) + (a + c))
2101 Otherwise, we will end up with unbounded expressions if
2102 fold does anything at all. */
2103 if (result && valid_gimple_rhs_p (result))
2104 return result;
2105
2106 return NULL_TREE;
2107 }
2108
2109 /* Simplify the unary expression RHS, and return the result if
2110 simplified. */
2111
2112 static tree
2113 simplify_unary_expression (gimple stmt)
2114 {
2115 tree result = NULL_TREE;
2116 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2117
2118 /* We handle some tcc_reference codes here that are all
2119 GIMPLE_ASSIGN_SINGLE codes. */
2120 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2121 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2122 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2123 op0 = TREE_OPERAND (op0, 0);
2124
2125 if (TREE_CODE (op0) != SSA_NAME)
2126 return NULL_TREE;
2127
2128 orig_op0 = op0;
2129 if (VN_INFO (op0)->has_constants)
2130 op0 = valueize_expr (vn_get_expr_for (op0));
2131 else if (gimple_assign_cast_p (stmt)
2132 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2133 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2134 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2135 {
2136 /* We want to do tree-combining on conversion-like expressions.
2137 Make sure we feed only SSA_NAMEs or constants to fold though. */
2138 tree tem = valueize_expr (vn_get_expr_for (op0));
2139 if (UNARY_CLASS_P (tem)
2140 || BINARY_CLASS_P (tem)
2141 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2142 || TREE_CODE (tem) == SSA_NAME
2143 || is_gimple_min_invariant (tem))
2144 op0 = tem;
2145 }
2146
2147 /* Avoid folding if nothing changed, but remember the expression. */
2148 if (op0 == orig_op0)
2149 return NULL_TREE;
2150
2151 result = fold_unary (gimple_assign_rhs_code (stmt),
2152 gimple_expr_type (stmt), op0);
2153 if (result)
2154 {
2155 STRIP_USELESS_TYPE_CONVERSION (result);
2156 if (valid_gimple_rhs_p (result))
2157 return result;
2158 }
2159
2160 return NULL_TREE;
2161 }
2162
2163 /* Try to simplify RHS using equivalences and constant folding. */
2164
2165 static tree
2166 try_to_simplify (gimple stmt)
2167 {
2168 tree tem;
2169
2170 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2171 in this case, there is no point in doing extra work. */
2172 if (gimple_assign_copy_p (stmt)
2173 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2174 return NULL_TREE;
2175
2176 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2177 {
2178 case tcc_declaration:
2179 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2180 if (tem)
2181 return tem;
2182 break;
2183
2184 case tcc_reference:
2185 /* Do not do full-blown reference lookup here, but simplify
2186 reads from constant aggregates. */
2187 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2188 if (tem)
2189 return tem;
2190
2191 /* Fallthrough for some codes that can operate on registers. */
2192 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2193 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2194 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2195 break;
2196 /* We could do a little more with unary ops, if they expand
2197 into binary ops, but it's debatable whether it is worth it. */
2198 case tcc_unary:
2199 return simplify_unary_expression (stmt);
2200 break;
2201 case tcc_comparison:
2202 case tcc_binary:
2203 return simplify_binary_expression (stmt);
2204 break;
2205 default:
2206 break;
2207 }
2208
2209 return NULL_TREE;
2210 }
2211
2212 /* Visit and value number USE, return true if the value number
2213 changed. */
2214
2215 static bool
2216 visit_use (tree use)
2217 {
2218 bool changed = false;
2219 gimple stmt = SSA_NAME_DEF_STMT (use);
2220
2221 VN_INFO (use)->use_processed = true;
2222
2223 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2224 if (dump_file && (dump_flags & TDF_DETAILS)
2225 && !SSA_NAME_IS_DEFAULT_DEF (use))
2226 {
2227 fprintf (dump_file, "Value numbering ");
2228 print_generic_expr (dump_file, use, 0);
2229 fprintf (dump_file, " stmt = ");
2230 print_gimple_stmt (dump_file, stmt, 0, 0);
2231 }
2232
2233 /* Handle uninitialized uses. */
2234 if (SSA_NAME_IS_DEFAULT_DEF (use))
2235 changed = set_ssa_val_to (use, use);
2236 else
2237 {
2238 if (gimple_code (stmt) == GIMPLE_PHI)
2239 changed = visit_phi (stmt);
2240 else if (!gimple_has_lhs (stmt)
2241 || gimple_has_volatile_ops (stmt)
2242 || stmt_could_throw_p (stmt))
2243 changed = defs_to_varying (stmt);
2244 else if (is_gimple_assign (stmt))
2245 {
2246 tree lhs = gimple_assign_lhs (stmt);
2247 tree simplified;
2248
2249 /* Shortcut for copies. Simplifying copies is pointless,
2250 since we copy the expression and value they represent. */
2251 if (gimple_assign_copy_p (stmt)
2252 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2253 && TREE_CODE (lhs) == SSA_NAME)
2254 {
2255 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2256 goto done;
2257 }
2258 simplified = try_to_simplify (stmt);
2259 if (simplified)
2260 {
2261 if (dump_file && (dump_flags & TDF_DETAILS))
2262 {
2263 fprintf (dump_file, "RHS ");
2264 print_gimple_expr (dump_file, stmt, 0, 0);
2265 fprintf (dump_file, " simplified to ");
2266 print_generic_expr (dump_file, simplified, 0);
2267 if (TREE_CODE (lhs) == SSA_NAME)
2268 fprintf (dump_file, " has constants %d\n",
2269 expr_has_constants (simplified));
2270 else
2271 fprintf (dump_file, "\n");
2272 }
2273 }
2274 /* Setting value numbers to constants will occasionally
2275 screw up phi congruence because constants are not
2276 uniquely associated with a single ssa name that can be
2277 looked up. */
2278 if (simplified
2279 && is_gimple_min_invariant (simplified)
2280 && TREE_CODE (lhs) == SSA_NAME)
2281 {
2282 VN_INFO (lhs)->expr = simplified;
2283 VN_INFO (lhs)->has_constants = true;
2284 changed = set_ssa_val_to (lhs, simplified);
2285 goto done;
2286 }
2287 else if (simplified
2288 && TREE_CODE (simplified) == SSA_NAME
2289 && TREE_CODE (lhs) == SSA_NAME)
2290 {
2291 changed = visit_copy (lhs, simplified);
2292 goto done;
2293 }
2294 else if (simplified)
2295 {
2296 if (TREE_CODE (lhs) == SSA_NAME)
2297 {
2298 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2299 /* We have to unshare the expression or else
2300 valuizing may change the IL stream. */
2301 VN_INFO (lhs)->expr = unshare_expr (simplified);
2302 }
2303 }
2304 else if (stmt_has_constants (stmt)
2305 && TREE_CODE (lhs) == SSA_NAME)
2306 VN_INFO (lhs)->has_constants = true;
2307 else if (TREE_CODE (lhs) == SSA_NAME)
2308 {
2309 /* We reset expr and constantness here because we may
2310 have been value numbering optimistically, and
2311 iterating. They may become non-constant in this case,
2312 even if they were optimistically constant. */
2313
2314 VN_INFO (lhs)->has_constants = false;
2315 VN_INFO (lhs)->expr = NULL_TREE;
2316 }
2317
2318 if (TREE_CODE (lhs) == SSA_NAME
2319 /* We can substitute SSA_NAMEs that are live over
2320 abnormal edges with their constant value. */
2321 && !(gimple_assign_copy_p (stmt)
2322 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2323 && !(simplified
2324 && is_gimple_min_invariant (simplified))
2325 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2326 changed = defs_to_varying (stmt);
2327 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2328 {
2329 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2330 }
2331 else if (TREE_CODE (lhs) == SSA_NAME)
2332 {
2333 if ((gimple_assign_copy_p (stmt)
2334 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2335 || (simplified
2336 && is_gimple_min_invariant (simplified)))
2337 {
2338 VN_INFO (lhs)->has_constants = true;
2339 if (simplified)
2340 changed = set_ssa_val_to (lhs, simplified);
2341 else
2342 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2343 }
2344 else
2345 {
2346 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2347 {
2348 case GIMPLE_UNARY_RHS:
2349 changed = visit_unary_op (lhs, stmt);
2350 break;
2351 case GIMPLE_BINARY_RHS:
2352 changed = visit_binary_op (lhs, stmt);
2353 break;
2354 case GIMPLE_SINGLE_RHS:
2355 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2356 {
2357 case tcc_declaration:
2358 case tcc_reference:
2359 changed = visit_reference_op_load
2360 (lhs, gimple_assign_rhs1 (stmt), stmt);
2361 break;
2362 case tcc_expression:
2363 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2364 {
2365 changed = visit_unary_op (lhs, stmt);
2366 break;
2367 }
2368 /* Fallthrough. */
2369 default:
2370 changed = defs_to_varying (stmt);
2371 }
2372 break;
2373 default:
2374 changed = defs_to_varying (stmt);
2375 break;
2376 }
2377 }
2378 }
2379 else
2380 changed = defs_to_varying (stmt);
2381 }
2382 else if (is_gimple_call (stmt))
2383 {
2384 tree lhs = gimple_call_lhs (stmt);
2385
2386 /* ??? We could try to simplify calls. */
2387
2388 if (stmt_has_constants (stmt)
2389 && TREE_CODE (lhs) == SSA_NAME)
2390 VN_INFO (lhs)->has_constants = true;
2391 else if (TREE_CODE (lhs) == SSA_NAME)
2392 {
2393 /* We reset expr and constantness here because we may
2394 have been value numbering optimistically, and
2395 iterating. They may become non-constant in this case,
2396 even if they were optimistically constant. */
2397 VN_INFO (lhs)->has_constants = false;
2398 VN_INFO (lhs)->expr = NULL_TREE;
2399 }
2400
2401 if (TREE_CODE (lhs) == SSA_NAME
2402 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2403 changed = defs_to_varying (stmt);
2404 /* ??? We should handle stores from calls. */
2405 else if (TREE_CODE (lhs) == SSA_NAME)
2406 {
2407 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2408 changed = visit_reference_op_call (lhs, stmt);
2409 else
2410 changed = defs_to_varying (stmt);
2411 }
2412 else
2413 changed = defs_to_varying (stmt);
2414 }
2415 }
2416 done:
2417 return changed;
2418 }
2419
2420 /* Compare two operands by reverse postorder index */
2421
2422 static int
2423 compare_ops (const void *pa, const void *pb)
2424 {
2425 const tree opa = *((const tree *)pa);
2426 const tree opb = *((const tree *)pb);
2427 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2428 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2429 basic_block bba;
2430 basic_block bbb;
2431
2432 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2433 return 0;
2434 else if (gimple_nop_p (opstmta))
2435 return -1;
2436 else if (gimple_nop_p (opstmtb))
2437 return 1;
2438
2439 bba = gimple_bb (opstmta);
2440 bbb = gimple_bb (opstmtb);
2441
2442 if (!bba && !bbb)
2443 return 0;
2444 else if (!bba)
2445 return -1;
2446 else if (!bbb)
2447 return 1;
2448
2449 if (bba == bbb)
2450 {
2451 if (gimple_code (opstmta) == GIMPLE_PHI
2452 && gimple_code (opstmtb) == GIMPLE_PHI)
2453 return 0;
2454 else if (gimple_code (opstmta) == GIMPLE_PHI)
2455 return -1;
2456 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2457 return 1;
2458 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2459 }
2460 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2461 }
2462
2463 /* Sort an array containing members of a strongly connected component
2464 SCC so that the members are ordered by RPO number.
2465 This means that when the sort is complete, iterating through the
2466 array will give you the members in RPO order. */
2467
2468 static void
2469 sort_scc (VEC (tree, heap) *scc)
2470 {
2471 qsort (VEC_address (tree, scc),
2472 VEC_length (tree, scc),
2473 sizeof (tree),
2474 compare_ops);
2475 }
2476
2477 /* Process a strongly connected component in the SSA graph. */
2478
2479 static void
2480 process_scc (VEC (tree, heap) *scc)
2481 {
2482 /* If the SCC has a single member, just visit it. */
2483
2484 if (VEC_length (tree, scc) == 1)
2485 {
2486 tree use = VEC_index (tree, scc, 0);
2487 if (!VN_INFO (use)->use_processed)
2488 visit_use (use);
2489 }
2490 else
2491 {
2492 tree var;
2493 unsigned int i;
2494 unsigned int iterations = 0;
2495 bool changed = true;
2496
2497 /* Iterate over the SCC with the optimistic table until it stops
2498 changing. */
2499 current_info = optimistic_info;
2500 while (changed)
2501 {
2502 changed = false;
2503 iterations++;
2504 /* As we are value-numbering optimistically we have to
2505 clear the expression tables and the simplified expressions
2506 in each iteration until we converge. */
2507 htab_empty (optimistic_info->nary);
2508 htab_empty (optimistic_info->phis);
2509 htab_empty (optimistic_info->references);
2510 obstack_free (&optimistic_info->nary_obstack, NULL);
2511 gcc_obstack_init (&optimistic_info->nary_obstack);
2512 empty_alloc_pool (optimistic_info->phis_pool);
2513 empty_alloc_pool (optimistic_info->references_pool);
2514 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2515 VN_INFO (var)->expr = NULL_TREE;
2516 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2517 changed |= visit_use (var);
2518 }
2519
2520 statistics_histogram_event (cfun, "SCC iterations", iterations);
2521
2522 /* Finally, visit the SCC once using the valid table. */
2523 current_info = valid_info;
2524 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2525 visit_use (var);
2526 }
2527 }
2528
2529 DEF_VEC_O(ssa_op_iter);
2530 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2531
2532 /* Pop the components of the found SCC for NAME off the SCC stack
2533 and process them. Returns true if all went well, false if
2534 we run into resource limits. */
2535
2536 static bool
2537 extract_and_process_scc_for_name (tree name)
2538 {
2539 VEC (tree, heap) *scc = NULL;
2540 tree x;
2541
2542 /* Found an SCC, pop the components off the SCC stack and
2543 process them. */
2544 do
2545 {
2546 x = VEC_pop (tree, sccstack);
2547
2548 VN_INFO (x)->on_sccstack = false;
2549 VEC_safe_push (tree, heap, scc, x);
2550 } while (x != name);
2551
2552 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2553 if (VEC_length (tree, scc)
2554 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2555 {
2556 if (dump_file)
2557 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2558 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2559 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2560 return false;
2561 }
2562
2563 if (VEC_length (tree, scc) > 1)
2564 sort_scc (scc);
2565
2566 if (dump_file && (dump_flags & TDF_DETAILS))
2567 print_scc (dump_file, scc);
2568
2569 process_scc (scc);
2570
2571 VEC_free (tree, heap, scc);
2572
2573 return true;
2574 }
2575
2576 /* Depth first search on NAME to discover and process SCC's in the SSA
2577 graph.
2578 Execution of this algorithm relies on the fact that the SCC's are
2579 popped off the stack in topological order.
2580 Returns true if successful, false if we stopped processing SCC's due
2581 to resource constraints. */
2582
2583 static bool
2584 DFS (tree name)
2585 {
2586 VEC(ssa_op_iter, heap) *itervec = NULL;
2587 VEC(tree, heap) *namevec = NULL;
2588 use_operand_p usep = NULL;
2589 gimple defstmt;
2590 tree use;
2591 ssa_op_iter iter;
2592
2593 start_over:
2594 /* SCC info */
2595 VN_INFO (name)->dfsnum = next_dfs_num++;
2596 VN_INFO (name)->visited = true;
2597 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2598
2599 VEC_safe_push (tree, heap, sccstack, name);
2600 VN_INFO (name)->on_sccstack = true;
2601 defstmt = SSA_NAME_DEF_STMT (name);
2602
2603 /* Recursively DFS on our operands, looking for SCC's. */
2604 if (!gimple_nop_p (defstmt))
2605 {
2606 /* Push a new iterator. */
2607 if (gimple_code (defstmt) == GIMPLE_PHI)
2608 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2609 else
2610 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2611 }
2612 else
2613 iter.done = true;
2614
2615 while (1)
2616 {
2617 /* If we are done processing uses of a name, go up the stack
2618 of iterators and process SCCs as we found them. */
2619 if (op_iter_done (&iter))
2620 {
2621 /* See if we found an SCC. */
2622 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2623 if (!extract_and_process_scc_for_name (name))
2624 {
2625 VEC_free (tree, heap, namevec);
2626 VEC_free (ssa_op_iter, heap, itervec);
2627 return false;
2628 }
2629
2630 /* Check if we are done. */
2631 if (VEC_empty (tree, namevec))
2632 {
2633 VEC_free (tree, heap, namevec);
2634 VEC_free (ssa_op_iter, heap, itervec);
2635 return true;
2636 }
2637
2638 /* Restore the last use walker and continue walking there. */
2639 use = name;
2640 name = VEC_pop (tree, namevec);
2641 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2642 sizeof (ssa_op_iter));
2643 VEC_pop (ssa_op_iter, itervec);
2644 goto continue_walking;
2645 }
2646
2647 use = USE_FROM_PTR (usep);
2648
2649 /* Since we handle phi nodes, we will sometimes get
2650 invariants in the use expression. */
2651 if (TREE_CODE (use) == SSA_NAME)
2652 {
2653 if (! (VN_INFO (use)->visited))
2654 {
2655 /* Recurse by pushing the current use walking state on
2656 the stack and starting over. */
2657 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2658 VEC_safe_push(tree, heap, namevec, name);
2659 name = use;
2660 goto start_over;
2661
2662 continue_walking:
2663 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2664 VN_INFO (use)->low);
2665 }
2666 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2667 && VN_INFO (use)->on_sccstack)
2668 {
2669 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2670 VN_INFO (name)->low);
2671 }
2672 }
2673
2674 usep = op_iter_next_use (&iter);
2675 }
2676 }
2677
2678 /* Allocate a value number table. */
2679
2680 static void
2681 allocate_vn_table (vn_tables_t table)
2682 {
2683 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2684 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2685 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2686 free_reference);
2687
2688 gcc_obstack_init (&table->nary_obstack);
2689 table->phis_pool = create_alloc_pool ("VN phis",
2690 sizeof (struct vn_phi_s),
2691 30);
2692 table->references_pool = create_alloc_pool ("VN references",
2693 sizeof (struct vn_reference_s),
2694 30);
2695 }
2696
2697 /* Free a value number table. */
2698
2699 static void
2700 free_vn_table (vn_tables_t table)
2701 {
2702 htab_delete (table->phis);
2703 htab_delete (table->nary);
2704 htab_delete (table->references);
2705 obstack_free (&table->nary_obstack, NULL);
2706 free_alloc_pool (table->phis_pool);
2707 free_alloc_pool (table->references_pool);
2708 }
2709
2710 static void
2711 init_scc_vn (void)
2712 {
2713 size_t i;
2714 int j;
2715 int *rpo_numbers_temp;
2716
2717 calculate_dominance_info (CDI_DOMINATORS);
2718 sccstack = NULL;
2719 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2720 free);
2721
2722 constant_value_ids = BITMAP_ALLOC (NULL);
2723
2724 next_dfs_num = 1;
2725 next_value_id = 1;
2726
2727 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2728 /* VEC_alloc doesn't actually grow it to the right size, it just
2729 preallocates the space to do so. */
2730 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2731 gcc_obstack_init (&vn_ssa_aux_obstack);
2732
2733 shared_lookup_phiargs = NULL;
2734 shared_lookup_vops = NULL;
2735 shared_lookup_references = NULL;
2736 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2737 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2738 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2739
2740 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2741 the i'th block in RPO order is bb. We want to map bb's to RPO
2742 numbers, so we need to rearrange this array. */
2743 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2744 rpo_numbers[rpo_numbers_temp[j]] = j;
2745
2746 XDELETE (rpo_numbers_temp);
2747
2748 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2749
2750 /* Create the VN_INFO structures, and initialize value numbers to
2751 TOP. */
2752 for (i = 0; i < num_ssa_names; i++)
2753 {
2754 tree name = ssa_name (i);
2755 if (name)
2756 {
2757 VN_INFO_GET (name)->valnum = VN_TOP;
2758 VN_INFO (name)->expr = NULL_TREE;
2759 VN_INFO (name)->value_id = 0;
2760 }
2761 }
2762
2763 renumber_gimple_stmt_uids ();
2764
2765 /* Create the valid and optimistic value numbering tables. */
2766 valid_info = XCNEW (struct vn_tables_s);
2767 allocate_vn_table (valid_info);
2768 optimistic_info = XCNEW (struct vn_tables_s);
2769 allocate_vn_table (optimistic_info);
2770 }
2771
2772 void
2773 free_scc_vn (void)
2774 {
2775 size_t i;
2776
2777 htab_delete (constant_to_value_id);
2778 BITMAP_FREE (constant_value_ids);
2779 VEC_free (tree, heap, shared_lookup_phiargs);
2780 VEC_free (tree, gc, shared_lookup_vops);
2781 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2782 XDELETEVEC (rpo_numbers);
2783
2784 for (i = 0; i < num_ssa_names; i++)
2785 {
2786 tree name = ssa_name (i);
2787 if (name
2788 && VN_INFO (name)->needs_insertion)
2789 release_ssa_name (name);
2790 }
2791 obstack_free (&vn_ssa_aux_obstack, NULL);
2792 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2793
2794 VEC_free (tree, heap, sccstack);
2795 free_vn_table (valid_info);
2796 XDELETE (valid_info);
2797 free_vn_table (optimistic_info);
2798 XDELETE (optimistic_info);
2799 }
2800
2801 /* Set the value ids in the valid hash tables. */
2802
2803 static void
2804 set_hashtable_value_ids (void)
2805 {
2806 htab_iterator hi;
2807 vn_nary_op_t vno;
2808 vn_reference_t vr;
2809 vn_phi_t vp;
2810
2811 /* Now set the value ids of the things we had put in the hash
2812 table. */
2813
2814 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2815 vno, vn_nary_op_t, hi)
2816 {
2817 if (vno->result)
2818 {
2819 if (TREE_CODE (vno->result) == SSA_NAME)
2820 vno->value_id = VN_INFO (vno->result)->value_id;
2821 else if (is_gimple_min_invariant (vno->result))
2822 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2823 }
2824 }
2825
2826 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2827 vp, vn_phi_t, hi)
2828 {
2829 if (vp->result)
2830 {
2831 if (TREE_CODE (vp->result) == SSA_NAME)
2832 vp->value_id = VN_INFO (vp->result)->value_id;
2833 else if (is_gimple_min_invariant (vp->result))
2834 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2835 }
2836 }
2837
2838 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2839 vr, vn_reference_t, hi)
2840 {
2841 if (vr->result)
2842 {
2843 if (TREE_CODE (vr->result) == SSA_NAME)
2844 vr->value_id = VN_INFO (vr->result)->value_id;
2845 else if (is_gimple_min_invariant (vr->result))
2846 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2847 }
2848 }
2849 }
2850
2851 /* Do SCCVN. Returns true if it finished, false if we bailed out
2852 due to resource constraints. */
2853
2854 bool
2855 run_scc_vn (bool may_insert_arg)
2856 {
2857 size_t i;
2858 tree param;
2859 bool changed = true;
2860
2861 may_insert = may_insert_arg;
2862
2863 init_scc_vn ();
2864 current_info = valid_info;
2865
2866 for (param = DECL_ARGUMENTS (current_function_decl);
2867 param;
2868 param = TREE_CHAIN (param))
2869 {
2870 if (gimple_default_def (cfun, param) != NULL)
2871 {
2872 tree def = gimple_default_def (cfun, param);
2873 SSA_VAL (def) = def;
2874 }
2875 }
2876
2877 for (i = 1; i < num_ssa_names; ++i)
2878 {
2879 tree name = ssa_name (i);
2880 if (name
2881 && VN_INFO (name)->visited == false
2882 && !has_zero_uses (name))
2883 if (!DFS (name))
2884 {
2885 free_scc_vn ();
2886 may_insert = false;
2887 return false;
2888 }
2889 }
2890
2891 /* Initialize the value ids. */
2892
2893 for (i = 1; i < num_ssa_names; ++i)
2894 {
2895 tree name = ssa_name (i);
2896 vn_ssa_aux_t info;
2897 if (!name)
2898 continue;
2899 info = VN_INFO (name);
2900 if (info->valnum == name)
2901 info->value_id = get_next_value_id ();
2902 else if (is_gimple_min_invariant (info->valnum))
2903 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2904 }
2905
2906 /* Propagate until they stop changing. */
2907 while (changed)
2908 {
2909 changed = false;
2910 for (i = 1; i < num_ssa_names; ++i)
2911 {
2912 tree name = ssa_name (i);
2913 vn_ssa_aux_t info;
2914 if (!name)
2915 continue;
2916 info = VN_INFO (name);
2917 if (TREE_CODE (info->valnum) == SSA_NAME
2918 && info->valnum != name
2919 && info->value_id != VN_INFO (info->valnum)->value_id)
2920 {
2921 changed = true;
2922 info->value_id = VN_INFO (info->valnum)->value_id;
2923 }
2924 }
2925 }
2926
2927 set_hashtable_value_ids ();
2928
2929 if (dump_file && (dump_flags & TDF_DETAILS))
2930 {
2931 fprintf (dump_file, "Value numbers:\n");
2932 for (i = 0; i < num_ssa_names; i++)
2933 {
2934 tree name = ssa_name (i);
2935 if (name
2936 && VN_INFO (name)->visited
2937 && SSA_VAL (name) != name)
2938 {
2939 print_generic_expr (dump_file, name, 0);
2940 fprintf (dump_file, " = ");
2941 print_generic_expr (dump_file, SSA_VAL (name), 0);
2942 fprintf (dump_file, "\n");
2943 }
2944 }
2945 }
2946
2947 may_insert = false;
2948 return true;
2949 }
2950
2951 /* Return the maximum value id we have ever seen. */
2952
2953 unsigned int
2954 get_max_value_id (void)
2955 {
2956 return next_value_id;
2957 }
2958
2959 /* Return the next unique value id. */
2960
2961 unsigned int
2962 get_next_value_id (void)
2963 {
2964 return next_value_id++;
2965 }
2966
2967
2968 /* Compare two expressions E1 and E2 and return true if they are equal. */
2969
2970 bool
2971 expressions_equal_p (tree e1, tree e2)
2972 {
2973 /* The obvious case. */
2974 if (e1 == e2)
2975 return true;
2976
2977 /* If only one of them is null, they cannot be equal. */
2978 if (!e1 || !e2)
2979 return false;
2980
2981 /* Recurse on elements of lists. */
2982 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
2983 {
2984 tree lop1 = e1;
2985 tree lop2 = e2;
2986 for (lop1 = e1, lop2 = e2;
2987 lop1 || lop2;
2988 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
2989 {
2990 if (!lop1 || !lop2)
2991 return false;
2992 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
2993 return false;
2994 }
2995 return true;
2996 }
2997
2998 /* Now perform the actual comparison. */
2999 if (TREE_CODE (e1) == TREE_CODE (e2)
3000 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3001 return true;
3002
3003 return false;
3004 }
3005
3006 /* Sort the VUSE array so that we can do equality comparisons
3007 quicker on two vuse vecs. */
3008
3009 void
3010 sort_vuses (VEC (tree,gc) *vuses)
3011 {
3012 if (VEC_length (tree, vuses) > 1)
3013 qsort (VEC_address (tree, vuses),
3014 VEC_length (tree, vuses),
3015 sizeof (tree),
3016 operand_build_cmp);
3017 }
3018
3019 /* Sort the VUSE array so that we can do equality comparisons
3020 quicker on two vuse vecs. */
3021
3022 void
3023 sort_vuses_heap (VEC (tree,heap) *vuses)
3024 {
3025 if (VEC_length (tree, vuses) > 1)
3026 qsort (VEC_address (tree, vuses),
3027 VEC_length (tree, vuses),
3028 sizeof (tree),
3029 operand_build_cmp);
3030 }