tree-ssa-sccvn.c (vn_reference_op_compute_hash): Use iterative_hash_* as intended.
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
48
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
54
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
58
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
63
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
68
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
72
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
77
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
81
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
84
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
90
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
96
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
104 */
105
106 /* The set of hashtables and alloc_pool's for their items. */
107
108 typedef struct vn_tables_s
109 {
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
117
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120
121
122 /* Valid hashtables storing information we have proven to be
123 correct. */
124
125 static vn_tables_t valid_info;
126
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
129
130 static vn_tables_t optimistic_info;
131
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
135
136 static vn_tables_t current_info;
137
138
139 /* Reverse post order index for each basic block. */
140
141 static int *rpo_numbers;
142
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
147
148 tree VN_TOP;
149
150 /* Unique counter for our value ids. */
151
152 static unsigned int next_value_id;
153
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
156
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159
160 static bool may_insert;
161
162
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
165
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
169
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
172
173 /* Return the value numbering information for a given SSA name. */
174
175 vn_ssa_aux_t
176 VN_INFO (tree name)
177 {
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
182 }
183
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
186
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
189 {
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
192 }
193
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
196
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
199 {
200 vn_ssa_aux_t newinfo;
201
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
210 }
211
212
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
215
216 tree
217 vn_get_expr_for (tree name)
218 {
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
222
223 if (vn->valnum == VN_TOP)
224 return name;
225
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
230
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
233
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
238
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
242
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
251
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
254
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
258 {
259 case tcc_reference:
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267 break;
268
269 case tcc_unary:
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
273 break;
274
275 case tcc_binary:
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
280 break;
281
282 default:;
283 }
284 if (expr == NULL_TREE)
285 return vn->valnum;
286
287 /* Cache the expression. */
288 vn->expr = expr;
289
290 return expr;
291 }
292
293
294 /* Free a phi operation structure VP. */
295
296 static void
297 free_phi (void *vp)
298 {
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
301 }
302
303 /* Free a reference operation structure VP. */
304
305 static void
306 free_reference (void *vp)
307 {
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
310 }
311
312 /* Hash table equality function for vn_constant_t. */
313
314 static int
315 vn_constant_eq (const void *p1, const void *p2)
316 {
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
319
320 if (vc1->hashcode != vc2->hashcode)
321 return false;
322
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
324 }
325
326 /* Hash table hash function for vn_constant_t. */
327
328 static hashval_t
329 vn_constant_hash (const void *p1)
330 {
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
333 }
334
335 /* Lookup a value id for CONSTANT and return it. If it does not
336 exist returns 0. */
337
338 unsigned int
339 get_constant_value_id (tree constant)
340 {
341 void **slot;
342 struct vn_constant_s vc;
343
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
348 if (slot)
349 return ((vn_constant_t)*slot)->value_id;
350 return 0;
351 }
352
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
355
356 unsigned int
357 get_or_alloc_constant_value_id (tree constant)
358 {
359 void **slot;
360 vn_constant_t vc = XNEW (struct vn_constant_s);
361
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
366 if (*slot)
367 {
368 free (vc);
369 return ((vn_constant_t)*slot)->value_id;
370 }
371 vc->value_id = get_next_value_id ();
372 *slot = vc;
373 bitmap_set_bit (constant_value_ids, vc->value_id);
374 return vc->value_id;
375 }
376
377 /* Return true if V is a value id for a constant. */
378
379 bool
380 value_id_constant_p (unsigned int v)
381 {
382 return bitmap_bit_p (constant_value_ids, v);
383 }
384
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
387
388 static int
389 vn_reference_op_eq (const void *p1, const void *p2)
390 {
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
399 }
400
401 /* Compute the hash for a reference operand VRO1. */
402
403 static hashval_t
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 {
406 result = iterative_hash_hashval_t (vro1->opcode, result);
407 if (vro1->op0)
408 result = iterative_hash_expr (vro1->op0, result);
409 if (vro1->op1)
410 result = iterative_hash_expr (vro1->op1, result);
411 if (vro1->op2)
412 result = iterative_hash_expr (vro1->op2, result);
413 return result;
414 }
415
416 /* Return the hashcode for a given reference operation P1. */
417
418 static hashval_t
419 vn_reference_hash (const void *p1)
420 {
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
423 }
424
425 /* Compute a hash for the reference operation VR1 and return it. */
426
427 hashval_t
428 vn_reference_compute_hash (const vn_reference_t vr1)
429 {
430 hashval_t result = 0;
431 int i;
432 vn_reference_op_t vro;
433
434 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
435 result = vn_reference_op_compute_hash (vro, result);
436 if (vr1->vuse)
437 result += SSA_NAME_VERSION (vr1->vuse);
438
439 return result;
440 }
441
442 /* Return true if reference operations P1 and P2 are equivalent. This
443 means they have the same set of operands and vuses. */
444
445 int
446 vn_reference_eq (const void *p1, const void *p2)
447 {
448 int i;
449 vn_reference_op_t vro;
450
451 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
452 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
453 if (vr1->hashcode != vr2->hashcode)
454 return false;
455
456 /* Early out if this is not a hash collision. */
457 if (vr1->hashcode != vr2->hashcode)
458 return false;
459
460 /* The VOP needs to be the same. */
461 if (vr1->vuse != vr2->vuse)
462 return false;
463
464 /* If the operands are the same we are done. */
465 if (vr1->operands == vr2->operands)
466 return true;
467
468 /* We require that address operands be canonicalized in a way that
469 two memory references will have the same operands if they are
470 equivalent. */
471 if (VEC_length (vn_reference_op_s, vr1->operands)
472 != VEC_length (vn_reference_op_s, vr2->operands))
473 return false;
474
475 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
476 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
477 vro))
478 return false;
479
480 return true;
481 }
482
483 /* Copy the operations present in load/store REF into RESULT, a vector of
484 vn_reference_op_s's. */
485
486 void
487 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
488 {
489 if (TREE_CODE (ref) == TARGET_MEM_REF)
490 {
491 vn_reference_op_s temp;
492 tree base;
493
494 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
495 if (!base)
496 base = build_int_cst (ptr_type_node, 0);
497
498 memset (&temp, 0, sizeof (temp));
499 /* We do not care for spurious type qualifications. */
500 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
501 temp.opcode = TREE_CODE (ref);
502 temp.op0 = TMR_INDEX (ref);
503 temp.op1 = TMR_STEP (ref);
504 temp.op2 = TMR_OFFSET (ref);
505 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506
507 memset (&temp, 0, sizeof (temp));
508 temp.type = NULL_TREE;
509 temp.opcode = TREE_CODE (base);
510 temp.op0 = base;
511 temp.op1 = TMR_ORIGINAL (ref);
512 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
513 return;
514 }
515
516 /* For non-calls, store the information that makes up the address. */
517
518 while (ref)
519 {
520 vn_reference_op_s temp;
521
522 memset (&temp, 0, sizeof (temp));
523 /* We do not care for spurious type qualifications. */
524 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
525 temp.opcode = TREE_CODE (ref);
526
527 switch (temp.opcode)
528 {
529 case ALIGN_INDIRECT_REF:
530 case INDIRECT_REF:
531 /* The only operand is the address, which gets its own
532 vn_reference_op_s structure. */
533 break;
534 case MISALIGNED_INDIRECT_REF:
535 temp.op0 = TREE_OPERAND (ref, 1);
536 break;
537 case BIT_FIELD_REF:
538 /* Record bits and position. */
539 temp.op0 = TREE_OPERAND (ref, 1);
540 temp.op1 = TREE_OPERAND (ref, 2);
541 break;
542 case COMPONENT_REF:
543 /* The field decl is enough to unambiguously specify the field,
544 a matching type is not necessary and a mismatching type
545 is always a spurious difference. */
546 temp.type = NULL_TREE;
547 temp.op0 = TREE_OPERAND (ref, 1);
548 temp.op1 = TREE_OPERAND (ref, 2);
549 /* If this is a reference to a union member, record the union
550 member size as operand. Do so only if we are doing
551 expression insertion (during FRE), as PRE currently gets
552 confused with this. */
553 if (may_insert
554 && temp.op1 == NULL_TREE
555 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
556 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
557 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
558 && host_integerp (DECL_SIZE (temp.op0), 0))
559 temp.op0 = DECL_SIZE (temp.op0);
560 break;
561 case ARRAY_RANGE_REF:
562 case ARRAY_REF:
563 /* Record index as operand. */
564 temp.op0 = TREE_OPERAND (ref, 1);
565 /* Always record lower bounds and element size. */
566 temp.op1 = array_ref_low_bound (ref);
567 temp.op2 = array_ref_element_size (ref);
568 break;
569 case STRING_CST:
570 case INTEGER_CST:
571 case COMPLEX_CST:
572 case VECTOR_CST:
573 case REAL_CST:
574 case CONSTRUCTOR:
575 case VAR_DECL:
576 case PARM_DECL:
577 case CONST_DECL:
578 case RESULT_DECL:
579 case SSA_NAME:
580 temp.op0 = ref;
581 break;
582 case ADDR_EXPR:
583 if (is_gimple_min_invariant (ref))
584 {
585 temp.op0 = ref;
586 break;
587 }
588 /* Fallthrough. */
589 /* These are only interesting for their operands, their
590 existence, and their type. They will never be the last
591 ref in the chain of references (IE they require an
592 operand), so we don't have to put anything
593 for op* as it will be handled by the iteration */
594 case IMAGPART_EXPR:
595 case REALPART_EXPR:
596 case VIEW_CONVERT_EXPR:
597 break;
598 default:
599 gcc_unreachable ();
600 }
601 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
602
603 if (REFERENCE_CLASS_P (ref)
604 || (TREE_CODE (ref) == ADDR_EXPR
605 && !is_gimple_min_invariant (ref)))
606 ref = TREE_OPERAND (ref, 0);
607 else
608 ref = NULL_TREE;
609 }
610 }
611
612 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
613 operands in *OPS, the reference alias set SET and the reference type TYPE.
614 Return true if something useful was produced. */
615
616 bool
617 ao_ref_init_from_vn_reference (ao_ref *ref,
618 alias_set_type set, tree type,
619 VEC (vn_reference_op_s, heap) *ops)
620 {
621 vn_reference_op_t op;
622 unsigned i;
623 tree base = NULL_TREE;
624 tree *op0_p = &base;
625 HOST_WIDE_INT offset = 0;
626 HOST_WIDE_INT max_size;
627 HOST_WIDE_INT size = -1;
628 tree size_tree = NULL_TREE;
629
630 /* First get the final access size from just the outermost expression. */
631 op = VEC_index (vn_reference_op_s, ops, 0);
632 if (op->opcode == COMPONENT_REF)
633 {
634 if (TREE_CODE (op->op0) == INTEGER_CST)
635 size_tree = op->op0;
636 else
637 size_tree = DECL_SIZE (op->op0);
638 }
639 else if (op->opcode == BIT_FIELD_REF)
640 size_tree = op->op0;
641 else
642 {
643 enum machine_mode mode = TYPE_MODE (type);
644 if (mode == BLKmode)
645 size_tree = TYPE_SIZE (type);
646 else
647 size = GET_MODE_BITSIZE (mode);
648 }
649 if (size_tree != NULL_TREE)
650 {
651 if (!host_integerp (size_tree, 1))
652 size = -1;
653 else
654 size = TREE_INT_CST_LOW (size_tree);
655 }
656
657 /* Initially, maxsize is the same as the accessed element size.
658 In the following it will only grow (or become -1). */
659 max_size = size;
660
661 /* Compute cumulative bit-offset for nested component-refs and array-refs,
662 and find the ultimate containing object. */
663 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
664 {
665 switch (op->opcode)
666 {
667 /* These may be in the reference ops, but we cannot do anything
668 sensible with them here. */
669 case CALL_EXPR:
670 case ADDR_EXPR:
671 return false;
672
673 /* Record the base objects. */
674 case ALIGN_INDIRECT_REF:
675 case INDIRECT_REF:
676 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
677 op0_p = &TREE_OPERAND (*op0_p, 0);
678 break;
679
680 case MISALIGNED_INDIRECT_REF:
681 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
682 NULL_TREE, op->op0);
683 op0_p = &TREE_OPERAND (*op0_p, 0);
684 break;
685
686 case VAR_DECL:
687 case PARM_DECL:
688 case RESULT_DECL:
689 case SSA_NAME:
690 *op0_p = op->op0;
691 break;
692
693 /* And now the usual component-reference style ops. */
694 case BIT_FIELD_REF:
695 offset += tree_low_cst (op->op1, 0);
696 break;
697
698 case COMPONENT_REF:
699 {
700 tree field = op->op0;
701 /* We do not have a complete COMPONENT_REF tree here so we
702 cannot use component_ref_field_offset. Do the interesting
703 parts manually. */
704
705 /* Our union trick, done for offset zero only. */
706 if (TREE_CODE (field) == INTEGER_CST)
707 ;
708 else if (op->op1
709 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
710 max_size = -1;
711 else
712 {
713 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
714 * BITS_PER_UNIT);
715 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
716 }
717 break;
718 }
719
720 case ARRAY_RANGE_REF:
721 case ARRAY_REF:
722 /* We recorded the lower bound and the element size. */
723 if (!host_integerp (op->op0, 0)
724 || !host_integerp (op->op1, 0)
725 || !host_integerp (op->op2, 0))
726 max_size = -1;
727 else
728 {
729 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
730 hindex -= TREE_INT_CST_LOW (op->op1);
731 hindex *= TREE_INT_CST_LOW (op->op2);
732 hindex *= BITS_PER_UNIT;
733 offset += hindex;
734 }
735 break;
736
737 case REALPART_EXPR:
738 break;
739
740 case IMAGPART_EXPR:
741 offset += size;
742 break;
743
744 case VIEW_CONVERT_EXPR:
745 break;
746
747 case STRING_CST:
748 case INTEGER_CST:
749 case COMPLEX_CST:
750 case VECTOR_CST:
751 case REAL_CST:
752 case CONSTRUCTOR:
753 case CONST_DECL:
754 return false;
755
756 default:
757 return false;
758 }
759 }
760
761 if (base == NULL_TREE)
762 return false;
763
764 ref->ref = NULL_TREE;
765 ref->base = base;
766 ref->offset = offset;
767 ref->size = size;
768 ref->max_size = max_size;
769 ref->ref_alias_set = set;
770 ref->base_alias_set = -1;
771
772 return true;
773 }
774
775 /* Copy the operations present in load/store/call REF into RESULT, a vector of
776 vn_reference_op_s's. */
777
778 void
779 copy_reference_ops_from_call (gimple call,
780 VEC(vn_reference_op_s, heap) **result)
781 {
782 vn_reference_op_s temp;
783 unsigned i;
784
785 /* Copy the type, opcode, function being called and static chain. */
786 memset (&temp, 0, sizeof (temp));
787 temp.type = gimple_call_return_type (call);
788 temp.opcode = CALL_EXPR;
789 temp.op0 = gimple_call_fn (call);
790 temp.op1 = gimple_call_chain (call);
791 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
792
793 /* Copy the call arguments. As they can be references as well,
794 just chain them together. */
795 for (i = 0; i < gimple_call_num_args (call); ++i)
796 {
797 tree callarg = gimple_call_arg (call, i);
798 copy_reference_ops_from_ref (callarg, result);
799 }
800 }
801
802 /* Create a vector of vn_reference_op_s structures from REF, a
803 REFERENCE_CLASS_P tree. The vector is not shared. */
804
805 static VEC(vn_reference_op_s, heap) *
806 create_reference_ops_from_ref (tree ref)
807 {
808 VEC (vn_reference_op_s, heap) *result = NULL;
809
810 copy_reference_ops_from_ref (ref, &result);
811 return result;
812 }
813
814 /* Create a vector of vn_reference_op_s structures from CALL, a
815 call statement. The vector is not shared. */
816
817 static VEC(vn_reference_op_s, heap) *
818 create_reference_ops_from_call (gimple call)
819 {
820 VEC (vn_reference_op_s, heap) *result = NULL;
821
822 copy_reference_ops_from_call (call, &result);
823 return result;
824 }
825
826 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
827 *I_P to point to the last element of the replacement. */
828 void
829 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
830 unsigned int *i_p)
831 {
832 VEC(vn_reference_op_s, heap) *mem = NULL;
833 vn_reference_op_t op;
834 unsigned int i = *i_p;
835 unsigned int j;
836
837 /* Get ops for the addressed object. */
838 op = VEC_index (vn_reference_op_s, *ops, i);
839 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
840 around it to avoid later ICEs. */
841 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
842 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
843 {
844 vn_reference_op_s aref;
845 tree dom;
846 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
847 aref.opcode = ARRAY_REF;
848 aref.op0 = integer_zero_node;
849 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
850 && TYPE_MIN_VALUE (dom))
851 aref.op0 = TYPE_MIN_VALUE (dom);
852 aref.op1 = aref.op0;
853 aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
854 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
855 }
856 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
857
858 /* Do the replacement - we should have at least one op in mem now. */
859 if (VEC_length (vn_reference_op_s, mem) == 1)
860 {
861 VEC_replace (vn_reference_op_s, *ops, i - 1,
862 VEC_index (vn_reference_op_s, mem, 0));
863 VEC_ordered_remove (vn_reference_op_s, *ops, i);
864 i--;
865 }
866 else if (VEC_length (vn_reference_op_s, mem) == 2)
867 {
868 VEC_replace (vn_reference_op_s, *ops, i - 1,
869 VEC_index (vn_reference_op_s, mem, 0));
870 VEC_replace (vn_reference_op_s, *ops, i,
871 VEC_index (vn_reference_op_s, mem, 1));
872 }
873 else if (VEC_length (vn_reference_op_s, mem) > 2)
874 {
875 VEC_replace (vn_reference_op_s, *ops, i - 1,
876 VEC_index (vn_reference_op_s, mem, 0));
877 VEC_replace (vn_reference_op_s, *ops, i,
878 VEC_index (vn_reference_op_s, mem, 1));
879 /* ??? There is no VEC_splice. */
880 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
881 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
882 }
883 else
884 gcc_unreachable ();
885
886 VEC_free (vn_reference_op_s, heap, mem);
887 *i_p = i;
888 }
889
890 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
891 structures into their value numbers. This is done in-place, and
892 the vector passed in is returned. */
893
894 static VEC (vn_reference_op_s, heap) *
895 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
896 {
897 vn_reference_op_t vro;
898 unsigned int i;
899
900 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
901 {
902 if (vro->opcode == SSA_NAME
903 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
904 {
905 vro->op0 = SSA_VAL (vro->op0);
906 /* If it transforms from an SSA_NAME to a constant, update
907 the opcode. */
908 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
909 vro->opcode = TREE_CODE (vro->op0);
910 /* If it transforms from an SSA_NAME to an address, fold with
911 a preceding indirect reference. */
912 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
913 && VEC_index (vn_reference_op_s,
914 orig, i - 1)->opcode == INDIRECT_REF)
915 {
916 vn_reference_fold_indirect (&orig, &i);
917 continue;
918 }
919 }
920 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
921 vro->op1 = SSA_VAL (vro->op1);
922 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
923 vro->op2 = SSA_VAL (vro->op2);
924 }
925
926 return orig;
927 }
928
929 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
930
931 /* Create a vector of vn_reference_op_s structures from REF, a
932 REFERENCE_CLASS_P tree. The vector is shared among all callers of
933 this function. */
934
935 static VEC(vn_reference_op_s, heap) *
936 valueize_shared_reference_ops_from_ref (tree ref)
937 {
938 if (!ref)
939 return NULL;
940 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
941 copy_reference_ops_from_ref (ref, &shared_lookup_references);
942 shared_lookup_references = valueize_refs (shared_lookup_references);
943 return shared_lookup_references;
944 }
945
946 /* Create a vector of vn_reference_op_s structures from CALL, a
947 call statement. The vector is shared among all callers of
948 this function. */
949
950 static VEC(vn_reference_op_s, heap) *
951 valueize_shared_reference_ops_from_call (gimple call)
952 {
953 if (!call)
954 return NULL;
955 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
956 copy_reference_ops_from_call (call, &shared_lookup_references);
957 shared_lookup_references = valueize_refs (shared_lookup_references);
958 return shared_lookup_references;
959 }
960
961 /* Lookup a SCCVN reference operation VR in the current hash table.
962 Returns the resulting value number if it exists in the hash table,
963 NULL_TREE otherwise. VNRESULT will be filled in with the actual
964 vn_reference_t stored in the hashtable if something is found. */
965
966 static tree
967 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
968 {
969 void **slot;
970 hashval_t hash;
971
972 hash = vr->hashcode;
973 slot = htab_find_slot_with_hash (current_info->references, vr,
974 hash, NO_INSERT);
975 if (!slot && current_info == optimistic_info)
976 slot = htab_find_slot_with_hash (valid_info->references, vr,
977 hash, NO_INSERT);
978 if (slot)
979 {
980 if (vnresult)
981 *vnresult = (vn_reference_t)*slot;
982 return ((vn_reference_t)*slot)->result;
983 }
984
985 return NULL_TREE;
986 }
987
988 static tree *last_vuse_ptr;
989
990 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
991 with the current VUSE and performs the expression lookup. */
992
993 static void *
994 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
995 {
996 vn_reference_t vr = (vn_reference_t)vr_;
997 void **slot;
998 hashval_t hash;
999
1000 if (last_vuse_ptr)
1001 *last_vuse_ptr = vuse;
1002
1003 /* Fixup vuse and hash. */
1004 if (vr->vuse)
1005 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1006 vr->vuse = SSA_VAL (vuse);
1007 if (vr->vuse)
1008 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1009
1010 hash = vr->hashcode;
1011 slot = htab_find_slot_with_hash (current_info->references, vr,
1012 hash, NO_INSERT);
1013 if (!slot && current_info == optimistic_info)
1014 slot = htab_find_slot_with_hash (valid_info->references, vr,
1015 hash, NO_INSERT);
1016 if (slot)
1017 return *slot;
1018
1019 return NULL;
1020 }
1021
1022 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1023 from the statement defining VUSE and if not successful tries to
1024 translate *REFP and VR_ through an aggregate copy at the defintion
1025 of VUSE. */
1026
1027 static void *
1028 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1029 {
1030 vn_reference_t vr = (vn_reference_t)vr_;
1031 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1032 tree fndecl;
1033 tree base;
1034 HOST_WIDE_INT offset, maxsize;
1035
1036 base = ao_ref_base (ref);
1037 offset = ref->offset;
1038 maxsize = ref->max_size;
1039
1040 /* If we cannot constrain the size of the reference we cannot
1041 test if anything kills it. */
1042 if (maxsize == -1)
1043 return (void *)-1;
1044
1045 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1046 from that defintion.
1047 1) Memset. */
1048 if (is_gimple_reg_type (vr->type)
1049 && is_gimple_call (def_stmt)
1050 && (fndecl = gimple_call_fndecl (def_stmt))
1051 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1052 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1053 && integer_zerop (gimple_call_arg (def_stmt, 1))
1054 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1055 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1056 {
1057 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1058 tree base2;
1059 HOST_WIDE_INT offset2, size2, maxsize2;
1060 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1061 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1062 if ((unsigned HOST_WIDE_INT)size2 / 8
1063 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1064 && operand_equal_p (base, base2, 0)
1065 && offset2 <= offset
1066 && offset2 + size2 >= offset + maxsize)
1067 {
1068 tree val = fold_convert (vr->type, integer_zero_node);
1069 unsigned int value_id = get_or_alloc_constant_value_id (val);
1070 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1071 VEC_copy (vn_reference_op_s,
1072 heap, vr->operands),
1073 val, value_id);
1074 }
1075 }
1076
1077 /* 2) Assignment from an empty CONSTRUCTOR. */
1078 else if (is_gimple_reg_type (vr->type)
1079 && gimple_assign_single_p (def_stmt)
1080 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1081 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1082 {
1083 tree base2;
1084 HOST_WIDE_INT offset2, size2, maxsize2;
1085 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1086 &offset2, &size2, &maxsize2);
1087 if (operand_equal_p (base, base2, 0)
1088 && offset2 <= offset
1089 && offset2 + size2 >= offset + maxsize)
1090 {
1091 tree val = fold_convert (vr->type, integer_zero_node);
1092 unsigned int value_id = get_or_alloc_constant_value_id (val);
1093 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1094 VEC_copy (vn_reference_op_s,
1095 heap, vr->operands),
1096 val, value_id);
1097 }
1098 }
1099
1100 /* For aggregate copies translate the reference through them if
1101 the copy kills ref. */
1102 else if (gimple_assign_single_p (def_stmt)
1103 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1104 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1105 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1106 {
1107 tree base2;
1108 HOST_WIDE_INT offset2, size2, maxsize2;
1109 int i, j;
1110 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1111 vn_reference_op_t vro;
1112 ao_ref r;
1113
1114 /* See if the assignment kills REF. */
1115 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1116 &offset2, &size2, &maxsize2);
1117 if (!operand_equal_p (base, base2, 0)
1118 || offset2 > offset
1119 || offset2 + size2 < offset + maxsize)
1120 return (void *)-1;
1121
1122 /* Find the common base of ref and the lhs. */
1123 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1124 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1125 j = VEC_length (vn_reference_op_s, lhs) - 1;
1126 while (j >= 0 && i >= 0
1127 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1128 vr->operands, i),
1129 VEC_index (vn_reference_op_s, lhs, j)))
1130 {
1131 i--;
1132 j--;
1133 }
1134
1135 VEC_free (vn_reference_op_s, heap, lhs);
1136 /* i now points to the first additional op.
1137 ??? LHS may not be completely contained in VR, one or more
1138 VIEW_CONVERT_EXPRs could be in its way. We could at least
1139 try handling outermost VIEW_CONVERT_EXPRs. */
1140 if (j != -1)
1141 return (void *)-1;
1142
1143 /* Now re-write REF to be based on the rhs of the assignment. */
1144 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1145 /* We need to pre-pend vr->operands[0..i] to rhs. */
1146 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1147 > VEC_length (vn_reference_op_s, vr->operands))
1148 {
1149 VEC (vn_reference_op_s, heap) *old = vr->operands;
1150 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1151 i + 1 + VEC_length (vn_reference_op_s, rhs));
1152 if (old == shared_lookup_references
1153 && vr->operands != old)
1154 shared_lookup_references = NULL;
1155 }
1156 else
1157 VEC_truncate (vn_reference_op_s, vr->operands,
1158 i + 1 + VEC_length (vn_reference_op_s, rhs));
1159 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1160 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1161 VEC_free (vn_reference_op_s, heap, rhs);
1162 vr->hashcode = vn_reference_compute_hash (vr);
1163
1164 /* Adjust *ref from the new operands. */
1165 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1166 return (void *)-1;
1167 /* This can happen with bitfields. */
1168 if (ref->size != r.size)
1169 return (void *)-1;
1170 *ref = r;
1171
1172 /* Do not update last seen VUSE after translating. */
1173 last_vuse_ptr = NULL;
1174
1175 /* Keep looking for the adjusted *REF / VR pair. */
1176 return NULL;
1177 }
1178
1179 /* Bail out and stop walking. */
1180 return (void *)-1;
1181 }
1182
1183 /* Lookup a reference operation by it's parts, in the current hash table.
1184 Returns the resulting value number if it exists in the hash table,
1185 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1186 vn_reference_t stored in the hashtable if something is found. */
1187
1188 tree
1189 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1190 VEC (vn_reference_op_s, heap) *operands,
1191 vn_reference_t *vnresult, bool maywalk)
1192 {
1193 struct vn_reference_s vr1;
1194 vn_reference_t tmp;
1195
1196 if (!vnresult)
1197 vnresult = &tmp;
1198 *vnresult = NULL;
1199
1200 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1201 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1202 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1203 VEC_length (vn_reference_op_s, operands));
1204 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1205 VEC_address (vn_reference_op_s, operands),
1206 sizeof (vn_reference_op_s)
1207 * VEC_length (vn_reference_op_s, operands));
1208 vr1.operands = operands = shared_lookup_references
1209 = valueize_refs (shared_lookup_references);
1210 vr1.type = type;
1211 vr1.set = set;
1212 vr1.hashcode = vn_reference_compute_hash (&vr1);
1213 vn_reference_lookup_1 (&vr1, vnresult);
1214
1215 if (!*vnresult
1216 && maywalk
1217 && vr1.vuse)
1218 {
1219 ao_ref r;
1220 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1221 *vnresult =
1222 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1223 vn_reference_lookup_2,
1224 vn_reference_lookup_3, &vr1);
1225 if (vr1.operands != operands)
1226 VEC_free (vn_reference_op_s, heap, vr1.operands);
1227 }
1228
1229 if (*vnresult)
1230 return (*vnresult)->result;
1231
1232 return NULL_TREE;
1233 }
1234
1235 /* Lookup OP in the current hash table, and return the resulting value
1236 number if it exists in the hash table. Return NULL_TREE if it does
1237 not exist in the hash table or if the result field of the structure
1238 was NULL.. VNRESULT will be filled in with the vn_reference_t
1239 stored in the hashtable if one exists. */
1240
1241 tree
1242 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1243 vn_reference_t *vnresult)
1244 {
1245 VEC (vn_reference_op_s, heap) *operands;
1246 struct vn_reference_s vr1;
1247
1248 if (vnresult)
1249 *vnresult = NULL;
1250
1251 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1252 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1253 vr1.type = TREE_TYPE (op);
1254 vr1.set = get_alias_set (op);
1255 vr1.hashcode = vn_reference_compute_hash (&vr1);
1256
1257 if (maywalk
1258 && vr1.vuse)
1259 {
1260 vn_reference_t wvnresult;
1261 ao_ref r;
1262 ao_ref_init (&r, op);
1263 wvnresult =
1264 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1265 vn_reference_lookup_2,
1266 vn_reference_lookup_3, &vr1);
1267 if (vr1.operands != operands)
1268 VEC_free (vn_reference_op_s, heap, vr1.operands);
1269 if (wvnresult)
1270 {
1271 if (vnresult)
1272 *vnresult = wvnresult;
1273 return wvnresult->result;
1274 }
1275
1276 return NULL_TREE;
1277 }
1278
1279 return vn_reference_lookup_1 (&vr1, vnresult);
1280 }
1281
1282
1283 /* Insert OP into the current hash table with a value number of
1284 RESULT, and return the resulting reference structure we created. */
1285
1286 vn_reference_t
1287 vn_reference_insert (tree op, tree result, tree vuse)
1288 {
1289 void **slot;
1290 vn_reference_t vr1;
1291
1292 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1293 if (TREE_CODE (result) == SSA_NAME)
1294 vr1->value_id = VN_INFO (result)->value_id;
1295 else
1296 vr1->value_id = get_or_alloc_constant_value_id (result);
1297 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1298 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1299 vr1->type = TREE_TYPE (op);
1300 vr1->set = get_alias_set (op);
1301 vr1->hashcode = vn_reference_compute_hash (vr1);
1302 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1303
1304 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1305 INSERT);
1306
1307 /* Because we lookup stores using vuses, and value number failures
1308 using the vdefs (see visit_reference_op_store for how and why),
1309 it's possible that on failure we may try to insert an already
1310 inserted store. This is not wrong, there is no ssa name for a
1311 store that we could use as a differentiator anyway. Thus, unlike
1312 the other lookup functions, you cannot gcc_assert (!*slot)
1313 here. */
1314
1315 /* But free the old slot in case of a collision. */
1316 if (*slot)
1317 free_reference (*slot);
1318
1319 *slot = vr1;
1320 return vr1;
1321 }
1322
1323 /* Insert a reference by it's pieces into the current hash table with
1324 a value number of RESULT. Return the resulting reference
1325 structure we created. */
1326
1327 vn_reference_t
1328 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1329 VEC (vn_reference_op_s, heap) *operands,
1330 tree result, unsigned int value_id)
1331
1332 {
1333 void **slot;
1334 vn_reference_t vr1;
1335
1336 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1337 vr1->value_id = value_id;
1338 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1339 vr1->operands = valueize_refs (operands);
1340 vr1->type = type;
1341 vr1->set = set;
1342 vr1->hashcode = vn_reference_compute_hash (vr1);
1343 if (result && TREE_CODE (result) == SSA_NAME)
1344 result = SSA_VAL (result);
1345 vr1->result = result;
1346
1347 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1348 INSERT);
1349
1350 /* At this point we should have all the things inserted that we have
1351 seen before, and we should never try inserting something that
1352 already exists. */
1353 gcc_assert (!*slot);
1354 if (*slot)
1355 free_reference (*slot);
1356
1357 *slot = vr1;
1358 return vr1;
1359 }
1360
1361 /* Compute and return the hash value for nary operation VBO1. */
1362
1363 hashval_t
1364 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1365 {
1366 hashval_t hash;
1367 unsigned i;
1368
1369 for (i = 0; i < vno1->length; ++i)
1370 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1371 vno1->op[i] = SSA_VAL (vno1->op[i]);
1372
1373 if (vno1->length == 2
1374 && commutative_tree_code (vno1->opcode)
1375 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1376 {
1377 tree temp = vno1->op[0];
1378 vno1->op[0] = vno1->op[1];
1379 vno1->op[1] = temp;
1380 }
1381
1382 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1383 for (i = 0; i < vno1->length; ++i)
1384 hash = iterative_hash_expr (vno1->op[i], hash);
1385
1386 return hash;
1387 }
1388
1389 /* Return the computed hashcode for nary operation P1. */
1390
1391 static hashval_t
1392 vn_nary_op_hash (const void *p1)
1393 {
1394 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1395 return vno1->hashcode;
1396 }
1397
1398 /* Compare nary operations P1 and P2 and return true if they are
1399 equivalent. */
1400
1401 int
1402 vn_nary_op_eq (const void *p1, const void *p2)
1403 {
1404 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1405 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1406 unsigned i;
1407
1408 if (vno1->hashcode != vno2->hashcode)
1409 return false;
1410
1411 if (vno1->opcode != vno2->opcode
1412 || !types_compatible_p (vno1->type, vno2->type))
1413 return false;
1414
1415 for (i = 0; i < vno1->length; ++i)
1416 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1417 return false;
1418
1419 return true;
1420 }
1421
1422 /* Lookup a n-ary operation by its pieces and return the resulting value
1423 number if it exists in the hash table. Return NULL_TREE if it does
1424 not exist in the hash table or if the result field of the operation
1425 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1426 if it exists. */
1427
1428 tree
1429 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1430 tree type, tree op0, tree op1, tree op2,
1431 tree op3, vn_nary_op_t *vnresult)
1432 {
1433 void **slot;
1434 struct vn_nary_op_s vno1;
1435 if (vnresult)
1436 *vnresult = NULL;
1437 vno1.opcode = code;
1438 vno1.length = length;
1439 vno1.type = type;
1440 vno1.op[0] = op0;
1441 vno1.op[1] = op1;
1442 vno1.op[2] = op2;
1443 vno1.op[3] = op3;
1444 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1445 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1446 NO_INSERT);
1447 if (!slot && current_info == optimistic_info)
1448 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1449 NO_INSERT);
1450 if (!slot)
1451 return NULL_TREE;
1452 if (vnresult)
1453 *vnresult = (vn_nary_op_t)*slot;
1454 return ((vn_nary_op_t)*slot)->result;
1455 }
1456
1457 /* Lookup OP in the current hash table, and return the resulting value
1458 number if it exists in the hash table. Return NULL_TREE if it does
1459 not exist in the hash table or if the result field of the operation
1460 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1461 if it exists. */
1462
1463 tree
1464 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1465 {
1466 void **slot;
1467 struct vn_nary_op_s vno1;
1468 unsigned i;
1469
1470 if (vnresult)
1471 *vnresult = NULL;
1472 vno1.opcode = TREE_CODE (op);
1473 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1474 vno1.type = TREE_TYPE (op);
1475 for (i = 0; i < vno1.length; ++i)
1476 vno1.op[i] = TREE_OPERAND (op, i);
1477 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1478 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1479 NO_INSERT);
1480 if (!slot && current_info == optimistic_info)
1481 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1482 NO_INSERT);
1483 if (!slot)
1484 return NULL_TREE;
1485 if (vnresult)
1486 *vnresult = (vn_nary_op_t)*slot;
1487 return ((vn_nary_op_t)*slot)->result;
1488 }
1489
1490 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1491 value number if it exists in the hash table. Return NULL_TREE if
1492 it does not exist in the hash table. VNRESULT will contain the
1493 vn_nary_op_t from the hashtable if it exists. */
1494
1495 tree
1496 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1497 {
1498 void **slot;
1499 struct vn_nary_op_s vno1;
1500 unsigned i;
1501
1502 if (vnresult)
1503 *vnresult = NULL;
1504 vno1.opcode = gimple_assign_rhs_code (stmt);
1505 vno1.length = gimple_num_ops (stmt) - 1;
1506 vno1.type = gimple_expr_type (stmt);
1507 for (i = 0; i < vno1.length; ++i)
1508 vno1.op[i] = gimple_op (stmt, i + 1);
1509 if (vno1.opcode == REALPART_EXPR
1510 || vno1.opcode == IMAGPART_EXPR
1511 || vno1.opcode == VIEW_CONVERT_EXPR)
1512 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1513 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1514 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1515 NO_INSERT);
1516 if (!slot && current_info == optimistic_info)
1517 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1518 NO_INSERT);
1519 if (!slot)
1520 return NULL_TREE;
1521 if (vnresult)
1522 *vnresult = (vn_nary_op_t)*slot;
1523 return ((vn_nary_op_t)*slot)->result;
1524 }
1525
1526 /* Insert a n-ary operation into the current hash table using it's
1527 pieces. Return the vn_nary_op_t structure we created and put in
1528 the hashtable. */
1529
1530 vn_nary_op_t
1531 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1532 tree type, tree op0,
1533 tree op1, tree op2, tree op3,
1534 tree result,
1535 unsigned int value_id)
1536 {
1537 void **slot;
1538 vn_nary_op_t vno1;
1539
1540 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1541 (sizeof (struct vn_nary_op_s)
1542 - sizeof (tree) * (4 - length)));
1543 vno1->value_id = value_id;
1544 vno1->opcode = code;
1545 vno1->length = length;
1546 vno1->type = type;
1547 if (length >= 1)
1548 vno1->op[0] = op0;
1549 if (length >= 2)
1550 vno1->op[1] = op1;
1551 if (length >= 3)
1552 vno1->op[2] = op2;
1553 if (length >= 4)
1554 vno1->op[3] = op3;
1555 vno1->result = result;
1556 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1557 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1558 INSERT);
1559 gcc_assert (!*slot);
1560
1561 *slot = vno1;
1562 return vno1;
1563
1564 }
1565
1566 /* Insert OP into the current hash table with a value number of
1567 RESULT. Return the vn_nary_op_t structure we created and put in
1568 the hashtable. */
1569
1570 vn_nary_op_t
1571 vn_nary_op_insert (tree op, tree result)
1572 {
1573 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1574 void **slot;
1575 vn_nary_op_t vno1;
1576 unsigned i;
1577
1578 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1579 (sizeof (struct vn_nary_op_s)
1580 - sizeof (tree) * (4 - length)));
1581 vno1->value_id = VN_INFO (result)->value_id;
1582 vno1->opcode = TREE_CODE (op);
1583 vno1->length = length;
1584 vno1->type = TREE_TYPE (op);
1585 for (i = 0; i < vno1->length; ++i)
1586 vno1->op[i] = TREE_OPERAND (op, i);
1587 vno1->result = result;
1588 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1589 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1590 INSERT);
1591 gcc_assert (!*slot);
1592
1593 *slot = vno1;
1594 return vno1;
1595 }
1596
1597 /* Insert the rhs of STMT into the current hash table with a value number of
1598 RESULT. */
1599
1600 vn_nary_op_t
1601 vn_nary_op_insert_stmt (gimple stmt, tree result)
1602 {
1603 unsigned length = gimple_num_ops (stmt) - 1;
1604 void **slot;
1605 vn_nary_op_t vno1;
1606 unsigned i;
1607
1608 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1609 (sizeof (struct vn_nary_op_s)
1610 - sizeof (tree) * (4 - length)));
1611 vno1->value_id = VN_INFO (result)->value_id;
1612 vno1->opcode = gimple_assign_rhs_code (stmt);
1613 vno1->length = length;
1614 vno1->type = gimple_expr_type (stmt);
1615 for (i = 0; i < vno1->length; ++i)
1616 vno1->op[i] = gimple_op (stmt, i + 1);
1617 if (vno1->opcode == REALPART_EXPR
1618 || vno1->opcode == IMAGPART_EXPR
1619 || vno1->opcode == VIEW_CONVERT_EXPR)
1620 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1621 vno1->result = result;
1622 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1623 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1624 INSERT);
1625 gcc_assert (!*slot);
1626
1627 *slot = vno1;
1628 return vno1;
1629 }
1630
1631 /* Compute a hashcode for PHI operation VP1 and return it. */
1632
1633 static inline hashval_t
1634 vn_phi_compute_hash (vn_phi_t vp1)
1635 {
1636 hashval_t result;
1637 int i;
1638 tree phi1op;
1639 tree type;
1640
1641 result = vp1->block->index;
1642
1643 /* If all PHI arguments are constants we need to distinguish
1644 the PHI node via its type. */
1645 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1646 result += (INTEGRAL_TYPE_P (type)
1647 + (INTEGRAL_TYPE_P (type)
1648 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1649
1650 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1651 {
1652 if (phi1op == VN_TOP)
1653 continue;
1654 result = iterative_hash_expr (phi1op, result);
1655 }
1656
1657 return result;
1658 }
1659
1660 /* Return the computed hashcode for phi operation P1. */
1661
1662 static hashval_t
1663 vn_phi_hash (const void *p1)
1664 {
1665 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1666 return vp1->hashcode;
1667 }
1668
1669 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1670
1671 static int
1672 vn_phi_eq (const void *p1, const void *p2)
1673 {
1674 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1675 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1676
1677 if (vp1->hashcode != vp2->hashcode)
1678 return false;
1679
1680 if (vp1->block == vp2->block)
1681 {
1682 int i;
1683 tree phi1op;
1684
1685 /* If the PHI nodes do not have compatible types
1686 they are not the same. */
1687 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1688 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1689 return false;
1690
1691 /* Any phi in the same block will have it's arguments in the
1692 same edge order, because of how we store phi nodes. */
1693 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1694 {
1695 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1696 if (phi1op == VN_TOP || phi2op == VN_TOP)
1697 continue;
1698 if (!expressions_equal_p (phi1op, phi2op))
1699 return false;
1700 }
1701 return true;
1702 }
1703 return false;
1704 }
1705
1706 static VEC(tree, heap) *shared_lookup_phiargs;
1707
1708 /* Lookup PHI in the current hash table, and return the resulting
1709 value number if it exists in the hash table. Return NULL_TREE if
1710 it does not exist in the hash table. */
1711
1712 static tree
1713 vn_phi_lookup (gimple phi)
1714 {
1715 void **slot;
1716 struct vn_phi_s vp1;
1717 unsigned i;
1718
1719 VEC_truncate (tree, shared_lookup_phiargs, 0);
1720
1721 /* Canonicalize the SSA_NAME's to their value number. */
1722 for (i = 0; i < gimple_phi_num_args (phi); i++)
1723 {
1724 tree def = PHI_ARG_DEF (phi, i);
1725 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1726 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1727 }
1728 vp1.phiargs = shared_lookup_phiargs;
1729 vp1.block = gimple_bb (phi);
1730 vp1.hashcode = vn_phi_compute_hash (&vp1);
1731 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1732 NO_INSERT);
1733 if (!slot && current_info == optimistic_info)
1734 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1735 NO_INSERT);
1736 if (!slot)
1737 return NULL_TREE;
1738 return ((vn_phi_t)*slot)->result;
1739 }
1740
1741 /* Insert PHI into the current hash table with a value number of
1742 RESULT. */
1743
1744 static vn_phi_t
1745 vn_phi_insert (gimple phi, tree result)
1746 {
1747 void **slot;
1748 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1749 unsigned i;
1750 VEC (tree, heap) *args = NULL;
1751
1752 /* Canonicalize the SSA_NAME's to their value number. */
1753 for (i = 0; i < gimple_phi_num_args (phi); i++)
1754 {
1755 tree def = PHI_ARG_DEF (phi, i);
1756 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1757 VEC_safe_push (tree, heap, args, def);
1758 }
1759 vp1->value_id = VN_INFO (result)->value_id;
1760 vp1->phiargs = args;
1761 vp1->block = gimple_bb (phi);
1762 vp1->result = result;
1763 vp1->hashcode = vn_phi_compute_hash (vp1);
1764
1765 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1766 INSERT);
1767
1768 /* Because we iterate over phi operations more than once, it's
1769 possible the slot might already exist here, hence no assert.*/
1770 *slot = vp1;
1771 return vp1;
1772 }
1773
1774
1775 /* Print set of components in strongly connected component SCC to OUT. */
1776
1777 static void
1778 print_scc (FILE *out, VEC (tree, heap) *scc)
1779 {
1780 tree var;
1781 unsigned int i;
1782
1783 fprintf (out, "SCC consists of: ");
1784 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1785 {
1786 print_generic_expr (out, var, 0);
1787 fprintf (out, " ");
1788 }
1789 fprintf (out, "\n");
1790 }
1791
1792 /* Set the value number of FROM to TO, return true if it has changed
1793 as a result. */
1794
1795 static inline bool
1796 set_ssa_val_to (tree from, tree to)
1797 {
1798 tree currval;
1799
1800 if (from != to
1801 && TREE_CODE (to) == SSA_NAME
1802 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1803 to = from;
1804
1805 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1806 and invariants. So assert that here. */
1807 gcc_assert (to != NULL_TREE
1808 && (to == VN_TOP
1809 || TREE_CODE (to) == SSA_NAME
1810 || is_gimple_min_invariant (to)));
1811
1812 if (dump_file && (dump_flags & TDF_DETAILS))
1813 {
1814 fprintf (dump_file, "Setting value number of ");
1815 print_generic_expr (dump_file, from, 0);
1816 fprintf (dump_file, " to ");
1817 print_generic_expr (dump_file, to, 0);
1818 }
1819
1820 currval = SSA_VAL (from);
1821
1822 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1823 {
1824 VN_INFO (from)->valnum = to;
1825 if (dump_file && (dump_flags & TDF_DETAILS))
1826 fprintf (dump_file, " (changed)\n");
1827 return true;
1828 }
1829 if (dump_file && (dump_flags & TDF_DETAILS))
1830 fprintf (dump_file, "\n");
1831 return false;
1832 }
1833
1834 /* Set all definitions in STMT to value number to themselves.
1835 Return true if a value number changed. */
1836
1837 static bool
1838 defs_to_varying (gimple stmt)
1839 {
1840 bool changed = false;
1841 ssa_op_iter iter;
1842 def_operand_p defp;
1843
1844 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1845 {
1846 tree def = DEF_FROM_PTR (defp);
1847
1848 VN_INFO (def)->use_processed = true;
1849 changed |= set_ssa_val_to (def, def);
1850 }
1851 return changed;
1852 }
1853
1854 static bool expr_has_constants (tree expr);
1855 static tree valueize_expr (tree expr);
1856
1857 /* Visit a copy between LHS and RHS, return true if the value number
1858 changed. */
1859
1860 static bool
1861 visit_copy (tree lhs, tree rhs)
1862 {
1863 /* Follow chains of copies to their destination. */
1864 while (TREE_CODE (rhs) == SSA_NAME
1865 && SSA_VAL (rhs) != rhs)
1866 rhs = SSA_VAL (rhs);
1867
1868 /* The copy may have a more interesting constant filled expression
1869 (we don't, since we know our RHS is just an SSA name). */
1870 if (TREE_CODE (rhs) == SSA_NAME)
1871 {
1872 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1873 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1874 }
1875
1876 return set_ssa_val_to (lhs, rhs);
1877 }
1878
1879 /* Visit a unary operator RHS, value number it, and return true if the
1880 value number of LHS has changed as a result. */
1881
1882 static bool
1883 visit_unary_op (tree lhs, gimple stmt)
1884 {
1885 bool changed = false;
1886 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1887
1888 if (result)
1889 {
1890 changed = set_ssa_val_to (lhs, result);
1891 }
1892 else
1893 {
1894 changed = set_ssa_val_to (lhs, lhs);
1895 vn_nary_op_insert_stmt (stmt, lhs);
1896 }
1897
1898 return changed;
1899 }
1900
1901 /* Visit a binary operator RHS, value number it, and return true if the
1902 value number of LHS has changed as a result. */
1903
1904 static bool
1905 visit_binary_op (tree lhs, gimple stmt)
1906 {
1907 bool changed = false;
1908 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1909
1910 if (result)
1911 {
1912 changed = set_ssa_val_to (lhs, result);
1913 }
1914 else
1915 {
1916 changed = set_ssa_val_to (lhs, lhs);
1917 vn_nary_op_insert_stmt (stmt, lhs);
1918 }
1919
1920 return changed;
1921 }
1922
1923 /* Visit a call STMT storing into LHS. Return true if the value number
1924 of the LHS has changed as a result. */
1925
1926 static bool
1927 visit_reference_op_call (tree lhs, gimple stmt)
1928 {
1929 bool changed = false;
1930 struct vn_reference_s vr1;
1931 tree result;
1932 tree vuse = gimple_vuse (stmt);
1933
1934 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1935 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1936 vr1.type = gimple_expr_type (stmt);
1937 vr1.set = 0;
1938 vr1.hashcode = vn_reference_compute_hash (&vr1);
1939 result = vn_reference_lookup_1 (&vr1, NULL);
1940 if (result)
1941 {
1942 changed = set_ssa_val_to (lhs, result);
1943 if (TREE_CODE (result) == SSA_NAME
1944 && VN_INFO (result)->has_constants)
1945 VN_INFO (lhs)->has_constants = true;
1946 }
1947 else
1948 {
1949 void **slot;
1950 vn_reference_t vr2;
1951 changed = set_ssa_val_to (lhs, lhs);
1952 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1953 vr2->vuse = vr1.vuse;
1954 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1955 vr2->type = vr1.type;
1956 vr2->set = vr1.set;
1957 vr2->hashcode = vr1.hashcode;
1958 vr2->result = lhs;
1959 slot = htab_find_slot_with_hash (current_info->references,
1960 vr2, vr2->hashcode, INSERT);
1961 if (*slot)
1962 free_reference (*slot);
1963 *slot = vr2;
1964 }
1965
1966 return changed;
1967 }
1968
1969 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1970 and return true if the value number of the LHS has changed as a result. */
1971
1972 static bool
1973 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1974 {
1975 bool changed = false;
1976 tree last_vuse;
1977 tree result;
1978
1979 last_vuse = gimple_vuse (stmt);
1980 last_vuse_ptr = &last_vuse;
1981 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1982 last_vuse_ptr = NULL;
1983
1984 /* If we have a VCE, try looking up its operand as it might be stored in
1985 a different type. */
1986 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
1987 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
1988 true, NULL);
1989
1990 /* We handle type-punning through unions by value-numbering based
1991 on offset and size of the access. Be prepared to handle a
1992 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1993 if (result
1994 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1995 {
1996 /* We will be setting the value number of lhs to the value number
1997 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1998 So first simplify and lookup this expression to see if it
1999 is already available. */
2000 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2001 if ((CONVERT_EXPR_P (val)
2002 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2003 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2004 {
2005 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2006 if ((CONVERT_EXPR_P (tem)
2007 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2008 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2009 TREE_TYPE (val), tem)))
2010 val = tem;
2011 }
2012 result = val;
2013 if (!is_gimple_min_invariant (val)
2014 && TREE_CODE (val) != SSA_NAME)
2015 result = vn_nary_op_lookup (val, NULL);
2016 /* If the expression is not yet available, value-number lhs to
2017 a new SSA_NAME we create. */
2018 if (!result && may_insert)
2019 {
2020 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2021 /* Initialize value-number information properly. */
2022 VN_INFO_GET (result)->valnum = result;
2023 VN_INFO (result)->value_id = get_next_value_id ();
2024 VN_INFO (result)->expr = val;
2025 VN_INFO (result)->has_constants = expr_has_constants (val);
2026 VN_INFO (result)->needs_insertion = true;
2027 /* As all "inserted" statements are singleton SCCs, insert
2028 to the valid table. This is strictly needed to
2029 avoid re-generating new value SSA_NAMEs for the same
2030 expression during SCC iteration over and over (the
2031 optimistic table gets cleared after each iteration).
2032 We do not need to insert into the optimistic table, as
2033 lookups there will fall back to the valid table. */
2034 if (current_info == optimistic_info)
2035 {
2036 current_info = valid_info;
2037 vn_nary_op_insert (val, result);
2038 current_info = optimistic_info;
2039 }
2040 else
2041 vn_nary_op_insert (val, result);
2042 if (dump_file && (dump_flags & TDF_DETAILS))
2043 {
2044 fprintf (dump_file, "Inserting name ");
2045 print_generic_expr (dump_file, result, 0);
2046 fprintf (dump_file, " for expression ");
2047 print_generic_expr (dump_file, val, 0);
2048 fprintf (dump_file, "\n");
2049 }
2050 }
2051 }
2052
2053 if (result)
2054 {
2055 changed = set_ssa_val_to (lhs, result);
2056 if (TREE_CODE (result) == SSA_NAME
2057 && VN_INFO (result)->has_constants)
2058 {
2059 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2060 VN_INFO (lhs)->has_constants = true;
2061 }
2062 }
2063 else
2064 {
2065 changed = set_ssa_val_to (lhs, lhs);
2066 vn_reference_insert (op, lhs, last_vuse);
2067 }
2068
2069 return changed;
2070 }
2071
2072
2073 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2074 and return true if the value number of the LHS has changed as a result. */
2075
2076 static bool
2077 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2078 {
2079 bool changed = false;
2080 tree result;
2081 bool resultsame = false;
2082
2083 /* First we want to lookup using the *vuses* from the store and see
2084 if there the last store to this location with the same address
2085 had the same value.
2086
2087 The vuses represent the memory state before the store. If the
2088 memory state, address, and value of the store is the same as the
2089 last store to this location, then this store will produce the
2090 same memory state as that store.
2091
2092 In this case the vdef versions for this store are value numbered to those
2093 vuse versions, since they represent the same memory state after
2094 this store.
2095
2096 Otherwise, the vdefs for the store are used when inserting into
2097 the table, since the store generates a new memory state. */
2098
2099 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2100
2101 if (result)
2102 {
2103 if (TREE_CODE (result) == SSA_NAME)
2104 result = SSA_VAL (result);
2105 if (TREE_CODE (op) == SSA_NAME)
2106 op = SSA_VAL (op);
2107 resultsame = expressions_equal_p (result, op);
2108 }
2109
2110 if (!result || !resultsame)
2111 {
2112 tree vdef;
2113
2114 if (dump_file && (dump_flags & TDF_DETAILS))
2115 {
2116 fprintf (dump_file, "No store match\n");
2117 fprintf (dump_file, "Value numbering store ");
2118 print_generic_expr (dump_file, lhs, 0);
2119 fprintf (dump_file, " to ");
2120 print_generic_expr (dump_file, op, 0);
2121 fprintf (dump_file, "\n");
2122 }
2123 /* Have to set value numbers before insert, since insert is
2124 going to valueize the references in-place. */
2125 if ((vdef = gimple_vdef (stmt)))
2126 {
2127 VN_INFO (vdef)->use_processed = true;
2128 changed |= set_ssa_val_to (vdef, vdef);
2129 }
2130
2131 /* Do not insert structure copies into the tables. */
2132 if (is_gimple_min_invariant (op)
2133 || is_gimple_reg (op))
2134 vn_reference_insert (lhs, op, vdef);
2135 }
2136 else
2137 {
2138 /* We had a match, so value number the vdef to have the value
2139 number of the vuse it came from. */
2140 tree def, use;
2141
2142 if (dump_file && (dump_flags & TDF_DETAILS))
2143 fprintf (dump_file, "Store matched earlier value,"
2144 "value numbering store vdefs to matching vuses.\n");
2145
2146 def = gimple_vdef (stmt);
2147 use = gimple_vuse (stmt);
2148
2149 VN_INFO (def)->use_processed = true;
2150 changed |= set_ssa_val_to (def, SSA_VAL (use));
2151 }
2152
2153 return changed;
2154 }
2155
2156 /* Visit and value number PHI, return true if the value number
2157 changed. */
2158
2159 static bool
2160 visit_phi (gimple phi)
2161 {
2162 bool changed = false;
2163 tree result;
2164 tree sameval = VN_TOP;
2165 bool allsame = true;
2166 unsigned i;
2167
2168 /* TODO: We could check for this in init_sccvn, and replace this
2169 with a gcc_assert. */
2170 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2171 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2172
2173 /* See if all non-TOP arguments have the same value. TOP is
2174 equivalent to everything, so we can ignore it. */
2175 for (i = 0; i < gimple_phi_num_args (phi); i++)
2176 {
2177 tree def = PHI_ARG_DEF (phi, i);
2178
2179 if (TREE_CODE (def) == SSA_NAME)
2180 def = SSA_VAL (def);
2181 if (def == VN_TOP)
2182 continue;
2183 if (sameval == VN_TOP)
2184 {
2185 sameval = def;
2186 }
2187 else
2188 {
2189 if (!expressions_equal_p (def, sameval))
2190 {
2191 allsame = false;
2192 break;
2193 }
2194 }
2195 }
2196
2197 /* If all value numbered to the same value, the phi node has that
2198 value. */
2199 if (allsame)
2200 {
2201 if (is_gimple_min_invariant (sameval))
2202 {
2203 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2204 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2205 }
2206 else
2207 {
2208 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2209 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2210 }
2211
2212 if (TREE_CODE (sameval) == SSA_NAME)
2213 return visit_copy (PHI_RESULT (phi), sameval);
2214
2215 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2216 }
2217
2218 /* Otherwise, see if it is equivalent to a phi node in this block. */
2219 result = vn_phi_lookup (phi);
2220 if (result)
2221 {
2222 if (TREE_CODE (result) == SSA_NAME)
2223 changed = visit_copy (PHI_RESULT (phi), result);
2224 else
2225 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2226 }
2227 else
2228 {
2229 vn_phi_insert (phi, PHI_RESULT (phi));
2230 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2231 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2232 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2233 }
2234
2235 return changed;
2236 }
2237
2238 /* Return true if EXPR contains constants. */
2239
2240 static bool
2241 expr_has_constants (tree expr)
2242 {
2243 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2244 {
2245 case tcc_unary:
2246 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2247
2248 case tcc_binary:
2249 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2250 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2251 /* Constants inside reference ops are rarely interesting, but
2252 it can take a lot of looking to find them. */
2253 case tcc_reference:
2254 case tcc_declaration:
2255 return false;
2256 default:
2257 return is_gimple_min_invariant (expr);
2258 }
2259 return false;
2260 }
2261
2262 /* Return true if STMT contains constants. */
2263
2264 static bool
2265 stmt_has_constants (gimple stmt)
2266 {
2267 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2268 return false;
2269
2270 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2271 {
2272 case GIMPLE_UNARY_RHS:
2273 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2274
2275 case GIMPLE_BINARY_RHS:
2276 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2277 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2278 case GIMPLE_SINGLE_RHS:
2279 /* Constants inside reference ops are rarely interesting, but
2280 it can take a lot of looking to find them. */
2281 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2282 default:
2283 gcc_unreachable ();
2284 }
2285 return false;
2286 }
2287
2288 /* Replace SSA_NAMES in expr with their value numbers, and return the
2289 result.
2290 This is performed in place. */
2291
2292 static tree
2293 valueize_expr (tree expr)
2294 {
2295 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2296 {
2297 case tcc_unary:
2298 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2299 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2300 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2301 break;
2302 case tcc_binary:
2303 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2304 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2305 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2306 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2307 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2308 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2309 break;
2310 default:
2311 break;
2312 }
2313 return expr;
2314 }
2315
2316 /* Simplify the binary expression RHS, and return the result if
2317 simplified. */
2318
2319 static tree
2320 simplify_binary_expression (gimple stmt)
2321 {
2322 tree result = NULL_TREE;
2323 tree op0 = gimple_assign_rhs1 (stmt);
2324 tree op1 = gimple_assign_rhs2 (stmt);
2325
2326 /* This will not catch every single case we could combine, but will
2327 catch those with constants. The goal here is to simultaneously
2328 combine constants between expressions, but avoid infinite
2329 expansion of expressions during simplification. */
2330 if (TREE_CODE (op0) == SSA_NAME)
2331 {
2332 if (VN_INFO (op0)->has_constants
2333 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2334 op0 = valueize_expr (vn_get_expr_for (op0));
2335 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2336 op0 = SSA_VAL (op0);
2337 }
2338
2339 if (TREE_CODE (op1) == SSA_NAME)
2340 {
2341 if (VN_INFO (op1)->has_constants)
2342 op1 = valueize_expr (vn_get_expr_for (op1));
2343 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2344 op1 = SSA_VAL (op1);
2345 }
2346
2347 /* Avoid folding if nothing changed. */
2348 if (op0 == gimple_assign_rhs1 (stmt)
2349 && op1 == gimple_assign_rhs2 (stmt))
2350 return NULL_TREE;
2351
2352 fold_defer_overflow_warnings ();
2353
2354 result = fold_binary (gimple_assign_rhs_code (stmt),
2355 gimple_expr_type (stmt), op0, op1);
2356 if (result)
2357 STRIP_USELESS_TYPE_CONVERSION (result);
2358
2359 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2360 stmt, 0);
2361
2362 /* Make sure result is not a complex expression consisting
2363 of operators of operators (IE (a + b) + (a + c))
2364 Otherwise, we will end up with unbounded expressions if
2365 fold does anything at all. */
2366 if (result && valid_gimple_rhs_p (result))
2367 return result;
2368
2369 return NULL_TREE;
2370 }
2371
2372 /* Simplify the unary expression RHS, and return the result if
2373 simplified. */
2374
2375 static tree
2376 simplify_unary_expression (gimple stmt)
2377 {
2378 tree result = NULL_TREE;
2379 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2380
2381 /* We handle some tcc_reference codes here that are all
2382 GIMPLE_ASSIGN_SINGLE codes. */
2383 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2384 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2385 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2386 op0 = TREE_OPERAND (op0, 0);
2387
2388 if (TREE_CODE (op0) != SSA_NAME)
2389 return NULL_TREE;
2390
2391 orig_op0 = op0;
2392 if (VN_INFO (op0)->has_constants)
2393 op0 = valueize_expr (vn_get_expr_for (op0));
2394 else if (gimple_assign_cast_p (stmt)
2395 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2396 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2397 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2398 {
2399 /* We want to do tree-combining on conversion-like expressions.
2400 Make sure we feed only SSA_NAMEs or constants to fold though. */
2401 tree tem = valueize_expr (vn_get_expr_for (op0));
2402 if (UNARY_CLASS_P (tem)
2403 || BINARY_CLASS_P (tem)
2404 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2405 || TREE_CODE (tem) == SSA_NAME
2406 || is_gimple_min_invariant (tem))
2407 op0 = tem;
2408 }
2409
2410 /* Avoid folding if nothing changed, but remember the expression. */
2411 if (op0 == orig_op0)
2412 return NULL_TREE;
2413
2414 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2415 gimple_expr_type (stmt), op0);
2416 if (result)
2417 {
2418 STRIP_USELESS_TYPE_CONVERSION (result);
2419 if (valid_gimple_rhs_p (result))
2420 return result;
2421 }
2422
2423 return NULL_TREE;
2424 }
2425
2426 /* Try to simplify RHS using equivalences and constant folding. */
2427
2428 static tree
2429 try_to_simplify (gimple stmt)
2430 {
2431 tree tem;
2432
2433 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2434 in this case, there is no point in doing extra work. */
2435 if (gimple_assign_copy_p (stmt)
2436 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2437 return NULL_TREE;
2438
2439 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2440 {
2441 case tcc_declaration:
2442 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2443 if (tem)
2444 return tem;
2445 break;
2446
2447 case tcc_reference:
2448 /* Do not do full-blown reference lookup here, but simplify
2449 reads from constant aggregates. */
2450 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2451 if (tem)
2452 return tem;
2453
2454 /* Fallthrough for some codes that can operate on registers. */
2455 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2456 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2457 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2458 break;
2459 /* We could do a little more with unary ops, if they expand
2460 into binary ops, but it's debatable whether it is worth it. */
2461 case tcc_unary:
2462 return simplify_unary_expression (stmt);
2463 break;
2464 case tcc_comparison:
2465 case tcc_binary:
2466 return simplify_binary_expression (stmt);
2467 break;
2468 default:
2469 break;
2470 }
2471
2472 return NULL_TREE;
2473 }
2474
2475 /* Visit and value number USE, return true if the value number
2476 changed. */
2477
2478 static bool
2479 visit_use (tree use)
2480 {
2481 bool changed = false;
2482 gimple stmt = SSA_NAME_DEF_STMT (use);
2483
2484 VN_INFO (use)->use_processed = true;
2485
2486 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2487 if (dump_file && (dump_flags & TDF_DETAILS)
2488 && !SSA_NAME_IS_DEFAULT_DEF (use))
2489 {
2490 fprintf (dump_file, "Value numbering ");
2491 print_generic_expr (dump_file, use, 0);
2492 fprintf (dump_file, " stmt = ");
2493 print_gimple_stmt (dump_file, stmt, 0, 0);
2494 }
2495
2496 /* Handle uninitialized uses. */
2497 if (SSA_NAME_IS_DEFAULT_DEF (use))
2498 changed = set_ssa_val_to (use, use);
2499 else
2500 {
2501 if (gimple_code (stmt) == GIMPLE_PHI)
2502 changed = visit_phi (stmt);
2503 else if (!gimple_has_lhs (stmt)
2504 || gimple_has_volatile_ops (stmt)
2505 || stmt_could_throw_p (stmt))
2506 changed = defs_to_varying (stmt);
2507 else if (is_gimple_assign (stmt))
2508 {
2509 tree lhs = gimple_assign_lhs (stmt);
2510 tree simplified;
2511
2512 /* Shortcut for copies. Simplifying copies is pointless,
2513 since we copy the expression and value they represent. */
2514 if (gimple_assign_copy_p (stmt)
2515 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2516 && TREE_CODE (lhs) == SSA_NAME)
2517 {
2518 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2519 goto done;
2520 }
2521 simplified = try_to_simplify (stmt);
2522 if (simplified)
2523 {
2524 if (dump_file && (dump_flags & TDF_DETAILS))
2525 {
2526 fprintf (dump_file, "RHS ");
2527 print_gimple_expr (dump_file, stmt, 0, 0);
2528 fprintf (dump_file, " simplified to ");
2529 print_generic_expr (dump_file, simplified, 0);
2530 if (TREE_CODE (lhs) == SSA_NAME)
2531 fprintf (dump_file, " has constants %d\n",
2532 expr_has_constants (simplified));
2533 else
2534 fprintf (dump_file, "\n");
2535 }
2536 }
2537 /* Setting value numbers to constants will occasionally
2538 screw up phi congruence because constants are not
2539 uniquely associated with a single ssa name that can be
2540 looked up. */
2541 if (simplified
2542 && is_gimple_min_invariant (simplified)
2543 && TREE_CODE (lhs) == SSA_NAME)
2544 {
2545 VN_INFO (lhs)->expr = simplified;
2546 VN_INFO (lhs)->has_constants = true;
2547 changed = set_ssa_val_to (lhs, simplified);
2548 goto done;
2549 }
2550 else if (simplified
2551 && TREE_CODE (simplified) == SSA_NAME
2552 && TREE_CODE (lhs) == SSA_NAME)
2553 {
2554 changed = visit_copy (lhs, simplified);
2555 goto done;
2556 }
2557 else if (simplified)
2558 {
2559 if (TREE_CODE (lhs) == SSA_NAME)
2560 {
2561 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2562 /* We have to unshare the expression or else
2563 valuizing may change the IL stream. */
2564 VN_INFO (lhs)->expr = unshare_expr (simplified);
2565 }
2566 }
2567 else if (stmt_has_constants (stmt)
2568 && TREE_CODE (lhs) == SSA_NAME)
2569 VN_INFO (lhs)->has_constants = true;
2570 else if (TREE_CODE (lhs) == SSA_NAME)
2571 {
2572 /* We reset expr and constantness here because we may
2573 have been value numbering optimistically, and
2574 iterating. They may become non-constant in this case,
2575 even if they were optimistically constant. */
2576
2577 VN_INFO (lhs)->has_constants = false;
2578 VN_INFO (lhs)->expr = NULL_TREE;
2579 }
2580
2581 if ((TREE_CODE (lhs) == SSA_NAME
2582 /* We can substitute SSA_NAMEs that are live over
2583 abnormal edges with their constant value. */
2584 && !(gimple_assign_copy_p (stmt)
2585 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2586 && !(simplified
2587 && is_gimple_min_invariant (simplified))
2588 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2589 /* Stores or copies from SSA_NAMEs that are live over
2590 abnormal edges are a problem. */
2591 || (gimple_assign_single_p (stmt)
2592 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2593 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2594 changed = defs_to_varying (stmt);
2595 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2596 {
2597 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2598 }
2599 else if (TREE_CODE (lhs) == SSA_NAME)
2600 {
2601 if ((gimple_assign_copy_p (stmt)
2602 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2603 || (simplified
2604 && is_gimple_min_invariant (simplified)))
2605 {
2606 VN_INFO (lhs)->has_constants = true;
2607 if (simplified)
2608 changed = set_ssa_val_to (lhs, simplified);
2609 else
2610 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2611 }
2612 else
2613 {
2614 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2615 {
2616 case GIMPLE_UNARY_RHS:
2617 changed = visit_unary_op (lhs, stmt);
2618 break;
2619 case GIMPLE_BINARY_RHS:
2620 changed = visit_binary_op (lhs, stmt);
2621 break;
2622 case GIMPLE_SINGLE_RHS:
2623 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2624 {
2625 case tcc_reference:
2626 /* VOP-less references can go through unary case. */
2627 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2628 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2629 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2630 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2631 {
2632 changed = visit_unary_op (lhs, stmt);
2633 break;
2634 }
2635 /* Fallthrough. */
2636 case tcc_declaration:
2637 changed = visit_reference_op_load
2638 (lhs, gimple_assign_rhs1 (stmt), stmt);
2639 break;
2640 case tcc_expression:
2641 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2642 {
2643 changed = visit_unary_op (lhs, stmt);
2644 break;
2645 }
2646 /* Fallthrough. */
2647 default:
2648 changed = defs_to_varying (stmt);
2649 }
2650 break;
2651 default:
2652 changed = defs_to_varying (stmt);
2653 break;
2654 }
2655 }
2656 }
2657 else
2658 changed = defs_to_varying (stmt);
2659 }
2660 else if (is_gimple_call (stmt))
2661 {
2662 tree lhs = gimple_call_lhs (stmt);
2663
2664 /* ??? We could try to simplify calls. */
2665
2666 if (stmt_has_constants (stmt)
2667 && TREE_CODE (lhs) == SSA_NAME)
2668 VN_INFO (lhs)->has_constants = true;
2669 else if (TREE_CODE (lhs) == SSA_NAME)
2670 {
2671 /* We reset expr and constantness here because we may
2672 have been value numbering optimistically, and
2673 iterating. They may become non-constant in this case,
2674 even if they were optimistically constant. */
2675 VN_INFO (lhs)->has_constants = false;
2676 VN_INFO (lhs)->expr = NULL_TREE;
2677 }
2678
2679 if (TREE_CODE (lhs) == SSA_NAME
2680 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2681 changed = defs_to_varying (stmt);
2682 /* ??? We should handle stores from calls. */
2683 else if (TREE_CODE (lhs) == SSA_NAME)
2684 {
2685 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2686 changed = visit_reference_op_call (lhs, stmt);
2687 else
2688 changed = defs_to_varying (stmt);
2689 }
2690 else
2691 changed = defs_to_varying (stmt);
2692 }
2693 }
2694 done:
2695 return changed;
2696 }
2697
2698 /* Compare two operands by reverse postorder index */
2699
2700 static int
2701 compare_ops (const void *pa, const void *pb)
2702 {
2703 const tree opa = *((const tree *)pa);
2704 const tree opb = *((const tree *)pb);
2705 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2706 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2707 basic_block bba;
2708 basic_block bbb;
2709
2710 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2711 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2712 else if (gimple_nop_p (opstmta))
2713 return -1;
2714 else if (gimple_nop_p (opstmtb))
2715 return 1;
2716
2717 bba = gimple_bb (opstmta);
2718 bbb = gimple_bb (opstmtb);
2719
2720 if (!bba && !bbb)
2721 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2722 else if (!bba)
2723 return -1;
2724 else if (!bbb)
2725 return 1;
2726
2727 if (bba == bbb)
2728 {
2729 if (gimple_code (opstmta) == GIMPLE_PHI
2730 && gimple_code (opstmtb) == GIMPLE_PHI)
2731 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2732 else if (gimple_code (opstmta) == GIMPLE_PHI)
2733 return -1;
2734 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2735 return 1;
2736 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2737 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2738 else
2739 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2740 }
2741 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2742 }
2743
2744 /* Sort an array containing members of a strongly connected component
2745 SCC so that the members are ordered by RPO number.
2746 This means that when the sort is complete, iterating through the
2747 array will give you the members in RPO order. */
2748
2749 static void
2750 sort_scc (VEC (tree, heap) *scc)
2751 {
2752 qsort (VEC_address (tree, scc),
2753 VEC_length (tree, scc),
2754 sizeof (tree),
2755 compare_ops);
2756 }
2757
2758 /* Insert the no longer used nary *ENTRY to the current hash. */
2759
2760 static int
2761 copy_nary (void **entry, void *data ATTRIBUTE_UNUSED)
2762 {
2763 vn_nary_op_t onary = (vn_nary_op_t) *entry;
2764 size_t size = (sizeof (struct vn_nary_op_s)
2765 - sizeof (tree) * (4 - onary->length));
2766 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
2767 size);
2768 void **slot;
2769 memcpy (nary, onary, size);
2770 slot = htab_find_slot_with_hash (current_info->nary, nary, nary->hashcode,
2771 INSERT);
2772 gcc_assert (!*slot);
2773 *slot = nary;
2774 return 1;
2775 }
2776
2777 /* Insert the no longer used phi *ENTRY to the current hash. */
2778
2779 static int
2780 copy_phis (void **entry, void *data ATTRIBUTE_UNUSED)
2781 {
2782 vn_phi_t ophi = (vn_phi_t) *entry;
2783 vn_phi_t phi = (vn_phi_t) pool_alloc (current_info->phis_pool);
2784 void **slot;
2785 memcpy (phi, ophi, sizeof (*phi));
2786 ophi->phiargs = NULL;
2787 slot = htab_find_slot_with_hash (current_info->phis, phi, phi->hashcode,
2788 INSERT);
2789 *slot = phi;
2790 return 1;
2791 }
2792
2793 /* Insert the no longer used reference *ENTRY to the current hash. */
2794
2795 static int
2796 copy_references (void **entry, void *data ATTRIBUTE_UNUSED)
2797 {
2798 vn_reference_t oref = (vn_reference_t) *entry;
2799 vn_reference_t ref;
2800 void **slot;
2801 ref = (vn_reference_t) pool_alloc (current_info->references_pool);
2802 memcpy (ref, oref, sizeof (*ref));
2803 oref->operands = NULL;
2804 slot = htab_find_slot_with_hash (current_info->references, ref, ref->hashcode,
2805 INSERT);
2806 if (*slot)
2807 free_reference (*slot);
2808 *slot = ref;
2809 return 1;
2810 }
2811
2812 /* Process a strongly connected component in the SSA graph. */
2813
2814 static void
2815 process_scc (VEC (tree, heap) *scc)
2816 {
2817 /* If the SCC has a single member, just visit it. */
2818
2819 if (VEC_length (tree, scc) == 1)
2820 {
2821 tree use = VEC_index (tree, scc, 0);
2822 if (!VN_INFO (use)->use_processed)
2823 visit_use (use);
2824 }
2825 else
2826 {
2827 tree var;
2828 unsigned int i;
2829 unsigned int iterations = 0;
2830 bool changed = true;
2831
2832 /* Iterate over the SCC with the optimistic table until it stops
2833 changing. */
2834 current_info = optimistic_info;
2835 while (changed)
2836 {
2837 changed = false;
2838 iterations++;
2839 /* As we are value-numbering optimistically we have to
2840 clear the expression tables and the simplified expressions
2841 in each iteration until we converge. */
2842 htab_empty (optimistic_info->nary);
2843 htab_empty (optimistic_info->phis);
2844 htab_empty (optimistic_info->references);
2845 obstack_free (&optimistic_info->nary_obstack, NULL);
2846 gcc_obstack_init (&optimistic_info->nary_obstack);
2847 empty_alloc_pool (optimistic_info->phis_pool);
2848 empty_alloc_pool (optimistic_info->references_pool);
2849 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2850 VN_INFO (var)->expr = NULL_TREE;
2851 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2852 changed |= visit_use (var);
2853 }
2854
2855 statistics_histogram_event (cfun, "SCC iterations", iterations);
2856
2857 /* Finally, copy the contents of the no longer used optimistic
2858 table to the valid table. */
2859 current_info = valid_info;
2860 htab_traverse (optimistic_info->nary, copy_nary, NULL);
2861 htab_traverse (optimistic_info->phis, copy_phis, NULL);
2862 htab_traverse (optimistic_info->references, copy_references, NULL);
2863 }
2864 }
2865
2866 DEF_VEC_O(ssa_op_iter);
2867 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2868
2869 /* Pop the components of the found SCC for NAME off the SCC stack
2870 and process them. Returns true if all went well, false if
2871 we run into resource limits. */
2872
2873 static bool
2874 extract_and_process_scc_for_name (tree name)
2875 {
2876 VEC (tree, heap) *scc = NULL;
2877 tree x;
2878
2879 /* Found an SCC, pop the components off the SCC stack and
2880 process them. */
2881 do
2882 {
2883 x = VEC_pop (tree, sccstack);
2884
2885 VN_INFO (x)->on_sccstack = false;
2886 VEC_safe_push (tree, heap, scc, x);
2887 } while (x != name);
2888
2889 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2890 if (VEC_length (tree, scc)
2891 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2892 {
2893 if (dump_file)
2894 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2895 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2896 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2897 return false;
2898 }
2899
2900 if (VEC_length (tree, scc) > 1)
2901 sort_scc (scc);
2902
2903 if (dump_file && (dump_flags & TDF_DETAILS))
2904 print_scc (dump_file, scc);
2905
2906 process_scc (scc);
2907
2908 VEC_free (tree, heap, scc);
2909
2910 return true;
2911 }
2912
2913 /* Depth first search on NAME to discover and process SCC's in the SSA
2914 graph.
2915 Execution of this algorithm relies on the fact that the SCC's are
2916 popped off the stack in topological order.
2917 Returns true if successful, false if we stopped processing SCC's due
2918 to resource constraints. */
2919
2920 static bool
2921 DFS (tree name)
2922 {
2923 VEC(ssa_op_iter, heap) *itervec = NULL;
2924 VEC(tree, heap) *namevec = NULL;
2925 use_operand_p usep = NULL;
2926 gimple defstmt;
2927 tree use;
2928 ssa_op_iter iter;
2929
2930 start_over:
2931 /* SCC info */
2932 VN_INFO (name)->dfsnum = next_dfs_num++;
2933 VN_INFO (name)->visited = true;
2934 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2935
2936 VEC_safe_push (tree, heap, sccstack, name);
2937 VN_INFO (name)->on_sccstack = true;
2938 defstmt = SSA_NAME_DEF_STMT (name);
2939
2940 /* Recursively DFS on our operands, looking for SCC's. */
2941 if (!gimple_nop_p (defstmt))
2942 {
2943 /* Push a new iterator. */
2944 if (gimple_code (defstmt) == GIMPLE_PHI)
2945 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2946 else
2947 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2948 }
2949 else
2950 clear_and_done_ssa_iter (&iter);
2951
2952 while (1)
2953 {
2954 /* If we are done processing uses of a name, go up the stack
2955 of iterators and process SCCs as we found them. */
2956 if (op_iter_done (&iter))
2957 {
2958 /* See if we found an SCC. */
2959 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2960 if (!extract_and_process_scc_for_name (name))
2961 {
2962 VEC_free (tree, heap, namevec);
2963 VEC_free (ssa_op_iter, heap, itervec);
2964 return false;
2965 }
2966
2967 /* Check if we are done. */
2968 if (VEC_empty (tree, namevec))
2969 {
2970 VEC_free (tree, heap, namevec);
2971 VEC_free (ssa_op_iter, heap, itervec);
2972 return true;
2973 }
2974
2975 /* Restore the last use walker and continue walking there. */
2976 use = name;
2977 name = VEC_pop (tree, namevec);
2978 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2979 sizeof (ssa_op_iter));
2980 VEC_pop (ssa_op_iter, itervec);
2981 goto continue_walking;
2982 }
2983
2984 use = USE_FROM_PTR (usep);
2985
2986 /* Since we handle phi nodes, we will sometimes get
2987 invariants in the use expression. */
2988 if (TREE_CODE (use) == SSA_NAME)
2989 {
2990 if (! (VN_INFO (use)->visited))
2991 {
2992 /* Recurse by pushing the current use walking state on
2993 the stack and starting over. */
2994 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2995 VEC_safe_push(tree, heap, namevec, name);
2996 name = use;
2997 goto start_over;
2998
2999 continue_walking:
3000 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3001 VN_INFO (use)->low);
3002 }
3003 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3004 && VN_INFO (use)->on_sccstack)
3005 {
3006 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3007 VN_INFO (name)->low);
3008 }
3009 }
3010
3011 usep = op_iter_next_use (&iter);
3012 }
3013 }
3014
3015 /* Allocate a value number table. */
3016
3017 static void
3018 allocate_vn_table (vn_tables_t table)
3019 {
3020 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3021 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3022 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3023 free_reference);
3024
3025 gcc_obstack_init (&table->nary_obstack);
3026 table->phis_pool = create_alloc_pool ("VN phis",
3027 sizeof (struct vn_phi_s),
3028 30);
3029 table->references_pool = create_alloc_pool ("VN references",
3030 sizeof (struct vn_reference_s),
3031 30);
3032 }
3033
3034 /* Free a value number table. */
3035
3036 static void
3037 free_vn_table (vn_tables_t table)
3038 {
3039 htab_delete (table->phis);
3040 htab_delete (table->nary);
3041 htab_delete (table->references);
3042 obstack_free (&table->nary_obstack, NULL);
3043 free_alloc_pool (table->phis_pool);
3044 free_alloc_pool (table->references_pool);
3045 }
3046
3047 static void
3048 init_scc_vn (void)
3049 {
3050 size_t i;
3051 int j;
3052 int *rpo_numbers_temp;
3053
3054 calculate_dominance_info (CDI_DOMINATORS);
3055 sccstack = NULL;
3056 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3057 free);
3058
3059 constant_value_ids = BITMAP_ALLOC (NULL);
3060
3061 next_dfs_num = 1;
3062 next_value_id = 1;
3063
3064 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3065 /* VEC_alloc doesn't actually grow it to the right size, it just
3066 preallocates the space to do so. */
3067 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3068 gcc_obstack_init (&vn_ssa_aux_obstack);
3069
3070 shared_lookup_phiargs = NULL;
3071 shared_lookup_references = NULL;
3072 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3073 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3074 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3075
3076 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3077 the i'th block in RPO order is bb. We want to map bb's to RPO
3078 numbers, so we need to rearrange this array. */
3079 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3080 rpo_numbers[rpo_numbers_temp[j]] = j;
3081
3082 XDELETE (rpo_numbers_temp);
3083
3084 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3085
3086 /* Create the VN_INFO structures, and initialize value numbers to
3087 TOP. */
3088 for (i = 0; i < num_ssa_names; i++)
3089 {
3090 tree name = ssa_name (i);
3091 if (name)
3092 {
3093 VN_INFO_GET (name)->valnum = VN_TOP;
3094 VN_INFO (name)->expr = NULL_TREE;
3095 VN_INFO (name)->value_id = 0;
3096 }
3097 }
3098
3099 renumber_gimple_stmt_uids ();
3100
3101 /* Create the valid and optimistic value numbering tables. */
3102 valid_info = XCNEW (struct vn_tables_s);
3103 allocate_vn_table (valid_info);
3104 optimistic_info = XCNEW (struct vn_tables_s);
3105 allocate_vn_table (optimistic_info);
3106 }
3107
3108 void
3109 free_scc_vn (void)
3110 {
3111 size_t i;
3112
3113 htab_delete (constant_to_value_id);
3114 BITMAP_FREE (constant_value_ids);
3115 VEC_free (tree, heap, shared_lookup_phiargs);
3116 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3117 XDELETEVEC (rpo_numbers);
3118
3119 for (i = 0; i < num_ssa_names; i++)
3120 {
3121 tree name = ssa_name (i);
3122 if (name
3123 && VN_INFO (name)->needs_insertion)
3124 release_ssa_name (name);
3125 }
3126 obstack_free (&vn_ssa_aux_obstack, NULL);
3127 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3128
3129 VEC_free (tree, heap, sccstack);
3130 free_vn_table (valid_info);
3131 XDELETE (valid_info);
3132 free_vn_table (optimistic_info);
3133 XDELETE (optimistic_info);
3134 }
3135
3136 /* Set the value ids in the valid hash tables. */
3137
3138 static void
3139 set_hashtable_value_ids (void)
3140 {
3141 htab_iterator hi;
3142 vn_nary_op_t vno;
3143 vn_reference_t vr;
3144 vn_phi_t vp;
3145
3146 /* Now set the value ids of the things we had put in the hash
3147 table. */
3148
3149 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3150 vno, vn_nary_op_t, hi)
3151 {
3152 if (vno->result)
3153 {
3154 if (TREE_CODE (vno->result) == SSA_NAME)
3155 vno->value_id = VN_INFO (vno->result)->value_id;
3156 else if (is_gimple_min_invariant (vno->result))
3157 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3158 }
3159 }
3160
3161 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3162 vp, vn_phi_t, hi)
3163 {
3164 if (vp->result)
3165 {
3166 if (TREE_CODE (vp->result) == SSA_NAME)
3167 vp->value_id = VN_INFO (vp->result)->value_id;
3168 else if (is_gimple_min_invariant (vp->result))
3169 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3170 }
3171 }
3172
3173 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3174 vr, vn_reference_t, hi)
3175 {
3176 if (vr->result)
3177 {
3178 if (TREE_CODE (vr->result) == SSA_NAME)
3179 vr->value_id = VN_INFO (vr->result)->value_id;
3180 else if (is_gimple_min_invariant (vr->result))
3181 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3182 }
3183 }
3184 }
3185
3186 /* Do SCCVN. Returns true if it finished, false if we bailed out
3187 due to resource constraints. */
3188
3189 bool
3190 run_scc_vn (bool may_insert_arg)
3191 {
3192 size_t i;
3193 tree param;
3194 bool changed = true;
3195
3196 may_insert = may_insert_arg;
3197
3198 init_scc_vn ();
3199 current_info = valid_info;
3200
3201 for (param = DECL_ARGUMENTS (current_function_decl);
3202 param;
3203 param = TREE_CHAIN (param))
3204 {
3205 if (gimple_default_def (cfun, param) != NULL)
3206 {
3207 tree def = gimple_default_def (cfun, param);
3208 VN_INFO (def)->valnum = def;
3209 }
3210 }
3211
3212 for (i = 1; i < num_ssa_names; ++i)
3213 {
3214 tree name = ssa_name (i);
3215 if (name
3216 && VN_INFO (name)->visited == false
3217 && !has_zero_uses (name))
3218 if (!DFS (name))
3219 {
3220 free_scc_vn ();
3221 may_insert = false;
3222 return false;
3223 }
3224 }
3225
3226 /* Initialize the value ids. */
3227
3228 for (i = 1; i < num_ssa_names; ++i)
3229 {
3230 tree name = ssa_name (i);
3231 vn_ssa_aux_t info;
3232 if (!name)
3233 continue;
3234 info = VN_INFO (name);
3235 if (info->valnum == name
3236 || info->valnum == VN_TOP)
3237 info->value_id = get_next_value_id ();
3238 else if (is_gimple_min_invariant (info->valnum))
3239 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3240 }
3241
3242 /* Propagate until they stop changing. */
3243 while (changed)
3244 {
3245 changed = false;
3246 for (i = 1; i < num_ssa_names; ++i)
3247 {
3248 tree name = ssa_name (i);
3249 vn_ssa_aux_t info;
3250 if (!name)
3251 continue;
3252 info = VN_INFO (name);
3253 if (TREE_CODE (info->valnum) == SSA_NAME
3254 && info->valnum != name
3255 && info->value_id != VN_INFO (info->valnum)->value_id)
3256 {
3257 changed = true;
3258 info->value_id = VN_INFO (info->valnum)->value_id;
3259 }
3260 }
3261 }
3262
3263 set_hashtable_value_ids ();
3264
3265 if (dump_file && (dump_flags & TDF_DETAILS))
3266 {
3267 fprintf (dump_file, "Value numbers:\n");
3268 for (i = 0; i < num_ssa_names; i++)
3269 {
3270 tree name = ssa_name (i);
3271 if (name
3272 && VN_INFO (name)->visited
3273 && SSA_VAL (name) != name)
3274 {
3275 print_generic_expr (dump_file, name, 0);
3276 fprintf (dump_file, " = ");
3277 print_generic_expr (dump_file, SSA_VAL (name), 0);
3278 fprintf (dump_file, "\n");
3279 }
3280 }
3281 }
3282
3283 may_insert = false;
3284 return true;
3285 }
3286
3287 /* Return the maximum value id we have ever seen. */
3288
3289 unsigned int
3290 get_max_value_id (void)
3291 {
3292 return next_value_id;
3293 }
3294
3295 /* Return the next unique value id. */
3296
3297 unsigned int
3298 get_next_value_id (void)
3299 {
3300 return next_value_id++;
3301 }
3302
3303
3304 /* Compare two expressions E1 and E2 and return true if they are equal. */
3305
3306 bool
3307 expressions_equal_p (tree e1, tree e2)
3308 {
3309 /* The obvious case. */
3310 if (e1 == e2)
3311 return true;
3312
3313 /* If only one of them is null, they cannot be equal. */
3314 if (!e1 || !e2)
3315 return false;
3316
3317 /* Now perform the actual comparison. */
3318 if (TREE_CODE (e1) == TREE_CODE (e2)
3319 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3320 return true;
3321
3322 return false;
3323 }
3324
3325
3326 /* Return true if the nary operation NARY may trap. This is a copy
3327 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3328
3329 bool
3330 vn_nary_may_trap (vn_nary_op_t nary)
3331 {
3332 tree type;
3333 tree rhs2;
3334 bool honor_nans = false;
3335 bool honor_snans = false;
3336 bool fp_operation = false;
3337 bool honor_trapv = false;
3338 bool handled, ret;
3339 unsigned i;
3340
3341 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3342 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3343 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3344 {
3345 type = nary->type;
3346 fp_operation = FLOAT_TYPE_P (type);
3347 if (fp_operation)
3348 {
3349 honor_nans = flag_trapping_math && !flag_finite_math_only;
3350 honor_snans = flag_signaling_nans != 0;
3351 }
3352 else if (INTEGRAL_TYPE_P (type)
3353 && TYPE_OVERFLOW_TRAPS (type))
3354 honor_trapv = true;
3355 }
3356 rhs2 = nary->op[1];
3357 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3358 honor_trapv,
3359 honor_nans, honor_snans, rhs2,
3360 &handled);
3361 if (handled
3362 && ret)
3363 return true;
3364
3365 for (i = 0; i < nary->length; ++i)
3366 if (tree_could_trap_p (nary->op[i]))
3367 return true;
3368
3369 return false;
3370 }