intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
53
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
57
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
62
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
67
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
71
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
76
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
80
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
83
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
89
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
95
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
103 */
104
105 /* The set of hashtables and alloc_pool's for their items. */
106
107 typedef struct vn_tables_s
108 {
109 htab_t nary;
110 htab_t phis;
111 htab_t references;
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
115 } *vn_tables_t;
116
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
119
120
121 /* Valid hashtables storing information we have proven to be
122 correct. */
123
124 static vn_tables_t valid_info;
125
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
128
129 static vn_tables_t optimistic_info;
130
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
133 valid_info. */
134
135 static vn_tables_t current_info;
136
137
138 /* Reverse post order index for each basic block. */
139
140 static int *rpo_numbers;
141
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
143
144 /* This represents the top of the VN lattice, which is the universal
145 value. */
146
147 tree VN_TOP;
148
149 /* Unique counter for our value ids. */
150
151 static unsigned int next_value_id;
152
153 /* Next DFS number and the stack for strongly connected component
154 detection. */
155
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
158
159 static bool may_insert;
160
161
162 DEF_VEC_P(vn_ssa_aux_t);
163 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164
165 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
166 are allocated on an obstack for locality reasons, and to free them
167 without looping over the VEC. */
168
169 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
170 static struct obstack vn_ssa_aux_obstack;
171
172 /* Return the value numbering information for a given SSA name. */
173
174 vn_ssa_aux_t
175 VN_INFO (tree name)
176 {
177 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
178 SSA_NAME_VERSION (name));
179 gcc_checking_assert (res);
180 return res;
181 }
182
183 /* Set the value numbering info for a given SSA name to a given
184 value. */
185
186 static inline void
187 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 {
189 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
190 SSA_NAME_VERSION (name), value);
191 }
192
193 /* Initialize the value numbering info for a given SSA name.
194 This should be called just once for every SSA name. */
195
196 vn_ssa_aux_t
197 VN_INFO_GET (tree name)
198 {
199 vn_ssa_aux_t newinfo;
200
201 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
202 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
203 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
204 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name) + 1);
206 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
207 SSA_NAME_VERSION (name), newinfo);
208 return newinfo;
209 }
210
211
212 /* Get the representative expression for the SSA_NAME NAME. Returns
213 the representative SSA_NAME if there is no expression associated with it. */
214
215 tree
216 vn_get_expr_for (tree name)
217 {
218 vn_ssa_aux_t vn = VN_INFO (name);
219 gimple def_stmt;
220 tree expr = NULL_TREE;
221
222 if (vn->valnum == VN_TOP)
223 return name;
224
225 /* If the value-number is a constant it is the representative
226 expression. */
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 return vn->valnum;
229
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
232
233 /* If the value-number is a constant it is the representative
234 expression. */
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 return vn->valnum;
237
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
240 return vn->expr;
241
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
244
245 /* If the value number is a default-definition or a PHI result
246 use it directly. */
247 if (gimple_nop_p (def_stmt)
248 || gimple_code (def_stmt) == GIMPLE_PHI)
249 return vn->valnum;
250
251 if (!is_gimple_assign (def_stmt))
252 return vn->valnum;
253
254 /* FIXME tuples. This is incomplete and likely will miss some
255 simplifications. */
256 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
257 {
258 case tcc_reference:
259 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
260 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
261 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
262 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
266 break;
267
268 case tcc_unary:
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
272 break;
273
274 case tcc_binary:
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
279 break;
280
281 default:;
282 }
283 if (expr == NULL_TREE)
284 return vn->valnum;
285
286 /* Cache the expression. */
287 vn->expr = expr;
288
289 return expr;
290 }
291
292
293 /* Free a phi operation structure VP. */
294
295 static void
296 free_phi (void *vp)
297 {
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
300 }
301
302 /* Free a reference operation structure VP. */
303
304 static void
305 free_reference (void *vp)
306 {
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
309 }
310
311 /* Hash table equality function for vn_constant_t. */
312
313 static int
314 vn_constant_eq (const void *p1, const void *p2)
315 {
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
318
319 if (vc1->hashcode != vc2->hashcode)
320 return false;
321
322 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 }
324
325 /* Hash table hash function for vn_constant_t. */
326
327 static hashval_t
328 vn_constant_hash (const void *p1)
329 {
330 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
331 return vc1->hashcode;
332 }
333
334 /* Lookup a value id for CONSTANT and return it. If it does not
335 exist returns 0. */
336
337 unsigned int
338 get_constant_value_id (tree constant)
339 {
340 void **slot;
341 struct vn_constant_s vc;
342
343 vc.hashcode = vn_hash_constant_with_type (constant);
344 vc.constant = constant;
345 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
346 vc.hashcode, NO_INSERT);
347 if (slot)
348 return ((vn_constant_t)*slot)->value_id;
349 return 0;
350 }
351
352 /* Lookup a value id for CONSTANT, and if it does not exist, create a
353 new one and return it. If it does exist, return it. */
354
355 unsigned int
356 get_or_alloc_constant_value_id (tree constant)
357 {
358 void **slot;
359 struct vn_constant_s vc;
360 vn_constant_t vcp;
361
362 vc.hashcode = vn_hash_constant_with_type (constant);
363 vc.constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
365 vc.hashcode, INSERT);
366 if (*slot)
367 return ((vn_constant_t)*slot)->value_id;
368
369 vcp = XNEW (struct vn_constant_s);
370 vcp->hashcode = vc.hashcode;
371 vcp->constant = constant;
372 vcp->value_id = get_next_value_id ();
373 *slot = (void *) vcp;
374 bitmap_set_bit (constant_value_ids, vcp->value_id);
375 return vcp->value_id;
376 }
377
378 /* Return true if V is a value id for a constant. */
379
380 bool
381 value_id_constant_p (unsigned int v)
382 {
383 return bitmap_bit_p (constant_value_ids, v);
384 }
385
386 /* Compare two reference operands P1 and P2 for equality. Return true if
387 they are equal, and false otherwise. */
388
389 static int
390 vn_reference_op_eq (const void *p1, const void *p2)
391 {
392 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
393 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394
395 return vro1->opcode == vro2->opcode
396 && types_compatible_p (vro1->type, vro2->type)
397 && expressions_equal_p (vro1->op0, vro2->op0)
398 && expressions_equal_p (vro1->op1, vro2->op1)
399 && expressions_equal_p (vro1->op2, vro2->op2);
400 }
401
402 /* Compute the hash for a reference operand VRO1. */
403
404 static hashval_t
405 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
406 {
407 result = iterative_hash_hashval_t (vro1->opcode, result);
408 if (vro1->op0)
409 result = iterative_hash_expr (vro1->op0, result);
410 if (vro1->op1)
411 result = iterative_hash_expr (vro1->op1, result);
412 if (vro1->op2)
413 result = iterative_hash_expr (vro1->op2, result);
414 return result;
415 }
416
417 /* Return the hashcode for a given reference operation P1. */
418
419 static hashval_t
420 vn_reference_hash (const void *p1)
421 {
422 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
423 return vr1->hashcode;
424 }
425
426 /* Compute a hash for the reference operation VR1 and return it. */
427
428 hashval_t
429 vn_reference_compute_hash (const vn_reference_t vr1)
430 {
431 hashval_t result = 0;
432 int i;
433 vn_reference_op_t vro;
434
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result = vn_reference_op_compute_hash (vro, result);
437 if (vr1->vuse)
438 result += SSA_NAME_VERSION (vr1->vuse);
439
440 return result;
441 }
442
443 /* Return true if reference operations P1 and P2 are equivalent. This
444 means they have the same set of operands and vuses. */
445
446 int
447 vn_reference_eq (const void *p1, const void *p2)
448 {
449 int i;
450 vn_reference_op_t vro;
451
452 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
453 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
454 if (vr1->hashcode != vr2->hashcode)
455 return false;
456
457 /* Early out if this is not a hash collision. */
458 if (vr1->hashcode != vr2->hashcode)
459 return false;
460
461 /* The VOP needs to be the same. */
462 if (vr1->vuse != vr2->vuse)
463 return false;
464
465 /* If the operands are the same we are done. */
466 if (vr1->operands == vr2->operands)
467 return true;
468
469 /* We require that address operands be canonicalized in a way that
470 two memory references will have the same operands if they are
471 equivalent. */
472 if (VEC_length (vn_reference_op_s, vr1->operands)
473 != VEC_length (vn_reference_op_s, vr2->operands))
474 return false;
475
476 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
477 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
478 vro))
479 return false;
480
481 return true;
482 }
483
484 /* Copy the operations present in load/store REF into RESULT, a vector of
485 vn_reference_op_s's. */
486
487 void
488 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
489 {
490 if (TREE_CODE (ref) == TARGET_MEM_REF)
491 {
492 vn_reference_op_s temp;
493 tree base;
494
495 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
496 if (!base)
497 base = build_int_cst (ptr_type_node, 0);
498
499 memset (&temp, 0, sizeof (temp));
500 /* We do not care for spurious type qualifications. */
501 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
502 temp.opcode = TREE_CODE (ref);
503 temp.op0 = TMR_INDEX (ref);
504 temp.op1 = TMR_STEP (ref);
505 temp.op2 = TMR_OFFSET (ref);
506 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
507
508 memset (&temp, 0, sizeof (temp));
509 temp.type = NULL_TREE;
510 temp.opcode = TREE_CODE (base);
511 temp.op0 = base;
512 temp.op1 = TMR_ORIGINAL (ref);
513 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
514 return;
515 }
516
517 /* For non-calls, store the information that makes up the address. */
518
519 while (ref)
520 {
521 vn_reference_op_s temp;
522
523 memset (&temp, 0, sizeof (temp));
524 /* We do not care for spurious type qualifications. */
525 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
526 temp.opcode = TREE_CODE (ref);
527
528 switch (temp.opcode)
529 {
530 case ALIGN_INDIRECT_REF:
531 case INDIRECT_REF:
532 /* The only operand is the address, which gets its own
533 vn_reference_op_s structure. */
534 break;
535 case MISALIGNED_INDIRECT_REF:
536 temp.op0 = TREE_OPERAND (ref, 1);
537 break;
538 case BIT_FIELD_REF:
539 /* Record bits and position. */
540 temp.op0 = TREE_OPERAND (ref, 1);
541 temp.op1 = TREE_OPERAND (ref, 2);
542 break;
543 case COMPONENT_REF:
544 /* The field decl is enough to unambiguously specify the field,
545 a matching type is not necessary and a mismatching type
546 is always a spurious difference. */
547 temp.type = NULL_TREE;
548 temp.op0 = TREE_OPERAND (ref, 1);
549 temp.op1 = TREE_OPERAND (ref, 2);
550 /* If this is a reference to a union member, record the union
551 member size as operand. Do so only if we are doing
552 expression insertion (during FRE), as PRE currently gets
553 confused with this. */
554 if (may_insert
555 && temp.op1 == NULL_TREE
556 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
557 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
558 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
559 && host_integerp (DECL_SIZE (temp.op0), 0))
560 temp.op0 = DECL_SIZE (temp.op0);
561 break;
562 case ARRAY_RANGE_REF:
563 case ARRAY_REF:
564 /* Record index as operand. */
565 temp.op0 = TREE_OPERAND (ref, 1);
566 /* Always record lower bounds and element size. */
567 temp.op1 = array_ref_low_bound (ref);
568 temp.op2 = array_ref_element_size (ref);
569 break;
570 case STRING_CST:
571 case INTEGER_CST:
572 case COMPLEX_CST:
573 case VECTOR_CST:
574 case REAL_CST:
575 case CONSTRUCTOR:
576 case VAR_DECL:
577 case PARM_DECL:
578 case CONST_DECL:
579 case RESULT_DECL:
580 case SSA_NAME:
581 temp.op0 = ref;
582 break;
583 case ADDR_EXPR:
584 if (is_gimple_min_invariant (ref))
585 {
586 temp.op0 = ref;
587 break;
588 }
589 /* Fallthrough. */
590 /* These are only interesting for their operands, their
591 existence, and their type. They will never be the last
592 ref in the chain of references (IE they require an
593 operand), so we don't have to put anything
594 for op* as it will be handled by the iteration */
595 case IMAGPART_EXPR:
596 case REALPART_EXPR:
597 case VIEW_CONVERT_EXPR:
598 break;
599 default:
600 gcc_unreachable ();
601 }
602 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
603
604 if (REFERENCE_CLASS_P (ref)
605 || (TREE_CODE (ref) == ADDR_EXPR
606 && !is_gimple_min_invariant (ref)))
607 ref = TREE_OPERAND (ref, 0);
608 else
609 ref = NULL_TREE;
610 }
611 }
612
613 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
614 operands in *OPS, the reference alias set SET and the reference type TYPE.
615 Return true if something useful was produced. */
616
617 bool
618 ao_ref_init_from_vn_reference (ao_ref *ref,
619 alias_set_type set, tree type,
620 VEC (vn_reference_op_s, heap) *ops)
621 {
622 vn_reference_op_t op;
623 unsigned i;
624 tree base = NULL_TREE;
625 tree *op0_p = &base;
626 HOST_WIDE_INT offset = 0;
627 HOST_WIDE_INT max_size;
628 HOST_WIDE_INT size = -1;
629 tree size_tree = NULL_TREE;
630
631 /* First get the final access size from just the outermost expression. */
632 op = VEC_index (vn_reference_op_s, ops, 0);
633 if (op->opcode == COMPONENT_REF)
634 {
635 if (TREE_CODE (op->op0) == INTEGER_CST)
636 size_tree = op->op0;
637 else
638 size_tree = DECL_SIZE (op->op0);
639 }
640 else if (op->opcode == BIT_FIELD_REF)
641 size_tree = op->op0;
642 else
643 {
644 enum machine_mode mode = TYPE_MODE (type);
645 if (mode == BLKmode)
646 size_tree = TYPE_SIZE (type);
647 else
648 size = GET_MODE_BITSIZE (mode);
649 }
650 if (size_tree != NULL_TREE)
651 {
652 if (!host_integerp (size_tree, 1))
653 size = -1;
654 else
655 size = TREE_INT_CST_LOW (size_tree);
656 }
657
658 /* Initially, maxsize is the same as the accessed element size.
659 In the following it will only grow (or become -1). */
660 max_size = size;
661
662 /* Compute cumulative bit-offset for nested component-refs and array-refs,
663 and find the ultimate containing object. */
664 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
665 {
666 switch (op->opcode)
667 {
668 /* These may be in the reference ops, but we cannot do anything
669 sensible with them here. */
670 case CALL_EXPR:
671 case ADDR_EXPR:
672 return false;
673
674 /* Record the base objects. */
675 case ALIGN_INDIRECT_REF:
676 case INDIRECT_REF:
677 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
678 op0_p = &TREE_OPERAND (*op0_p, 0);
679 break;
680
681 case MISALIGNED_INDIRECT_REF:
682 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
683 NULL_TREE, op->op0);
684 op0_p = &TREE_OPERAND (*op0_p, 0);
685 break;
686
687 case VAR_DECL:
688 case PARM_DECL:
689 case RESULT_DECL:
690 case SSA_NAME:
691 *op0_p = op->op0;
692 break;
693
694 /* And now the usual component-reference style ops. */
695 case BIT_FIELD_REF:
696 offset += tree_low_cst (op->op1, 0);
697 break;
698
699 case COMPONENT_REF:
700 {
701 tree field = op->op0;
702 /* We do not have a complete COMPONENT_REF tree here so we
703 cannot use component_ref_field_offset. Do the interesting
704 parts manually. */
705
706 /* Our union trick, done for offset zero only. */
707 if (TREE_CODE (field) == INTEGER_CST)
708 ;
709 else if (op->op1
710 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
711 max_size = -1;
712 else
713 {
714 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
715 * BITS_PER_UNIT);
716 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
717 }
718 break;
719 }
720
721 case ARRAY_RANGE_REF:
722 case ARRAY_REF:
723 /* We recorded the lower bound and the element size. */
724 if (!host_integerp (op->op0, 0)
725 || !host_integerp (op->op1, 0)
726 || !host_integerp (op->op2, 0))
727 max_size = -1;
728 else
729 {
730 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
731 hindex -= TREE_INT_CST_LOW (op->op1);
732 hindex *= TREE_INT_CST_LOW (op->op2);
733 hindex *= BITS_PER_UNIT;
734 offset += hindex;
735 }
736 break;
737
738 case REALPART_EXPR:
739 break;
740
741 case IMAGPART_EXPR:
742 offset += size;
743 break;
744
745 case VIEW_CONVERT_EXPR:
746 break;
747
748 case STRING_CST:
749 case INTEGER_CST:
750 case COMPLEX_CST:
751 case VECTOR_CST:
752 case REAL_CST:
753 case CONSTRUCTOR:
754 case CONST_DECL:
755 return false;
756
757 default:
758 return false;
759 }
760 }
761
762 if (base == NULL_TREE)
763 return false;
764
765 ref->ref = NULL_TREE;
766 ref->base = base;
767 ref->offset = offset;
768 ref->size = size;
769 ref->max_size = max_size;
770 ref->ref_alias_set = set;
771 ref->base_alias_set = -1;
772
773 return true;
774 }
775
776 /* Copy the operations present in load/store/call REF into RESULT, a vector of
777 vn_reference_op_s's. */
778
779 void
780 copy_reference_ops_from_call (gimple call,
781 VEC(vn_reference_op_s, heap) **result)
782 {
783 vn_reference_op_s temp;
784 unsigned i;
785
786 /* Copy the type, opcode, function being called and static chain. */
787 memset (&temp, 0, sizeof (temp));
788 temp.type = gimple_call_return_type (call);
789 temp.opcode = CALL_EXPR;
790 temp.op0 = gimple_call_fn (call);
791 temp.op1 = gimple_call_chain (call);
792 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
793
794 /* Copy the call arguments. As they can be references as well,
795 just chain them together. */
796 for (i = 0; i < gimple_call_num_args (call); ++i)
797 {
798 tree callarg = gimple_call_arg (call, i);
799 copy_reference_ops_from_ref (callarg, result);
800 }
801 }
802
803 /* Create a vector of vn_reference_op_s structures from REF, a
804 REFERENCE_CLASS_P tree. The vector is not shared. */
805
806 static VEC(vn_reference_op_s, heap) *
807 create_reference_ops_from_ref (tree ref)
808 {
809 VEC (vn_reference_op_s, heap) *result = NULL;
810
811 copy_reference_ops_from_ref (ref, &result);
812 return result;
813 }
814
815 /* Create a vector of vn_reference_op_s structures from CALL, a
816 call statement. The vector is not shared. */
817
818 static VEC(vn_reference_op_s, heap) *
819 create_reference_ops_from_call (gimple call)
820 {
821 VEC (vn_reference_op_s, heap) *result = NULL;
822
823 copy_reference_ops_from_call (call, &result);
824 return result;
825 }
826
827 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
828 *I_P to point to the last element of the replacement. */
829 void
830 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
831 unsigned int *i_p)
832 {
833 VEC(vn_reference_op_s, heap) *mem = NULL;
834 vn_reference_op_t op;
835 unsigned int i = *i_p;
836 unsigned int j;
837
838 /* Get ops for the addressed object. */
839 op = VEC_index (vn_reference_op_s, *ops, i);
840 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
841 around it to avoid later ICEs. */
842 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
843 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
844 {
845 vn_reference_op_s aref;
846 tree dom;
847 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
848 aref.opcode = ARRAY_REF;
849 aref.op0 = integer_zero_node;
850 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
851 && TYPE_MIN_VALUE (dom))
852 aref.op0 = TYPE_MIN_VALUE (dom);
853 aref.op1 = aref.op0;
854 aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
855 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
856 }
857 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
858
859 /* Do the replacement - we should have at least one op in mem now. */
860 if (VEC_length (vn_reference_op_s, mem) == 1)
861 {
862 VEC_replace (vn_reference_op_s, *ops, i - 1,
863 VEC_index (vn_reference_op_s, mem, 0));
864 VEC_ordered_remove (vn_reference_op_s, *ops, i);
865 i--;
866 }
867 else if (VEC_length (vn_reference_op_s, mem) == 2)
868 {
869 VEC_replace (vn_reference_op_s, *ops, i - 1,
870 VEC_index (vn_reference_op_s, mem, 0));
871 VEC_replace (vn_reference_op_s, *ops, i,
872 VEC_index (vn_reference_op_s, mem, 1));
873 }
874 else if (VEC_length (vn_reference_op_s, mem) > 2)
875 {
876 VEC_replace (vn_reference_op_s, *ops, i - 1,
877 VEC_index (vn_reference_op_s, mem, 0));
878 VEC_replace (vn_reference_op_s, *ops, i,
879 VEC_index (vn_reference_op_s, mem, 1));
880 /* ??? There is no VEC_splice. */
881 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
882 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
883 }
884 else
885 gcc_unreachable ();
886
887 VEC_free (vn_reference_op_s, heap, mem);
888 *i_p = i;
889 }
890
891 /* Optimize the reference REF to a constant if possible or return
892 NULL_TREE if not. */
893
894 tree
895 fully_constant_vn_reference_p (vn_reference_t ref)
896 {
897 VEC (vn_reference_op_s, heap) *operands = ref->operands;
898 vn_reference_op_t op;
899
900 /* Try to simplify the translated expression if it is
901 a call to a builtin function with at most two arguments. */
902 op = VEC_index (vn_reference_op_s, operands, 0);
903 if (op->opcode == CALL_EXPR
904 && TREE_CODE (op->op0) == ADDR_EXPR
905 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
906 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
907 && VEC_length (vn_reference_op_s, operands) >= 2
908 && VEC_length (vn_reference_op_s, operands) <= 3)
909 {
910 vn_reference_op_t arg0, arg1 = NULL;
911 bool anyconst = false;
912 arg0 = VEC_index (vn_reference_op_s, operands, 1);
913 if (VEC_length (vn_reference_op_s, operands) > 2)
914 arg1 = VEC_index (vn_reference_op_s, operands, 2);
915 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
916 || (arg0->opcode == ADDR_EXPR
917 && is_gimple_min_invariant (arg0->op0)))
918 anyconst = true;
919 if (arg1
920 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
921 || (arg1->opcode == ADDR_EXPR
922 && is_gimple_min_invariant (arg1->op0))))
923 anyconst = true;
924 if (anyconst)
925 {
926 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
927 arg1 ? 2 : 1,
928 arg0->op0,
929 arg1 ? arg1->op0 : NULL);
930 if (folded
931 && TREE_CODE (folded) == NOP_EXPR)
932 folded = TREE_OPERAND (folded, 0);
933 if (folded
934 && is_gimple_min_invariant (folded))
935 return folded;
936 }
937 }
938
939 /* Simplify reads from constant strings. */
940 else if (op->opcode == ARRAY_REF
941 && TREE_CODE (op->op0) == INTEGER_CST
942 && integer_zerop (op->op1)
943 && VEC_length (vn_reference_op_s, operands) == 2)
944 {
945 vn_reference_op_t arg0;
946 arg0 = VEC_index (vn_reference_op_s, operands, 1);
947 if (arg0->opcode == STRING_CST
948 && (TYPE_MODE (op->type)
949 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
950 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
951 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
952 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
953 return build_int_cst_type (op->type,
954 (TREE_STRING_POINTER (arg0->op0)
955 [TREE_INT_CST_LOW (op->op0)]));
956 }
957
958 return NULL_TREE;
959 }
960
961 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
962 structures into their value numbers. This is done in-place, and
963 the vector passed in is returned. */
964
965 static VEC (vn_reference_op_s, heap) *
966 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
967 {
968 vn_reference_op_t vro;
969 unsigned int i;
970
971 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
972 {
973 if (vro->opcode == SSA_NAME
974 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
975 {
976 vro->op0 = SSA_VAL (vro->op0);
977 /* If it transforms from an SSA_NAME to a constant, update
978 the opcode. */
979 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
980 vro->opcode = TREE_CODE (vro->op0);
981 /* If it transforms from an SSA_NAME to an address, fold with
982 a preceding indirect reference. */
983 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
984 && VEC_index (vn_reference_op_s,
985 orig, i - 1)->opcode == INDIRECT_REF)
986 {
987 vn_reference_fold_indirect (&orig, &i);
988 continue;
989 }
990 }
991 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
992 vro->op1 = SSA_VAL (vro->op1);
993 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
994 vro->op2 = SSA_VAL (vro->op2);
995 }
996
997 return orig;
998 }
999
1000 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1001
1002 /* Create a vector of vn_reference_op_s structures from REF, a
1003 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1004 this function. */
1005
1006 static VEC(vn_reference_op_s, heap) *
1007 valueize_shared_reference_ops_from_ref (tree ref)
1008 {
1009 if (!ref)
1010 return NULL;
1011 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1012 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1013 shared_lookup_references = valueize_refs (shared_lookup_references);
1014 return shared_lookup_references;
1015 }
1016
1017 /* Create a vector of vn_reference_op_s structures from CALL, a
1018 call statement. The vector is shared among all callers of
1019 this function. */
1020
1021 static VEC(vn_reference_op_s, heap) *
1022 valueize_shared_reference_ops_from_call (gimple call)
1023 {
1024 if (!call)
1025 return NULL;
1026 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1027 copy_reference_ops_from_call (call, &shared_lookup_references);
1028 shared_lookup_references = valueize_refs (shared_lookup_references);
1029 return shared_lookup_references;
1030 }
1031
1032 /* Lookup a SCCVN reference operation VR in the current hash table.
1033 Returns the resulting value number if it exists in the hash table,
1034 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1035 vn_reference_t stored in the hashtable if something is found. */
1036
1037 static tree
1038 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1039 {
1040 void **slot;
1041 hashval_t hash;
1042
1043 hash = vr->hashcode;
1044 slot = htab_find_slot_with_hash (current_info->references, vr,
1045 hash, NO_INSERT);
1046 if (!slot && current_info == optimistic_info)
1047 slot = htab_find_slot_with_hash (valid_info->references, vr,
1048 hash, NO_INSERT);
1049 if (slot)
1050 {
1051 if (vnresult)
1052 *vnresult = (vn_reference_t)*slot;
1053 return ((vn_reference_t)*slot)->result;
1054 }
1055
1056 return NULL_TREE;
1057 }
1058
1059 static tree *last_vuse_ptr;
1060
1061 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1062 with the current VUSE and performs the expression lookup. */
1063
1064 static void *
1065 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1066 {
1067 vn_reference_t vr = (vn_reference_t)vr_;
1068 void **slot;
1069 hashval_t hash;
1070
1071 if (last_vuse_ptr)
1072 *last_vuse_ptr = vuse;
1073
1074 /* Fixup vuse and hash. */
1075 if (vr->vuse)
1076 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1077 vr->vuse = SSA_VAL (vuse);
1078 if (vr->vuse)
1079 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1080
1081 hash = vr->hashcode;
1082 slot = htab_find_slot_with_hash (current_info->references, vr,
1083 hash, NO_INSERT);
1084 if (!slot && current_info == optimistic_info)
1085 slot = htab_find_slot_with_hash (valid_info->references, vr,
1086 hash, NO_INSERT);
1087 if (slot)
1088 return *slot;
1089
1090 return NULL;
1091 }
1092
1093 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1094 from the statement defining VUSE and if not successful tries to
1095 translate *REFP and VR_ through an aggregate copy at the defintion
1096 of VUSE. */
1097
1098 static void *
1099 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1100 {
1101 vn_reference_t vr = (vn_reference_t)vr_;
1102 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1103 tree fndecl;
1104 tree base;
1105 HOST_WIDE_INT offset, maxsize;
1106
1107 base = ao_ref_base (ref);
1108 offset = ref->offset;
1109 maxsize = ref->max_size;
1110
1111 /* If we cannot constrain the size of the reference we cannot
1112 test if anything kills it. */
1113 if (maxsize == -1)
1114 return (void *)-1;
1115
1116 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1117 from that defintion.
1118 1) Memset. */
1119 if (is_gimple_reg_type (vr->type)
1120 && is_gimple_call (def_stmt)
1121 && (fndecl = gimple_call_fndecl (def_stmt))
1122 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1123 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1124 && integer_zerop (gimple_call_arg (def_stmt, 1))
1125 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1126 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1127 {
1128 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1129 tree base2;
1130 HOST_WIDE_INT offset2, size2, maxsize2;
1131 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1132 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1133 if ((unsigned HOST_WIDE_INT)size2 / 8
1134 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1135 && operand_equal_p (base, base2, 0)
1136 && offset2 <= offset
1137 && offset2 + size2 >= offset + maxsize)
1138 {
1139 tree val = fold_convert (vr->type, integer_zero_node);
1140 unsigned int value_id = get_or_alloc_constant_value_id (val);
1141 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1142 VEC_copy (vn_reference_op_s,
1143 heap, vr->operands),
1144 val, value_id);
1145 }
1146 }
1147
1148 /* 2) Assignment from an empty CONSTRUCTOR. */
1149 else if (is_gimple_reg_type (vr->type)
1150 && gimple_assign_single_p (def_stmt)
1151 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1152 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1153 {
1154 tree base2;
1155 HOST_WIDE_INT offset2, size2, maxsize2;
1156 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1157 &offset2, &size2, &maxsize2);
1158 if (operand_equal_p (base, base2, 0)
1159 && offset2 <= offset
1160 && offset2 + size2 >= offset + maxsize)
1161 {
1162 tree val = fold_convert (vr->type, integer_zero_node);
1163 unsigned int value_id = get_or_alloc_constant_value_id (val);
1164 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1165 VEC_copy (vn_reference_op_s,
1166 heap, vr->operands),
1167 val, value_id);
1168 }
1169 }
1170
1171 /* For aggregate copies translate the reference through them if
1172 the copy kills ref. */
1173 else if (gimple_assign_single_p (def_stmt)
1174 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1175 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1176 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1177 {
1178 tree base2;
1179 HOST_WIDE_INT offset2, size2, maxsize2;
1180 int i, j;
1181 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1182 vn_reference_op_t vro;
1183 ao_ref r;
1184
1185 /* See if the assignment kills REF. */
1186 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1187 &offset2, &size2, &maxsize2);
1188 if (!operand_equal_p (base, base2, 0)
1189 || offset2 > offset
1190 || offset2 + size2 < offset + maxsize)
1191 return (void *)-1;
1192
1193 /* Find the common base of ref and the lhs. */
1194 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1195 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1196 j = VEC_length (vn_reference_op_s, lhs) - 1;
1197 while (j >= 0 && i >= 0
1198 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1199 vr->operands, i),
1200 VEC_index (vn_reference_op_s, lhs, j)))
1201 {
1202 i--;
1203 j--;
1204 }
1205
1206 VEC_free (vn_reference_op_s, heap, lhs);
1207 /* i now points to the first additional op.
1208 ??? LHS may not be completely contained in VR, one or more
1209 VIEW_CONVERT_EXPRs could be in its way. We could at least
1210 try handling outermost VIEW_CONVERT_EXPRs. */
1211 if (j != -1)
1212 return (void *)-1;
1213
1214 /* Now re-write REF to be based on the rhs of the assignment. */
1215 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1216 /* We need to pre-pend vr->operands[0..i] to rhs. */
1217 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1218 > VEC_length (vn_reference_op_s, vr->operands))
1219 {
1220 VEC (vn_reference_op_s, heap) *old = vr->operands;
1221 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1222 i + 1 + VEC_length (vn_reference_op_s, rhs));
1223 if (old == shared_lookup_references
1224 && vr->operands != old)
1225 shared_lookup_references = NULL;
1226 }
1227 else
1228 VEC_truncate (vn_reference_op_s, vr->operands,
1229 i + 1 + VEC_length (vn_reference_op_s, rhs));
1230 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1231 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1232 VEC_free (vn_reference_op_s, heap, rhs);
1233 vr->hashcode = vn_reference_compute_hash (vr);
1234
1235 /* Adjust *ref from the new operands. */
1236 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1237 return (void *)-1;
1238 /* This can happen with bitfields. */
1239 if (ref->size != r.size)
1240 return (void *)-1;
1241 *ref = r;
1242
1243 /* Do not update last seen VUSE after translating. */
1244 last_vuse_ptr = NULL;
1245
1246 /* Keep looking for the adjusted *REF / VR pair. */
1247 return NULL;
1248 }
1249
1250 /* Bail out and stop walking. */
1251 return (void *)-1;
1252 }
1253
1254 /* Lookup a reference operation by it's parts, in the current hash table.
1255 Returns the resulting value number if it exists in the hash table,
1256 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1257 vn_reference_t stored in the hashtable if something is found. */
1258
1259 tree
1260 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1261 VEC (vn_reference_op_s, heap) *operands,
1262 vn_reference_t *vnresult, bool maywalk)
1263 {
1264 struct vn_reference_s vr1;
1265 vn_reference_t tmp;
1266 tree cst;
1267
1268 if (!vnresult)
1269 vnresult = &tmp;
1270 *vnresult = NULL;
1271
1272 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1273 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1274 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1275 VEC_length (vn_reference_op_s, operands));
1276 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1277 VEC_address (vn_reference_op_s, operands),
1278 sizeof (vn_reference_op_s)
1279 * VEC_length (vn_reference_op_s, operands));
1280 vr1.operands = operands = shared_lookup_references
1281 = valueize_refs (shared_lookup_references);
1282 vr1.type = type;
1283 vr1.set = set;
1284 vr1.hashcode = vn_reference_compute_hash (&vr1);
1285 if ((cst = fully_constant_vn_reference_p (&vr1)))
1286 return cst;
1287
1288 vn_reference_lookup_1 (&vr1, vnresult);
1289 if (!*vnresult
1290 && maywalk
1291 && vr1.vuse)
1292 {
1293 ao_ref r;
1294 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1295 *vnresult =
1296 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1297 vn_reference_lookup_2,
1298 vn_reference_lookup_3, &vr1);
1299 if (vr1.operands != operands)
1300 VEC_free (vn_reference_op_s, heap, vr1.operands);
1301 }
1302
1303 if (*vnresult)
1304 return (*vnresult)->result;
1305
1306 return NULL_TREE;
1307 }
1308
1309 /* Lookup OP in the current hash table, and return the resulting value
1310 number if it exists in the hash table. Return NULL_TREE if it does
1311 not exist in the hash table or if the result field of the structure
1312 was NULL.. VNRESULT will be filled in with the vn_reference_t
1313 stored in the hashtable if one exists. */
1314
1315 tree
1316 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1317 vn_reference_t *vnresult)
1318 {
1319 VEC (vn_reference_op_s, heap) *operands;
1320 struct vn_reference_s vr1;
1321 tree cst;
1322
1323 if (vnresult)
1324 *vnresult = NULL;
1325
1326 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1327 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1328 vr1.type = TREE_TYPE (op);
1329 vr1.set = get_alias_set (op);
1330 vr1.hashcode = vn_reference_compute_hash (&vr1);
1331 if ((cst = fully_constant_vn_reference_p (&vr1)))
1332 return cst;
1333
1334 if (maywalk
1335 && vr1.vuse)
1336 {
1337 vn_reference_t wvnresult;
1338 ao_ref r;
1339 ao_ref_init (&r, op);
1340 wvnresult =
1341 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1342 vn_reference_lookup_2,
1343 vn_reference_lookup_3, &vr1);
1344 if (vr1.operands != operands)
1345 VEC_free (vn_reference_op_s, heap, vr1.operands);
1346 if (wvnresult)
1347 {
1348 if (vnresult)
1349 *vnresult = wvnresult;
1350 return wvnresult->result;
1351 }
1352
1353 return NULL_TREE;
1354 }
1355
1356 return vn_reference_lookup_1 (&vr1, vnresult);
1357 }
1358
1359
1360 /* Insert OP into the current hash table with a value number of
1361 RESULT, and return the resulting reference structure we created. */
1362
1363 vn_reference_t
1364 vn_reference_insert (tree op, tree result, tree vuse)
1365 {
1366 void **slot;
1367 vn_reference_t vr1;
1368
1369 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1370 if (TREE_CODE (result) == SSA_NAME)
1371 vr1->value_id = VN_INFO (result)->value_id;
1372 else
1373 vr1->value_id = get_or_alloc_constant_value_id (result);
1374 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1375 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1376 vr1->type = TREE_TYPE (op);
1377 vr1->set = get_alias_set (op);
1378 vr1->hashcode = vn_reference_compute_hash (vr1);
1379 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1380
1381 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1382 INSERT);
1383
1384 /* Because we lookup stores using vuses, and value number failures
1385 using the vdefs (see visit_reference_op_store for how and why),
1386 it's possible that on failure we may try to insert an already
1387 inserted store. This is not wrong, there is no ssa name for a
1388 store that we could use as a differentiator anyway. Thus, unlike
1389 the other lookup functions, you cannot gcc_assert (!*slot)
1390 here. */
1391
1392 /* But free the old slot in case of a collision. */
1393 if (*slot)
1394 free_reference (*slot);
1395
1396 *slot = vr1;
1397 return vr1;
1398 }
1399
1400 /* Insert a reference by it's pieces into the current hash table with
1401 a value number of RESULT. Return the resulting reference
1402 structure we created. */
1403
1404 vn_reference_t
1405 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1406 VEC (vn_reference_op_s, heap) *operands,
1407 tree result, unsigned int value_id)
1408
1409 {
1410 void **slot;
1411 vn_reference_t vr1;
1412
1413 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1414 vr1->value_id = value_id;
1415 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1416 vr1->operands = valueize_refs (operands);
1417 vr1->type = type;
1418 vr1->set = set;
1419 vr1->hashcode = vn_reference_compute_hash (vr1);
1420 if (result && TREE_CODE (result) == SSA_NAME)
1421 result = SSA_VAL (result);
1422 vr1->result = result;
1423
1424 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1425 INSERT);
1426
1427 /* At this point we should have all the things inserted that we have
1428 seen before, and we should never try inserting something that
1429 already exists. */
1430 gcc_assert (!*slot);
1431 if (*slot)
1432 free_reference (*slot);
1433
1434 *slot = vr1;
1435 return vr1;
1436 }
1437
1438 /* Compute and return the hash value for nary operation VBO1. */
1439
1440 hashval_t
1441 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1442 {
1443 hashval_t hash;
1444 unsigned i;
1445
1446 for (i = 0; i < vno1->length; ++i)
1447 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1448 vno1->op[i] = SSA_VAL (vno1->op[i]);
1449
1450 if (vno1->length == 2
1451 && commutative_tree_code (vno1->opcode)
1452 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1453 {
1454 tree temp = vno1->op[0];
1455 vno1->op[0] = vno1->op[1];
1456 vno1->op[1] = temp;
1457 }
1458
1459 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1460 for (i = 0; i < vno1->length; ++i)
1461 hash = iterative_hash_expr (vno1->op[i], hash);
1462
1463 return hash;
1464 }
1465
1466 /* Return the computed hashcode for nary operation P1. */
1467
1468 static hashval_t
1469 vn_nary_op_hash (const void *p1)
1470 {
1471 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1472 return vno1->hashcode;
1473 }
1474
1475 /* Compare nary operations P1 and P2 and return true if they are
1476 equivalent. */
1477
1478 int
1479 vn_nary_op_eq (const void *p1, const void *p2)
1480 {
1481 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1482 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1483 unsigned i;
1484
1485 if (vno1->hashcode != vno2->hashcode)
1486 return false;
1487
1488 if (vno1->opcode != vno2->opcode
1489 || !types_compatible_p (vno1->type, vno2->type))
1490 return false;
1491
1492 for (i = 0; i < vno1->length; ++i)
1493 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1494 return false;
1495
1496 return true;
1497 }
1498
1499 /* Lookup a n-ary operation by its pieces and return the resulting value
1500 number if it exists in the hash table. Return NULL_TREE if it does
1501 not exist in the hash table or if the result field of the operation
1502 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1503 if it exists. */
1504
1505 tree
1506 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1507 tree type, tree op0, tree op1, tree op2,
1508 tree op3, vn_nary_op_t *vnresult)
1509 {
1510 void **slot;
1511 struct vn_nary_op_s vno1;
1512 if (vnresult)
1513 *vnresult = NULL;
1514 vno1.opcode = code;
1515 vno1.length = length;
1516 vno1.type = type;
1517 vno1.op[0] = op0;
1518 vno1.op[1] = op1;
1519 vno1.op[2] = op2;
1520 vno1.op[3] = op3;
1521 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1522 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1523 NO_INSERT);
1524 if (!slot && current_info == optimistic_info)
1525 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1526 NO_INSERT);
1527 if (!slot)
1528 return NULL_TREE;
1529 if (vnresult)
1530 *vnresult = (vn_nary_op_t)*slot;
1531 return ((vn_nary_op_t)*slot)->result;
1532 }
1533
1534 /* Lookup OP in the current hash table, and return the resulting value
1535 number if it exists in the hash table. Return NULL_TREE if it does
1536 not exist in the hash table or if the result field of the operation
1537 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1538 if it exists. */
1539
1540 tree
1541 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1542 {
1543 void **slot;
1544 struct vn_nary_op_s vno1;
1545 unsigned i;
1546
1547 if (vnresult)
1548 *vnresult = NULL;
1549 vno1.opcode = TREE_CODE (op);
1550 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1551 vno1.type = TREE_TYPE (op);
1552 for (i = 0; i < vno1.length; ++i)
1553 vno1.op[i] = TREE_OPERAND (op, i);
1554 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1555 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1556 NO_INSERT);
1557 if (!slot && current_info == optimistic_info)
1558 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1559 NO_INSERT);
1560 if (!slot)
1561 return NULL_TREE;
1562 if (vnresult)
1563 *vnresult = (vn_nary_op_t)*slot;
1564 return ((vn_nary_op_t)*slot)->result;
1565 }
1566
1567 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1568 value number if it exists in the hash table. Return NULL_TREE if
1569 it does not exist in the hash table. VNRESULT will contain the
1570 vn_nary_op_t from the hashtable if it exists. */
1571
1572 tree
1573 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1574 {
1575 void **slot;
1576 struct vn_nary_op_s vno1;
1577 unsigned i;
1578
1579 if (vnresult)
1580 *vnresult = NULL;
1581 vno1.opcode = gimple_assign_rhs_code (stmt);
1582 vno1.length = gimple_num_ops (stmt) - 1;
1583 vno1.type = gimple_expr_type (stmt);
1584 for (i = 0; i < vno1.length; ++i)
1585 vno1.op[i] = gimple_op (stmt, i + 1);
1586 if (vno1.opcode == REALPART_EXPR
1587 || vno1.opcode == IMAGPART_EXPR
1588 || vno1.opcode == VIEW_CONVERT_EXPR)
1589 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1590 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1591 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1592 NO_INSERT);
1593 if (!slot && current_info == optimistic_info)
1594 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1595 NO_INSERT);
1596 if (!slot)
1597 return NULL_TREE;
1598 if (vnresult)
1599 *vnresult = (vn_nary_op_t)*slot;
1600 return ((vn_nary_op_t)*slot)->result;
1601 }
1602
1603 /* Insert a n-ary operation into the current hash table using it's
1604 pieces. Return the vn_nary_op_t structure we created and put in
1605 the hashtable. */
1606
1607 vn_nary_op_t
1608 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1609 tree type, tree op0,
1610 tree op1, tree op2, tree op3,
1611 tree result,
1612 unsigned int value_id)
1613 {
1614 void **slot;
1615 vn_nary_op_t vno1;
1616
1617 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1618 (sizeof (struct vn_nary_op_s)
1619 - sizeof (tree) * (4 - length)));
1620 vno1->value_id = value_id;
1621 vno1->opcode = code;
1622 vno1->length = length;
1623 vno1->type = type;
1624 if (length >= 1)
1625 vno1->op[0] = op0;
1626 if (length >= 2)
1627 vno1->op[1] = op1;
1628 if (length >= 3)
1629 vno1->op[2] = op2;
1630 if (length >= 4)
1631 vno1->op[3] = op3;
1632 vno1->result = result;
1633 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1634 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1635 INSERT);
1636 gcc_assert (!*slot);
1637
1638 *slot = vno1;
1639 return vno1;
1640
1641 }
1642
1643 /* Insert OP into the current hash table with a value number of
1644 RESULT. Return the vn_nary_op_t structure we created and put in
1645 the hashtable. */
1646
1647 vn_nary_op_t
1648 vn_nary_op_insert (tree op, tree result)
1649 {
1650 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1651 void **slot;
1652 vn_nary_op_t vno1;
1653 unsigned i;
1654
1655 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1656 (sizeof (struct vn_nary_op_s)
1657 - sizeof (tree) * (4 - length)));
1658 vno1->value_id = VN_INFO (result)->value_id;
1659 vno1->opcode = TREE_CODE (op);
1660 vno1->length = length;
1661 vno1->type = TREE_TYPE (op);
1662 for (i = 0; i < vno1->length; ++i)
1663 vno1->op[i] = TREE_OPERAND (op, i);
1664 vno1->result = result;
1665 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1666 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1667 INSERT);
1668 gcc_assert (!*slot);
1669
1670 *slot = vno1;
1671 return vno1;
1672 }
1673
1674 /* Insert the rhs of STMT into the current hash table with a value number of
1675 RESULT. */
1676
1677 vn_nary_op_t
1678 vn_nary_op_insert_stmt (gimple stmt, tree result)
1679 {
1680 unsigned length = gimple_num_ops (stmt) - 1;
1681 void **slot;
1682 vn_nary_op_t vno1;
1683 unsigned i;
1684
1685 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1686 (sizeof (struct vn_nary_op_s)
1687 - sizeof (tree) * (4 - length)));
1688 vno1->value_id = VN_INFO (result)->value_id;
1689 vno1->opcode = gimple_assign_rhs_code (stmt);
1690 vno1->length = length;
1691 vno1->type = gimple_expr_type (stmt);
1692 for (i = 0; i < vno1->length; ++i)
1693 vno1->op[i] = gimple_op (stmt, i + 1);
1694 if (vno1->opcode == REALPART_EXPR
1695 || vno1->opcode == IMAGPART_EXPR
1696 || vno1->opcode == VIEW_CONVERT_EXPR)
1697 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1698 vno1->result = result;
1699 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1700 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1701 INSERT);
1702 gcc_assert (!*slot);
1703
1704 *slot = vno1;
1705 return vno1;
1706 }
1707
1708 /* Compute a hashcode for PHI operation VP1 and return it. */
1709
1710 static inline hashval_t
1711 vn_phi_compute_hash (vn_phi_t vp1)
1712 {
1713 hashval_t result;
1714 int i;
1715 tree phi1op;
1716 tree type;
1717
1718 result = vp1->block->index;
1719
1720 /* If all PHI arguments are constants we need to distinguish
1721 the PHI node via its type. */
1722 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1723 result += (INTEGRAL_TYPE_P (type)
1724 + (INTEGRAL_TYPE_P (type)
1725 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1726
1727 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1728 {
1729 if (phi1op == VN_TOP)
1730 continue;
1731 result = iterative_hash_expr (phi1op, result);
1732 }
1733
1734 return result;
1735 }
1736
1737 /* Return the computed hashcode for phi operation P1. */
1738
1739 static hashval_t
1740 vn_phi_hash (const void *p1)
1741 {
1742 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1743 return vp1->hashcode;
1744 }
1745
1746 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1747
1748 static int
1749 vn_phi_eq (const void *p1, const void *p2)
1750 {
1751 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1752 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1753
1754 if (vp1->hashcode != vp2->hashcode)
1755 return false;
1756
1757 if (vp1->block == vp2->block)
1758 {
1759 int i;
1760 tree phi1op;
1761
1762 /* If the PHI nodes do not have compatible types
1763 they are not the same. */
1764 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1765 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1766 return false;
1767
1768 /* Any phi in the same block will have it's arguments in the
1769 same edge order, because of how we store phi nodes. */
1770 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1771 {
1772 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1773 if (phi1op == VN_TOP || phi2op == VN_TOP)
1774 continue;
1775 if (!expressions_equal_p (phi1op, phi2op))
1776 return false;
1777 }
1778 return true;
1779 }
1780 return false;
1781 }
1782
1783 static VEC(tree, heap) *shared_lookup_phiargs;
1784
1785 /* Lookup PHI in the current hash table, and return the resulting
1786 value number if it exists in the hash table. Return NULL_TREE if
1787 it does not exist in the hash table. */
1788
1789 static tree
1790 vn_phi_lookup (gimple phi)
1791 {
1792 void **slot;
1793 struct vn_phi_s vp1;
1794 unsigned i;
1795
1796 VEC_truncate (tree, shared_lookup_phiargs, 0);
1797
1798 /* Canonicalize the SSA_NAME's to their value number. */
1799 for (i = 0; i < gimple_phi_num_args (phi); i++)
1800 {
1801 tree def = PHI_ARG_DEF (phi, i);
1802 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1803 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1804 }
1805 vp1.phiargs = shared_lookup_phiargs;
1806 vp1.block = gimple_bb (phi);
1807 vp1.hashcode = vn_phi_compute_hash (&vp1);
1808 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1809 NO_INSERT);
1810 if (!slot && current_info == optimistic_info)
1811 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1812 NO_INSERT);
1813 if (!slot)
1814 return NULL_TREE;
1815 return ((vn_phi_t)*slot)->result;
1816 }
1817
1818 /* Insert PHI into the current hash table with a value number of
1819 RESULT. */
1820
1821 static vn_phi_t
1822 vn_phi_insert (gimple phi, tree result)
1823 {
1824 void **slot;
1825 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1826 unsigned i;
1827 VEC (tree, heap) *args = NULL;
1828
1829 /* Canonicalize the SSA_NAME's to their value number. */
1830 for (i = 0; i < gimple_phi_num_args (phi); i++)
1831 {
1832 tree def = PHI_ARG_DEF (phi, i);
1833 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1834 VEC_safe_push (tree, heap, args, def);
1835 }
1836 vp1->value_id = VN_INFO (result)->value_id;
1837 vp1->phiargs = args;
1838 vp1->block = gimple_bb (phi);
1839 vp1->result = result;
1840 vp1->hashcode = vn_phi_compute_hash (vp1);
1841
1842 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1843 INSERT);
1844
1845 /* Because we iterate over phi operations more than once, it's
1846 possible the slot might already exist here, hence no assert.*/
1847 *slot = vp1;
1848 return vp1;
1849 }
1850
1851
1852 /* Print set of components in strongly connected component SCC to OUT. */
1853
1854 static void
1855 print_scc (FILE *out, VEC (tree, heap) *scc)
1856 {
1857 tree var;
1858 unsigned int i;
1859
1860 fprintf (out, "SCC consists of: ");
1861 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1862 {
1863 print_generic_expr (out, var, 0);
1864 fprintf (out, " ");
1865 }
1866 fprintf (out, "\n");
1867 }
1868
1869 /* Set the value number of FROM to TO, return true if it has changed
1870 as a result. */
1871
1872 static inline bool
1873 set_ssa_val_to (tree from, tree to)
1874 {
1875 tree currval;
1876
1877 if (from != to
1878 && TREE_CODE (to) == SSA_NAME
1879 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1880 to = from;
1881
1882 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1883 and invariants. So assert that here. */
1884 gcc_assert (to != NULL_TREE
1885 && (to == VN_TOP
1886 || TREE_CODE (to) == SSA_NAME
1887 || is_gimple_min_invariant (to)));
1888
1889 if (dump_file && (dump_flags & TDF_DETAILS))
1890 {
1891 fprintf (dump_file, "Setting value number of ");
1892 print_generic_expr (dump_file, from, 0);
1893 fprintf (dump_file, " to ");
1894 print_generic_expr (dump_file, to, 0);
1895 }
1896
1897 currval = SSA_VAL (from);
1898
1899 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1900 {
1901 VN_INFO (from)->valnum = to;
1902 if (dump_file && (dump_flags & TDF_DETAILS))
1903 fprintf (dump_file, " (changed)\n");
1904 return true;
1905 }
1906 if (dump_file && (dump_flags & TDF_DETAILS))
1907 fprintf (dump_file, "\n");
1908 return false;
1909 }
1910
1911 /* Set all definitions in STMT to value number to themselves.
1912 Return true if a value number changed. */
1913
1914 static bool
1915 defs_to_varying (gimple stmt)
1916 {
1917 bool changed = false;
1918 ssa_op_iter iter;
1919 def_operand_p defp;
1920
1921 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1922 {
1923 tree def = DEF_FROM_PTR (defp);
1924
1925 VN_INFO (def)->use_processed = true;
1926 changed |= set_ssa_val_to (def, def);
1927 }
1928 return changed;
1929 }
1930
1931 static bool expr_has_constants (tree expr);
1932 static tree valueize_expr (tree expr);
1933
1934 /* Visit a copy between LHS and RHS, return true if the value number
1935 changed. */
1936
1937 static bool
1938 visit_copy (tree lhs, tree rhs)
1939 {
1940 /* Follow chains of copies to their destination. */
1941 while (TREE_CODE (rhs) == SSA_NAME
1942 && SSA_VAL (rhs) != rhs)
1943 rhs = SSA_VAL (rhs);
1944
1945 /* The copy may have a more interesting constant filled expression
1946 (we don't, since we know our RHS is just an SSA name). */
1947 if (TREE_CODE (rhs) == SSA_NAME)
1948 {
1949 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1950 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1951 }
1952
1953 return set_ssa_val_to (lhs, rhs);
1954 }
1955
1956 /* Visit a unary operator RHS, value number it, and return true if the
1957 value number of LHS has changed as a result. */
1958
1959 static bool
1960 visit_unary_op (tree lhs, gimple stmt)
1961 {
1962 bool changed = false;
1963 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1964
1965 if (result)
1966 {
1967 changed = set_ssa_val_to (lhs, result);
1968 }
1969 else
1970 {
1971 changed = set_ssa_val_to (lhs, lhs);
1972 vn_nary_op_insert_stmt (stmt, lhs);
1973 }
1974
1975 return changed;
1976 }
1977
1978 /* Visit a binary operator RHS, value number it, and return true if the
1979 value number of LHS has changed as a result. */
1980
1981 static bool
1982 visit_binary_op (tree lhs, gimple stmt)
1983 {
1984 bool changed = false;
1985 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1986
1987 if (result)
1988 {
1989 changed = set_ssa_val_to (lhs, result);
1990 }
1991 else
1992 {
1993 changed = set_ssa_val_to (lhs, lhs);
1994 vn_nary_op_insert_stmt (stmt, lhs);
1995 }
1996
1997 return changed;
1998 }
1999
2000 /* Visit a call STMT storing into LHS. Return true if the value number
2001 of the LHS has changed as a result. */
2002
2003 static bool
2004 visit_reference_op_call (tree lhs, gimple stmt)
2005 {
2006 bool changed = false;
2007 struct vn_reference_s vr1;
2008 tree result;
2009 tree vuse = gimple_vuse (stmt);
2010
2011 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2012 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2013 vr1.type = gimple_expr_type (stmt);
2014 vr1.set = 0;
2015 vr1.hashcode = vn_reference_compute_hash (&vr1);
2016 result = vn_reference_lookup_1 (&vr1, NULL);
2017 if (result)
2018 {
2019 changed = set_ssa_val_to (lhs, result);
2020 if (TREE_CODE (result) == SSA_NAME
2021 && VN_INFO (result)->has_constants)
2022 VN_INFO (lhs)->has_constants = true;
2023 }
2024 else
2025 {
2026 void **slot;
2027 vn_reference_t vr2;
2028 changed = set_ssa_val_to (lhs, lhs);
2029 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2030 vr2->vuse = vr1.vuse;
2031 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2032 vr2->type = vr1.type;
2033 vr2->set = vr1.set;
2034 vr2->hashcode = vr1.hashcode;
2035 vr2->result = lhs;
2036 slot = htab_find_slot_with_hash (current_info->references,
2037 vr2, vr2->hashcode, INSERT);
2038 if (*slot)
2039 free_reference (*slot);
2040 *slot = vr2;
2041 }
2042
2043 return changed;
2044 }
2045
2046 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2047 and return true if the value number of the LHS has changed as a result. */
2048
2049 static bool
2050 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2051 {
2052 bool changed = false;
2053 tree last_vuse;
2054 tree result;
2055
2056 last_vuse = gimple_vuse (stmt);
2057 last_vuse_ptr = &last_vuse;
2058 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
2059 last_vuse_ptr = NULL;
2060
2061 /* If we have a VCE, try looking up its operand as it might be stored in
2062 a different type. */
2063 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2064 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2065 true, NULL);
2066
2067 /* We handle type-punning through unions by value-numbering based
2068 on offset and size of the access. Be prepared to handle a
2069 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2070 if (result
2071 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2072 {
2073 /* We will be setting the value number of lhs to the value number
2074 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2075 So first simplify and lookup this expression to see if it
2076 is already available. */
2077 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2078 if ((CONVERT_EXPR_P (val)
2079 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2080 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2081 {
2082 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2083 if ((CONVERT_EXPR_P (tem)
2084 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2085 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2086 TREE_TYPE (val), tem)))
2087 val = tem;
2088 }
2089 result = val;
2090 if (!is_gimple_min_invariant (val)
2091 && TREE_CODE (val) != SSA_NAME)
2092 result = vn_nary_op_lookup (val, NULL);
2093 /* If the expression is not yet available, value-number lhs to
2094 a new SSA_NAME we create. */
2095 if (!result && may_insert)
2096 {
2097 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2098 /* Initialize value-number information properly. */
2099 VN_INFO_GET (result)->valnum = result;
2100 VN_INFO (result)->value_id = get_next_value_id ();
2101 VN_INFO (result)->expr = val;
2102 VN_INFO (result)->has_constants = expr_has_constants (val);
2103 VN_INFO (result)->needs_insertion = true;
2104 /* As all "inserted" statements are singleton SCCs, insert
2105 to the valid table. This is strictly needed to
2106 avoid re-generating new value SSA_NAMEs for the same
2107 expression during SCC iteration over and over (the
2108 optimistic table gets cleared after each iteration).
2109 We do not need to insert into the optimistic table, as
2110 lookups there will fall back to the valid table. */
2111 if (current_info == optimistic_info)
2112 {
2113 current_info = valid_info;
2114 vn_nary_op_insert (val, result);
2115 current_info = optimistic_info;
2116 }
2117 else
2118 vn_nary_op_insert (val, result);
2119 if (dump_file && (dump_flags & TDF_DETAILS))
2120 {
2121 fprintf (dump_file, "Inserting name ");
2122 print_generic_expr (dump_file, result, 0);
2123 fprintf (dump_file, " for expression ");
2124 print_generic_expr (dump_file, val, 0);
2125 fprintf (dump_file, "\n");
2126 }
2127 }
2128 }
2129
2130 if (result)
2131 {
2132 changed = set_ssa_val_to (lhs, result);
2133 if (TREE_CODE (result) == SSA_NAME
2134 && VN_INFO (result)->has_constants)
2135 {
2136 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2137 VN_INFO (lhs)->has_constants = true;
2138 }
2139 }
2140 else
2141 {
2142 changed = set_ssa_val_to (lhs, lhs);
2143 vn_reference_insert (op, lhs, last_vuse);
2144 }
2145
2146 return changed;
2147 }
2148
2149
2150 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2151 and return true if the value number of the LHS has changed as a result. */
2152
2153 static bool
2154 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2155 {
2156 bool changed = false;
2157 tree result;
2158 bool resultsame = false;
2159
2160 /* First we want to lookup using the *vuses* from the store and see
2161 if there the last store to this location with the same address
2162 had the same value.
2163
2164 The vuses represent the memory state before the store. If the
2165 memory state, address, and value of the store is the same as the
2166 last store to this location, then this store will produce the
2167 same memory state as that store.
2168
2169 In this case the vdef versions for this store are value numbered to those
2170 vuse versions, since they represent the same memory state after
2171 this store.
2172
2173 Otherwise, the vdefs for the store are used when inserting into
2174 the table, since the store generates a new memory state. */
2175
2176 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2177
2178 if (result)
2179 {
2180 if (TREE_CODE (result) == SSA_NAME)
2181 result = SSA_VAL (result);
2182 if (TREE_CODE (op) == SSA_NAME)
2183 op = SSA_VAL (op);
2184 resultsame = expressions_equal_p (result, op);
2185 }
2186
2187 if (!result || !resultsame)
2188 {
2189 tree vdef;
2190
2191 if (dump_file && (dump_flags & TDF_DETAILS))
2192 {
2193 fprintf (dump_file, "No store match\n");
2194 fprintf (dump_file, "Value numbering store ");
2195 print_generic_expr (dump_file, lhs, 0);
2196 fprintf (dump_file, " to ");
2197 print_generic_expr (dump_file, op, 0);
2198 fprintf (dump_file, "\n");
2199 }
2200 /* Have to set value numbers before insert, since insert is
2201 going to valueize the references in-place. */
2202 if ((vdef = gimple_vdef (stmt)))
2203 {
2204 VN_INFO (vdef)->use_processed = true;
2205 changed |= set_ssa_val_to (vdef, vdef);
2206 }
2207
2208 /* Do not insert structure copies into the tables. */
2209 if (is_gimple_min_invariant (op)
2210 || is_gimple_reg (op))
2211 vn_reference_insert (lhs, op, vdef);
2212 }
2213 else
2214 {
2215 /* We had a match, so value number the vdef to have the value
2216 number of the vuse it came from. */
2217 tree def, use;
2218
2219 if (dump_file && (dump_flags & TDF_DETAILS))
2220 fprintf (dump_file, "Store matched earlier value,"
2221 "value numbering store vdefs to matching vuses.\n");
2222
2223 def = gimple_vdef (stmt);
2224 use = gimple_vuse (stmt);
2225
2226 VN_INFO (def)->use_processed = true;
2227 changed |= set_ssa_val_to (def, SSA_VAL (use));
2228 }
2229
2230 return changed;
2231 }
2232
2233 /* Visit and value number PHI, return true if the value number
2234 changed. */
2235
2236 static bool
2237 visit_phi (gimple phi)
2238 {
2239 bool changed = false;
2240 tree result;
2241 tree sameval = VN_TOP;
2242 bool allsame = true;
2243 unsigned i;
2244
2245 /* TODO: We could check for this in init_sccvn, and replace this
2246 with a gcc_assert. */
2247 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2248 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2249
2250 /* See if all non-TOP arguments have the same value. TOP is
2251 equivalent to everything, so we can ignore it. */
2252 for (i = 0; i < gimple_phi_num_args (phi); i++)
2253 {
2254 tree def = PHI_ARG_DEF (phi, i);
2255
2256 if (TREE_CODE (def) == SSA_NAME)
2257 def = SSA_VAL (def);
2258 if (def == VN_TOP)
2259 continue;
2260 if (sameval == VN_TOP)
2261 {
2262 sameval = def;
2263 }
2264 else
2265 {
2266 if (!expressions_equal_p (def, sameval))
2267 {
2268 allsame = false;
2269 break;
2270 }
2271 }
2272 }
2273
2274 /* If all value numbered to the same value, the phi node has that
2275 value. */
2276 if (allsame)
2277 {
2278 if (is_gimple_min_invariant (sameval))
2279 {
2280 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2281 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2282 }
2283 else
2284 {
2285 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2286 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2287 }
2288
2289 if (TREE_CODE (sameval) == SSA_NAME)
2290 return visit_copy (PHI_RESULT (phi), sameval);
2291
2292 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2293 }
2294
2295 /* Otherwise, see if it is equivalent to a phi node in this block. */
2296 result = vn_phi_lookup (phi);
2297 if (result)
2298 {
2299 if (TREE_CODE (result) == SSA_NAME)
2300 changed = visit_copy (PHI_RESULT (phi), result);
2301 else
2302 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2303 }
2304 else
2305 {
2306 vn_phi_insert (phi, PHI_RESULT (phi));
2307 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2308 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2309 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2310 }
2311
2312 return changed;
2313 }
2314
2315 /* Return true if EXPR contains constants. */
2316
2317 static bool
2318 expr_has_constants (tree expr)
2319 {
2320 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2321 {
2322 case tcc_unary:
2323 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2324
2325 case tcc_binary:
2326 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2327 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2328 /* Constants inside reference ops are rarely interesting, but
2329 it can take a lot of looking to find them. */
2330 case tcc_reference:
2331 case tcc_declaration:
2332 return false;
2333 default:
2334 return is_gimple_min_invariant (expr);
2335 }
2336 return false;
2337 }
2338
2339 /* Return true if STMT contains constants. */
2340
2341 static bool
2342 stmt_has_constants (gimple stmt)
2343 {
2344 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2345 return false;
2346
2347 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2348 {
2349 case GIMPLE_UNARY_RHS:
2350 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2351
2352 case GIMPLE_BINARY_RHS:
2353 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2354 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2355 case GIMPLE_TERNARY_RHS:
2356 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2357 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2358 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2359 case GIMPLE_SINGLE_RHS:
2360 /* Constants inside reference ops are rarely interesting, but
2361 it can take a lot of looking to find them. */
2362 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2363 default:
2364 gcc_unreachable ();
2365 }
2366 return false;
2367 }
2368
2369 /* Replace SSA_NAMES in expr with their value numbers, and return the
2370 result.
2371 This is performed in place. */
2372
2373 static tree
2374 valueize_expr (tree expr)
2375 {
2376 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2377 {
2378 case tcc_unary:
2379 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2380 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2381 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2382 break;
2383 case tcc_binary:
2384 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2385 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2386 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2387 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2388 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2389 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2390 break;
2391 default:
2392 break;
2393 }
2394 return expr;
2395 }
2396
2397 /* Simplify the binary expression RHS, and return the result if
2398 simplified. */
2399
2400 static tree
2401 simplify_binary_expression (gimple stmt)
2402 {
2403 tree result = NULL_TREE;
2404 tree op0 = gimple_assign_rhs1 (stmt);
2405 tree op1 = gimple_assign_rhs2 (stmt);
2406
2407 /* This will not catch every single case we could combine, but will
2408 catch those with constants. The goal here is to simultaneously
2409 combine constants between expressions, but avoid infinite
2410 expansion of expressions during simplification. */
2411 if (TREE_CODE (op0) == SSA_NAME)
2412 {
2413 if (VN_INFO (op0)->has_constants
2414 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2415 op0 = valueize_expr (vn_get_expr_for (op0));
2416 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2417 op0 = SSA_VAL (op0);
2418 }
2419
2420 if (TREE_CODE (op1) == SSA_NAME)
2421 {
2422 if (VN_INFO (op1)->has_constants)
2423 op1 = valueize_expr (vn_get_expr_for (op1));
2424 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2425 op1 = SSA_VAL (op1);
2426 }
2427
2428 /* Avoid folding if nothing changed. */
2429 if (op0 == gimple_assign_rhs1 (stmt)
2430 && op1 == gimple_assign_rhs2 (stmt))
2431 return NULL_TREE;
2432
2433 fold_defer_overflow_warnings ();
2434
2435 result = fold_binary (gimple_assign_rhs_code (stmt),
2436 gimple_expr_type (stmt), op0, op1);
2437 if (result)
2438 STRIP_USELESS_TYPE_CONVERSION (result);
2439
2440 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2441 stmt, 0);
2442
2443 /* Make sure result is not a complex expression consisting
2444 of operators of operators (IE (a + b) + (a + c))
2445 Otherwise, we will end up with unbounded expressions if
2446 fold does anything at all. */
2447 if (result && valid_gimple_rhs_p (result))
2448 return result;
2449
2450 return NULL_TREE;
2451 }
2452
2453 /* Simplify the unary expression RHS, and return the result if
2454 simplified. */
2455
2456 static tree
2457 simplify_unary_expression (gimple stmt)
2458 {
2459 tree result = NULL_TREE;
2460 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2461
2462 /* We handle some tcc_reference codes here that are all
2463 GIMPLE_ASSIGN_SINGLE codes. */
2464 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2465 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2466 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2467 op0 = TREE_OPERAND (op0, 0);
2468
2469 if (TREE_CODE (op0) != SSA_NAME)
2470 return NULL_TREE;
2471
2472 orig_op0 = op0;
2473 if (VN_INFO (op0)->has_constants)
2474 op0 = valueize_expr (vn_get_expr_for (op0));
2475 else if (gimple_assign_cast_p (stmt)
2476 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2477 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2478 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2479 {
2480 /* We want to do tree-combining on conversion-like expressions.
2481 Make sure we feed only SSA_NAMEs or constants to fold though. */
2482 tree tem = valueize_expr (vn_get_expr_for (op0));
2483 if (UNARY_CLASS_P (tem)
2484 || BINARY_CLASS_P (tem)
2485 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2486 || TREE_CODE (tem) == SSA_NAME
2487 || is_gimple_min_invariant (tem))
2488 op0 = tem;
2489 }
2490
2491 /* Avoid folding if nothing changed, but remember the expression. */
2492 if (op0 == orig_op0)
2493 return NULL_TREE;
2494
2495 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2496 gimple_expr_type (stmt), op0);
2497 if (result)
2498 {
2499 STRIP_USELESS_TYPE_CONVERSION (result);
2500 if (valid_gimple_rhs_p (result))
2501 return result;
2502 }
2503
2504 return NULL_TREE;
2505 }
2506
2507 /* Try to simplify RHS using equivalences and constant folding. */
2508
2509 static tree
2510 try_to_simplify (gimple stmt)
2511 {
2512 tree tem;
2513
2514 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2515 in this case, there is no point in doing extra work. */
2516 if (gimple_assign_copy_p (stmt)
2517 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2518 return NULL_TREE;
2519
2520 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2521 {
2522 case tcc_declaration:
2523 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2524 if (tem)
2525 return tem;
2526 break;
2527
2528 case tcc_reference:
2529 /* Do not do full-blown reference lookup here, but simplify
2530 reads from constant aggregates. */
2531 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2532 if (tem)
2533 return tem;
2534
2535 /* Fallthrough for some codes that can operate on registers. */
2536 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2537 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2538 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2539 break;
2540 /* We could do a little more with unary ops, if they expand
2541 into binary ops, but it's debatable whether it is worth it. */
2542 case tcc_unary:
2543 return simplify_unary_expression (stmt);
2544 break;
2545 case tcc_comparison:
2546 case tcc_binary:
2547 return simplify_binary_expression (stmt);
2548 break;
2549 default:
2550 break;
2551 }
2552
2553 return NULL_TREE;
2554 }
2555
2556 /* Visit and value number USE, return true if the value number
2557 changed. */
2558
2559 static bool
2560 visit_use (tree use)
2561 {
2562 bool changed = false;
2563 gimple stmt = SSA_NAME_DEF_STMT (use);
2564
2565 VN_INFO (use)->use_processed = true;
2566
2567 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2568 if (dump_file && (dump_flags & TDF_DETAILS)
2569 && !SSA_NAME_IS_DEFAULT_DEF (use))
2570 {
2571 fprintf (dump_file, "Value numbering ");
2572 print_generic_expr (dump_file, use, 0);
2573 fprintf (dump_file, " stmt = ");
2574 print_gimple_stmt (dump_file, stmt, 0, 0);
2575 }
2576
2577 /* Handle uninitialized uses. */
2578 if (SSA_NAME_IS_DEFAULT_DEF (use))
2579 changed = set_ssa_val_to (use, use);
2580 else
2581 {
2582 if (gimple_code (stmt) == GIMPLE_PHI)
2583 changed = visit_phi (stmt);
2584 else if (!gimple_has_lhs (stmt)
2585 || gimple_has_volatile_ops (stmt)
2586 || stmt_could_throw_p (stmt))
2587 changed = defs_to_varying (stmt);
2588 else if (is_gimple_assign (stmt))
2589 {
2590 tree lhs = gimple_assign_lhs (stmt);
2591 tree simplified;
2592
2593 /* Shortcut for copies. Simplifying copies is pointless,
2594 since we copy the expression and value they represent. */
2595 if (gimple_assign_copy_p (stmt)
2596 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2597 && TREE_CODE (lhs) == SSA_NAME)
2598 {
2599 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2600 goto done;
2601 }
2602 simplified = try_to_simplify (stmt);
2603 if (simplified)
2604 {
2605 if (dump_file && (dump_flags & TDF_DETAILS))
2606 {
2607 fprintf (dump_file, "RHS ");
2608 print_gimple_expr (dump_file, stmt, 0, 0);
2609 fprintf (dump_file, " simplified to ");
2610 print_generic_expr (dump_file, simplified, 0);
2611 if (TREE_CODE (lhs) == SSA_NAME)
2612 fprintf (dump_file, " has constants %d\n",
2613 expr_has_constants (simplified));
2614 else
2615 fprintf (dump_file, "\n");
2616 }
2617 }
2618 /* Setting value numbers to constants will occasionally
2619 screw up phi congruence because constants are not
2620 uniquely associated with a single ssa name that can be
2621 looked up. */
2622 if (simplified
2623 && is_gimple_min_invariant (simplified)
2624 && TREE_CODE (lhs) == SSA_NAME)
2625 {
2626 VN_INFO (lhs)->expr = simplified;
2627 VN_INFO (lhs)->has_constants = true;
2628 changed = set_ssa_val_to (lhs, simplified);
2629 goto done;
2630 }
2631 else if (simplified
2632 && TREE_CODE (simplified) == SSA_NAME
2633 && TREE_CODE (lhs) == SSA_NAME)
2634 {
2635 changed = visit_copy (lhs, simplified);
2636 goto done;
2637 }
2638 else if (simplified)
2639 {
2640 if (TREE_CODE (lhs) == SSA_NAME)
2641 {
2642 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2643 /* We have to unshare the expression or else
2644 valuizing may change the IL stream. */
2645 VN_INFO (lhs)->expr = unshare_expr (simplified);
2646 }
2647 }
2648 else if (stmt_has_constants (stmt)
2649 && TREE_CODE (lhs) == SSA_NAME)
2650 VN_INFO (lhs)->has_constants = true;
2651 else if (TREE_CODE (lhs) == SSA_NAME)
2652 {
2653 /* We reset expr and constantness here because we may
2654 have been value numbering optimistically, and
2655 iterating. They may become non-constant in this case,
2656 even if they were optimistically constant. */
2657
2658 VN_INFO (lhs)->has_constants = false;
2659 VN_INFO (lhs)->expr = NULL_TREE;
2660 }
2661
2662 if ((TREE_CODE (lhs) == SSA_NAME
2663 /* We can substitute SSA_NAMEs that are live over
2664 abnormal edges with their constant value. */
2665 && !(gimple_assign_copy_p (stmt)
2666 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2667 && !(simplified
2668 && is_gimple_min_invariant (simplified))
2669 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2670 /* Stores or copies from SSA_NAMEs that are live over
2671 abnormal edges are a problem. */
2672 || (gimple_assign_single_p (stmt)
2673 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2674 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2675 changed = defs_to_varying (stmt);
2676 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2677 {
2678 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2679 }
2680 else if (TREE_CODE (lhs) == SSA_NAME)
2681 {
2682 if ((gimple_assign_copy_p (stmt)
2683 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2684 || (simplified
2685 && is_gimple_min_invariant (simplified)))
2686 {
2687 VN_INFO (lhs)->has_constants = true;
2688 if (simplified)
2689 changed = set_ssa_val_to (lhs, simplified);
2690 else
2691 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2692 }
2693 else
2694 {
2695 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2696 {
2697 case GIMPLE_UNARY_RHS:
2698 changed = visit_unary_op (lhs, stmt);
2699 break;
2700 case GIMPLE_BINARY_RHS:
2701 changed = visit_binary_op (lhs, stmt);
2702 break;
2703 case GIMPLE_SINGLE_RHS:
2704 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2705 {
2706 case tcc_reference:
2707 /* VOP-less references can go through unary case. */
2708 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2709 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2710 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2711 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2712 {
2713 changed = visit_unary_op (lhs, stmt);
2714 break;
2715 }
2716 /* Fallthrough. */
2717 case tcc_declaration:
2718 changed = visit_reference_op_load
2719 (lhs, gimple_assign_rhs1 (stmt), stmt);
2720 break;
2721 case tcc_expression:
2722 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2723 {
2724 changed = visit_unary_op (lhs, stmt);
2725 break;
2726 }
2727 /* Fallthrough. */
2728 default:
2729 changed = defs_to_varying (stmt);
2730 }
2731 break;
2732 default:
2733 changed = defs_to_varying (stmt);
2734 break;
2735 }
2736 }
2737 }
2738 else
2739 changed = defs_to_varying (stmt);
2740 }
2741 else if (is_gimple_call (stmt))
2742 {
2743 tree lhs = gimple_call_lhs (stmt);
2744
2745 /* ??? We could try to simplify calls. */
2746
2747 if (stmt_has_constants (stmt)
2748 && TREE_CODE (lhs) == SSA_NAME)
2749 VN_INFO (lhs)->has_constants = true;
2750 else if (TREE_CODE (lhs) == SSA_NAME)
2751 {
2752 /* We reset expr and constantness here because we may
2753 have been value numbering optimistically, and
2754 iterating. They may become non-constant in this case,
2755 even if they were optimistically constant. */
2756 VN_INFO (lhs)->has_constants = false;
2757 VN_INFO (lhs)->expr = NULL_TREE;
2758 }
2759
2760 if (TREE_CODE (lhs) == SSA_NAME
2761 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2762 changed = defs_to_varying (stmt);
2763 /* ??? We should handle stores from calls. */
2764 else if (TREE_CODE (lhs) == SSA_NAME)
2765 {
2766 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2767 changed = visit_reference_op_call (lhs, stmt);
2768 else
2769 changed = defs_to_varying (stmt);
2770 }
2771 else
2772 changed = defs_to_varying (stmt);
2773 }
2774 }
2775 done:
2776 return changed;
2777 }
2778
2779 /* Compare two operands by reverse postorder index */
2780
2781 static int
2782 compare_ops (const void *pa, const void *pb)
2783 {
2784 const tree opa = *((const tree *)pa);
2785 const tree opb = *((const tree *)pb);
2786 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2787 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2788 basic_block bba;
2789 basic_block bbb;
2790
2791 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2792 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2793 else if (gimple_nop_p (opstmta))
2794 return -1;
2795 else if (gimple_nop_p (opstmtb))
2796 return 1;
2797
2798 bba = gimple_bb (opstmta);
2799 bbb = gimple_bb (opstmtb);
2800
2801 if (!bba && !bbb)
2802 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2803 else if (!bba)
2804 return -1;
2805 else if (!bbb)
2806 return 1;
2807
2808 if (bba == bbb)
2809 {
2810 if (gimple_code (opstmta) == GIMPLE_PHI
2811 && gimple_code (opstmtb) == GIMPLE_PHI)
2812 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2813 else if (gimple_code (opstmta) == GIMPLE_PHI)
2814 return -1;
2815 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2816 return 1;
2817 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2818 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2819 else
2820 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2821 }
2822 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2823 }
2824
2825 /* Sort an array containing members of a strongly connected component
2826 SCC so that the members are ordered by RPO number.
2827 This means that when the sort is complete, iterating through the
2828 array will give you the members in RPO order. */
2829
2830 static void
2831 sort_scc (VEC (tree, heap) *scc)
2832 {
2833 qsort (VEC_address (tree, scc),
2834 VEC_length (tree, scc),
2835 sizeof (tree),
2836 compare_ops);
2837 }
2838
2839 /* Insert the no longer used nary ONARY to the hash INFO. */
2840
2841 static void
2842 copy_nary (vn_nary_op_t onary, vn_tables_t info)
2843 {
2844 size_t size = (sizeof (struct vn_nary_op_s)
2845 - sizeof (tree) * (4 - onary->length));
2846 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&info->nary_obstack, size);
2847 void **slot;
2848 memcpy (nary, onary, size);
2849 slot = htab_find_slot_with_hash (info->nary, nary, nary->hashcode, INSERT);
2850 gcc_assert (!*slot);
2851 *slot = nary;
2852 }
2853
2854 /* Insert the no longer used phi OPHI to the hash INFO. */
2855
2856 static void
2857 copy_phi (vn_phi_t ophi, vn_tables_t info)
2858 {
2859 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
2860 void **slot;
2861 memcpy (phi, ophi, sizeof (*phi));
2862 ophi->phiargs = NULL;
2863 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
2864 gcc_assert (!*slot);
2865 *slot = phi;
2866 }
2867
2868 /* Insert the no longer used reference OREF to the hash INFO. */
2869
2870 static void
2871 copy_reference (vn_reference_t oref, vn_tables_t info)
2872 {
2873 vn_reference_t ref;
2874 void **slot;
2875 ref = (vn_reference_t) pool_alloc (info->references_pool);
2876 memcpy (ref, oref, sizeof (*ref));
2877 oref->operands = NULL;
2878 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
2879 INSERT);
2880 if (*slot)
2881 free_reference (*slot);
2882 *slot = ref;
2883 }
2884
2885 /* Process a strongly connected component in the SSA graph. */
2886
2887 static void
2888 process_scc (VEC (tree, heap) *scc)
2889 {
2890 tree var;
2891 unsigned int i;
2892 unsigned int iterations = 0;
2893 bool changed = true;
2894 htab_iterator hi;
2895 vn_nary_op_t nary;
2896 vn_phi_t phi;
2897 vn_reference_t ref;
2898
2899 /* If the SCC has a single member, just visit it. */
2900 if (VEC_length (tree, scc) == 1)
2901 {
2902 tree use = VEC_index (tree, scc, 0);
2903 if (!VN_INFO (use)->use_processed)
2904 visit_use (use);
2905 return;
2906 }
2907
2908 /* Iterate over the SCC with the optimistic table until it stops
2909 changing. */
2910 current_info = optimistic_info;
2911 while (changed)
2912 {
2913 changed = false;
2914 iterations++;
2915 /* As we are value-numbering optimistically we have to
2916 clear the expression tables and the simplified expressions
2917 in each iteration until we converge. */
2918 htab_empty (optimistic_info->nary);
2919 htab_empty (optimistic_info->phis);
2920 htab_empty (optimistic_info->references);
2921 obstack_free (&optimistic_info->nary_obstack, NULL);
2922 gcc_obstack_init (&optimistic_info->nary_obstack);
2923 empty_alloc_pool (optimistic_info->phis_pool);
2924 empty_alloc_pool (optimistic_info->references_pool);
2925 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2926 VN_INFO (var)->expr = NULL_TREE;
2927 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2928 changed |= visit_use (var);
2929 }
2930
2931 statistics_histogram_event (cfun, "SCC iterations", iterations);
2932
2933 /* Finally, copy the contents of the no longer used optimistic
2934 table to the valid table. */
2935 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
2936 copy_nary (nary, valid_info);
2937 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
2938 copy_phi (phi, valid_info);
2939 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
2940 copy_reference (ref, valid_info);
2941
2942 current_info = valid_info;
2943 }
2944
2945 DEF_VEC_O(ssa_op_iter);
2946 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2947
2948 /* Pop the components of the found SCC for NAME off the SCC stack
2949 and process them. Returns true if all went well, false if
2950 we run into resource limits. */
2951
2952 static bool
2953 extract_and_process_scc_for_name (tree name)
2954 {
2955 VEC (tree, heap) *scc = NULL;
2956 tree x;
2957
2958 /* Found an SCC, pop the components off the SCC stack and
2959 process them. */
2960 do
2961 {
2962 x = VEC_pop (tree, sccstack);
2963
2964 VN_INFO (x)->on_sccstack = false;
2965 VEC_safe_push (tree, heap, scc, x);
2966 } while (x != name);
2967
2968 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2969 if (VEC_length (tree, scc)
2970 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2971 {
2972 if (dump_file)
2973 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2974 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2975 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2976 return false;
2977 }
2978
2979 if (VEC_length (tree, scc) > 1)
2980 sort_scc (scc);
2981
2982 if (dump_file && (dump_flags & TDF_DETAILS))
2983 print_scc (dump_file, scc);
2984
2985 process_scc (scc);
2986
2987 VEC_free (tree, heap, scc);
2988
2989 return true;
2990 }
2991
2992 /* Depth first search on NAME to discover and process SCC's in the SSA
2993 graph.
2994 Execution of this algorithm relies on the fact that the SCC's are
2995 popped off the stack in topological order.
2996 Returns true if successful, false if we stopped processing SCC's due
2997 to resource constraints. */
2998
2999 static bool
3000 DFS (tree name)
3001 {
3002 VEC(ssa_op_iter, heap) *itervec = NULL;
3003 VEC(tree, heap) *namevec = NULL;
3004 use_operand_p usep = NULL;
3005 gimple defstmt;
3006 tree use;
3007 ssa_op_iter iter;
3008
3009 start_over:
3010 /* SCC info */
3011 VN_INFO (name)->dfsnum = next_dfs_num++;
3012 VN_INFO (name)->visited = true;
3013 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3014
3015 VEC_safe_push (tree, heap, sccstack, name);
3016 VN_INFO (name)->on_sccstack = true;
3017 defstmt = SSA_NAME_DEF_STMT (name);
3018
3019 /* Recursively DFS on our operands, looking for SCC's. */
3020 if (!gimple_nop_p (defstmt))
3021 {
3022 /* Push a new iterator. */
3023 if (gimple_code (defstmt) == GIMPLE_PHI)
3024 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3025 else
3026 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3027 }
3028 else
3029 clear_and_done_ssa_iter (&iter);
3030
3031 while (1)
3032 {
3033 /* If we are done processing uses of a name, go up the stack
3034 of iterators and process SCCs as we found them. */
3035 if (op_iter_done (&iter))
3036 {
3037 /* See if we found an SCC. */
3038 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3039 if (!extract_and_process_scc_for_name (name))
3040 {
3041 VEC_free (tree, heap, namevec);
3042 VEC_free (ssa_op_iter, heap, itervec);
3043 return false;
3044 }
3045
3046 /* Check if we are done. */
3047 if (VEC_empty (tree, namevec))
3048 {
3049 VEC_free (tree, heap, namevec);
3050 VEC_free (ssa_op_iter, heap, itervec);
3051 return true;
3052 }
3053
3054 /* Restore the last use walker and continue walking there. */
3055 use = name;
3056 name = VEC_pop (tree, namevec);
3057 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3058 sizeof (ssa_op_iter));
3059 VEC_pop (ssa_op_iter, itervec);
3060 goto continue_walking;
3061 }
3062
3063 use = USE_FROM_PTR (usep);
3064
3065 /* Since we handle phi nodes, we will sometimes get
3066 invariants in the use expression. */
3067 if (TREE_CODE (use) == SSA_NAME)
3068 {
3069 if (! (VN_INFO (use)->visited))
3070 {
3071 /* Recurse by pushing the current use walking state on
3072 the stack and starting over. */
3073 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3074 VEC_safe_push(tree, heap, namevec, name);
3075 name = use;
3076 goto start_over;
3077
3078 continue_walking:
3079 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3080 VN_INFO (use)->low);
3081 }
3082 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3083 && VN_INFO (use)->on_sccstack)
3084 {
3085 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3086 VN_INFO (name)->low);
3087 }
3088 }
3089
3090 usep = op_iter_next_use (&iter);
3091 }
3092 }
3093
3094 /* Allocate a value number table. */
3095
3096 static void
3097 allocate_vn_table (vn_tables_t table)
3098 {
3099 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3100 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3101 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3102 free_reference);
3103
3104 gcc_obstack_init (&table->nary_obstack);
3105 table->phis_pool = create_alloc_pool ("VN phis",
3106 sizeof (struct vn_phi_s),
3107 30);
3108 table->references_pool = create_alloc_pool ("VN references",
3109 sizeof (struct vn_reference_s),
3110 30);
3111 }
3112
3113 /* Free a value number table. */
3114
3115 static void
3116 free_vn_table (vn_tables_t table)
3117 {
3118 htab_delete (table->phis);
3119 htab_delete (table->nary);
3120 htab_delete (table->references);
3121 obstack_free (&table->nary_obstack, NULL);
3122 free_alloc_pool (table->phis_pool);
3123 free_alloc_pool (table->references_pool);
3124 }
3125
3126 static void
3127 init_scc_vn (void)
3128 {
3129 size_t i;
3130 int j;
3131 int *rpo_numbers_temp;
3132
3133 calculate_dominance_info (CDI_DOMINATORS);
3134 sccstack = NULL;
3135 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3136 free);
3137
3138 constant_value_ids = BITMAP_ALLOC (NULL);
3139
3140 next_dfs_num = 1;
3141 next_value_id = 1;
3142
3143 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3144 /* VEC_alloc doesn't actually grow it to the right size, it just
3145 preallocates the space to do so. */
3146 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3147 gcc_obstack_init (&vn_ssa_aux_obstack);
3148
3149 shared_lookup_phiargs = NULL;
3150 shared_lookup_references = NULL;
3151 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3152 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3153 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3154
3155 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3156 the i'th block in RPO order is bb. We want to map bb's to RPO
3157 numbers, so we need to rearrange this array. */
3158 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3159 rpo_numbers[rpo_numbers_temp[j]] = j;
3160
3161 XDELETE (rpo_numbers_temp);
3162
3163 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3164
3165 /* Create the VN_INFO structures, and initialize value numbers to
3166 TOP. */
3167 for (i = 0; i < num_ssa_names; i++)
3168 {
3169 tree name = ssa_name (i);
3170 if (name)
3171 {
3172 VN_INFO_GET (name)->valnum = VN_TOP;
3173 VN_INFO (name)->expr = NULL_TREE;
3174 VN_INFO (name)->value_id = 0;
3175 }
3176 }
3177
3178 renumber_gimple_stmt_uids ();
3179
3180 /* Create the valid and optimistic value numbering tables. */
3181 valid_info = XCNEW (struct vn_tables_s);
3182 allocate_vn_table (valid_info);
3183 optimistic_info = XCNEW (struct vn_tables_s);
3184 allocate_vn_table (optimistic_info);
3185 }
3186
3187 void
3188 free_scc_vn (void)
3189 {
3190 size_t i;
3191
3192 htab_delete (constant_to_value_id);
3193 BITMAP_FREE (constant_value_ids);
3194 VEC_free (tree, heap, shared_lookup_phiargs);
3195 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3196 XDELETEVEC (rpo_numbers);
3197
3198 for (i = 0; i < num_ssa_names; i++)
3199 {
3200 tree name = ssa_name (i);
3201 if (name
3202 && VN_INFO (name)->needs_insertion)
3203 release_ssa_name (name);
3204 }
3205 obstack_free (&vn_ssa_aux_obstack, NULL);
3206 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3207
3208 VEC_free (tree, heap, sccstack);
3209 free_vn_table (valid_info);
3210 XDELETE (valid_info);
3211 free_vn_table (optimistic_info);
3212 XDELETE (optimistic_info);
3213 }
3214
3215 /* Set the value ids in the valid hash tables. */
3216
3217 static void
3218 set_hashtable_value_ids (void)
3219 {
3220 htab_iterator hi;
3221 vn_nary_op_t vno;
3222 vn_reference_t vr;
3223 vn_phi_t vp;
3224
3225 /* Now set the value ids of the things we had put in the hash
3226 table. */
3227
3228 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3229 vno, vn_nary_op_t, hi)
3230 {
3231 if (vno->result)
3232 {
3233 if (TREE_CODE (vno->result) == SSA_NAME)
3234 vno->value_id = VN_INFO (vno->result)->value_id;
3235 else if (is_gimple_min_invariant (vno->result))
3236 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3237 }
3238 }
3239
3240 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3241 vp, vn_phi_t, hi)
3242 {
3243 if (vp->result)
3244 {
3245 if (TREE_CODE (vp->result) == SSA_NAME)
3246 vp->value_id = VN_INFO (vp->result)->value_id;
3247 else if (is_gimple_min_invariant (vp->result))
3248 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3249 }
3250 }
3251
3252 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3253 vr, vn_reference_t, hi)
3254 {
3255 if (vr->result)
3256 {
3257 if (TREE_CODE (vr->result) == SSA_NAME)
3258 vr->value_id = VN_INFO (vr->result)->value_id;
3259 else if (is_gimple_min_invariant (vr->result))
3260 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3261 }
3262 }
3263 }
3264
3265 /* Do SCCVN. Returns true if it finished, false if we bailed out
3266 due to resource constraints. */
3267
3268 bool
3269 run_scc_vn (bool may_insert_arg)
3270 {
3271 size_t i;
3272 tree param;
3273 bool changed = true;
3274
3275 may_insert = may_insert_arg;
3276
3277 init_scc_vn ();
3278 current_info = valid_info;
3279
3280 for (param = DECL_ARGUMENTS (current_function_decl);
3281 param;
3282 param = TREE_CHAIN (param))
3283 {
3284 if (gimple_default_def (cfun, param) != NULL)
3285 {
3286 tree def = gimple_default_def (cfun, param);
3287 VN_INFO (def)->valnum = def;
3288 }
3289 }
3290
3291 for (i = 1; i < num_ssa_names; ++i)
3292 {
3293 tree name = ssa_name (i);
3294 if (name
3295 && VN_INFO (name)->visited == false
3296 && !has_zero_uses (name))
3297 if (!DFS (name))
3298 {
3299 free_scc_vn ();
3300 may_insert = false;
3301 return false;
3302 }
3303 }
3304
3305 /* Initialize the value ids. */
3306
3307 for (i = 1; i < num_ssa_names; ++i)
3308 {
3309 tree name = ssa_name (i);
3310 vn_ssa_aux_t info;
3311 if (!name)
3312 continue;
3313 info = VN_INFO (name);
3314 if (info->valnum == name
3315 || info->valnum == VN_TOP)
3316 info->value_id = get_next_value_id ();
3317 else if (is_gimple_min_invariant (info->valnum))
3318 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3319 }
3320
3321 /* Propagate until they stop changing. */
3322 while (changed)
3323 {
3324 changed = false;
3325 for (i = 1; i < num_ssa_names; ++i)
3326 {
3327 tree name = ssa_name (i);
3328 vn_ssa_aux_t info;
3329 if (!name)
3330 continue;
3331 info = VN_INFO (name);
3332 if (TREE_CODE (info->valnum) == SSA_NAME
3333 && info->valnum != name
3334 && info->value_id != VN_INFO (info->valnum)->value_id)
3335 {
3336 changed = true;
3337 info->value_id = VN_INFO (info->valnum)->value_id;
3338 }
3339 }
3340 }
3341
3342 set_hashtable_value_ids ();
3343
3344 if (dump_file && (dump_flags & TDF_DETAILS))
3345 {
3346 fprintf (dump_file, "Value numbers:\n");
3347 for (i = 0; i < num_ssa_names; i++)
3348 {
3349 tree name = ssa_name (i);
3350 if (name
3351 && VN_INFO (name)->visited
3352 && SSA_VAL (name) != name)
3353 {
3354 print_generic_expr (dump_file, name, 0);
3355 fprintf (dump_file, " = ");
3356 print_generic_expr (dump_file, SSA_VAL (name), 0);
3357 fprintf (dump_file, "\n");
3358 }
3359 }
3360 }
3361
3362 may_insert = false;
3363 return true;
3364 }
3365
3366 /* Return the maximum value id we have ever seen. */
3367
3368 unsigned int
3369 get_max_value_id (void)
3370 {
3371 return next_value_id;
3372 }
3373
3374 /* Return the next unique value id. */
3375
3376 unsigned int
3377 get_next_value_id (void)
3378 {
3379 return next_value_id++;
3380 }
3381
3382
3383 /* Compare two expressions E1 and E2 and return true if they are equal. */
3384
3385 bool
3386 expressions_equal_p (tree e1, tree e2)
3387 {
3388 /* The obvious case. */
3389 if (e1 == e2)
3390 return true;
3391
3392 /* If only one of them is null, they cannot be equal. */
3393 if (!e1 || !e2)
3394 return false;
3395
3396 /* Now perform the actual comparison. */
3397 if (TREE_CODE (e1) == TREE_CODE (e2)
3398 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3399 return true;
3400
3401 return false;
3402 }
3403
3404
3405 /* Return true if the nary operation NARY may trap. This is a copy
3406 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3407
3408 bool
3409 vn_nary_may_trap (vn_nary_op_t nary)
3410 {
3411 tree type;
3412 tree rhs2 = NULL_TREE;
3413 bool honor_nans = false;
3414 bool honor_snans = false;
3415 bool fp_operation = false;
3416 bool honor_trapv = false;
3417 bool handled, ret;
3418 unsigned i;
3419
3420 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3421 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3422 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3423 {
3424 type = nary->type;
3425 fp_operation = FLOAT_TYPE_P (type);
3426 if (fp_operation)
3427 {
3428 honor_nans = flag_trapping_math && !flag_finite_math_only;
3429 honor_snans = flag_signaling_nans != 0;
3430 }
3431 else if (INTEGRAL_TYPE_P (type)
3432 && TYPE_OVERFLOW_TRAPS (type))
3433 honor_trapv = true;
3434 }
3435 if (nary->length >= 2)
3436 rhs2 = nary->op[1];
3437 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3438 honor_trapv,
3439 honor_nans, honor_snans, rhs2,
3440 &handled);
3441 if (handled
3442 && ret)
3443 return true;
3444
3445 for (i = 0; i < nary->length; ++i)
3446 if (tree_could_trap_p (nary->op[i]))
3447 return true;
3448
3449 return false;
3450 }