Fix documentation typos.
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
48
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
54
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
58
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
63
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
68
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
72
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
77
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
81
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
84
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
90
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
96
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
104 */
105
106 /* The set of hashtables and alloc_pool's for their items. */
107
108 typedef struct vn_tables_s
109 {
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
117
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120
121
122 /* Valid hashtables storing information we have proven to be
123 correct. */
124
125 static vn_tables_t valid_info;
126
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
129
130 static vn_tables_t optimistic_info;
131
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
135
136 static vn_tables_t current_info;
137
138
139 /* Reverse post order index for each basic block. */
140
141 static int *rpo_numbers;
142
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
147
148 tree VN_TOP;
149
150 /* Unique counter for our value ids. */
151
152 static unsigned int next_value_id;
153
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
156
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159
160 static bool may_insert;
161
162
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
165
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
169
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
172
173 /* Return the value numbering information for a given SSA name. */
174
175 vn_ssa_aux_t
176 VN_INFO (tree name)
177 {
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
182 }
183
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
186
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
189 {
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
192 }
193
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
196
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
199 {
200 vn_ssa_aux_t newinfo;
201
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
210 }
211
212
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
215
216 tree
217 vn_get_expr_for (tree name)
218 {
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
222
223 if (vn->valnum == VN_TOP)
224 return name;
225
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
230
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
233
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
238
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
242
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
251
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
254
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
258 {
259 case tcc_reference:
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267 break;
268
269 case tcc_unary:
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
273 break;
274
275 case tcc_binary:
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
280 break;
281
282 default:;
283 }
284 if (expr == NULL_TREE)
285 return vn->valnum;
286
287 /* Cache the expression. */
288 vn->expr = expr;
289
290 return expr;
291 }
292
293
294 /* Free a phi operation structure VP. */
295
296 static void
297 free_phi (void *vp)
298 {
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
301 }
302
303 /* Free a reference operation structure VP. */
304
305 static void
306 free_reference (void *vp)
307 {
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
310 }
311
312 /* Hash table equality function for vn_constant_t. */
313
314 static int
315 vn_constant_eq (const void *p1, const void *p2)
316 {
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
319
320 if (vc1->hashcode != vc2->hashcode)
321 return false;
322
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
324 }
325
326 /* Hash table hash function for vn_constant_t. */
327
328 static hashval_t
329 vn_constant_hash (const void *p1)
330 {
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
333 }
334
335 /* Lookup a value id for CONSTANT and return it. If it does not
336 exist returns 0. */
337
338 unsigned int
339 get_constant_value_id (tree constant)
340 {
341 void **slot;
342 struct vn_constant_s vc;
343
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
348 if (slot)
349 return ((vn_constant_t)*slot)->value_id;
350 return 0;
351 }
352
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
355
356 unsigned int
357 get_or_alloc_constant_value_id (tree constant)
358 {
359 void **slot;
360 struct vn_constant_s vc;
361 vn_constant_t vcp;
362
363 vc.hashcode = vn_hash_constant_with_type (constant);
364 vc.constant = constant;
365 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
366 vc.hashcode, INSERT);
367 if (*slot)
368 return ((vn_constant_t)*slot)->value_id;
369
370 vcp = XNEW (struct vn_constant_s);
371 vcp->hashcode = vc.hashcode;
372 vcp->constant = constant;
373 vcp->value_id = get_next_value_id ();
374 *slot = (void *) vcp;
375 bitmap_set_bit (constant_value_ids, vcp->value_id);
376 return vcp->value_id;
377 }
378
379 /* Return true if V is a value id for a constant. */
380
381 bool
382 value_id_constant_p (unsigned int v)
383 {
384 return bitmap_bit_p (constant_value_ids, v);
385 }
386
387 /* Compare two reference operands P1 and P2 for equality. Return true if
388 they are equal, and false otherwise. */
389
390 static int
391 vn_reference_op_eq (const void *p1, const void *p2)
392 {
393 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
394 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
395
396 return vro1->opcode == vro2->opcode
397 && types_compatible_p (vro1->type, vro2->type)
398 && expressions_equal_p (vro1->op0, vro2->op0)
399 && expressions_equal_p (vro1->op1, vro2->op1)
400 && expressions_equal_p (vro1->op2, vro2->op2);
401 }
402
403 /* Compute the hash for a reference operand VRO1. */
404
405 static hashval_t
406 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
407 {
408 result = iterative_hash_hashval_t (vro1->opcode, result);
409 if (vro1->op0)
410 result = iterative_hash_expr (vro1->op0, result);
411 if (vro1->op1)
412 result = iterative_hash_expr (vro1->op1, result);
413 if (vro1->op2)
414 result = iterative_hash_expr (vro1->op2, result);
415 return result;
416 }
417
418 /* Return the hashcode for a given reference operation P1. */
419
420 static hashval_t
421 vn_reference_hash (const void *p1)
422 {
423 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
424 return vr1->hashcode;
425 }
426
427 /* Compute a hash for the reference operation VR1 and return it. */
428
429 hashval_t
430 vn_reference_compute_hash (const vn_reference_t vr1)
431 {
432 hashval_t result = 0;
433 int i;
434 vn_reference_op_t vro;
435
436 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
437 result = vn_reference_op_compute_hash (vro, result);
438 if (vr1->vuse)
439 result += SSA_NAME_VERSION (vr1->vuse);
440
441 return result;
442 }
443
444 /* Return true if reference operations P1 and P2 are equivalent. This
445 means they have the same set of operands and vuses. */
446
447 int
448 vn_reference_eq (const void *p1, const void *p2)
449 {
450 int i;
451 vn_reference_op_t vro;
452
453 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
454 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
455 if (vr1->hashcode != vr2->hashcode)
456 return false;
457
458 /* Early out if this is not a hash collision. */
459 if (vr1->hashcode != vr2->hashcode)
460 return false;
461
462 /* The VOP needs to be the same. */
463 if (vr1->vuse != vr2->vuse)
464 return false;
465
466 /* If the operands are the same we are done. */
467 if (vr1->operands == vr2->operands)
468 return true;
469
470 /* We require that address operands be canonicalized in a way that
471 two memory references will have the same operands if they are
472 equivalent. */
473 if (VEC_length (vn_reference_op_s, vr1->operands)
474 != VEC_length (vn_reference_op_s, vr2->operands))
475 return false;
476
477 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
478 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
479 vro))
480 return false;
481
482 return true;
483 }
484
485 /* Copy the operations present in load/store REF into RESULT, a vector of
486 vn_reference_op_s's. */
487
488 void
489 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
490 {
491 if (TREE_CODE (ref) == TARGET_MEM_REF)
492 {
493 vn_reference_op_s temp;
494 tree base;
495
496 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
497 if (!base)
498 base = build_int_cst (ptr_type_node, 0);
499
500 memset (&temp, 0, sizeof (temp));
501 /* We do not care for spurious type qualifications. */
502 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
503 temp.opcode = TREE_CODE (ref);
504 temp.op0 = TMR_INDEX (ref);
505 temp.op1 = TMR_STEP (ref);
506 temp.op2 = TMR_OFFSET (ref);
507 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
508
509 memset (&temp, 0, sizeof (temp));
510 temp.type = NULL_TREE;
511 temp.opcode = TREE_CODE (base);
512 temp.op0 = base;
513 temp.op1 = TMR_ORIGINAL (ref);
514 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
515 return;
516 }
517
518 /* For non-calls, store the information that makes up the address. */
519
520 while (ref)
521 {
522 vn_reference_op_s temp;
523
524 memset (&temp, 0, sizeof (temp));
525 /* We do not care for spurious type qualifications. */
526 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
527 temp.opcode = TREE_CODE (ref);
528
529 switch (temp.opcode)
530 {
531 case ALIGN_INDIRECT_REF:
532 case INDIRECT_REF:
533 /* The only operand is the address, which gets its own
534 vn_reference_op_s structure. */
535 break;
536 case MISALIGNED_INDIRECT_REF:
537 temp.op0 = TREE_OPERAND (ref, 1);
538 break;
539 case BIT_FIELD_REF:
540 /* Record bits and position. */
541 temp.op0 = TREE_OPERAND (ref, 1);
542 temp.op1 = TREE_OPERAND (ref, 2);
543 break;
544 case COMPONENT_REF:
545 /* The field decl is enough to unambiguously specify the field,
546 a matching type is not necessary and a mismatching type
547 is always a spurious difference. */
548 temp.type = NULL_TREE;
549 temp.op0 = TREE_OPERAND (ref, 1);
550 temp.op1 = TREE_OPERAND (ref, 2);
551 /* If this is a reference to a union member, record the union
552 member size as operand. Do so only if we are doing
553 expression insertion (during FRE), as PRE currently gets
554 confused with this. */
555 if (may_insert
556 && temp.op1 == NULL_TREE
557 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
558 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
559 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
560 && host_integerp (DECL_SIZE (temp.op0), 0))
561 temp.op0 = DECL_SIZE (temp.op0);
562 break;
563 case ARRAY_RANGE_REF:
564 case ARRAY_REF:
565 /* Record index as operand. */
566 temp.op0 = TREE_OPERAND (ref, 1);
567 /* Always record lower bounds and element size. */
568 temp.op1 = array_ref_low_bound (ref);
569 temp.op2 = array_ref_element_size (ref);
570 break;
571 case STRING_CST:
572 case INTEGER_CST:
573 case COMPLEX_CST:
574 case VECTOR_CST:
575 case REAL_CST:
576 case CONSTRUCTOR:
577 case VAR_DECL:
578 case PARM_DECL:
579 case CONST_DECL:
580 case RESULT_DECL:
581 case SSA_NAME:
582 temp.op0 = ref;
583 break;
584 case ADDR_EXPR:
585 if (is_gimple_min_invariant (ref))
586 {
587 temp.op0 = ref;
588 break;
589 }
590 /* Fallthrough. */
591 /* These are only interesting for their operands, their
592 existence, and their type. They will never be the last
593 ref in the chain of references (IE they require an
594 operand), so we don't have to put anything
595 for op* as it will be handled by the iteration */
596 case IMAGPART_EXPR:
597 case REALPART_EXPR:
598 case VIEW_CONVERT_EXPR:
599 break;
600 default:
601 gcc_unreachable ();
602 }
603 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
604
605 if (REFERENCE_CLASS_P (ref)
606 || (TREE_CODE (ref) == ADDR_EXPR
607 && !is_gimple_min_invariant (ref)))
608 ref = TREE_OPERAND (ref, 0);
609 else
610 ref = NULL_TREE;
611 }
612 }
613
614 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
615 operands in *OPS, the reference alias set SET and the reference type TYPE.
616 Return true if something useful was produced. */
617
618 bool
619 ao_ref_init_from_vn_reference (ao_ref *ref,
620 alias_set_type set, tree type,
621 VEC (vn_reference_op_s, heap) *ops)
622 {
623 vn_reference_op_t op;
624 unsigned i;
625 tree base = NULL_TREE;
626 tree *op0_p = &base;
627 HOST_WIDE_INT offset = 0;
628 HOST_WIDE_INT max_size;
629 HOST_WIDE_INT size = -1;
630 tree size_tree = NULL_TREE;
631
632 /* First get the final access size from just the outermost expression. */
633 op = VEC_index (vn_reference_op_s, ops, 0);
634 if (op->opcode == COMPONENT_REF)
635 {
636 if (TREE_CODE (op->op0) == INTEGER_CST)
637 size_tree = op->op0;
638 else
639 size_tree = DECL_SIZE (op->op0);
640 }
641 else if (op->opcode == BIT_FIELD_REF)
642 size_tree = op->op0;
643 else
644 {
645 enum machine_mode mode = TYPE_MODE (type);
646 if (mode == BLKmode)
647 size_tree = TYPE_SIZE (type);
648 else
649 size = GET_MODE_BITSIZE (mode);
650 }
651 if (size_tree != NULL_TREE)
652 {
653 if (!host_integerp (size_tree, 1))
654 size = -1;
655 else
656 size = TREE_INT_CST_LOW (size_tree);
657 }
658
659 /* Initially, maxsize is the same as the accessed element size.
660 In the following it will only grow (or become -1). */
661 max_size = size;
662
663 /* Compute cumulative bit-offset for nested component-refs and array-refs,
664 and find the ultimate containing object. */
665 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
666 {
667 switch (op->opcode)
668 {
669 /* These may be in the reference ops, but we cannot do anything
670 sensible with them here. */
671 case CALL_EXPR:
672 case ADDR_EXPR:
673 return false;
674
675 /* Record the base objects. */
676 case ALIGN_INDIRECT_REF:
677 case INDIRECT_REF:
678 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
679 op0_p = &TREE_OPERAND (*op0_p, 0);
680 break;
681
682 case MISALIGNED_INDIRECT_REF:
683 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
684 NULL_TREE, op->op0);
685 op0_p = &TREE_OPERAND (*op0_p, 0);
686 break;
687
688 case VAR_DECL:
689 case PARM_DECL:
690 case RESULT_DECL:
691 case SSA_NAME:
692 *op0_p = op->op0;
693 break;
694
695 /* And now the usual component-reference style ops. */
696 case BIT_FIELD_REF:
697 offset += tree_low_cst (op->op1, 0);
698 break;
699
700 case COMPONENT_REF:
701 {
702 tree field = op->op0;
703 /* We do not have a complete COMPONENT_REF tree here so we
704 cannot use component_ref_field_offset. Do the interesting
705 parts manually. */
706
707 /* Our union trick, done for offset zero only. */
708 if (TREE_CODE (field) == INTEGER_CST)
709 ;
710 else if (op->op1
711 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
712 max_size = -1;
713 else
714 {
715 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
716 * BITS_PER_UNIT);
717 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
718 }
719 break;
720 }
721
722 case ARRAY_RANGE_REF:
723 case ARRAY_REF:
724 /* We recorded the lower bound and the element size. */
725 if (!host_integerp (op->op0, 0)
726 || !host_integerp (op->op1, 0)
727 || !host_integerp (op->op2, 0))
728 max_size = -1;
729 else
730 {
731 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
732 hindex -= TREE_INT_CST_LOW (op->op1);
733 hindex *= TREE_INT_CST_LOW (op->op2);
734 hindex *= BITS_PER_UNIT;
735 offset += hindex;
736 }
737 break;
738
739 case REALPART_EXPR:
740 break;
741
742 case IMAGPART_EXPR:
743 offset += size;
744 break;
745
746 case VIEW_CONVERT_EXPR:
747 break;
748
749 case STRING_CST:
750 case INTEGER_CST:
751 case COMPLEX_CST:
752 case VECTOR_CST:
753 case REAL_CST:
754 case CONSTRUCTOR:
755 case CONST_DECL:
756 return false;
757
758 default:
759 return false;
760 }
761 }
762
763 if (base == NULL_TREE)
764 return false;
765
766 ref->ref = NULL_TREE;
767 ref->base = base;
768 ref->offset = offset;
769 ref->size = size;
770 ref->max_size = max_size;
771 ref->ref_alias_set = set;
772 ref->base_alias_set = -1;
773
774 return true;
775 }
776
777 /* Copy the operations present in load/store/call REF into RESULT, a vector of
778 vn_reference_op_s's. */
779
780 void
781 copy_reference_ops_from_call (gimple call,
782 VEC(vn_reference_op_s, heap) **result)
783 {
784 vn_reference_op_s temp;
785 unsigned i;
786
787 /* Copy the type, opcode, function being called and static chain. */
788 memset (&temp, 0, sizeof (temp));
789 temp.type = gimple_call_return_type (call);
790 temp.opcode = CALL_EXPR;
791 temp.op0 = gimple_call_fn (call);
792 temp.op1 = gimple_call_chain (call);
793 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
794
795 /* Copy the call arguments. As they can be references as well,
796 just chain them together. */
797 for (i = 0; i < gimple_call_num_args (call); ++i)
798 {
799 tree callarg = gimple_call_arg (call, i);
800 copy_reference_ops_from_ref (callarg, result);
801 }
802 }
803
804 /* Create a vector of vn_reference_op_s structures from REF, a
805 REFERENCE_CLASS_P tree. The vector is not shared. */
806
807 static VEC(vn_reference_op_s, heap) *
808 create_reference_ops_from_ref (tree ref)
809 {
810 VEC (vn_reference_op_s, heap) *result = NULL;
811
812 copy_reference_ops_from_ref (ref, &result);
813 return result;
814 }
815
816 /* Create a vector of vn_reference_op_s structures from CALL, a
817 call statement. The vector is not shared. */
818
819 static VEC(vn_reference_op_s, heap) *
820 create_reference_ops_from_call (gimple call)
821 {
822 VEC (vn_reference_op_s, heap) *result = NULL;
823
824 copy_reference_ops_from_call (call, &result);
825 return result;
826 }
827
828 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
829 *I_P to point to the last element of the replacement. */
830 void
831 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
832 unsigned int *i_p)
833 {
834 VEC(vn_reference_op_s, heap) *mem = NULL;
835 vn_reference_op_t op;
836 unsigned int i = *i_p;
837 unsigned int j;
838
839 /* Get ops for the addressed object. */
840 op = VEC_index (vn_reference_op_s, *ops, i);
841 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
842 around it to avoid later ICEs. */
843 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
844 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
845 {
846 vn_reference_op_s aref;
847 tree dom;
848 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
849 aref.opcode = ARRAY_REF;
850 aref.op0 = integer_zero_node;
851 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
852 && TYPE_MIN_VALUE (dom))
853 aref.op0 = TYPE_MIN_VALUE (dom);
854 aref.op1 = aref.op0;
855 aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
856 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
857 }
858 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
859
860 /* Do the replacement - we should have at least one op in mem now. */
861 if (VEC_length (vn_reference_op_s, mem) == 1)
862 {
863 VEC_replace (vn_reference_op_s, *ops, i - 1,
864 VEC_index (vn_reference_op_s, mem, 0));
865 VEC_ordered_remove (vn_reference_op_s, *ops, i);
866 i--;
867 }
868 else if (VEC_length (vn_reference_op_s, mem) == 2)
869 {
870 VEC_replace (vn_reference_op_s, *ops, i - 1,
871 VEC_index (vn_reference_op_s, mem, 0));
872 VEC_replace (vn_reference_op_s, *ops, i,
873 VEC_index (vn_reference_op_s, mem, 1));
874 }
875 else if (VEC_length (vn_reference_op_s, mem) > 2)
876 {
877 VEC_replace (vn_reference_op_s, *ops, i - 1,
878 VEC_index (vn_reference_op_s, mem, 0));
879 VEC_replace (vn_reference_op_s, *ops, i,
880 VEC_index (vn_reference_op_s, mem, 1));
881 /* ??? There is no VEC_splice. */
882 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
883 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
884 }
885 else
886 gcc_unreachable ();
887
888 VEC_free (vn_reference_op_s, heap, mem);
889 *i_p = i;
890 }
891
892 /* Optimize the reference REF to a constant if possible or return
893 NULL_TREE if not. */
894
895 tree
896 fully_constant_vn_reference_p (vn_reference_t ref)
897 {
898 VEC (vn_reference_op_s, heap) *operands = ref->operands;
899 vn_reference_op_t op;
900
901 /* Try to simplify the translated expression if it is
902 a call to a builtin function with at most two arguments. */
903 op = VEC_index (vn_reference_op_s, operands, 0);
904 if (op->opcode == CALL_EXPR
905 && TREE_CODE (op->op0) == ADDR_EXPR
906 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
907 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
908 && VEC_length (vn_reference_op_s, operands) >= 2
909 && VEC_length (vn_reference_op_s, operands) <= 3)
910 {
911 vn_reference_op_t arg0, arg1 = NULL;
912 bool anyconst = false;
913 arg0 = VEC_index (vn_reference_op_s, operands, 1);
914 if (VEC_length (vn_reference_op_s, operands) > 2)
915 arg1 = VEC_index (vn_reference_op_s, operands, 2);
916 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
917 || (arg0->opcode == ADDR_EXPR
918 && is_gimple_min_invariant (arg0->op0)))
919 anyconst = true;
920 if (arg1
921 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
922 || (arg1->opcode == ADDR_EXPR
923 && is_gimple_min_invariant (arg1->op0))))
924 anyconst = true;
925 if (anyconst)
926 {
927 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
928 arg1 ? 2 : 1,
929 arg0->op0,
930 arg1 ? arg1->op0 : NULL);
931 if (folded
932 && TREE_CODE (folded) == NOP_EXPR)
933 folded = TREE_OPERAND (folded, 0);
934 if (folded
935 && is_gimple_min_invariant (folded))
936 return folded;
937 }
938 }
939
940 /* Simplify reads from constant strings. */
941 else if (op->opcode == ARRAY_REF
942 && TREE_CODE (op->op0) == INTEGER_CST
943 && integer_zerop (op->op1)
944 && VEC_length (vn_reference_op_s, operands) == 2)
945 {
946 vn_reference_op_t arg0;
947 arg0 = VEC_index (vn_reference_op_s, operands, 1);
948 if (arg0->opcode == STRING_CST
949 && (TYPE_MODE (op->type)
950 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
951 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
952 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
953 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
954 return build_int_cst_type (op->type,
955 (TREE_STRING_POINTER (arg0->op0)
956 [TREE_INT_CST_LOW (op->op0)]));
957 }
958
959 return NULL_TREE;
960 }
961
962 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
963 structures into their value numbers. This is done in-place, and
964 the vector passed in is returned. */
965
966 static VEC (vn_reference_op_s, heap) *
967 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
968 {
969 vn_reference_op_t vro;
970 unsigned int i;
971
972 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
973 {
974 if (vro->opcode == SSA_NAME
975 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
976 {
977 vro->op0 = SSA_VAL (vro->op0);
978 /* If it transforms from an SSA_NAME to a constant, update
979 the opcode. */
980 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
981 vro->opcode = TREE_CODE (vro->op0);
982 /* If it transforms from an SSA_NAME to an address, fold with
983 a preceding indirect reference. */
984 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
985 && VEC_index (vn_reference_op_s,
986 orig, i - 1)->opcode == INDIRECT_REF)
987 {
988 vn_reference_fold_indirect (&orig, &i);
989 continue;
990 }
991 }
992 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
993 vro->op1 = SSA_VAL (vro->op1);
994 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
995 vro->op2 = SSA_VAL (vro->op2);
996 }
997
998 return orig;
999 }
1000
1001 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1002
1003 /* Create a vector of vn_reference_op_s structures from REF, a
1004 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1005 this function. */
1006
1007 static VEC(vn_reference_op_s, heap) *
1008 valueize_shared_reference_ops_from_ref (tree ref)
1009 {
1010 if (!ref)
1011 return NULL;
1012 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1013 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1014 shared_lookup_references = valueize_refs (shared_lookup_references);
1015 return shared_lookup_references;
1016 }
1017
1018 /* Create a vector of vn_reference_op_s structures from CALL, a
1019 call statement. The vector is shared among all callers of
1020 this function. */
1021
1022 static VEC(vn_reference_op_s, heap) *
1023 valueize_shared_reference_ops_from_call (gimple call)
1024 {
1025 if (!call)
1026 return NULL;
1027 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1028 copy_reference_ops_from_call (call, &shared_lookup_references);
1029 shared_lookup_references = valueize_refs (shared_lookup_references);
1030 return shared_lookup_references;
1031 }
1032
1033 /* Lookup a SCCVN reference operation VR in the current hash table.
1034 Returns the resulting value number if it exists in the hash table,
1035 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1036 vn_reference_t stored in the hashtable if something is found. */
1037
1038 static tree
1039 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1040 {
1041 void **slot;
1042 hashval_t hash;
1043
1044 hash = vr->hashcode;
1045 slot = htab_find_slot_with_hash (current_info->references, vr,
1046 hash, NO_INSERT);
1047 if (!slot && current_info == optimistic_info)
1048 slot = htab_find_slot_with_hash (valid_info->references, vr,
1049 hash, NO_INSERT);
1050 if (slot)
1051 {
1052 if (vnresult)
1053 *vnresult = (vn_reference_t)*slot;
1054 return ((vn_reference_t)*slot)->result;
1055 }
1056
1057 return NULL_TREE;
1058 }
1059
1060 static tree *last_vuse_ptr;
1061
1062 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1063 with the current VUSE and performs the expression lookup. */
1064
1065 static void *
1066 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1067 {
1068 vn_reference_t vr = (vn_reference_t)vr_;
1069 void **slot;
1070 hashval_t hash;
1071
1072 if (last_vuse_ptr)
1073 *last_vuse_ptr = vuse;
1074
1075 /* Fixup vuse and hash. */
1076 if (vr->vuse)
1077 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1078 vr->vuse = SSA_VAL (vuse);
1079 if (vr->vuse)
1080 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1081
1082 hash = vr->hashcode;
1083 slot = htab_find_slot_with_hash (current_info->references, vr,
1084 hash, NO_INSERT);
1085 if (!slot && current_info == optimistic_info)
1086 slot = htab_find_slot_with_hash (valid_info->references, vr,
1087 hash, NO_INSERT);
1088 if (slot)
1089 return *slot;
1090
1091 return NULL;
1092 }
1093
1094 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1095 from the statement defining VUSE and if not successful tries to
1096 translate *REFP and VR_ through an aggregate copy at the defintion
1097 of VUSE. */
1098
1099 static void *
1100 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1101 {
1102 vn_reference_t vr = (vn_reference_t)vr_;
1103 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1104 tree fndecl;
1105 tree base;
1106 HOST_WIDE_INT offset, maxsize;
1107
1108 base = ao_ref_base (ref);
1109 offset = ref->offset;
1110 maxsize = ref->max_size;
1111
1112 /* If we cannot constrain the size of the reference we cannot
1113 test if anything kills it. */
1114 if (maxsize == -1)
1115 return (void *)-1;
1116
1117 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1118 from that defintion.
1119 1) Memset. */
1120 if (is_gimple_reg_type (vr->type)
1121 && is_gimple_call (def_stmt)
1122 && (fndecl = gimple_call_fndecl (def_stmt))
1123 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1124 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1125 && integer_zerop (gimple_call_arg (def_stmt, 1))
1126 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1127 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1128 {
1129 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1130 tree base2;
1131 HOST_WIDE_INT offset2, size2, maxsize2;
1132 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1133 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1134 if ((unsigned HOST_WIDE_INT)size2 / 8
1135 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1136 && operand_equal_p (base, base2, 0)
1137 && offset2 <= offset
1138 && offset2 + size2 >= offset + maxsize)
1139 {
1140 tree val = fold_convert (vr->type, integer_zero_node);
1141 unsigned int value_id = get_or_alloc_constant_value_id (val);
1142 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1143 VEC_copy (vn_reference_op_s,
1144 heap, vr->operands),
1145 val, value_id);
1146 }
1147 }
1148
1149 /* 2) Assignment from an empty CONSTRUCTOR. */
1150 else if (is_gimple_reg_type (vr->type)
1151 && gimple_assign_single_p (def_stmt)
1152 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1153 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1154 {
1155 tree base2;
1156 HOST_WIDE_INT offset2, size2, maxsize2;
1157 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1158 &offset2, &size2, &maxsize2);
1159 if (operand_equal_p (base, base2, 0)
1160 && offset2 <= offset
1161 && offset2 + size2 >= offset + maxsize)
1162 {
1163 tree val = fold_convert (vr->type, integer_zero_node);
1164 unsigned int value_id = get_or_alloc_constant_value_id (val);
1165 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1166 VEC_copy (vn_reference_op_s,
1167 heap, vr->operands),
1168 val, value_id);
1169 }
1170 }
1171
1172 /* For aggregate copies translate the reference through them if
1173 the copy kills ref. */
1174 else if (gimple_assign_single_p (def_stmt)
1175 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1176 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1177 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1178 {
1179 tree base2;
1180 HOST_WIDE_INT offset2, size2, maxsize2;
1181 int i, j;
1182 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1183 vn_reference_op_t vro;
1184 ao_ref r;
1185
1186 /* See if the assignment kills REF. */
1187 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1188 &offset2, &size2, &maxsize2);
1189 if (!operand_equal_p (base, base2, 0)
1190 || offset2 > offset
1191 || offset2 + size2 < offset + maxsize)
1192 return (void *)-1;
1193
1194 /* Find the common base of ref and the lhs. */
1195 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1196 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1197 j = VEC_length (vn_reference_op_s, lhs) - 1;
1198 while (j >= 0 && i >= 0
1199 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1200 vr->operands, i),
1201 VEC_index (vn_reference_op_s, lhs, j)))
1202 {
1203 i--;
1204 j--;
1205 }
1206
1207 VEC_free (vn_reference_op_s, heap, lhs);
1208 /* i now points to the first additional op.
1209 ??? LHS may not be completely contained in VR, one or more
1210 VIEW_CONVERT_EXPRs could be in its way. We could at least
1211 try handling outermost VIEW_CONVERT_EXPRs. */
1212 if (j != -1)
1213 return (void *)-1;
1214
1215 /* Now re-write REF to be based on the rhs of the assignment. */
1216 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1217 /* We need to pre-pend vr->operands[0..i] to rhs. */
1218 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1219 > VEC_length (vn_reference_op_s, vr->operands))
1220 {
1221 VEC (vn_reference_op_s, heap) *old = vr->operands;
1222 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1223 i + 1 + VEC_length (vn_reference_op_s, rhs));
1224 if (old == shared_lookup_references
1225 && vr->operands != old)
1226 shared_lookup_references = NULL;
1227 }
1228 else
1229 VEC_truncate (vn_reference_op_s, vr->operands,
1230 i + 1 + VEC_length (vn_reference_op_s, rhs));
1231 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1232 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1233 VEC_free (vn_reference_op_s, heap, rhs);
1234 vr->hashcode = vn_reference_compute_hash (vr);
1235
1236 /* Adjust *ref from the new operands. */
1237 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1238 return (void *)-1;
1239 /* This can happen with bitfields. */
1240 if (ref->size != r.size)
1241 return (void *)-1;
1242 *ref = r;
1243
1244 /* Do not update last seen VUSE after translating. */
1245 last_vuse_ptr = NULL;
1246
1247 /* Keep looking for the adjusted *REF / VR pair. */
1248 return NULL;
1249 }
1250
1251 /* Bail out and stop walking. */
1252 return (void *)-1;
1253 }
1254
1255 /* Lookup a reference operation by it's parts, in the current hash table.
1256 Returns the resulting value number if it exists in the hash table,
1257 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1258 vn_reference_t stored in the hashtable if something is found. */
1259
1260 tree
1261 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1262 VEC (vn_reference_op_s, heap) *operands,
1263 vn_reference_t *vnresult, bool maywalk)
1264 {
1265 struct vn_reference_s vr1;
1266 vn_reference_t tmp;
1267 tree cst;
1268
1269 if (!vnresult)
1270 vnresult = &tmp;
1271 *vnresult = NULL;
1272
1273 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1274 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1275 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1276 VEC_length (vn_reference_op_s, operands));
1277 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1278 VEC_address (vn_reference_op_s, operands),
1279 sizeof (vn_reference_op_s)
1280 * VEC_length (vn_reference_op_s, operands));
1281 vr1.operands = operands = shared_lookup_references
1282 = valueize_refs (shared_lookup_references);
1283 vr1.type = type;
1284 vr1.set = set;
1285 vr1.hashcode = vn_reference_compute_hash (&vr1);
1286 if ((cst = fully_constant_vn_reference_p (&vr1)))
1287 return cst;
1288
1289 vn_reference_lookup_1 (&vr1, vnresult);
1290 if (!*vnresult
1291 && maywalk
1292 && vr1.vuse)
1293 {
1294 ao_ref r;
1295 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1296 *vnresult =
1297 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1298 vn_reference_lookup_2,
1299 vn_reference_lookup_3, &vr1);
1300 if (vr1.operands != operands)
1301 VEC_free (vn_reference_op_s, heap, vr1.operands);
1302 }
1303
1304 if (*vnresult)
1305 return (*vnresult)->result;
1306
1307 return NULL_TREE;
1308 }
1309
1310 /* Lookup OP in the current hash table, and return the resulting value
1311 number if it exists in the hash table. Return NULL_TREE if it does
1312 not exist in the hash table or if the result field of the structure
1313 was NULL.. VNRESULT will be filled in with the vn_reference_t
1314 stored in the hashtable if one exists. */
1315
1316 tree
1317 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1318 vn_reference_t *vnresult)
1319 {
1320 VEC (vn_reference_op_s, heap) *operands;
1321 struct vn_reference_s vr1;
1322 tree cst;
1323
1324 if (vnresult)
1325 *vnresult = NULL;
1326
1327 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1328 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1329 vr1.type = TREE_TYPE (op);
1330 vr1.set = get_alias_set (op);
1331 vr1.hashcode = vn_reference_compute_hash (&vr1);
1332 if ((cst = fully_constant_vn_reference_p (&vr1)))
1333 return cst;
1334
1335 if (maywalk
1336 && vr1.vuse)
1337 {
1338 vn_reference_t wvnresult;
1339 ao_ref r;
1340 ao_ref_init (&r, op);
1341 wvnresult =
1342 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1343 vn_reference_lookup_2,
1344 vn_reference_lookup_3, &vr1);
1345 if (vr1.operands != operands)
1346 VEC_free (vn_reference_op_s, heap, vr1.operands);
1347 if (wvnresult)
1348 {
1349 if (vnresult)
1350 *vnresult = wvnresult;
1351 return wvnresult->result;
1352 }
1353
1354 return NULL_TREE;
1355 }
1356
1357 return vn_reference_lookup_1 (&vr1, vnresult);
1358 }
1359
1360
1361 /* Insert OP into the current hash table with a value number of
1362 RESULT, and return the resulting reference structure we created. */
1363
1364 vn_reference_t
1365 vn_reference_insert (tree op, tree result, tree vuse)
1366 {
1367 void **slot;
1368 vn_reference_t vr1;
1369
1370 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1371 if (TREE_CODE (result) == SSA_NAME)
1372 vr1->value_id = VN_INFO (result)->value_id;
1373 else
1374 vr1->value_id = get_or_alloc_constant_value_id (result);
1375 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1376 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1377 vr1->type = TREE_TYPE (op);
1378 vr1->set = get_alias_set (op);
1379 vr1->hashcode = vn_reference_compute_hash (vr1);
1380 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1381
1382 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1383 INSERT);
1384
1385 /* Because we lookup stores using vuses, and value number failures
1386 using the vdefs (see visit_reference_op_store for how and why),
1387 it's possible that on failure we may try to insert an already
1388 inserted store. This is not wrong, there is no ssa name for a
1389 store that we could use as a differentiator anyway. Thus, unlike
1390 the other lookup functions, you cannot gcc_assert (!*slot)
1391 here. */
1392
1393 /* But free the old slot in case of a collision. */
1394 if (*slot)
1395 free_reference (*slot);
1396
1397 *slot = vr1;
1398 return vr1;
1399 }
1400
1401 /* Insert a reference by it's pieces into the current hash table with
1402 a value number of RESULT. Return the resulting reference
1403 structure we created. */
1404
1405 vn_reference_t
1406 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1407 VEC (vn_reference_op_s, heap) *operands,
1408 tree result, unsigned int value_id)
1409
1410 {
1411 void **slot;
1412 vn_reference_t vr1;
1413
1414 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1415 vr1->value_id = value_id;
1416 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1417 vr1->operands = valueize_refs (operands);
1418 vr1->type = type;
1419 vr1->set = set;
1420 vr1->hashcode = vn_reference_compute_hash (vr1);
1421 if (result && TREE_CODE (result) == SSA_NAME)
1422 result = SSA_VAL (result);
1423 vr1->result = result;
1424
1425 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1426 INSERT);
1427
1428 /* At this point we should have all the things inserted that we have
1429 seen before, and we should never try inserting something that
1430 already exists. */
1431 gcc_assert (!*slot);
1432 if (*slot)
1433 free_reference (*slot);
1434
1435 *slot = vr1;
1436 return vr1;
1437 }
1438
1439 /* Compute and return the hash value for nary operation VBO1. */
1440
1441 hashval_t
1442 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1443 {
1444 hashval_t hash;
1445 unsigned i;
1446
1447 for (i = 0; i < vno1->length; ++i)
1448 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1449 vno1->op[i] = SSA_VAL (vno1->op[i]);
1450
1451 if (vno1->length == 2
1452 && commutative_tree_code (vno1->opcode)
1453 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1454 {
1455 tree temp = vno1->op[0];
1456 vno1->op[0] = vno1->op[1];
1457 vno1->op[1] = temp;
1458 }
1459
1460 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1461 for (i = 0; i < vno1->length; ++i)
1462 hash = iterative_hash_expr (vno1->op[i], hash);
1463
1464 return hash;
1465 }
1466
1467 /* Return the computed hashcode for nary operation P1. */
1468
1469 static hashval_t
1470 vn_nary_op_hash (const void *p1)
1471 {
1472 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1473 return vno1->hashcode;
1474 }
1475
1476 /* Compare nary operations P1 and P2 and return true if they are
1477 equivalent. */
1478
1479 int
1480 vn_nary_op_eq (const void *p1, const void *p2)
1481 {
1482 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1483 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1484 unsigned i;
1485
1486 if (vno1->hashcode != vno2->hashcode)
1487 return false;
1488
1489 if (vno1->opcode != vno2->opcode
1490 || !types_compatible_p (vno1->type, vno2->type))
1491 return false;
1492
1493 for (i = 0; i < vno1->length; ++i)
1494 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1495 return false;
1496
1497 return true;
1498 }
1499
1500 /* Lookup a n-ary operation by its pieces and return the resulting value
1501 number if it exists in the hash table. Return NULL_TREE if it does
1502 not exist in the hash table or if the result field of the operation
1503 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1504 if it exists. */
1505
1506 tree
1507 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1508 tree type, tree op0, tree op1, tree op2,
1509 tree op3, vn_nary_op_t *vnresult)
1510 {
1511 void **slot;
1512 struct vn_nary_op_s vno1;
1513 if (vnresult)
1514 *vnresult = NULL;
1515 vno1.opcode = code;
1516 vno1.length = length;
1517 vno1.type = type;
1518 vno1.op[0] = op0;
1519 vno1.op[1] = op1;
1520 vno1.op[2] = op2;
1521 vno1.op[3] = op3;
1522 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1523 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1524 NO_INSERT);
1525 if (!slot && current_info == optimistic_info)
1526 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1527 NO_INSERT);
1528 if (!slot)
1529 return NULL_TREE;
1530 if (vnresult)
1531 *vnresult = (vn_nary_op_t)*slot;
1532 return ((vn_nary_op_t)*slot)->result;
1533 }
1534
1535 /* Lookup OP in the current hash table, and return the resulting value
1536 number if it exists in the hash table. Return NULL_TREE if it does
1537 not exist in the hash table or if the result field of the operation
1538 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1539 if it exists. */
1540
1541 tree
1542 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1543 {
1544 void **slot;
1545 struct vn_nary_op_s vno1;
1546 unsigned i;
1547
1548 if (vnresult)
1549 *vnresult = NULL;
1550 vno1.opcode = TREE_CODE (op);
1551 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1552 vno1.type = TREE_TYPE (op);
1553 for (i = 0; i < vno1.length; ++i)
1554 vno1.op[i] = TREE_OPERAND (op, i);
1555 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1556 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1557 NO_INSERT);
1558 if (!slot && current_info == optimistic_info)
1559 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1560 NO_INSERT);
1561 if (!slot)
1562 return NULL_TREE;
1563 if (vnresult)
1564 *vnresult = (vn_nary_op_t)*slot;
1565 return ((vn_nary_op_t)*slot)->result;
1566 }
1567
1568 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1569 value number if it exists in the hash table. Return NULL_TREE if
1570 it does not exist in the hash table. VNRESULT will contain the
1571 vn_nary_op_t from the hashtable if it exists. */
1572
1573 tree
1574 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1575 {
1576 void **slot;
1577 struct vn_nary_op_s vno1;
1578 unsigned i;
1579
1580 if (vnresult)
1581 *vnresult = NULL;
1582 vno1.opcode = gimple_assign_rhs_code (stmt);
1583 vno1.length = gimple_num_ops (stmt) - 1;
1584 vno1.type = gimple_expr_type (stmt);
1585 for (i = 0; i < vno1.length; ++i)
1586 vno1.op[i] = gimple_op (stmt, i + 1);
1587 if (vno1.opcode == REALPART_EXPR
1588 || vno1.opcode == IMAGPART_EXPR
1589 || vno1.opcode == VIEW_CONVERT_EXPR)
1590 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1591 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1592 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1593 NO_INSERT);
1594 if (!slot && current_info == optimistic_info)
1595 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1596 NO_INSERT);
1597 if (!slot)
1598 return NULL_TREE;
1599 if (vnresult)
1600 *vnresult = (vn_nary_op_t)*slot;
1601 return ((vn_nary_op_t)*slot)->result;
1602 }
1603
1604 /* Insert a n-ary operation into the current hash table using it's
1605 pieces. Return the vn_nary_op_t structure we created and put in
1606 the hashtable. */
1607
1608 vn_nary_op_t
1609 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1610 tree type, tree op0,
1611 tree op1, tree op2, tree op3,
1612 tree result,
1613 unsigned int value_id)
1614 {
1615 void **slot;
1616 vn_nary_op_t vno1;
1617
1618 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1619 (sizeof (struct vn_nary_op_s)
1620 - sizeof (tree) * (4 - length)));
1621 vno1->value_id = value_id;
1622 vno1->opcode = code;
1623 vno1->length = length;
1624 vno1->type = type;
1625 if (length >= 1)
1626 vno1->op[0] = op0;
1627 if (length >= 2)
1628 vno1->op[1] = op1;
1629 if (length >= 3)
1630 vno1->op[2] = op2;
1631 if (length >= 4)
1632 vno1->op[3] = op3;
1633 vno1->result = result;
1634 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1635 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1636 INSERT);
1637 gcc_assert (!*slot);
1638
1639 *slot = vno1;
1640 return vno1;
1641
1642 }
1643
1644 /* Insert OP into the current hash table with a value number of
1645 RESULT. Return the vn_nary_op_t structure we created and put in
1646 the hashtable. */
1647
1648 vn_nary_op_t
1649 vn_nary_op_insert (tree op, tree result)
1650 {
1651 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1652 void **slot;
1653 vn_nary_op_t vno1;
1654 unsigned i;
1655
1656 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1657 (sizeof (struct vn_nary_op_s)
1658 - sizeof (tree) * (4 - length)));
1659 vno1->value_id = VN_INFO (result)->value_id;
1660 vno1->opcode = TREE_CODE (op);
1661 vno1->length = length;
1662 vno1->type = TREE_TYPE (op);
1663 for (i = 0; i < vno1->length; ++i)
1664 vno1->op[i] = TREE_OPERAND (op, i);
1665 vno1->result = result;
1666 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1667 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1668 INSERT);
1669 gcc_assert (!*slot);
1670
1671 *slot = vno1;
1672 return vno1;
1673 }
1674
1675 /* Insert the rhs of STMT into the current hash table with a value number of
1676 RESULT. */
1677
1678 vn_nary_op_t
1679 vn_nary_op_insert_stmt (gimple stmt, tree result)
1680 {
1681 unsigned length = gimple_num_ops (stmt) - 1;
1682 void **slot;
1683 vn_nary_op_t vno1;
1684 unsigned i;
1685
1686 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1687 (sizeof (struct vn_nary_op_s)
1688 - sizeof (tree) * (4 - length)));
1689 vno1->value_id = VN_INFO (result)->value_id;
1690 vno1->opcode = gimple_assign_rhs_code (stmt);
1691 vno1->length = length;
1692 vno1->type = gimple_expr_type (stmt);
1693 for (i = 0; i < vno1->length; ++i)
1694 vno1->op[i] = gimple_op (stmt, i + 1);
1695 if (vno1->opcode == REALPART_EXPR
1696 || vno1->opcode == IMAGPART_EXPR
1697 || vno1->opcode == VIEW_CONVERT_EXPR)
1698 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1699 vno1->result = result;
1700 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1701 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1702 INSERT);
1703 gcc_assert (!*slot);
1704
1705 *slot = vno1;
1706 return vno1;
1707 }
1708
1709 /* Compute a hashcode for PHI operation VP1 and return it. */
1710
1711 static inline hashval_t
1712 vn_phi_compute_hash (vn_phi_t vp1)
1713 {
1714 hashval_t result;
1715 int i;
1716 tree phi1op;
1717 tree type;
1718
1719 result = vp1->block->index;
1720
1721 /* If all PHI arguments are constants we need to distinguish
1722 the PHI node via its type. */
1723 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1724 result += (INTEGRAL_TYPE_P (type)
1725 + (INTEGRAL_TYPE_P (type)
1726 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1727
1728 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1729 {
1730 if (phi1op == VN_TOP)
1731 continue;
1732 result = iterative_hash_expr (phi1op, result);
1733 }
1734
1735 return result;
1736 }
1737
1738 /* Return the computed hashcode for phi operation P1. */
1739
1740 static hashval_t
1741 vn_phi_hash (const void *p1)
1742 {
1743 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1744 return vp1->hashcode;
1745 }
1746
1747 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1748
1749 static int
1750 vn_phi_eq (const void *p1, const void *p2)
1751 {
1752 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1753 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1754
1755 if (vp1->hashcode != vp2->hashcode)
1756 return false;
1757
1758 if (vp1->block == vp2->block)
1759 {
1760 int i;
1761 tree phi1op;
1762
1763 /* If the PHI nodes do not have compatible types
1764 they are not the same. */
1765 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1766 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1767 return false;
1768
1769 /* Any phi in the same block will have it's arguments in the
1770 same edge order, because of how we store phi nodes. */
1771 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1772 {
1773 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1774 if (phi1op == VN_TOP || phi2op == VN_TOP)
1775 continue;
1776 if (!expressions_equal_p (phi1op, phi2op))
1777 return false;
1778 }
1779 return true;
1780 }
1781 return false;
1782 }
1783
1784 static VEC(tree, heap) *shared_lookup_phiargs;
1785
1786 /* Lookup PHI in the current hash table, and return the resulting
1787 value number if it exists in the hash table. Return NULL_TREE if
1788 it does not exist in the hash table. */
1789
1790 static tree
1791 vn_phi_lookup (gimple phi)
1792 {
1793 void **slot;
1794 struct vn_phi_s vp1;
1795 unsigned i;
1796
1797 VEC_truncate (tree, shared_lookup_phiargs, 0);
1798
1799 /* Canonicalize the SSA_NAME's to their value number. */
1800 for (i = 0; i < gimple_phi_num_args (phi); i++)
1801 {
1802 tree def = PHI_ARG_DEF (phi, i);
1803 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1804 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1805 }
1806 vp1.phiargs = shared_lookup_phiargs;
1807 vp1.block = gimple_bb (phi);
1808 vp1.hashcode = vn_phi_compute_hash (&vp1);
1809 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1810 NO_INSERT);
1811 if (!slot && current_info == optimistic_info)
1812 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1813 NO_INSERT);
1814 if (!slot)
1815 return NULL_TREE;
1816 return ((vn_phi_t)*slot)->result;
1817 }
1818
1819 /* Insert PHI into the current hash table with a value number of
1820 RESULT. */
1821
1822 static vn_phi_t
1823 vn_phi_insert (gimple phi, tree result)
1824 {
1825 void **slot;
1826 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1827 unsigned i;
1828 VEC (tree, heap) *args = NULL;
1829
1830 /* Canonicalize the SSA_NAME's to their value number. */
1831 for (i = 0; i < gimple_phi_num_args (phi); i++)
1832 {
1833 tree def = PHI_ARG_DEF (phi, i);
1834 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1835 VEC_safe_push (tree, heap, args, def);
1836 }
1837 vp1->value_id = VN_INFO (result)->value_id;
1838 vp1->phiargs = args;
1839 vp1->block = gimple_bb (phi);
1840 vp1->result = result;
1841 vp1->hashcode = vn_phi_compute_hash (vp1);
1842
1843 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1844 INSERT);
1845
1846 /* Because we iterate over phi operations more than once, it's
1847 possible the slot might already exist here, hence no assert.*/
1848 *slot = vp1;
1849 return vp1;
1850 }
1851
1852
1853 /* Print set of components in strongly connected component SCC to OUT. */
1854
1855 static void
1856 print_scc (FILE *out, VEC (tree, heap) *scc)
1857 {
1858 tree var;
1859 unsigned int i;
1860
1861 fprintf (out, "SCC consists of: ");
1862 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1863 {
1864 print_generic_expr (out, var, 0);
1865 fprintf (out, " ");
1866 }
1867 fprintf (out, "\n");
1868 }
1869
1870 /* Set the value number of FROM to TO, return true if it has changed
1871 as a result. */
1872
1873 static inline bool
1874 set_ssa_val_to (tree from, tree to)
1875 {
1876 tree currval;
1877
1878 if (from != to
1879 && TREE_CODE (to) == SSA_NAME
1880 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1881 to = from;
1882
1883 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1884 and invariants. So assert that here. */
1885 gcc_assert (to != NULL_TREE
1886 && (to == VN_TOP
1887 || TREE_CODE (to) == SSA_NAME
1888 || is_gimple_min_invariant (to)));
1889
1890 if (dump_file && (dump_flags & TDF_DETAILS))
1891 {
1892 fprintf (dump_file, "Setting value number of ");
1893 print_generic_expr (dump_file, from, 0);
1894 fprintf (dump_file, " to ");
1895 print_generic_expr (dump_file, to, 0);
1896 }
1897
1898 currval = SSA_VAL (from);
1899
1900 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1901 {
1902 VN_INFO (from)->valnum = to;
1903 if (dump_file && (dump_flags & TDF_DETAILS))
1904 fprintf (dump_file, " (changed)\n");
1905 return true;
1906 }
1907 if (dump_file && (dump_flags & TDF_DETAILS))
1908 fprintf (dump_file, "\n");
1909 return false;
1910 }
1911
1912 /* Set all definitions in STMT to value number to themselves.
1913 Return true if a value number changed. */
1914
1915 static bool
1916 defs_to_varying (gimple stmt)
1917 {
1918 bool changed = false;
1919 ssa_op_iter iter;
1920 def_operand_p defp;
1921
1922 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1923 {
1924 tree def = DEF_FROM_PTR (defp);
1925
1926 VN_INFO (def)->use_processed = true;
1927 changed |= set_ssa_val_to (def, def);
1928 }
1929 return changed;
1930 }
1931
1932 static bool expr_has_constants (tree expr);
1933 static tree valueize_expr (tree expr);
1934
1935 /* Visit a copy between LHS and RHS, return true if the value number
1936 changed. */
1937
1938 static bool
1939 visit_copy (tree lhs, tree rhs)
1940 {
1941 /* Follow chains of copies to their destination. */
1942 while (TREE_CODE (rhs) == SSA_NAME
1943 && SSA_VAL (rhs) != rhs)
1944 rhs = SSA_VAL (rhs);
1945
1946 /* The copy may have a more interesting constant filled expression
1947 (we don't, since we know our RHS is just an SSA name). */
1948 if (TREE_CODE (rhs) == SSA_NAME)
1949 {
1950 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1951 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1952 }
1953
1954 return set_ssa_val_to (lhs, rhs);
1955 }
1956
1957 /* Visit a unary operator RHS, value number it, and return true if the
1958 value number of LHS has changed as a result. */
1959
1960 static bool
1961 visit_unary_op (tree lhs, gimple stmt)
1962 {
1963 bool changed = false;
1964 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1965
1966 if (result)
1967 {
1968 changed = set_ssa_val_to (lhs, result);
1969 }
1970 else
1971 {
1972 changed = set_ssa_val_to (lhs, lhs);
1973 vn_nary_op_insert_stmt (stmt, lhs);
1974 }
1975
1976 return changed;
1977 }
1978
1979 /* Visit a binary operator RHS, value number it, and return true if the
1980 value number of LHS has changed as a result. */
1981
1982 static bool
1983 visit_binary_op (tree lhs, gimple stmt)
1984 {
1985 bool changed = false;
1986 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1987
1988 if (result)
1989 {
1990 changed = set_ssa_val_to (lhs, result);
1991 }
1992 else
1993 {
1994 changed = set_ssa_val_to (lhs, lhs);
1995 vn_nary_op_insert_stmt (stmt, lhs);
1996 }
1997
1998 return changed;
1999 }
2000
2001 /* Visit a call STMT storing into LHS. Return true if the value number
2002 of the LHS has changed as a result. */
2003
2004 static bool
2005 visit_reference_op_call (tree lhs, gimple stmt)
2006 {
2007 bool changed = false;
2008 struct vn_reference_s vr1;
2009 tree result;
2010 tree vuse = gimple_vuse (stmt);
2011
2012 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2013 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2014 vr1.type = gimple_expr_type (stmt);
2015 vr1.set = 0;
2016 vr1.hashcode = vn_reference_compute_hash (&vr1);
2017 result = vn_reference_lookup_1 (&vr1, NULL);
2018 if (result)
2019 {
2020 changed = set_ssa_val_to (lhs, result);
2021 if (TREE_CODE (result) == SSA_NAME
2022 && VN_INFO (result)->has_constants)
2023 VN_INFO (lhs)->has_constants = true;
2024 }
2025 else
2026 {
2027 void **slot;
2028 vn_reference_t vr2;
2029 changed = set_ssa_val_to (lhs, lhs);
2030 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2031 vr2->vuse = vr1.vuse;
2032 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2033 vr2->type = vr1.type;
2034 vr2->set = vr1.set;
2035 vr2->hashcode = vr1.hashcode;
2036 vr2->result = lhs;
2037 slot = htab_find_slot_with_hash (current_info->references,
2038 vr2, vr2->hashcode, INSERT);
2039 if (*slot)
2040 free_reference (*slot);
2041 *slot = vr2;
2042 }
2043
2044 return changed;
2045 }
2046
2047 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2048 and return true if the value number of the LHS has changed as a result. */
2049
2050 static bool
2051 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2052 {
2053 bool changed = false;
2054 tree last_vuse;
2055 tree result;
2056
2057 last_vuse = gimple_vuse (stmt);
2058 last_vuse_ptr = &last_vuse;
2059 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
2060 last_vuse_ptr = NULL;
2061
2062 /* If we have a VCE, try looking up its operand as it might be stored in
2063 a different type. */
2064 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2065 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2066 true, NULL);
2067
2068 /* We handle type-punning through unions by value-numbering based
2069 on offset and size of the access. Be prepared to handle a
2070 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2071 if (result
2072 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2073 {
2074 /* We will be setting the value number of lhs to the value number
2075 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2076 So first simplify and lookup this expression to see if it
2077 is already available. */
2078 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2079 if ((CONVERT_EXPR_P (val)
2080 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2081 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2082 {
2083 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2084 if ((CONVERT_EXPR_P (tem)
2085 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2086 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2087 TREE_TYPE (val), tem)))
2088 val = tem;
2089 }
2090 result = val;
2091 if (!is_gimple_min_invariant (val)
2092 && TREE_CODE (val) != SSA_NAME)
2093 result = vn_nary_op_lookup (val, NULL);
2094 /* If the expression is not yet available, value-number lhs to
2095 a new SSA_NAME we create. */
2096 if (!result && may_insert)
2097 {
2098 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2099 /* Initialize value-number information properly. */
2100 VN_INFO_GET (result)->valnum = result;
2101 VN_INFO (result)->value_id = get_next_value_id ();
2102 VN_INFO (result)->expr = val;
2103 VN_INFO (result)->has_constants = expr_has_constants (val);
2104 VN_INFO (result)->needs_insertion = true;
2105 /* As all "inserted" statements are singleton SCCs, insert
2106 to the valid table. This is strictly needed to
2107 avoid re-generating new value SSA_NAMEs for the same
2108 expression during SCC iteration over and over (the
2109 optimistic table gets cleared after each iteration).
2110 We do not need to insert into the optimistic table, as
2111 lookups there will fall back to the valid table. */
2112 if (current_info == optimistic_info)
2113 {
2114 current_info = valid_info;
2115 vn_nary_op_insert (val, result);
2116 current_info = optimistic_info;
2117 }
2118 else
2119 vn_nary_op_insert (val, result);
2120 if (dump_file && (dump_flags & TDF_DETAILS))
2121 {
2122 fprintf (dump_file, "Inserting name ");
2123 print_generic_expr (dump_file, result, 0);
2124 fprintf (dump_file, " for expression ");
2125 print_generic_expr (dump_file, val, 0);
2126 fprintf (dump_file, "\n");
2127 }
2128 }
2129 }
2130
2131 if (result)
2132 {
2133 changed = set_ssa_val_to (lhs, result);
2134 if (TREE_CODE (result) == SSA_NAME
2135 && VN_INFO (result)->has_constants)
2136 {
2137 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2138 VN_INFO (lhs)->has_constants = true;
2139 }
2140 }
2141 else
2142 {
2143 changed = set_ssa_val_to (lhs, lhs);
2144 vn_reference_insert (op, lhs, last_vuse);
2145 }
2146
2147 return changed;
2148 }
2149
2150
2151 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2152 and return true if the value number of the LHS has changed as a result. */
2153
2154 static bool
2155 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2156 {
2157 bool changed = false;
2158 tree result;
2159 bool resultsame = false;
2160
2161 /* First we want to lookup using the *vuses* from the store and see
2162 if there the last store to this location with the same address
2163 had the same value.
2164
2165 The vuses represent the memory state before the store. If the
2166 memory state, address, and value of the store is the same as the
2167 last store to this location, then this store will produce the
2168 same memory state as that store.
2169
2170 In this case the vdef versions for this store are value numbered to those
2171 vuse versions, since they represent the same memory state after
2172 this store.
2173
2174 Otherwise, the vdefs for the store are used when inserting into
2175 the table, since the store generates a new memory state. */
2176
2177 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2178
2179 if (result)
2180 {
2181 if (TREE_CODE (result) == SSA_NAME)
2182 result = SSA_VAL (result);
2183 if (TREE_CODE (op) == SSA_NAME)
2184 op = SSA_VAL (op);
2185 resultsame = expressions_equal_p (result, op);
2186 }
2187
2188 if (!result || !resultsame)
2189 {
2190 tree vdef;
2191
2192 if (dump_file && (dump_flags & TDF_DETAILS))
2193 {
2194 fprintf (dump_file, "No store match\n");
2195 fprintf (dump_file, "Value numbering store ");
2196 print_generic_expr (dump_file, lhs, 0);
2197 fprintf (dump_file, " to ");
2198 print_generic_expr (dump_file, op, 0);
2199 fprintf (dump_file, "\n");
2200 }
2201 /* Have to set value numbers before insert, since insert is
2202 going to valueize the references in-place. */
2203 if ((vdef = gimple_vdef (stmt)))
2204 {
2205 VN_INFO (vdef)->use_processed = true;
2206 changed |= set_ssa_val_to (vdef, vdef);
2207 }
2208
2209 /* Do not insert structure copies into the tables. */
2210 if (is_gimple_min_invariant (op)
2211 || is_gimple_reg (op))
2212 vn_reference_insert (lhs, op, vdef);
2213 }
2214 else
2215 {
2216 /* We had a match, so value number the vdef to have the value
2217 number of the vuse it came from. */
2218 tree def, use;
2219
2220 if (dump_file && (dump_flags & TDF_DETAILS))
2221 fprintf (dump_file, "Store matched earlier value,"
2222 "value numbering store vdefs to matching vuses.\n");
2223
2224 def = gimple_vdef (stmt);
2225 use = gimple_vuse (stmt);
2226
2227 VN_INFO (def)->use_processed = true;
2228 changed |= set_ssa_val_to (def, SSA_VAL (use));
2229 }
2230
2231 return changed;
2232 }
2233
2234 /* Visit and value number PHI, return true if the value number
2235 changed. */
2236
2237 static bool
2238 visit_phi (gimple phi)
2239 {
2240 bool changed = false;
2241 tree result;
2242 tree sameval = VN_TOP;
2243 bool allsame = true;
2244 unsigned i;
2245
2246 /* TODO: We could check for this in init_sccvn, and replace this
2247 with a gcc_assert. */
2248 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2249 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2250
2251 /* See if all non-TOP arguments have the same value. TOP is
2252 equivalent to everything, so we can ignore it. */
2253 for (i = 0; i < gimple_phi_num_args (phi); i++)
2254 {
2255 tree def = PHI_ARG_DEF (phi, i);
2256
2257 if (TREE_CODE (def) == SSA_NAME)
2258 def = SSA_VAL (def);
2259 if (def == VN_TOP)
2260 continue;
2261 if (sameval == VN_TOP)
2262 {
2263 sameval = def;
2264 }
2265 else
2266 {
2267 if (!expressions_equal_p (def, sameval))
2268 {
2269 allsame = false;
2270 break;
2271 }
2272 }
2273 }
2274
2275 /* If all value numbered to the same value, the phi node has that
2276 value. */
2277 if (allsame)
2278 {
2279 if (is_gimple_min_invariant (sameval))
2280 {
2281 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2282 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2283 }
2284 else
2285 {
2286 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2287 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2288 }
2289
2290 if (TREE_CODE (sameval) == SSA_NAME)
2291 return visit_copy (PHI_RESULT (phi), sameval);
2292
2293 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2294 }
2295
2296 /* Otherwise, see if it is equivalent to a phi node in this block. */
2297 result = vn_phi_lookup (phi);
2298 if (result)
2299 {
2300 if (TREE_CODE (result) == SSA_NAME)
2301 changed = visit_copy (PHI_RESULT (phi), result);
2302 else
2303 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2304 }
2305 else
2306 {
2307 vn_phi_insert (phi, PHI_RESULT (phi));
2308 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2309 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2310 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2311 }
2312
2313 return changed;
2314 }
2315
2316 /* Return true if EXPR contains constants. */
2317
2318 static bool
2319 expr_has_constants (tree expr)
2320 {
2321 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2322 {
2323 case tcc_unary:
2324 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2325
2326 case tcc_binary:
2327 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2328 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2329 /* Constants inside reference ops are rarely interesting, but
2330 it can take a lot of looking to find them. */
2331 case tcc_reference:
2332 case tcc_declaration:
2333 return false;
2334 default:
2335 return is_gimple_min_invariant (expr);
2336 }
2337 return false;
2338 }
2339
2340 /* Return true if STMT contains constants. */
2341
2342 static bool
2343 stmt_has_constants (gimple stmt)
2344 {
2345 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2346 return false;
2347
2348 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2349 {
2350 case GIMPLE_UNARY_RHS:
2351 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2352
2353 case GIMPLE_BINARY_RHS:
2354 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2355 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2356 case GIMPLE_SINGLE_RHS:
2357 /* Constants inside reference ops are rarely interesting, but
2358 it can take a lot of looking to find them. */
2359 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2360 default:
2361 gcc_unreachable ();
2362 }
2363 return false;
2364 }
2365
2366 /* Replace SSA_NAMES in expr with their value numbers, and return the
2367 result.
2368 This is performed in place. */
2369
2370 static tree
2371 valueize_expr (tree expr)
2372 {
2373 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2374 {
2375 case tcc_unary:
2376 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2377 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2378 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2379 break;
2380 case tcc_binary:
2381 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2382 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2383 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2384 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2385 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2386 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2387 break;
2388 default:
2389 break;
2390 }
2391 return expr;
2392 }
2393
2394 /* Simplify the binary expression RHS, and return the result if
2395 simplified. */
2396
2397 static tree
2398 simplify_binary_expression (gimple stmt)
2399 {
2400 tree result = NULL_TREE;
2401 tree op0 = gimple_assign_rhs1 (stmt);
2402 tree op1 = gimple_assign_rhs2 (stmt);
2403
2404 /* This will not catch every single case we could combine, but will
2405 catch those with constants. The goal here is to simultaneously
2406 combine constants between expressions, but avoid infinite
2407 expansion of expressions during simplification. */
2408 if (TREE_CODE (op0) == SSA_NAME)
2409 {
2410 if (VN_INFO (op0)->has_constants
2411 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2412 op0 = valueize_expr (vn_get_expr_for (op0));
2413 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2414 op0 = SSA_VAL (op0);
2415 }
2416
2417 if (TREE_CODE (op1) == SSA_NAME)
2418 {
2419 if (VN_INFO (op1)->has_constants)
2420 op1 = valueize_expr (vn_get_expr_for (op1));
2421 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2422 op1 = SSA_VAL (op1);
2423 }
2424
2425 /* Avoid folding if nothing changed. */
2426 if (op0 == gimple_assign_rhs1 (stmt)
2427 && op1 == gimple_assign_rhs2 (stmt))
2428 return NULL_TREE;
2429
2430 fold_defer_overflow_warnings ();
2431
2432 result = fold_binary (gimple_assign_rhs_code (stmt),
2433 gimple_expr_type (stmt), op0, op1);
2434 if (result)
2435 STRIP_USELESS_TYPE_CONVERSION (result);
2436
2437 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2438 stmt, 0);
2439
2440 /* Make sure result is not a complex expression consisting
2441 of operators of operators (IE (a + b) + (a + c))
2442 Otherwise, we will end up with unbounded expressions if
2443 fold does anything at all. */
2444 if (result && valid_gimple_rhs_p (result))
2445 return result;
2446
2447 return NULL_TREE;
2448 }
2449
2450 /* Simplify the unary expression RHS, and return the result if
2451 simplified. */
2452
2453 static tree
2454 simplify_unary_expression (gimple stmt)
2455 {
2456 tree result = NULL_TREE;
2457 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2458
2459 /* We handle some tcc_reference codes here that are all
2460 GIMPLE_ASSIGN_SINGLE codes. */
2461 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2462 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2463 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2464 op0 = TREE_OPERAND (op0, 0);
2465
2466 if (TREE_CODE (op0) != SSA_NAME)
2467 return NULL_TREE;
2468
2469 orig_op0 = op0;
2470 if (VN_INFO (op0)->has_constants)
2471 op0 = valueize_expr (vn_get_expr_for (op0));
2472 else if (gimple_assign_cast_p (stmt)
2473 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2474 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2475 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2476 {
2477 /* We want to do tree-combining on conversion-like expressions.
2478 Make sure we feed only SSA_NAMEs or constants to fold though. */
2479 tree tem = valueize_expr (vn_get_expr_for (op0));
2480 if (UNARY_CLASS_P (tem)
2481 || BINARY_CLASS_P (tem)
2482 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2483 || TREE_CODE (tem) == SSA_NAME
2484 || is_gimple_min_invariant (tem))
2485 op0 = tem;
2486 }
2487
2488 /* Avoid folding if nothing changed, but remember the expression. */
2489 if (op0 == orig_op0)
2490 return NULL_TREE;
2491
2492 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2493 gimple_expr_type (stmt), op0);
2494 if (result)
2495 {
2496 STRIP_USELESS_TYPE_CONVERSION (result);
2497 if (valid_gimple_rhs_p (result))
2498 return result;
2499 }
2500
2501 return NULL_TREE;
2502 }
2503
2504 /* Try to simplify RHS using equivalences and constant folding. */
2505
2506 static tree
2507 try_to_simplify (gimple stmt)
2508 {
2509 tree tem;
2510
2511 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2512 in this case, there is no point in doing extra work. */
2513 if (gimple_assign_copy_p (stmt)
2514 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2515 return NULL_TREE;
2516
2517 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2518 {
2519 case tcc_declaration:
2520 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2521 if (tem)
2522 return tem;
2523 break;
2524
2525 case tcc_reference:
2526 /* Do not do full-blown reference lookup here, but simplify
2527 reads from constant aggregates. */
2528 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2529 if (tem)
2530 return tem;
2531
2532 /* Fallthrough for some codes that can operate on registers. */
2533 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2534 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2535 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2536 break;
2537 /* We could do a little more with unary ops, if they expand
2538 into binary ops, but it's debatable whether it is worth it. */
2539 case tcc_unary:
2540 return simplify_unary_expression (stmt);
2541 break;
2542 case tcc_comparison:
2543 case tcc_binary:
2544 return simplify_binary_expression (stmt);
2545 break;
2546 default:
2547 break;
2548 }
2549
2550 return NULL_TREE;
2551 }
2552
2553 /* Visit and value number USE, return true if the value number
2554 changed. */
2555
2556 static bool
2557 visit_use (tree use)
2558 {
2559 bool changed = false;
2560 gimple stmt = SSA_NAME_DEF_STMT (use);
2561
2562 VN_INFO (use)->use_processed = true;
2563
2564 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2565 if (dump_file && (dump_flags & TDF_DETAILS)
2566 && !SSA_NAME_IS_DEFAULT_DEF (use))
2567 {
2568 fprintf (dump_file, "Value numbering ");
2569 print_generic_expr (dump_file, use, 0);
2570 fprintf (dump_file, " stmt = ");
2571 print_gimple_stmt (dump_file, stmt, 0, 0);
2572 }
2573
2574 /* Handle uninitialized uses. */
2575 if (SSA_NAME_IS_DEFAULT_DEF (use))
2576 changed = set_ssa_val_to (use, use);
2577 else
2578 {
2579 if (gimple_code (stmt) == GIMPLE_PHI)
2580 changed = visit_phi (stmt);
2581 else if (!gimple_has_lhs (stmt)
2582 || gimple_has_volatile_ops (stmt)
2583 || stmt_could_throw_p (stmt))
2584 changed = defs_to_varying (stmt);
2585 else if (is_gimple_assign (stmt))
2586 {
2587 tree lhs = gimple_assign_lhs (stmt);
2588 tree simplified;
2589
2590 /* Shortcut for copies. Simplifying copies is pointless,
2591 since we copy the expression and value they represent. */
2592 if (gimple_assign_copy_p (stmt)
2593 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2594 && TREE_CODE (lhs) == SSA_NAME)
2595 {
2596 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2597 goto done;
2598 }
2599 simplified = try_to_simplify (stmt);
2600 if (simplified)
2601 {
2602 if (dump_file && (dump_flags & TDF_DETAILS))
2603 {
2604 fprintf (dump_file, "RHS ");
2605 print_gimple_expr (dump_file, stmt, 0, 0);
2606 fprintf (dump_file, " simplified to ");
2607 print_generic_expr (dump_file, simplified, 0);
2608 if (TREE_CODE (lhs) == SSA_NAME)
2609 fprintf (dump_file, " has constants %d\n",
2610 expr_has_constants (simplified));
2611 else
2612 fprintf (dump_file, "\n");
2613 }
2614 }
2615 /* Setting value numbers to constants will occasionally
2616 screw up phi congruence because constants are not
2617 uniquely associated with a single ssa name that can be
2618 looked up. */
2619 if (simplified
2620 && is_gimple_min_invariant (simplified)
2621 && TREE_CODE (lhs) == SSA_NAME)
2622 {
2623 VN_INFO (lhs)->expr = simplified;
2624 VN_INFO (lhs)->has_constants = true;
2625 changed = set_ssa_val_to (lhs, simplified);
2626 goto done;
2627 }
2628 else if (simplified
2629 && TREE_CODE (simplified) == SSA_NAME
2630 && TREE_CODE (lhs) == SSA_NAME)
2631 {
2632 changed = visit_copy (lhs, simplified);
2633 goto done;
2634 }
2635 else if (simplified)
2636 {
2637 if (TREE_CODE (lhs) == SSA_NAME)
2638 {
2639 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2640 /* We have to unshare the expression or else
2641 valuizing may change the IL stream. */
2642 VN_INFO (lhs)->expr = unshare_expr (simplified);
2643 }
2644 }
2645 else if (stmt_has_constants (stmt)
2646 && TREE_CODE (lhs) == SSA_NAME)
2647 VN_INFO (lhs)->has_constants = true;
2648 else if (TREE_CODE (lhs) == SSA_NAME)
2649 {
2650 /* We reset expr and constantness here because we may
2651 have been value numbering optimistically, and
2652 iterating. They may become non-constant in this case,
2653 even if they were optimistically constant. */
2654
2655 VN_INFO (lhs)->has_constants = false;
2656 VN_INFO (lhs)->expr = NULL_TREE;
2657 }
2658
2659 if ((TREE_CODE (lhs) == SSA_NAME
2660 /* We can substitute SSA_NAMEs that are live over
2661 abnormal edges with their constant value. */
2662 && !(gimple_assign_copy_p (stmt)
2663 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2664 && !(simplified
2665 && is_gimple_min_invariant (simplified))
2666 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2667 /* Stores or copies from SSA_NAMEs that are live over
2668 abnormal edges are a problem. */
2669 || (gimple_assign_single_p (stmt)
2670 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2671 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2672 changed = defs_to_varying (stmt);
2673 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2674 {
2675 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2676 }
2677 else if (TREE_CODE (lhs) == SSA_NAME)
2678 {
2679 if ((gimple_assign_copy_p (stmt)
2680 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2681 || (simplified
2682 && is_gimple_min_invariant (simplified)))
2683 {
2684 VN_INFO (lhs)->has_constants = true;
2685 if (simplified)
2686 changed = set_ssa_val_to (lhs, simplified);
2687 else
2688 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2689 }
2690 else
2691 {
2692 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2693 {
2694 case GIMPLE_UNARY_RHS:
2695 changed = visit_unary_op (lhs, stmt);
2696 break;
2697 case GIMPLE_BINARY_RHS:
2698 changed = visit_binary_op (lhs, stmt);
2699 break;
2700 case GIMPLE_SINGLE_RHS:
2701 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2702 {
2703 case tcc_reference:
2704 /* VOP-less references can go through unary case. */
2705 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2706 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2707 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2708 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2709 {
2710 changed = visit_unary_op (lhs, stmt);
2711 break;
2712 }
2713 /* Fallthrough. */
2714 case tcc_declaration:
2715 changed = visit_reference_op_load
2716 (lhs, gimple_assign_rhs1 (stmt), stmt);
2717 break;
2718 case tcc_expression:
2719 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2720 {
2721 changed = visit_unary_op (lhs, stmt);
2722 break;
2723 }
2724 /* Fallthrough. */
2725 default:
2726 changed = defs_to_varying (stmt);
2727 }
2728 break;
2729 default:
2730 changed = defs_to_varying (stmt);
2731 break;
2732 }
2733 }
2734 }
2735 else
2736 changed = defs_to_varying (stmt);
2737 }
2738 else if (is_gimple_call (stmt))
2739 {
2740 tree lhs = gimple_call_lhs (stmt);
2741
2742 /* ??? We could try to simplify calls. */
2743
2744 if (stmt_has_constants (stmt)
2745 && TREE_CODE (lhs) == SSA_NAME)
2746 VN_INFO (lhs)->has_constants = true;
2747 else if (TREE_CODE (lhs) == SSA_NAME)
2748 {
2749 /* We reset expr and constantness here because we may
2750 have been value numbering optimistically, and
2751 iterating. They may become non-constant in this case,
2752 even if they were optimistically constant. */
2753 VN_INFO (lhs)->has_constants = false;
2754 VN_INFO (lhs)->expr = NULL_TREE;
2755 }
2756
2757 if (TREE_CODE (lhs) == SSA_NAME
2758 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2759 changed = defs_to_varying (stmt);
2760 /* ??? We should handle stores from calls. */
2761 else if (TREE_CODE (lhs) == SSA_NAME)
2762 {
2763 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2764 changed = visit_reference_op_call (lhs, stmt);
2765 else
2766 changed = defs_to_varying (stmt);
2767 }
2768 else
2769 changed = defs_to_varying (stmt);
2770 }
2771 }
2772 done:
2773 return changed;
2774 }
2775
2776 /* Compare two operands by reverse postorder index */
2777
2778 static int
2779 compare_ops (const void *pa, const void *pb)
2780 {
2781 const tree opa = *((const tree *)pa);
2782 const tree opb = *((const tree *)pb);
2783 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2784 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2785 basic_block bba;
2786 basic_block bbb;
2787
2788 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2789 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2790 else if (gimple_nop_p (opstmta))
2791 return -1;
2792 else if (gimple_nop_p (opstmtb))
2793 return 1;
2794
2795 bba = gimple_bb (opstmta);
2796 bbb = gimple_bb (opstmtb);
2797
2798 if (!bba && !bbb)
2799 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2800 else if (!bba)
2801 return -1;
2802 else if (!bbb)
2803 return 1;
2804
2805 if (bba == bbb)
2806 {
2807 if (gimple_code (opstmta) == GIMPLE_PHI
2808 && gimple_code (opstmtb) == GIMPLE_PHI)
2809 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2810 else if (gimple_code (opstmta) == GIMPLE_PHI)
2811 return -1;
2812 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2813 return 1;
2814 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2815 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2816 else
2817 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2818 }
2819 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2820 }
2821
2822 /* Sort an array containing members of a strongly connected component
2823 SCC so that the members are ordered by RPO number.
2824 This means that when the sort is complete, iterating through the
2825 array will give you the members in RPO order. */
2826
2827 static void
2828 sort_scc (VEC (tree, heap) *scc)
2829 {
2830 qsort (VEC_address (tree, scc),
2831 VEC_length (tree, scc),
2832 sizeof (tree),
2833 compare_ops);
2834 }
2835
2836 /* Insert the no longer used nary *ENTRY to the current hash. */
2837
2838 static int
2839 copy_nary (void **entry, void *data ATTRIBUTE_UNUSED)
2840 {
2841 vn_nary_op_t onary = (vn_nary_op_t) *entry;
2842 size_t size = (sizeof (struct vn_nary_op_s)
2843 - sizeof (tree) * (4 - onary->length));
2844 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
2845 size);
2846 void **slot;
2847 memcpy (nary, onary, size);
2848 slot = htab_find_slot_with_hash (current_info->nary, nary, nary->hashcode,
2849 INSERT);
2850 gcc_assert (!*slot);
2851 *slot = nary;
2852 return 1;
2853 }
2854
2855 /* Insert the no longer used phi *ENTRY to the current hash. */
2856
2857 static int
2858 copy_phis (void **entry, void *data ATTRIBUTE_UNUSED)
2859 {
2860 vn_phi_t ophi = (vn_phi_t) *entry;
2861 vn_phi_t phi = (vn_phi_t) pool_alloc (current_info->phis_pool);
2862 void **slot;
2863 memcpy (phi, ophi, sizeof (*phi));
2864 ophi->phiargs = NULL;
2865 slot = htab_find_slot_with_hash (current_info->phis, phi, phi->hashcode,
2866 INSERT);
2867 *slot = phi;
2868 return 1;
2869 }
2870
2871 /* Insert the no longer used reference *ENTRY to the current hash. */
2872
2873 static int
2874 copy_references (void **entry, void *data ATTRIBUTE_UNUSED)
2875 {
2876 vn_reference_t oref = (vn_reference_t) *entry;
2877 vn_reference_t ref;
2878 void **slot;
2879 ref = (vn_reference_t) pool_alloc (current_info->references_pool);
2880 memcpy (ref, oref, sizeof (*ref));
2881 oref->operands = NULL;
2882 slot = htab_find_slot_with_hash (current_info->references, ref, ref->hashcode,
2883 INSERT);
2884 if (*slot)
2885 free_reference (*slot);
2886 *slot = ref;
2887 return 1;
2888 }
2889
2890 /* Process a strongly connected component in the SSA graph. */
2891
2892 static void
2893 process_scc (VEC (tree, heap) *scc)
2894 {
2895 /* If the SCC has a single member, just visit it. */
2896
2897 if (VEC_length (tree, scc) == 1)
2898 {
2899 tree use = VEC_index (tree, scc, 0);
2900 if (!VN_INFO (use)->use_processed)
2901 visit_use (use);
2902 }
2903 else
2904 {
2905 tree var;
2906 unsigned int i;
2907 unsigned int iterations = 0;
2908 bool changed = true;
2909
2910 /* Iterate over the SCC with the optimistic table until it stops
2911 changing. */
2912 current_info = optimistic_info;
2913 while (changed)
2914 {
2915 changed = false;
2916 iterations++;
2917 /* As we are value-numbering optimistically we have to
2918 clear the expression tables and the simplified expressions
2919 in each iteration until we converge. */
2920 htab_empty (optimistic_info->nary);
2921 htab_empty (optimistic_info->phis);
2922 htab_empty (optimistic_info->references);
2923 obstack_free (&optimistic_info->nary_obstack, NULL);
2924 gcc_obstack_init (&optimistic_info->nary_obstack);
2925 empty_alloc_pool (optimistic_info->phis_pool);
2926 empty_alloc_pool (optimistic_info->references_pool);
2927 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2928 VN_INFO (var)->expr = NULL_TREE;
2929 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2930 changed |= visit_use (var);
2931 }
2932
2933 statistics_histogram_event (cfun, "SCC iterations", iterations);
2934
2935 /* Finally, copy the contents of the no longer used optimistic
2936 table to the valid table. */
2937 current_info = valid_info;
2938 htab_traverse (optimistic_info->nary, copy_nary, NULL);
2939 htab_traverse (optimistic_info->phis, copy_phis, NULL);
2940 htab_traverse (optimistic_info->references, copy_references, NULL);
2941 }
2942 }
2943
2944 DEF_VEC_O(ssa_op_iter);
2945 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2946
2947 /* Pop the components of the found SCC for NAME off the SCC stack
2948 and process them. Returns true if all went well, false if
2949 we run into resource limits. */
2950
2951 static bool
2952 extract_and_process_scc_for_name (tree name)
2953 {
2954 VEC (tree, heap) *scc = NULL;
2955 tree x;
2956
2957 /* Found an SCC, pop the components off the SCC stack and
2958 process them. */
2959 do
2960 {
2961 x = VEC_pop (tree, sccstack);
2962
2963 VN_INFO (x)->on_sccstack = false;
2964 VEC_safe_push (tree, heap, scc, x);
2965 } while (x != name);
2966
2967 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2968 if (VEC_length (tree, scc)
2969 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2970 {
2971 if (dump_file)
2972 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2973 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2974 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2975 return false;
2976 }
2977
2978 if (VEC_length (tree, scc) > 1)
2979 sort_scc (scc);
2980
2981 if (dump_file && (dump_flags & TDF_DETAILS))
2982 print_scc (dump_file, scc);
2983
2984 process_scc (scc);
2985
2986 VEC_free (tree, heap, scc);
2987
2988 return true;
2989 }
2990
2991 /* Depth first search on NAME to discover and process SCC's in the SSA
2992 graph.
2993 Execution of this algorithm relies on the fact that the SCC's are
2994 popped off the stack in topological order.
2995 Returns true if successful, false if we stopped processing SCC's due
2996 to resource constraints. */
2997
2998 static bool
2999 DFS (tree name)
3000 {
3001 VEC(ssa_op_iter, heap) *itervec = NULL;
3002 VEC(tree, heap) *namevec = NULL;
3003 use_operand_p usep = NULL;
3004 gimple defstmt;
3005 tree use;
3006 ssa_op_iter iter;
3007
3008 start_over:
3009 /* SCC info */
3010 VN_INFO (name)->dfsnum = next_dfs_num++;
3011 VN_INFO (name)->visited = true;
3012 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3013
3014 VEC_safe_push (tree, heap, sccstack, name);
3015 VN_INFO (name)->on_sccstack = true;
3016 defstmt = SSA_NAME_DEF_STMT (name);
3017
3018 /* Recursively DFS on our operands, looking for SCC's. */
3019 if (!gimple_nop_p (defstmt))
3020 {
3021 /* Push a new iterator. */
3022 if (gimple_code (defstmt) == GIMPLE_PHI)
3023 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3024 else
3025 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3026 }
3027 else
3028 clear_and_done_ssa_iter (&iter);
3029
3030 while (1)
3031 {
3032 /* If we are done processing uses of a name, go up the stack
3033 of iterators and process SCCs as we found them. */
3034 if (op_iter_done (&iter))
3035 {
3036 /* See if we found an SCC. */
3037 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3038 if (!extract_and_process_scc_for_name (name))
3039 {
3040 VEC_free (tree, heap, namevec);
3041 VEC_free (ssa_op_iter, heap, itervec);
3042 return false;
3043 }
3044
3045 /* Check if we are done. */
3046 if (VEC_empty (tree, namevec))
3047 {
3048 VEC_free (tree, heap, namevec);
3049 VEC_free (ssa_op_iter, heap, itervec);
3050 return true;
3051 }
3052
3053 /* Restore the last use walker and continue walking there. */
3054 use = name;
3055 name = VEC_pop (tree, namevec);
3056 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3057 sizeof (ssa_op_iter));
3058 VEC_pop (ssa_op_iter, itervec);
3059 goto continue_walking;
3060 }
3061
3062 use = USE_FROM_PTR (usep);
3063
3064 /* Since we handle phi nodes, we will sometimes get
3065 invariants in the use expression. */
3066 if (TREE_CODE (use) == SSA_NAME)
3067 {
3068 if (! (VN_INFO (use)->visited))
3069 {
3070 /* Recurse by pushing the current use walking state on
3071 the stack and starting over. */
3072 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3073 VEC_safe_push(tree, heap, namevec, name);
3074 name = use;
3075 goto start_over;
3076
3077 continue_walking:
3078 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3079 VN_INFO (use)->low);
3080 }
3081 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3082 && VN_INFO (use)->on_sccstack)
3083 {
3084 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3085 VN_INFO (name)->low);
3086 }
3087 }
3088
3089 usep = op_iter_next_use (&iter);
3090 }
3091 }
3092
3093 /* Allocate a value number table. */
3094
3095 static void
3096 allocate_vn_table (vn_tables_t table)
3097 {
3098 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3099 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3100 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3101 free_reference);
3102
3103 gcc_obstack_init (&table->nary_obstack);
3104 table->phis_pool = create_alloc_pool ("VN phis",
3105 sizeof (struct vn_phi_s),
3106 30);
3107 table->references_pool = create_alloc_pool ("VN references",
3108 sizeof (struct vn_reference_s),
3109 30);
3110 }
3111
3112 /* Free a value number table. */
3113
3114 static void
3115 free_vn_table (vn_tables_t table)
3116 {
3117 htab_delete (table->phis);
3118 htab_delete (table->nary);
3119 htab_delete (table->references);
3120 obstack_free (&table->nary_obstack, NULL);
3121 free_alloc_pool (table->phis_pool);
3122 free_alloc_pool (table->references_pool);
3123 }
3124
3125 static void
3126 init_scc_vn (void)
3127 {
3128 size_t i;
3129 int j;
3130 int *rpo_numbers_temp;
3131
3132 calculate_dominance_info (CDI_DOMINATORS);
3133 sccstack = NULL;
3134 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3135 free);
3136
3137 constant_value_ids = BITMAP_ALLOC (NULL);
3138
3139 next_dfs_num = 1;
3140 next_value_id = 1;
3141
3142 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3143 /* VEC_alloc doesn't actually grow it to the right size, it just
3144 preallocates the space to do so. */
3145 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3146 gcc_obstack_init (&vn_ssa_aux_obstack);
3147
3148 shared_lookup_phiargs = NULL;
3149 shared_lookup_references = NULL;
3150 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3151 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3152 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3153
3154 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3155 the i'th block in RPO order is bb. We want to map bb's to RPO
3156 numbers, so we need to rearrange this array. */
3157 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3158 rpo_numbers[rpo_numbers_temp[j]] = j;
3159
3160 XDELETE (rpo_numbers_temp);
3161
3162 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3163
3164 /* Create the VN_INFO structures, and initialize value numbers to
3165 TOP. */
3166 for (i = 0; i < num_ssa_names; i++)
3167 {
3168 tree name = ssa_name (i);
3169 if (name)
3170 {
3171 VN_INFO_GET (name)->valnum = VN_TOP;
3172 VN_INFO (name)->expr = NULL_TREE;
3173 VN_INFO (name)->value_id = 0;
3174 }
3175 }
3176
3177 renumber_gimple_stmt_uids ();
3178
3179 /* Create the valid and optimistic value numbering tables. */
3180 valid_info = XCNEW (struct vn_tables_s);
3181 allocate_vn_table (valid_info);
3182 optimistic_info = XCNEW (struct vn_tables_s);
3183 allocate_vn_table (optimistic_info);
3184 }
3185
3186 void
3187 free_scc_vn (void)
3188 {
3189 size_t i;
3190
3191 htab_delete (constant_to_value_id);
3192 BITMAP_FREE (constant_value_ids);
3193 VEC_free (tree, heap, shared_lookup_phiargs);
3194 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3195 XDELETEVEC (rpo_numbers);
3196
3197 for (i = 0; i < num_ssa_names; i++)
3198 {
3199 tree name = ssa_name (i);
3200 if (name
3201 && VN_INFO (name)->needs_insertion)
3202 release_ssa_name (name);
3203 }
3204 obstack_free (&vn_ssa_aux_obstack, NULL);
3205 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3206
3207 VEC_free (tree, heap, sccstack);
3208 free_vn_table (valid_info);
3209 XDELETE (valid_info);
3210 free_vn_table (optimistic_info);
3211 XDELETE (optimistic_info);
3212 }
3213
3214 /* Set the value ids in the valid hash tables. */
3215
3216 static void
3217 set_hashtable_value_ids (void)
3218 {
3219 htab_iterator hi;
3220 vn_nary_op_t vno;
3221 vn_reference_t vr;
3222 vn_phi_t vp;
3223
3224 /* Now set the value ids of the things we had put in the hash
3225 table. */
3226
3227 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3228 vno, vn_nary_op_t, hi)
3229 {
3230 if (vno->result)
3231 {
3232 if (TREE_CODE (vno->result) == SSA_NAME)
3233 vno->value_id = VN_INFO (vno->result)->value_id;
3234 else if (is_gimple_min_invariant (vno->result))
3235 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3236 }
3237 }
3238
3239 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3240 vp, vn_phi_t, hi)
3241 {
3242 if (vp->result)
3243 {
3244 if (TREE_CODE (vp->result) == SSA_NAME)
3245 vp->value_id = VN_INFO (vp->result)->value_id;
3246 else if (is_gimple_min_invariant (vp->result))
3247 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3248 }
3249 }
3250
3251 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3252 vr, vn_reference_t, hi)
3253 {
3254 if (vr->result)
3255 {
3256 if (TREE_CODE (vr->result) == SSA_NAME)
3257 vr->value_id = VN_INFO (vr->result)->value_id;
3258 else if (is_gimple_min_invariant (vr->result))
3259 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3260 }
3261 }
3262 }
3263
3264 /* Do SCCVN. Returns true if it finished, false if we bailed out
3265 due to resource constraints. */
3266
3267 bool
3268 run_scc_vn (bool may_insert_arg)
3269 {
3270 size_t i;
3271 tree param;
3272 bool changed = true;
3273
3274 may_insert = may_insert_arg;
3275
3276 init_scc_vn ();
3277 current_info = valid_info;
3278
3279 for (param = DECL_ARGUMENTS (current_function_decl);
3280 param;
3281 param = TREE_CHAIN (param))
3282 {
3283 if (gimple_default_def (cfun, param) != NULL)
3284 {
3285 tree def = gimple_default_def (cfun, param);
3286 VN_INFO (def)->valnum = def;
3287 }
3288 }
3289
3290 for (i = 1; i < num_ssa_names; ++i)
3291 {
3292 tree name = ssa_name (i);
3293 if (name
3294 && VN_INFO (name)->visited == false
3295 && !has_zero_uses (name))
3296 if (!DFS (name))
3297 {
3298 free_scc_vn ();
3299 may_insert = false;
3300 return false;
3301 }
3302 }
3303
3304 /* Initialize the value ids. */
3305
3306 for (i = 1; i < num_ssa_names; ++i)
3307 {
3308 tree name = ssa_name (i);
3309 vn_ssa_aux_t info;
3310 if (!name)
3311 continue;
3312 info = VN_INFO (name);
3313 if (info->valnum == name
3314 || info->valnum == VN_TOP)
3315 info->value_id = get_next_value_id ();
3316 else if (is_gimple_min_invariant (info->valnum))
3317 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3318 }
3319
3320 /* Propagate until they stop changing. */
3321 while (changed)
3322 {
3323 changed = false;
3324 for (i = 1; i < num_ssa_names; ++i)
3325 {
3326 tree name = ssa_name (i);
3327 vn_ssa_aux_t info;
3328 if (!name)
3329 continue;
3330 info = VN_INFO (name);
3331 if (TREE_CODE (info->valnum) == SSA_NAME
3332 && info->valnum != name
3333 && info->value_id != VN_INFO (info->valnum)->value_id)
3334 {
3335 changed = true;
3336 info->value_id = VN_INFO (info->valnum)->value_id;
3337 }
3338 }
3339 }
3340
3341 set_hashtable_value_ids ();
3342
3343 if (dump_file && (dump_flags & TDF_DETAILS))
3344 {
3345 fprintf (dump_file, "Value numbers:\n");
3346 for (i = 0; i < num_ssa_names; i++)
3347 {
3348 tree name = ssa_name (i);
3349 if (name
3350 && VN_INFO (name)->visited
3351 && SSA_VAL (name) != name)
3352 {
3353 print_generic_expr (dump_file, name, 0);
3354 fprintf (dump_file, " = ");
3355 print_generic_expr (dump_file, SSA_VAL (name), 0);
3356 fprintf (dump_file, "\n");
3357 }
3358 }
3359 }
3360
3361 may_insert = false;
3362 return true;
3363 }
3364
3365 /* Return the maximum value id we have ever seen. */
3366
3367 unsigned int
3368 get_max_value_id (void)
3369 {
3370 return next_value_id;
3371 }
3372
3373 /* Return the next unique value id. */
3374
3375 unsigned int
3376 get_next_value_id (void)
3377 {
3378 return next_value_id++;
3379 }
3380
3381
3382 /* Compare two expressions E1 and E2 and return true if they are equal. */
3383
3384 bool
3385 expressions_equal_p (tree e1, tree e2)
3386 {
3387 /* The obvious case. */
3388 if (e1 == e2)
3389 return true;
3390
3391 /* If only one of them is null, they cannot be equal. */
3392 if (!e1 || !e2)
3393 return false;
3394
3395 /* Now perform the actual comparison. */
3396 if (TREE_CODE (e1) == TREE_CODE (e2)
3397 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3398 return true;
3399
3400 return false;
3401 }
3402
3403
3404 /* Return true if the nary operation NARY may trap. This is a copy
3405 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3406
3407 bool
3408 vn_nary_may_trap (vn_nary_op_t nary)
3409 {
3410 tree type;
3411 tree rhs2;
3412 bool honor_nans = false;
3413 bool honor_snans = false;
3414 bool fp_operation = false;
3415 bool honor_trapv = false;
3416 bool handled, ret;
3417 unsigned i;
3418
3419 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3420 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3421 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3422 {
3423 type = nary->type;
3424 fp_operation = FLOAT_TYPE_P (type);
3425 if (fp_operation)
3426 {
3427 honor_nans = flag_trapping_math && !flag_finite_math_only;
3428 honor_snans = flag_signaling_nans != 0;
3429 }
3430 else if (INTEGRAL_TYPE_P (type)
3431 && TYPE_OVERFLOW_TRAPS (type))
3432 honor_trapv = true;
3433 }
3434 rhs2 = nary->op[1];
3435 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3436 honor_trapv,
3437 honor_nans, honor_snans, rhs2,
3438 &handled);
3439 if (handled
3440 && ret)
3441 return true;
3442
3443 for (i = 0; i < nary->length; ++i)
3444 if (tree_could_trap_p (nary->op[i]))
3445 return true;
3446
3447 return false;
3448 }