gimplify.c: Do not include except.h and optabs.h.
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
53
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
57
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
62
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
67
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
71
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
76
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
80
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
83
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
89
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
95
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
103 */
104
105 /* The set of hashtables and alloc_pool's for their items. */
106
107 typedef struct vn_tables_s
108 {
109 htab_t nary;
110 htab_t phis;
111 htab_t references;
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
115 } *vn_tables_t;
116
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
119
120
121 /* Valid hashtables storing information we have proven to be
122 correct. */
123
124 static vn_tables_t valid_info;
125
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
128
129 static vn_tables_t optimistic_info;
130
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
133 valid_info. */
134
135 static vn_tables_t current_info;
136
137
138 /* Reverse post order index for each basic block. */
139
140 static int *rpo_numbers;
141
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
143
144 /* This represents the top of the VN lattice, which is the universal
145 value. */
146
147 tree VN_TOP;
148
149 /* Unique counter for our value ids. */
150
151 static unsigned int next_value_id;
152
153 /* Next DFS number and the stack for strongly connected component
154 detection. */
155
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
158
159 static bool may_insert;
160
161
162 DEF_VEC_P(vn_ssa_aux_t);
163 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164
165 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
166 are allocated on an obstack for locality reasons, and to free them
167 without looping over the VEC. */
168
169 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
170 static struct obstack vn_ssa_aux_obstack;
171
172 /* Return the value numbering information for a given SSA name. */
173
174 vn_ssa_aux_t
175 VN_INFO (tree name)
176 {
177 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
178 SSA_NAME_VERSION (name));
179 gcc_assert (res);
180 return res;
181 }
182
183 /* Set the value numbering info for a given SSA name to a given
184 value. */
185
186 static inline void
187 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 {
189 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
190 SSA_NAME_VERSION (name), value);
191 }
192
193 /* Initialize the value numbering info for a given SSA name.
194 This should be called just once for every SSA name. */
195
196 vn_ssa_aux_t
197 VN_INFO_GET (tree name)
198 {
199 vn_ssa_aux_t newinfo;
200
201 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
202 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
203 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
204 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name) + 1);
206 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
207 SSA_NAME_VERSION (name), newinfo);
208 return newinfo;
209 }
210
211
212 /* Get the representative expression for the SSA_NAME NAME. Returns
213 the representative SSA_NAME if there is no expression associated with it. */
214
215 tree
216 vn_get_expr_for (tree name)
217 {
218 vn_ssa_aux_t vn = VN_INFO (name);
219 gimple def_stmt;
220 tree expr = NULL_TREE;
221
222 if (vn->valnum == VN_TOP)
223 return name;
224
225 /* If the value-number is a constant it is the representative
226 expression. */
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 return vn->valnum;
229
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
232
233 /* If the value-number is a constant it is the representative
234 expression. */
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 return vn->valnum;
237
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
240 return vn->expr;
241
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
244
245 /* If the value number is a default-definition or a PHI result
246 use it directly. */
247 if (gimple_nop_p (def_stmt)
248 || gimple_code (def_stmt) == GIMPLE_PHI)
249 return vn->valnum;
250
251 if (!is_gimple_assign (def_stmt))
252 return vn->valnum;
253
254 /* FIXME tuples. This is incomplete and likely will miss some
255 simplifications. */
256 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
257 {
258 case tcc_reference:
259 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
260 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
261 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
262 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
266 break;
267
268 case tcc_unary:
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
272 break;
273
274 case tcc_binary:
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
279 break;
280
281 default:;
282 }
283 if (expr == NULL_TREE)
284 return vn->valnum;
285
286 /* Cache the expression. */
287 vn->expr = expr;
288
289 return expr;
290 }
291
292
293 /* Free a phi operation structure VP. */
294
295 static void
296 free_phi (void *vp)
297 {
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
300 }
301
302 /* Free a reference operation structure VP. */
303
304 static void
305 free_reference (void *vp)
306 {
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
309 }
310
311 /* Hash table equality function for vn_constant_t. */
312
313 static int
314 vn_constant_eq (const void *p1, const void *p2)
315 {
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
318
319 if (vc1->hashcode != vc2->hashcode)
320 return false;
321
322 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 }
324
325 /* Hash table hash function for vn_constant_t. */
326
327 static hashval_t
328 vn_constant_hash (const void *p1)
329 {
330 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
331 return vc1->hashcode;
332 }
333
334 /* Lookup a value id for CONSTANT and return it. If it does not
335 exist returns 0. */
336
337 unsigned int
338 get_constant_value_id (tree constant)
339 {
340 void **slot;
341 struct vn_constant_s vc;
342
343 vc.hashcode = vn_hash_constant_with_type (constant);
344 vc.constant = constant;
345 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
346 vc.hashcode, NO_INSERT);
347 if (slot)
348 return ((vn_constant_t)*slot)->value_id;
349 return 0;
350 }
351
352 /* Lookup a value id for CONSTANT, and if it does not exist, create a
353 new one and return it. If it does exist, return it. */
354
355 unsigned int
356 get_or_alloc_constant_value_id (tree constant)
357 {
358 void **slot;
359 struct vn_constant_s vc;
360 vn_constant_t vcp;
361
362 vc.hashcode = vn_hash_constant_with_type (constant);
363 vc.constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
365 vc.hashcode, INSERT);
366 if (*slot)
367 return ((vn_constant_t)*slot)->value_id;
368
369 vcp = XNEW (struct vn_constant_s);
370 vcp->hashcode = vc.hashcode;
371 vcp->constant = constant;
372 vcp->value_id = get_next_value_id ();
373 *slot = (void *) vcp;
374 bitmap_set_bit (constant_value_ids, vcp->value_id);
375 return vcp->value_id;
376 }
377
378 /* Return true if V is a value id for a constant. */
379
380 bool
381 value_id_constant_p (unsigned int v)
382 {
383 return bitmap_bit_p (constant_value_ids, v);
384 }
385
386 /* Compare two reference operands P1 and P2 for equality. Return true if
387 they are equal, and false otherwise. */
388
389 static int
390 vn_reference_op_eq (const void *p1, const void *p2)
391 {
392 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
393 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394
395 return vro1->opcode == vro2->opcode
396 && types_compatible_p (vro1->type, vro2->type)
397 && expressions_equal_p (vro1->op0, vro2->op0)
398 && expressions_equal_p (vro1->op1, vro2->op1)
399 && expressions_equal_p (vro1->op2, vro2->op2);
400 }
401
402 /* Compute the hash for a reference operand VRO1. */
403
404 static hashval_t
405 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
406 {
407 result = iterative_hash_hashval_t (vro1->opcode, result);
408 if (vro1->op0)
409 result = iterative_hash_expr (vro1->op0, result);
410 if (vro1->op1)
411 result = iterative_hash_expr (vro1->op1, result);
412 if (vro1->op2)
413 result = iterative_hash_expr (vro1->op2, result);
414 return result;
415 }
416
417 /* Return the hashcode for a given reference operation P1. */
418
419 static hashval_t
420 vn_reference_hash (const void *p1)
421 {
422 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
423 return vr1->hashcode;
424 }
425
426 /* Compute a hash for the reference operation VR1 and return it. */
427
428 hashval_t
429 vn_reference_compute_hash (const vn_reference_t vr1)
430 {
431 hashval_t result = 0;
432 int i;
433 vn_reference_op_t vro;
434
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result = vn_reference_op_compute_hash (vro, result);
437 if (vr1->vuse)
438 result += SSA_NAME_VERSION (vr1->vuse);
439
440 return result;
441 }
442
443 /* Return true if reference operations P1 and P2 are equivalent. This
444 means they have the same set of operands and vuses. */
445
446 int
447 vn_reference_eq (const void *p1, const void *p2)
448 {
449 int i;
450 vn_reference_op_t vro;
451
452 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
453 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
454 if (vr1->hashcode != vr2->hashcode)
455 return false;
456
457 /* Early out if this is not a hash collision. */
458 if (vr1->hashcode != vr2->hashcode)
459 return false;
460
461 /* The VOP needs to be the same. */
462 if (vr1->vuse != vr2->vuse)
463 return false;
464
465 /* If the operands are the same we are done. */
466 if (vr1->operands == vr2->operands)
467 return true;
468
469 /* We require that address operands be canonicalized in a way that
470 two memory references will have the same operands if they are
471 equivalent. */
472 if (VEC_length (vn_reference_op_s, vr1->operands)
473 != VEC_length (vn_reference_op_s, vr2->operands))
474 return false;
475
476 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
477 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
478 vro))
479 return false;
480
481 return true;
482 }
483
484 /* Copy the operations present in load/store REF into RESULT, a vector of
485 vn_reference_op_s's. */
486
487 void
488 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
489 {
490 if (TREE_CODE (ref) == TARGET_MEM_REF)
491 {
492 vn_reference_op_s temp;
493 tree base;
494
495 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
496 if (!base)
497 base = build_int_cst (ptr_type_node, 0);
498
499 memset (&temp, 0, sizeof (temp));
500 /* We do not care for spurious type qualifications. */
501 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
502 temp.opcode = TREE_CODE (ref);
503 temp.op0 = TMR_INDEX (ref);
504 temp.op1 = TMR_STEP (ref);
505 temp.op2 = TMR_OFFSET (ref);
506 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
507
508 memset (&temp, 0, sizeof (temp));
509 temp.type = NULL_TREE;
510 temp.opcode = TREE_CODE (base);
511 temp.op0 = base;
512 temp.op1 = TMR_ORIGINAL (ref);
513 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
514 return;
515 }
516
517 /* For non-calls, store the information that makes up the address. */
518
519 while (ref)
520 {
521 vn_reference_op_s temp;
522
523 memset (&temp, 0, sizeof (temp));
524 /* We do not care for spurious type qualifications. */
525 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
526 temp.opcode = TREE_CODE (ref);
527
528 switch (temp.opcode)
529 {
530 case ALIGN_INDIRECT_REF:
531 case INDIRECT_REF:
532 /* The only operand is the address, which gets its own
533 vn_reference_op_s structure. */
534 break;
535 case MISALIGNED_INDIRECT_REF:
536 temp.op0 = TREE_OPERAND (ref, 1);
537 break;
538 case BIT_FIELD_REF:
539 /* Record bits and position. */
540 temp.op0 = TREE_OPERAND (ref, 1);
541 temp.op1 = TREE_OPERAND (ref, 2);
542 break;
543 case COMPONENT_REF:
544 /* The field decl is enough to unambiguously specify the field,
545 a matching type is not necessary and a mismatching type
546 is always a spurious difference. */
547 temp.type = NULL_TREE;
548 temp.op0 = TREE_OPERAND (ref, 1);
549 temp.op1 = TREE_OPERAND (ref, 2);
550 /* If this is a reference to a union member, record the union
551 member size as operand. Do so only if we are doing
552 expression insertion (during FRE), as PRE currently gets
553 confused with this. */
554 if (may_insert
555 && temp.op1 == NULL_TREE
556 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
557 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
558 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
559 && host_integerp (DECL_SIZE (temp.op0), 0))
560 temp.op0 = DECL_SIZE (temp.op0);
561 break;
562 case ARRAY_RANGE_REF:
563 case ARRAY_REF:
564 /* Record index as operand. */
565 temp.op0 = TREE_OPERAND (ref, 1);
566 /* Always record lower bounds and element size. */
567 temp.op1 = array_ref_low_bound (ref);
568 temp.op2 = array_ref_element_size (ref);
569 break;
570 case STRING_CST:
571 case INTEGER_CST:
572 case COMPLEX_CST:
573 case VECTOR_CST:
574 case REAL_CST:
575 case CONSTRUCTOR:
576 case VAR_DECL:
577 case PARM_DECL:
578 case CONST_DECL:
579 case RESULT_DECL:
580 case SSA_NAME:
581 temp.op0 = ref;
582 break;
583 case ADDR_EXPR:
584 if (is_gimple_min_invariant (ref))
585 {
586 temp.op0 = ref;
587 break;
588 }
589 /* Fallthrough. */
590 /* These are only interesting for their operands, their
591 existence, and their type. They will never be the last
592 ref in the chain of references (IE they require an
593 operand), so we don't have to put anything
594 for op* as it will be handled by the iteration */
595 case IMAGPART_EXPR:
596 case REALPART_EXPR:
597 case VIEW_CONVERT_EXPR:
598 break;
599 default:
600 gcc_unreachable ();
601 }
602 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
603
604 if (REFERENCE_CLASS_P (ref)
605 || (TREE_CODE (ref) == ADDR_EXPR
606 && !is_gimple_min_invariant (ref)))
607 ref = TREE_OPERAND (ref, 0);
608 else
609 ref = NULL_TREE;
610 }
611 }
612
613 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
614 operands in *OPS, the reference alias set SET and the reference type TYPE.
615 Return true if something useful was produced. */
616
617 bool
618 ao_ref_init_from_vn_reference (ao_ref *ref,
619 alias_set_type set, tree type,
620 VEC (vn_reference_op_s, heap) *ops)
621 {
622 vn_reference_op_t op;
623 unsigned i;
624 tree base = NULL_TREE;
625 tree *op0_p = &base;
626 HOST_WIDE_INT offset = 0;
627 HOST_WIDE_INT max_size;
628 HOST_WIDE_INT size = -1;
629 tree size_tree = NULL_TREE;
630
631 /* First get the final access size from just the outermost expression. */
632 op = VEC_index (vn_reference_op_s, ops, 0);
633 if (op->opcode == COMPONENT_REF)
634 {
635 if (TREE_CODE (op->op0) == INTEGER_CST)
636 size_tree = op->op0;
637 else
638 size_tree = DECL_SIZE (op->op0);
639 }
640 else if (op->opcode == BIT_FIELD_REF)
641 size_tree = op->op0;
642 else
643 {
644 enum machine_mode mode = TYPE_MODE (type);
645 if (mode == BLKmode)
646 size_tree = TYPE_SIZE (type);
647 else
648 size = GET_MODE_BITSIZE (mode);
649 }
650 if (size_tree != NULL_TREE)
651 {
652 if (!host_integerp (size_tree, 1))
653 size = -1;
654 else
655 size = TREE_INT_CST_LOW (size_tree);
656 }
657
658 /* Initially, maxsize is the same as the accessed element size.
659 In the following it will only grow (or become -1). */
660 max_size = size;
661
662 /* Compute cumulative bit-offset for nested component-refs and array-refs,
663 and find the ultimate containing object. */
664 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
665 {
666 switch (op->opcode)
667 {
668 /* These may be in the reference ops, but we cannot do anything
669 sensible with them here. */
670 case CALL_EXPR:
671 case ADDR_EXPR:
672 return false;
673
674 /* Record the base objects. */
675 case ALIGN_INDIRECT_REF:
676 case INDIRECT_REF:
677 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
678 op0_p = &TREE_OPERAND (*op0_p, 0);
679 break;
680
681 case MISALIGNED_INDIRECT_REF:
682 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
683 NULL_TREE, op->op0);
684 op0_p = &TREE_OPERAND (*op0_p, 0);
685 break;
686
687 case VAR_DECL:
688 case PARM_DECL:
689 case RESULT_DECL:
690 case SSA_NAME:
691 *op0_p = op->op0;
692 break;
693
694 /* And now the usual component-reference style ops. */
695 case BIT_FIELD_REF:
696 offset += tree_low_cst (op->op1, 0);
697 break;
698
699 case COMPONENT_REF:
700 {
701 tree field = op->op0;
702 /* We do not have a complete COMPONENT_REF tree here so we
703 cannot use component_ref_field_offset. Do the interesting
704 parts manually. */
705
706 /* Our union trick, done for offset zero only. */
707 if (TREE_CODE (field) == INTEGER_CST)
708 ;
709 else if (op->op1
710 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
711 max_size = -1;
712 else
713 {
714 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
715 * BITS_PER_UNIT);
716 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
717 }
718 break;
719 }
720
721 case ARRAY_RANGE_REF:
722 case ARRAY_REF:
723 /* We recorded the lower bound and the element size. */
724 if (!host_integerp (op->op0, 0)
725 || !host_integerp (op->op1, 0)
726 || !host_integerp (op->op2, 0))
727 max_size = -1;
728 else
729 {
730 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
731 hindex -= TREE_INT_CST_LOW (op->op1);
732 hindex *= TREE_INT_CST_LOW (op->op2);
733 hindex *= BITS_PER_UNIT;
734 offset += hindex;
735 }
736 break;
737
738 case REALPART_EXPR:
739 break;
740
741 case IMAGPART_EXPR:
742 offset += size;
743 break;
744
745 case VIEW_CONVERT_EXPR:
746 break;
747
748 case STRING_CST:
749 case INTEGER_CST:
750 case COMPLEX_CST:
751 case VECTOR_CST:
752 case REAL_CST:
753 case CONSTRUCTOR:
754 case CONST_DECL:
755 return false;
756
757 default:
758 return false;
759 }
760 }
761
762 if (base == NULL_TREE)
763 return false;
764
765 ref->ref = NULL_TREE;
766 ref->base = base;
767 ref->offset = offset;
768 ref->size = size;
769 ref->max_size = max_size;
770 ref->ref_alias_set = set;
771 ref->base_alias_set = -1;
772
773 return true;
774 }
775
776 /* Copy the operations present in load/store/call REF into RESULT, a vector of
777 vn_reference_op_s's. */
778
779 void
780 copy_reference_ops_from_call (gimple call,
781 VEC(vn_reference_op_s, heap) **result)
782 {
783 vn_reference_op_s temp;
784 unsigned i;
785
786 /* Copy the type, opcode, function being called and static chain. */
787 memset (&temp, 0, sizeof (temp));
788 temp.type = gimple_call_return_type (call);
789 temp.opcode = CALL_EXPR;
790 temp.op0 = gimple_call_fn (call);
791 temp.op1 = gimple_call_chain (call);
792 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
793
794 /* Copy the call arguments. As they can be references as well,
795 just chain them together. */
796 for (i = 0; i < gimple_call_num_args (call); ++i)
797 {
798 tree callarg = gimple_call_arg (call, i);
799 copy_reference_ops_from_ref (callarg, result);
800 }
801 }
802
803 /* Create a vector of vn_reference_op_s structures from REF, a
804 REFERENCE_CLASS_P tree. The vector is not shared. */
805
806 static VEC(vn_reference_op_s, heap) *
807 create_reference_ops_from_ref (tree ref)
808 {
809 VEC (vn_reference_op_s, heap) *result = NULL;
810
811 copy_reference_ops_from_ref (ref, &result);
812 return result;
813 }
814
815 /* Create a vector of vn_reference_op_s structures from CALL, a
816 call statement. The vector is not shared. */
817
818 static VEC(vn_reference_op_s, heap) *
819 create_reference_ops_from_call (gimple call)
820 {
821 VEC (vn_reference_op_s, heap) *result = NULL;
822
823 copy_reference_ops_from_call (call, &result);
824 return result;
825 }
826
827 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
828 *I_P to point to the last element of the replacement. */
829 void
830 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
831 unsigned int *i_p)
832 {
833 VEC(vn_reference_op_s, heap) *mem = NULL;
834 vn_reference_op_t op;
835 unsigned int i = *i_p;
836 unsigned int j;
837
838 /* Get ops for the addressed object. */
839 op = VEC_index (vn_reference_op_s, *ops, i);
840 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
841 around it to avoid later ICEs. */
842 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
843 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
844 {
845 vn_reference_op_s aref;
846 tree dom;
847 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
848 aref.opcode = ARRAY_REF;
849 aref.op0 = integer_zero_node;
850 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
851 && TYPE_MIN_VALUE (dom))
852 aref.op0 = TYPE_MIN_VALUE (dom);
853 aref.op1 = aref.op0;
854 aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
855 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
856 }
857 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
858
859 /* Do the replacement - we should have at least one op in mem now. */
860 if (VEC_length (vn_reference_op_s, mem) == 1)
861 {
862 VEC_replace (vn_reference_op_s, *ops, i - 1,
863 VEC_index (vn_reference_op_s, mem, 0));
864 VEC_ordered_remove (vn_reference_op_s, *ops, i);
865 i--;
866 }
867 else if (VEC_length (vn_reference_op_s, mem) == 2)
868 {
869 VEC_replace (vn_reference_op_s, *ops, i - 1,
870 VEC_index (vn_reference_op_s, mem, 0));
871 VEC_replace (vn_reference_op_s, *ops, i,
872 VEC_index (vn_reference_op_s, mem, 1));
873 }
874 else if (VEC_length (vn_reference_op_s, mem) > 2)
875 {
876 VEC_replace (vn_reference_op_s, *ops, i - 1,
877 VEC_index (vn_reference_op_s, mem, 0));
878 VEC_replace (vn_reference_op_s, *ops, i,
879 VEC_index (vn_reference_op_s, mem, 1));
880 /* ??? There is no VEC_splice. */
881 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
882 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
883 }
884 else
885 gcc_unreachable ();
886
887 VEC_free (vn_reference_op_s, heap, mem);
888 *i_p = i;
889 }
890
891 /* Optimize the reference REF to a constant if possible or return
892 NULL_TREE if not. */
893
894 tree
895 fully_constant_vn_reference_p (vn_reference_t ref)
896 {
897 VEC (vn_reference_op_s, heap) *operands = ref->operands;
898 vn_reference_op_t op;
899
900 /* Try to simplify the translated expression if it is
901 a call to a builtin function with at most two arguments. */
902 op = VEC_index (vn_reference_op_s, operands, 0);
903 if (op->opcode == CALL_EXPR
904 && TREE_CODE (op->op0) == ADDR_EXPR
905 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
906 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
907 && VEC_length (vn_reference_op_s, operands) >= 2
908 && VEC_length (vn_reference_op_s, operands) <= 3)
909 {
910 vn_reference_op_t arg0, arg1 = NULL;
911 bool anyconst = false;
912 arg0 = VEC_index (vn_reference_op_s, operands, 1);
913 if (VEC_length (vn_reference_op_s, operands) > 2)
914 arg1 = VEC_index (vn_reference_op_s, operands, 2);
915 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
916 || (arg0->opcode == ADDR_EXPR
917 && is_gimple_min_invariant (arg0->op0)))
918 anyconst = true;
919 if (arg1
920 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
921 || (arg1->opcode == ADDR_EXPR
922 && is_gimple_min_invariant (arg1->op0))))
923 anyconst = true;
924 if (anyconst)
925 {
926 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
927 arg1 ? 2 : 1,
928 arg0->op0,
929 arg1 ? arg1->op0 : NULL);
930 if (folded
931 && TREE_CODE (folded) == NOP_EXPR)
932 folded = TREE_OPERAND (folded, 0);
933 if (folded
934 && is_gimple_min_invariant (folded))
935 return folded;
936 }
937 }
938
939 /* Simplify reads from constant strings. */
940 else if (op->opcode == ARRAY_REF
941 && TREE_CODE (op->op0) == INTEGER_CST
942 && integer_zerop (op->op1)
943 && VEC_length (vn_reference_op_s, operands) == 2)
944 {
945 vn_reference_op_t arg0;
946 arg0 = VEC_index (vn_reference_op_s, operands, 1);
947 if (arg0->opcode == STRING_CST
948 && (TYPE_MODE (op->type)
949 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
950 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
951 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
952 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
953 return build_int_cst_type (op->type,
954 (TREE_STRING_POINTER (arg0->op0)
955 [TREE_INT_CST_LOW (op->op0)]));
956 }
957
958 return NULL_TREE;
959 }
960
961 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
962 structures into their value numbers. This is done in-place, and
963 the vector passed in is returned. */
964
965 static VEC (vn_reference_op_s, heap) *
966 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
967 {
968 vn_reference_op_t vro;
969 unsigned int i;
970
971 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
972 {
973 if (vro->opcode == SSA_NAME
974 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
975 {
976 vro->op0 = SSA_VAL (vro->op0);
977 /* If it transforms from an SSA_NAME to a constant, update
978 the opcode. */
979 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
980 vro->opcode = TREE_CODE (vro->op0);
981 /* If it transforms from an SSA_NAME to an address, fold with
982 a preceding indirect reference. */
983 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
984 && VEC_index (vn_reference_op_s,
985 orig, i - 1)->opcode == INDIRECT_REF)
986 {
987 vn_reference_fold_indirect (&orig, &i);
988 continue;
989 }
990 }
991 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
992 vro->op1 = SSA_VAL (vro->op1);
993 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
994 vro->op2 = SSA_VAL (vro->op2);
995 }
996
997 return orig;
998 }
999
1000 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1001
1002 /* Create a vector of vn_reference_op_s structures from REF, a
1003 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1004 this function. */
1005
1006 static VEC(vn_reference_op_s, heap) *
1007 valueize_shared_reference_ops_from_ref (tree ref)
1008 {
1009 if (!ref)
1010 return NULL;
1011 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1012 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1013 shared_lookup_references = valueize_refs (shared_lookup_references);
1014 return shared_lookup_references;
1015 }
1016
1017 /* Create a vector of vn_reference_op_s structures from CALL, a
1018 call statement. The vector is shared among all callers of
1019 this function. */
1020
1021 static VEC(vn_reference_op_s, heap) *
1022 valueize_shared_reference_ops_from_call (gimple call)
1023 {
1024 if (!call)
1025 return NULL;
1026 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1027 copy_reference_ops_from_call (call, &shared_lookup_references);
1028 shared_lookup_references = valueize_refs (shared_lookup_references);
1029 return shared_lookup_references;
1030 }
1031
1032 /* Lookup a SCCVN reference operation VR in the current hash table.
1033 Returns the resulting value number if it exists in the hash table,
1034 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1035 vn_reference_t stored in the hashtable if something is found. */
1036
1037 static tree
1038 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1039 {
1040 void **slot;
1041 hashval_t hash;
1042
1043 hash = vr->hashcode;
1044 slot = htab_find_slot_with_hash (current_info->references, vr,
1045 hash, NO_INSERT);
1046 if (!slot && current_info == optimistic_info)
1047 slot = htab_find_slot_with_hash (valid_info->references, vr,
1048 hash, NO_INSERT);
1049 if (slot)
1050 {
1051 if (vnresult)
1052 *vnresult = (vn_reference_t)*slot;
1053 return ((vn_reference_t)*slot)->result;
1054 }
1055
1056 return NULL_TREE;
1057 }
1058
1059 static tree *last_vuse_ptr;
1060
1061 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1062 with the current VUSE and performs the expression lookup. */
1063
1064 static void *
1065 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1066 {
1067 vn_reference_t vr = (vn_reference_t)vr_;
1068 void **slot;
1069 hashval_t hash;
1070
1071 if (last_vuse_ptr)
1072 *last_vuse_ptr = vuse;
1073
1074 /* Fixup vuse and hash. */
1075 if (vr->vuse)
1076 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1077 vr->vuse = SSA_VAL (vuse);
1078 if (vr->vuse)
1079 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1080
1081 hash = vr->hashcode;
1082 slot = htab_find_slot_with_hash (current_info->references, vr,
1083 hash, NO_INSERT);
1084 if (!slot && current_info == optimistic_info)
1085 slot = htab_find_slot_with_hash (valid_info->references, vr,
1086 hash, NO_INSERT);
1087 if (slot)
1088 return *slot;
1089
1090 return NULL;
1091 }
1092
1093 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1094 from the statement defining VUSE and if not successful tries to
1095 translate *REFP and VR_ through an aggregate copy at the defintion
1096 of VUSE. */
1097
1098 static void *
1099 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1100 {
1101 vn_reference_t vr = (vn_reference_t)vr_;
1102 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1103 tree fndecl;
1104 tree base;
1105 HOST_WIDE_INT offset, maxsize;
1106
1107 base = ao_ref_base (ref);
1108 offset = ref->offset;
1109 maxsize = ref->max_size;
1110
1111 /* If we cannot constrain the size of the reference we cannot
1112 test if anything kills it. */
1113 if (maxsize == -1)
1114 return (void *)-1;
1115
1116 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1117 from that defintion.
1118 1) Memset. */
1119 if (is_gimple_reg_type (vr->type)
1120 && is_gimple_call (def_stmt)
1121 && (fndecl = gimple_call_fndecl (def_stmt))
1122 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1123 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1124 && integer_zerop (gimple_call_arg (def_stmt, 1))
1125 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1126 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1127 {
1128 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1129 tree base2;
1130 HOST_WIDE_INT offset2, size2, maxsize2;
1131 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1132 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1133 if ((unsigned HOST_WIDE_INT)size2 / 8
1134 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1135 && operand_equal_p (base, base2, 0)
1136 && offset2 <= offset
1137 && offset2 + size2 >= offset + maxsize)
1138 {
1139 tree val = fold_convert (vr->type, integer_zero_node);
1140 unsigned int value_id = get_or_alloc_constant_value_id (val);
1141 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1142 VEC_copy (vn_reference_op_s,
1143 heap, vr->operands),
1144 val, value_id);
1145 }
1146 }
1147
1148 /* 2) Assignment from an empty CONSTRUCTOR. */
1149 else if (is_gimple_reg_type (vr->type)
1150 && gimple_assign_single_p (def_stmt)
1151 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1152 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1153 {
1154 tree base2;
1155 HOST_WIDE_INT offset2, size2, maxsize2;
1156 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1157 &offset2, &size2, &maxsize2);
1158 if (operand_equal_p (base, base2, 0)
1159 && offset2 <= offset
1160 && offset2 + size2 >= offset + maxsize)
1161 {
1162 tree val = fold_convert (vr->type, integer_zero_node);
1163 unsigned int value_id = get_or_alloc_constant_value_id (val);
1164 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1165 VEC_copy (vn_reference_op_s,
1166 heap, vr->operands),
1167 val, value_id);
1168 }
1169 }
1170
1171 /* For aggregate copies translate the reference through them if
1172 the copy kills ref. */
1173 else if (gimple_assign_single_p (def_stmt)
1174 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1175 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1176 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1177 {
1178 tree base2;
1179 HOST_WIDE_INT offset2, size2, maxsize2;
1180 int i, j;
1181 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1182 vn_reference_op_t vro;
1183 ao_ref r;
1184
1185 /* See if the assignment kills REF. */
1186 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1187 &offset2, &size2, &maxsize2);
1188 if (!operand_equal_p (base, base2, 0)
1189 || offset2 > offset
1190 || offset2 + size2 < offset + maxsize)
1191 return (void *)-1;
1192
1193 /* Find the common base of ref and the lhs. */
1194 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1195 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1196 j = VEC_length (vn_reference_op_s, lhs) - 1;
1197 while (j >= 0 && i >= 0
1198 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1199 vr->operands, i),
1200 VEC_index (vn_reference_op_s, lhs, j)))
1201 {
1202 i--;
1203 j--;
1204 }
1205
1206 VEC_free (vn_reference_op_s, heap, lhs);
1207 /* i now points to the first additional op.
1208 ??? LHS may not be completely contained in VR, one or more
1209 VIEW_CONVERT_EXPRs could be in its way. We could at least
1210 try handling outermost VIEW_CONVERT_EXPRs. */
1211 if (j != -1)
1212 return (void *)-1;
1213
1214 /* Now re-write REF to be based on the rhs of the assignment. */
1215 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1216 /* We need to pre-pend vr->operands[0..i] to rhs. */
1217 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1218 > VEC_length (vn_reference_op_s, vr->operands))
1219 {
1220 VEC (vn_reference_op_s, heap) *old = vr->operands;
1221 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1222 i + 1 + VEC_length (vn_reference_op_s, rhs));
1223 if (old == shared_lookup_references
1224 && vr->operands != old)
1225 shared_lookup_references = NULL;
1226 }
1227 else
1228 VEC_truncate (vn_reference_op_s, vr->operands,
1229 i + 1 + VEC_length (vn_reference_op_s, rhs));
1230 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1231 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1232 VEC_free (vn_reference_op_s, heap, rhs);
1233 vr->hashcode = vn_reference_compute_hash (vr);
1234
1235 /* Adjust *ref from the new operands. */
1236 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1237 return (void *)-1;
1238 /* This can happen with bitfields. */
1239 if (ref->size != r.size)
1240 return (void *)-1;
1241 *ref = r;
1242
1243 /* Do not update last seen VUSE after translating. */
1244 last_vuse_ptr = NULL;
1245
1246 /* Keep looking for the adjusted *REF / VR pair. */
1247 return NULL;
1248 }
1249
1250 /* Bail out and stop walking. */
1251 return (void *)-1;
1252 }
1253
1254 /* Lookup a reference operation by it's parts, in the current hash table.
1255 Returns the resulting value number if it exists in the hash table,
1256 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1257 vn_reference_t stored in the hashtable if something is found. */
1258
1259 tree
1260 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1261 VEC (vn_reference_op_s, heap) *operands,
1262 vn_reference_t *vnresult, bool maywalk)
1263 {
1264 struct vn_reference_s vr1;
1265 vn_reference_t tmp;
1266 tree cst;
1267
1268 if (!vnresult)
1269 vnresult = &tmp;
1270 *vnresult = NULL;
1271
1272 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1273 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1274 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1275 VEC_length (vn_reference_op_s, operands));
1276 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1277 VEC_address (vn_reference_op_s, operands),
1278 sizeof (vn_reference_op_s)
1279 * VEC_length (vn_reference_op_s, operands));
1280 vr1.operands = operands = shared_lookup_references
1281 = valueize_refs (shared_lookup_references);
1282 vr1.type = type;
1283 vr1.set = set;
1284 vr1.hashcode = vn_reference_compute_hash (&vr1);
1285 if ((cst = fully_constant_vn_reference_p (&vr1)))
1286 return cst;
1287
1288 vn_reference_lookup_1 (&vr1, vnresult);
1289 if (!*vnresult
1290 && maywalk
1291 && vr1.vuse)
1292 {
1293 ao_ref r;
1294 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1295 *vnresult =
1296 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1297 vn_reference_lookup_2,
1298 vn_reference_lookup_3, &vr1);
1299 if (vr1.operands != operands)
1300 VEC_free (vn_reference_op_s, heap, vr1.operands);
1301 }
1302
1303 if (*vnresult)
1304 return (*vnresult)->result;
1305
1306 return NULL_TREE;
1307 }
1308
1309 /* Lookup OP in the current hash table, and return the resulting value
1310 number if it exists in the hash table. Return NULL_TREE if it does
1311 not exist in the hash table or if the result field of the structure
1312 was NULL.. VNRESULT will be filled in with the vn_reference_t
1313 stored in the hashtable if one exists. */
1314
1315 tree
1316 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1317 vn_reference_t *vnresult)
1318 {
1319 VEC (vn_reference_op_s, heap) *operands;
1320 struct vn_reference_s vr1;
1321 tree cst;
1322
1323 if (vnresult)
1324 *vnresult = NULL;
1325
1326 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1327 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1328 vr1.type = TREE_TYPE (op);
1329 vr1.set = get_alias_set (op);
1330 vr1.hashcode = vn_reference_compute_hash (&vr1);
1331 if ((cst = fully_constant_vn_reference_p (&vr1)))
1332 return cst;
1333
1334 if (maywalk
1335 && vr1.vuse)
1336 {
1337 vn_reference_t wvnresult;
1338 ao_ref r;
1339 ao_ref_init (&r, op);
1340 wvnresult =
1341 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1342 vn_reference_lookup_2,
1343 vn_reference_lookup_3, &vr1);
1344 if (vr1.operands != operands)
1345 VEC_free (vn_reference_op_s, heap, vr1.operands);
1346 if (wvnresult)
1347 {
1348 if (vnresult)
1349 *vnresult = wvnresult;
1350 return wvnresult->result;
1351 }
1352
1353 return NULL_TREE;
1354 }
1355
1356 return vn_reference_lookup_1 (&vr1, vnresult);
1357 }
1358
1359
1360 /* Insert OP into the current hash table with a value number of
1361 RESULT, and return the resulting reference structure we created. */
1362
1363 vn_reference_t
1364 vn_reference_insert (tree op, tree result, tree vuse)
1365 {
1366 void **slot;
1367 vn_reference_t vr1;
1368
1369 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1370 if (TREE_CODE (result) == SSA_NAME)
1371 vr1->value_id = VN_INFO (result)->value_id;
1372 else
1373 vr1->value_id = get_or_alloc_constant_value_id (result);
1374 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1375 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1376 vr1->type = TREE_TYPE (op);
1377 vr1->set = get_alias_set (op);
1378 vr1->hashcode = vn_reference_compute_hash (vr1);
1379 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1380
1381 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1382 INSERT);
1383
1384 /* Because we lookup stores using vuses, and value number failures
1385 using the vdefs (see visit_reference_op_store for how and why),
1386 it's possible that on failure we may try to insert an already
1387 inserted store. This is not wrong, there is no ssa name for a
1388 store that we could use as a differentiator anyway. Thus, unlike
1389 the other lookup functions, you cannot gcc_assert (!*slot)
1390 here. */
1391
1392 /* But free the old slot in case of a collision. */
1393 if (*slot)
1394 free_reference (*slot);
1395
1396 *slot = vr1;
1397 return vr1;
1398 }
1399
1400 /* Insert a reference by it's pieces into the current hash table with
1401 a value number of RESULT. Return the resulting reference
1402 structure we created. */
1403
1404 vn_reference_t
1405 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1406 VEC (vn_reference_op_s, heap) *operands,
1407 tree result, unsigned int value_id)
1408
1409 {
1410 void **slot;
1411 vn_reference_t vr1;
1412
1413 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1414 vr1->value_id = value_id;
1415 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1416 vr1->operands = valueize_refs (operands);
1417 vr1->type = type;
1418 vr1->set = set;
1419 vr1->hashcode = vn_reference_compute_hash (vr1);
1420 if (result && TREE_CODE (result) == SSA_NAME)
1421 result = SSA_VAL (result);
1422 vr1->result = result;
1423
1424 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1425 INSERT);
1426
1427 /* At this point we should have all the things inserted that we have
1428 seen before, and we should never try inserting something that
1429 already exists. */
1430 gcc_assert (!*slot);
1431 if (*slot)
1432 free_reference (*slot);
1433
1434 *slot = vr1;
1435 return vr1;
1436 }
1437
1438 /* Compute and return the hash value for nary operation VBO1. */
1439
1440 hashval_t
1441 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1442 {
1443 hashval_t hash;
1444 unsigned i;
1445
1446 for (i = 0; i < vno1->length; ++i)
1447 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1448 vno1->op[i] = SSA_VAL (vno1->op[i]);
1449
1450 if (vno1->length == 2
1451 && commutative_tree_code (vno1->opcode)
1452 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1453 {
1454 tree temp = vno1->op[0];
1455 vno1->op[0] = vno1->op[1];
1456 vno1->op[1] = temp;
1457 }
1458
1459 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1460 for (i = 0; i < vno1->length; ++i)
1461 hash = iterative_hash_expr (vno1->op[i], hash);
1462
1463 return hash;
1464 }
1465
1466 /* Return the computed hashcode for nary operation P1. */
1467
1468 static hashval_t
1469 vn_nary_op_hash (const void *p1)
1470 {
1471 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1472 return vno1->hashcode;
1473 }
1474
1475 /* Compare nary operations P1 and P2 and return true if they are
1476 equivalent. */
1477
1478 int
1479 vn_nary_op_eq (const void *p1, const void *p2)
1480 {
1481 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1482 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1483 unsigned i;
1484
1485 if (vno1->hashcode != vno2->hashcode)
1486 return false;
1487
1488 if (vno1->opcode != vno2->opcode
1489 || !types_compatible_p (vno1->type, vno2->type))
1490 return false;
1491
1492 for (i = 0; i < vno1->length; ++i)
1493 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1494 return false;
1495
1496 return true;
1497 }
1498
1499 /* Lookup a n-ary operation by its pieces and return the resulting value
1500 number if it exists in the hash table. Return NULL_TREE if it does
1501 not exist in the hash table or if the result field of the operation
1502 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1503 if it exists. */
1504
1505 tree
1506 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1507 tree type, tree op0, tree op1, tree op2,
1508 tree op3, vn_nary_op_t *vnresult)
1509 {
1510 void **slot;
1511 struct vn_nary_op_s vno1;
1512 if (vnresult)
1513 *vnresult = NULL;
1514 vno1.opcode = code;
1515 vno1.length = length;
1516 vno1.type = type;
1517 vno1.op[0] = op0;
1518 vno1.op[1] = op1;
1519 vno1.op[2] = op2;
1520 vno1.op[3] = op3;
1521 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1522 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1523 NO_INSERT);
1524 if (!slot && current_info == optimistic_info)
1525 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1526 NO_INSERT);
1527 if (!slot)
1528 return NULL_TREE;
1529 if (vnresult)
1530 *vnresult = (vn_nary_op_t)*slot;
1531 return ((vn_nary_op_t)*slot)->result;
1532 }
1533
1534 /* Lookup OP in the current hash table, and return the resulting value
1535 number if it exists in the hash table. Return NULL_TREE if it does
1536 not exist in the hash table or if the result field of the operation
1537 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1538 if it exists. */
1539
1540 tree
1541 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1542 {
1543 void **slot;
1544 struct vn_nary_op_s vno1;
1545 unsigned i;
1546
1547 if (vnresult)
1548 *vnresult = NULL;
1549 vno1.opcode = TREE_CODE (op);
1550 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1551 vno1.type = TREE_TYPE (op);
1552 for (i = 0; i < vno1.length; ++i)
1553 vno1.op[i] = TREE_OPERAND (op, i);
1554 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1555 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1556 NO_INSERT);
1557 if (!slot && current_info == optimistic_info)
1558 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1559 NO_INSERT);
1560 if (!slot)
1561 return NULL_TREE;
1562 if (vnresult)
1563 *vnresult = (vn_nary_op_t)*slot;
1564 return ((vn_nary_op_t)*slot)->result;
1565 }
1566
1567 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1568 value number if it exists in the hash table. Return NULL_TREE if
1569 it does not exist in the hash table. VNRESULT will contain the
1570 vn_nary_op_t from the hashtable if it exists. */
1571
1572 tree
1573 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1574 {
1575 void **slot;
1576 struct vn_nary_op_s vno1;
1577 unsigned i;
1578
1579 if (vnresult)
1580 *vnresult = NULL;
1581 vno1.opcode = gimple_assign_rhs_code (stmt);
1582 vno1.length = gimple_num_ops (stmt) - 1;
1583 vno1.type = gimple_expr_type (stmt);
1584 for (i = 0; i < vno1.length; ++i)
1585 vno1.op[i] = gimple_op (stmt, i + 1);
1586 if (vno1.opcode == REALPART_EXPR
1587 || vno1.opcode == IMAGPART_EXPR
1588 || vno1.opcode == VIEW_CONVERT_EXPR)
1589 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1590 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1591 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1592 NO_INSERT);
1593 if (!slot && current_info == optimistic_info)
1594 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1595 NO_INSERT);
1596 if (!slot)
1597 return NULL_TREE;
1598 if (vnresult)
1599 *vnresult = (vn_nary_op_t)*slot;
1600 return ((vn_nary_op_t)*slot)->result;
1601 }
1602
1603 /* Insert a n-ary operation into the current hash table using it's
1604 pieces. Return the vn_nary_op_t structure we created and put in
1605 the hashtable. */
1606
1607 vn_nary_op_t
1608 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1609 tree type, tree op0,
1610 tree op1, tree op2, tree op3,
1611 tree result,
1612 unsigned int value_id)
1613 {
1614 void **slot;
1615 vn_nary_op_t vno1;
1616
1617 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1618 (sizeof (struct vn_nary_op_s)
1619 - sizeof (tree) * (4 - length)));
1620 vno1->value_id = value_id;
1621 vno1->opcode = code;
1622 vno1->length = length;
1623 vno1->type = type;
1624 if (length >= 1)
1625 vno1->op[0] = op0;
1626 if (length >= 2)
1627 vno1->op[1] = op1;
1628 if (length >= 3)
1629 vno1->op[2] = op2;
1630 if (length >= 4)
1631 vno1->op[3] = op3;
1632 vno1->result = result;
1633 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1634 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1635 INSERT);
1636 gcc_assert (!*slot);
1637
1638 *slot = vno1;
1639 return vno1;
1640
1641 }
1642
1643 /* Insert OP into the current hash table with a value number of
1644 RESULT. Return the vn_nary_op_t structure we created and put in
1645 the hashtable. */
1646
1647 vn_nary_op_t
1648 vn_nary_op_insert (tree op, tree result)
1649 {
1650 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1651 void **slot;
1652 vn_nary_op_t vno1;
1653 unsigned i;
1654
1655 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1656 (sizeof (struct vn_nary_op_s)
1657 - sizeof (tree) * (4 - length)));
1658 vno1->value_id = VN_INFO (result)->value_id;
1659 vno1->opcode = TREE_CODE (op);
1660 vno1->length = length;
1661 vno1->type = TREE_TYPE (op);
1662 for (i = 0; i < vno1->length; ++i)
1663 vno1->op[i] = TREE_OPERAND (op, i);
1664 vno1->result = result;
1665 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1666 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1667 INSERT);
1668 gcc_assert (!*slot);
1669
1670 *slot = vno1;
1671 return vno1;
1672 }
1673
1674 /* Insert the rhs of STMT into the current hash table with a value number of
1675 RESULT. */
1676
1677 vn_nary_op_t
1678 vn_nary_op_insert_stmt (gimple stmt, tree result)
1679 {
1680 unsigned length = gimple_num_ops (stmt) - 1;
1681 void **slot;
1682 vn_nary_op_t vno1;
1683 unsigned i;
1684
1685 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1686 (sizeof (struct vn_nary_op_s)
1687 - sizeof (tree) * (4 - length)));
1688 vno1->value_id = VN_INFO (result)->value_id;
1689 vno1->opcode = gimple_assign_rhs_code (stmt);
1690 vno1->length = length;
1691 vno1->type = gimple_expr_type (stmt);
1692 for (i = 0; i < vno1->length; ++i)
1693 vno1->op[i] = gimple_op (stmt, i + 1);
1694 if (vno1->opcode == REALPART_EXPR
1695 || vno1->opcode == IMAGPART_EXPR
1696 || vno1->opcode == VIEW_CONVERT_EXPR)
1697 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1698 vno1->result = result;
1699 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1700 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1701 INSERT);
1702 gcc_assert (!*slot);
1703
1704 *slot = vno1;
1705 return vno1;
1706 }
1707
1708 /* Compute a hashcode for PHI operation VP1 and return it. */
1709
1710 static inline hashval_t
1711 vn_phi_compute_hash (vn_phi_t vp1)
1712 {
1713 hashval_t result;
1714 int i;
1715 tree phi1op;
1716 tree type;
1717
1718 result = vp1->block->index;
1719
1720 /* If all PHI arguments are constants we need to distinguish
1721 the PHI node via its type. */
1722 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1723 result += (INTEGRAL_TYPE_P (type)
1724 + (INTEGRAL_TYPE_P (type)
1725 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1726
1727 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1728 {
1729 if (phi1op == VN_TOP)
1730 continue;
1731 result = iterative_hash_expr (phi1op, result);
1732 }
1733
1734 return result;
1735 }
1736
1737 /* Return the computed hashcode for phi operation P1. */
1738
1739 static hashval_t
1740 vn_phi_hash (const void *p1)
1741 {
1742 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1743 return vp1->hashcode;
1744 }
1745
1746 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1747
1748 static int
1749 vn_phi_eq (const void *p1, const void *p2)
1750 {
1751 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1752 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1753
1754 if (vp1->hashcode != vp2->hashcode)
1755 return false;
1756
1757 if (vp1->block == vp2->block)
1758 {
1759 int i;
1760 tree phi1op;
1761
1762 /* If the PHI nodes do not have compatible types
1763 they are not the same. */
1764 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1765 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1766 return false;
1767
1768 /* Any phi in the same block will have it's arguments in the
1769 same edge order, because of how we store phi nodes. */
1770 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1771 {
1772 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1773 if (phi1op == VN_TOP || phi2op == VN_TOP)
1774 continue;
1775 if (!expressions_equal_p (phi1op, phi2op))
1776 return false;
1777 }
1778 return true;
1779 }
1780 return false;
1781 }
1782
1783 static VEC(tree, heap) *shared_lookup_phiargs;
1784
1785 /* Lookup PHI in the current hash table, and return the resulting
1786 value number if it exists in the hash table. Return NULL_TREE if
1787 it does not exist in the hash table. */
1788
1789 static tree
1790 vn_phi_lookup (gimple phi)
1791 {
1792 void **slot;
1793 struct vn_phi_s vp1;
1794 unsigned i;
1795
1796 VEC_truncate (tree, shared_lookup_phiargs, 0);
1797
1798 /* Canonicalize the SSA_NAME's to their value number. */
1799 for (i = 0; i < gimple_phi_num_args (phi); i++)
1800 {
1801 tree def = PHI_ARG_DEF (phi, i);
1802 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1803 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1804 }
1805 vp1.phiargs = shared_lookup_phiargs;
1806 vp1.block = gimple_bb (phi);
1807 vp1.hashcode = vn_phi_compute_hash (&vp1);
1808 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1809 NO_INSERT);
1810 if (!slot && current_info == optimistic_info)
1811 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1812 NO_INSERT);
1813 if (!slot)
1814 return NULL_TREE;
1815 return ((vn_phi_t)*slot)->result;
1816 }
1817
1818 /* Insert PHI into the current hash table with a value number of
1819 RESULT. */
1820
1821 static vn_phi_t
1822 vn_phi_insert (gimple phi, tree result)
1823 {
1824 void **slot;
1825 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1826 unsigned i;
1827 VEC (tree, heap) *args = NULL;
1828
1829 /* Canonicalize the SSA_NAME's to their value number. */
1830 for (i = 0; i < gimple_phi_num_args (phi); i++)
1831 {
1832 tree def = PHI_ARG_DEF (phi, i);
1833 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1834 VEC_safe_push (tree, heap, args, def);
1835 }
1836 vp1->value_id = VN_INFO (result)->value_id;
1837 vp1->phiargs = args;
1838 vp1->block = gimple_bb (phi);
1839 vp1->result = result;
1840 vp1->hashcode = vn_phi_compute_hash (vp1);
1841
1842 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1843 INSERT);
1844
1845 /* Because we iterate over phi operations more than once, it's
1846 possible the slot might already exist here, hence no assert.*/
1847 *slot = vp1;
1848 return vp1;
1849 }
1850
1851
1852 /* Print set of components in strongly connected component SCC to OUT. */
1853
1854 static void
1855 print_scc (FILE *out, VEC (tree, heap) *scc)
1856 {
1857 tree var;
1858 unsigned int i;
1859
1860 fprintf (out, "SCC consists of: ");
1861 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1862 {
1863 print_generic_expr (out, var, 0);
1864 fprintf (out, " ");
1865 }
1866 fprintf (out, "\n");
1867 }
1868
1869 /* Set the value number of FROM to TO, return true if it has changed
1870 as a result. */
1871
1872 static inline bool
1873 set_ssa_val_to (tree from, tree to)
1874 {
1875 tree currval;
1876
1877 if (from != to
1878 && TREE_CODE (to) == SSA_NAME
1879 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1880 to = from;
1881
1882 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1883 and invariants. So assert that here. */
1884 gcc_assert (to != NULL_TREE
1885 && (to == VN_TOP
1886 || TREE_CODE (to) == SSA_NAME
1887 || is_gimple_min_invariant (to)));
1888
1889 if (dump_file && (dump_flags & TDF_DETAILS))
1890 {
1891 fprintf (dump_file, "Setting value number of ");
1892 print_generic_expr (dump_file, from, 0);
1893 fprintf (dump_file, " to ");
1894 print_generic_expr (dump_file, to, 0);
1895 }
1896
1897 currval = SSA_VAL (from);
1898
1899 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1900 {
1901 VN_INFO (from)->valnum = to;
1902 if (dump_file && (dump_flags & TDF_DETAILS))
1903 fprintf (dump_file, " (changed)\n");
1904 return true;
1905 }
1906 if (dump_file && (dump_flags & TDF_DETAILS))
1907 fprintf (dump_file, "\n");
1908 return false;
1909 }
1910
1911 /* Set all definitions in STMT to value number to themselves.
1912 Return true if a value number changed. */
1913
1914 static bool
1915 defs_to_varying (gimple stmt)
1916 {
1917 bool changed = false;
1918 ssa_op_iter iter;
1919 def_operand_p defp;
1920
1921 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1922 {
1923 tree def = DEF_FROM_PTR (defp);
1924
1925 VN_INFO (def)->use_processed = true;
1926 changed |= set_ssa_val_to (def, def);
1927 }
1928 return changed;
1929 }
1930
1931 static bool expr_has_constants (tree expr);
1932 static tree valueize_expr (tree expr);
1933
1934 /* Visit a copy between LHS and RHS, return true if the value number
1935 changed. */
1936
1937 static bool
1938 visit_copy (tree lhs, tree rhs)
1939 {
1940 /* Follow chains of copies to their destination. */
1941 while (TREE_CODE (rhs) == SSA_NAME
1942 && SSA_VAL (rhs) != rhs)
1943 rhs = SSA_VAL (rhs);
1944
1945 /* The copy may have a more interesting constant filled expression
1946 (we don't, since we know our RHS is just an SSA name). */
1947 if (TREE_CODE (rhs) == SSA_NAME)
1948 {
1949 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1950 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1951 }
1952
1953 return set_ssa_val_to (lhs, rhs);
1954 }
1955
1956 /* Visit a unary operator RHS, value number it, and return true if the
1957 value number of LHS has changed as a result. */
1958
1959 static bool
1960 visit_unary_op (tree lhs, gimple stmt)
1961 {
1962 bool changed = false;
1963 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1964
1965 if (result)
1966 {
1967 changed = set_ssa_val_to (lhs, result);
1968 }
1969 else
1970 {
1971 changed = set_ssa_val_to (lhs, lhs);
1972 vn_nary_op_insert_stmt (stmt, lhs);
1973 }
1974
1975 return changed;
1976 }
1977
1978 /* Visit a binary operator RHS, value number it, and return true if the
1979 value number of LHS has changed as a result. */
1980
1981 static bool
1982 visit_binary_op (tree lhs, gimple stmt)
1983 {
1984 bool changed = false;
1985 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1986
1987 if (result)
1988 {
1989 changed = set_ssa_val_to (lhs, result);
1990 }
1991 else
1992 {
1993 changed = set_ssa_val_to (lhs, lhs);
1994 vn_nary_op_insert_stmt (stmt, lhs);
1995 }
1996
1997 return changed;
1998 }
1999
2000 /* Visit a call STMT storing into LHS. Return true if the value number
2001 of the LHS has changed as a result. */
2002
2003 static bool
2004 visit_reference_op_call (tree lhs, gimple stmt)
2005 {
2006 bool changed = false;
2007 struct vn_reference_s vr1;
2008 tree result;
2009 tree vuse = gimple_vuse (stmt);
2010
2011 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2012 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2013 vr1.type = gimple_expr_type (stmt);
2014 vr1.set = 0;
2015 vr1.hashcode = vn_reference_compute_hash (&vr1);
2016 result = vn_reference_lookup_1 (&vr1, NULL);
2017 if (result)
2018 {
2019 changed = set_ssa_val_to (lhs, result);
2020 if (TREE_CODE (result) == SSA_NAME
2021 && VN_INFO (result)->has_constants)
2022 VN_INFO (lhs)->has_constants = true;
2023 }
2024 else
2025 {
2026 void **slot;
2027 vn_reference_t vr2;
2028 changed = set_ssa_val_to (lhs, lhs);
2029 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2030 vr2->vuse = vr1.vuse;
2031 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2032 vr2->type = vr1.type;
2033 vr2->set = vr1.set;
2034 vr2->hashcode = vr1.hashcode;
2035 vr2->result = lhs;
2036 slot = htab_find_slot_with_hash (current_info->references,
2037 vr2, vr2->hashcode, INSERT);
2038 if (*slot)
2039 free_reference (*slot);
2040 *slot = vr2;
2041 }
2042
2043 return changed;
2044 }
2045
2046 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2047 and return true if the value number of the LHS has changed as a result. */
2048
2049 static bool
2050 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2051 {
2052 bool changed = false;
2053 tree last_vuse;
2054 tree result;
2055
2056 last_vuse = gimple_vuse (stmt);
2057 last_vuse_ptr = &last_vuse;
2058 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
2059 last_vuse_ptr = NULL;
2060
2061 /* If we have a VCE, try looking up its operand as it might be stored in
2062 a different type. */
2063 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2064 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2065 true, NULL);
2066
2067 /* We handle type-punning through unions by value-numbering based
2068 on offset and size of the access. Be prepared to handle a
2069 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2070 if (result
2071 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2072 {
2073 /* We will be setting the value number of lhs to the value number
2074 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2075 So first simplify and lookup this expression to see if it
2076 is already available. */
2077 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2078 if ((CONVERT_EXPR_P (val)
2079 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2080 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2081 {
2082 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2083 if ((CONVERT_EXPR_P (tem)
2084 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2085 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2086 TREE_TYPE (val), tem)))
2087 val = tem;
2088 }
2089 result = val;
2090 if (!is_gimple_min_invariant (val)
2091 && TREE_CODE (val) != SSA_NAME)
2092 result = vn_nary_op_lookup (val, NULL);
2093 /* If the expression is not yet available, value-number lhs to
2094 a new SSA_NAME we create. */
2095 if (!result && may_insert)
2096 {
2097 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2098 /* Initialize value-number information properly. */
2099 VN_INFO_GET (result)->valnum = result;
2100 VN_INFO (result)->value_id = get_next_value_id ();
2101 VN_INFO (result)->expr = val;
2102 VN_INFO (result)->has_constants = expr_has_constants (val);
2103 VN_INFO (result)->needs_insertion = true;
2104 /* As all "inserted" statements are singleton SCCs, insert
2105 to the valid table. This is strictly needed to
2106 avoid re-generating new value SSA_NAMEs for the same
2107 expression during SCC iteration over and over (the
2108 optimistic table gets cleared after each iteration).
2109 We do not need to insert into the optimistic table, as
2110 lookups there will fall back to the valid table. */
2111 if (current_info == optimistic_info)
2112 {
2113 current_info = valid_info;
2114 vn_nary_op_insert (val, result);
2115 current_info = optimistic_info;
2116 }
2117 else
2118 vn_nary_op_insert (val, result);
2119 if (dump_file && (dump_flags & TDF_DETAILS))
2120 {
2121 fprintf (dump_file, "Inserting name ");
2122 print_generic_expr (dump_file, result, 0);
2123 fprintf (dump_file, " for expression ");
2124 print_generic_expr (dump_file, val, 0);
2125 fprintf (dump_file, "\n");
2126 }
2127 }
2128 }
2129
2130 if (result)
2131 {
2132 changed = set_ssa_val_to (lhs, result);
2133 if (TREE_CODE (result) == SSA_NAME
2134 && VN_INFO (result)->has_constants)
2135 {
2136 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2137 VN_INFO (lhs)->has_constants = true;
2138 }
2139 }
2140 else
2141 {
2142 changed = set_ssa_val_to (lhs, lhs);
2143 vn_reference_insert (op, lhs, last_vuse);
2144 }
2145
2146 return changed;
2147 }
2148
2149
2150 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2151 and return true if the value number of the LHS has changed as a result. */
2152
2153 static bool
2154 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2155 {
2156 bool changed = false;
2157 tree result;
2158 bool resultsame = false;
2159
2160 /* First we want to lookup using the *vuses* from the store and see
2161 if there the last store to this location with the same address
2162 had the same value.
2163
2164 The vuses represent the memory state before the store. If the
2165 memory state, address, and value of the store is the same as the
2166 last store to this location, then this store will produce the
2167 same memory state as that store.
2168
2169 In this case the vdef versions for this store are value numbered to those
2170 vuse versions, since they represent the same memory state after
2171 this store.
2172
2173 Otherwise, the vdefs for the store are used when inserting into
2174 the table, since the store generates a new memory state. */
2175
2176 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2177
2178 if (result)
2179 {
2180 if (TREE_CODE (result) == SSA_NAME)
2181 result = SSA_VAL (result);
2182 if (TREE_CODE (op) == SSA_NAME)
2183 op = SSA_VAL (op);
2184 resultsame = expressions_equal_p (result, op);
2185 }
2186
2187 if (!result || !resultsame)
2188 {
2189 tree vdef;
2190
2191 if (dump_file && (dump_flags & TDF_DETAILS))
2192 {
2193 fprintf (dump_file, "No store match\n");
2194 fprintf (dump_file, "Value numbering store ");
2195 print_generic_expr (dump_file, lhs, 0);
2196 fprintf (dump_file, " to ");
2197 print_generic_expr (dump_file, op, 0);
2198 fprintf (dump_file, "\n");
2199 }
2200 /* Have to set value numbers before insert, since insert is
2201 going to valueize the references in-place. */
2202 if ((vdef = gimple_vdef (stmt)))
2203 {
2204 VN_INFO (vdef)->use_processed = true;
2205 changed |= set_ssa_val_to (vdef, vdef);
2206 }
2207
2208 /* Do not insert structure copies into the tables. */
2209 if (is_gimple_min_invariant (op)
2210 || is_gimple_reg (op))
2211 vn_reference_insert (lhs, op, vdef);
2212 }
2213 else
2214 {
2215 /* We had a match, so value number the vdef to have the value
2216 number of the vuse it came from. */
2217 tree def, use;
2218
2219 if (dump_file && (dump_flags & TDF_DETAILS))
2220 fprintf (dump_file, "Store matched earlier value,"
2221 "value numbering store vdefs to matching vuses.\n");
2222
2223 def = gimple_vdef (stmt);
2224 use = gimple_vuse (stmt);
2225
2226 VN_INFO (def)->use_processed = true;
2227 changed |= set_ssa_val_to (def, SSA_VAL (use));
2228 }
2229
2230 return changed;
2231 }
2232
2233 /* Visit and value number PHI, return true if the value number
2234 changed. */
2235
2236 static bool
2237 visit_phi (gimple phi)
2238 {
2239 bool changed = false;
2240 tree result;
2241 tree sameval = VN_TOP;
2242 bool allsame = true;
2243 unsigned i;
2244
2245 /* TODO: We could check for this in init_sccvn, and replace this
2246 with a gcc_assert. */
2247 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2248 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2249
2250 /* See if all non-TOP arguments have the same value. TOP is
2251 equivalent to everything, so we can ignore it. */
2252 for (i = 0; i < gimple_phi_num_args (phi); i++)
2253 {
2254 tree def = PHI_ARG_DEF (phi, i);
2255
2256 if (TREE_CODE (def) == SSA_NAME)
2257 def = SSA_VAL (def);
2258 if (def == VN_TOP)
2259 continue;
2260 if (sameval == VN_TOP)
2261 {
2262 sameval = def;
2263 }
2264 else
2265 {
2266 if (!expressions_equal_p (def, sameval))
2267 {
2268 allsame = false;
2269 break;
2270 }
2271 }
2272 }
2273
2274 /* If all value numbered to the same value, the phi node has that
2275 value. */
2276 if (allsame)
2277 {
2278 if (is_gimple_min_invariant (sameval))
2279 {
2280 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2281 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2282 }
2283 else
2284 {
2285 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2286 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2287 }
2288
2289 if (TREE_CODE (sameval) == SSA_NAME)
2290 return visit_copy (PHI_RESULT (phi), sameval);
2291
2292 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2293 }
2294
2295 /* Otherwise, see if it is equivalent to a phi node in this block. */
2296 result = vn_phi_lookup (phi);
2297 if (result)
2298 {
2299 if (TREE_CODE (result) == SSA_NAME)
2300 changed = visit_copy (PHI_RESULT (phi), result);
2301 else
2302 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2303 }
2304 else
2305 {
2306 vn_phi_insert (phi, PHI_RESULT (phi));
2307 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2308 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2309 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2310 }
2311
2312 return changed;
2313 }
2314
2315 /* Return true if EXPR contains constants. */
2316
2317 static bool
2318 expr_has_constants (tree expr)
2319 {
2320 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2321 {
2322 case tcc_unary:
2323 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2324
2325 case tcc_binary:
2326 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2327 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2328 /* Constants inside reference ops are rarely interesting, but
2329 it can take a lot of looking to find them. */
2330 case tcc_reference:
2331 case tcc_declaration:
2332 return false;
2333 default:
2334 return is_gimple_min_invariant (expr);
2335 }
2336 return false;
2337 }
2338
2339 /* Return true if STMT contains constants. */
2340
2341 static bool
2342 stmt_has_constants (gimple stmt)
2343 {
2344 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2345 return false;
2346
2347 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2348 {
2349 case GIMPLE_UNARY_RHS:
2350 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2351
2352 case GIMPLE_BINARY_RHS:
2353 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2354 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2355 case GIMPLE_SINGLE_RHS:
2356 /* Constants inside reference ops are rarely interesting, but
2357 it can take a lot of looking to find them. */
2358 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2359 default:
2360 gcc_unreachable ();
2361 }
2362 return false;
2363 }
2364
2365 /* Replace SSA_NAMES in expr with their value numbers, and return the
2366 result.
2367 This is performed in place. */
2368
2369 static tree
2370 valueize_expr (tree expr)
2371 {
2372 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2373 {
2374 case tcc_unary:
2375 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2376 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2377 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2378 break;
2379 case tcc_binary:
2380 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2381 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2382 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2383 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2384 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2385 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2386 break;
2387 default:
2388 break;
2389 }
2390 return expr;
2391 }
2392
2393 /* Simplify the binary expression RHS, and return the result if
2394 simplified. */
2395
2396 static tree
2397 simplify_binary_expression (gimple stmt)
2398 {
2399 tree result = NULL_TREE;
2400 tree op0 = gimple_assign_rhs1 (stmt);
2401 tree op1 = gimple_assign_rhs2 (stmt);
2402
2403 /* This will not catch every single case we could combine, but will
2404 catch those with constants. The goal here is to simultaneously
2405 combine constants between expressions, but avoid infinite
2406 expansion of expressions during simplification. */
2407 if (TREE_CODE (op0) == SSA_NAME)
2408 {
2409 if (VN_INFO (op0)->has_constants
2410 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2411 op0 = valueize_expr (vn_get_expr_for (op0));
2412 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2413 op0 = SSA_VAL (op0);
2414 }
2415
2416 if (TREE_CODE (op1) == SSA_NAME)
2417 {
2418 if (VN_INFO (op1)->has_constants)
2419 op1 = valueize_expr (vn_get_expr_for (op1));
2420 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2421 op1 = SSA_VAL (op1);
2422 }
2423
2424 /* Avoid folding if nothing changed. */
2425 if (op0 == gimple_assign_rhs1 (stmt)
2426 && op1 == gimple_assign_rhs2 (stmt))
2427 return NULL_TREE;
2428
2429 fold_defer_overflow_warnings ();
2430
2431 result = fold_binary (gimple_assign_rhs_code (stmt),
2432 gimple_expr_type (stmt), op0, op1);
2433 if (result)
2434 STRIP_USELESS_TYPE_CONVERSION (result);
2435
2436 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2437 stmt, 0);
2438
2439 /* Make sure result is not a complex expression consisting
2440 of operators of operators (IE (a + b) + (a + c))
2441 Otherwise, we will end up with unbounded expressions if
2442 fold does anything at all. */
2443 if (result && valid_gimple_rhs_p (result))
2444 return result;
2445
2446 return NULL_TREE;
2447 }
2448
2449 /* Simplify the unary expression RHS, and return the result if
2450 simplified. */
2451
2452 static tree
2453 simplify_unary_expression (gimple stmt)
2454 {
2455 tree result = NULL_TREE;
2456 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2457
2458 /* We handle some tcc_reference codes here that are all
2459 GIMPLE_ASSIGN_SINGLE codes. */
2460 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2461 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2462 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2463 op0 = TREE_OPERAND (op0, 0);
2464
2465 if (TREE_CODE (op0) != SSA_NAME)
2466 return NULL_TREE;
2467
2468 orig_op0 = op0;
2469 if (VN_INFO (op0)->has_constants)
2470 op0 = valueize_expr (vn_get_expr_for (op0));
2471 else if (gimple_assign_cast_p (stmt)
2472 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2473 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2474 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2475 {
2476 /* We want to do tree-combining on conversion-like expressions.
2477 Make sure we feed only SSA_NAMEs or constants to fold though. */
2478 tree tem = valueize_expr (vn_get_expr_for (op0));
2479 if (UNARY_CLASS_P (tem)
2480 || BINARY_CLASS_P (tem)
2481 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2482 || TREE_CODE (tem) == SSA_NAME
2483 || is_gimple_min_invariant (tem))
2484 op0 = tem;
2485 }
2486
2487 /* Avoid folding if nothing changed, but remember the expression. */
2488 if (op0 == orig_op0)
2489 return NULL_TREE;
2490
2491 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2492 gimple_expr_type (stmt), op0);
2493 if (result)
2494 {
2495 STRIP_USELESS_TYPE_CONVERSION (result);
2496 if (valid_gimple_rhs_p (result))
2497 return result;
2498 }
2499
2500 return NULL_TREE;
2501 }
2502
2503 /* Try to simplify RHS using equivalences and constant folding. */
2504
2505 static tree
2506 try_to_simplify (gimple stmt)
2507 {
2508 tree tem;
2509
2510 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2511 in this case, there is no point in doing extra work. */
2512 if (gimple_assign_copy_p (stmt)
2513 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2514 return NULL_TREE;
2515
2516 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2517 {
2518 case tcc_declaration:
2519 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2520 if (tem)
2521 return tem;
2522 break;
2523
2524 case tcc_reference:
2525 /* Do not do full-blown reference lookup here, but simplify
2526 reads from constant aggregates. */
2527 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2528 if (tem)
2529 return tem;
2530
2531 /* Fallthrough for some codes that can operate on registers. */
2532 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2533 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2534 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2535 break;
2536 /* We could do a little more with unary ops, if they expand
2537 into binary ops, but it's debatable whether it is worth it. */
2538 case tcc_unary:
2539 return simplify_unary_expression (stmt);
2540 break;
2541 case tcc_comparison:
2542 case tcc_binary:
2543 return simplify_binary_expression (stmt);
2544 break;
2545 default:
2546 break;
2547 }
2548
2549 return NULL_TREE;
2550 }
2551
2552 /* Visit and value number USE, return true if the value number
2553 changed. */
2554
2555 static bool
2556 visit_use (tree use)
2557 {
2558 bool changed = false;
2559 gimple stmt = SSA_NAME_DEF_STMT (use);
2560
2561 VN_INFO (use)->use_processed = true;
2562
2563 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2564 if (dump_file && (dump_flags & TDF_DETAILS)
2565 && !SSA_NAME_IS_DEFAULT_DEF (use))
2566 {
2567 fprintf (dump_file, "Value numbering ");
2568 print_generic_expr (dump_file, use, 0);
2569 fprintf (dump_file, " stmt = ");
2570 print_gimple_stmt (dump_file, stmt, 0, 0);
2571 }
2572
2573 /* Handle uninitialized uses. */
2574 if (SSA_NAME_IS_DEFAULT_DEF (use))
2575 changed = set_ssa_val_to (use, use);
2576 else
2577 {
2578 if (gimple_code (stmt) == GIMPLE_PHI)
2579 changed = visit_phi (stmt);
2580 else if (!gimple_has_lhs (stmt)
2581 || gimple_has_volatile_ops (stmt)
2582 || stmt_could_throw_p (stmt))
2583 changed = defs_to_varying (stmt);
2584 else if (is_gimple_assign (stmt))
2585 {
2586 tree lhs = gimple_assign_lhs (stmt);
2587 tree simplified;
2588
2589 /* Shortcut for copies. Simplifying copies is pointless,
2590 since we copy the expression and value they represent. */
2591 if (gimple_assign_copy_p (stmt)
2592 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2593 && TREE_CODE (lhs) == SSA_NAME)
2594 {
2595 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2596 goto done;
2597 }
2598 simplified = try_to_simplify (stmt);
2599 if (simplified)
2600 {
2601 if (dump_file && (dump_flags & TDF_DETAILS))
2602 {
2603 fprintf (dump_file, "RHS ");
2604 print_gimple_expr (dump_file, stmt, 0, 0);
2605 fprintf (dump_file, " simplified to ");
2606 print_generic_expr (dump_file, simplified, 0);
2607 if (TREE_CODE (lhs) == SSA_NAME)
2608 fprintf (dump_file, " has constants %d\n",
2609 expr_has_constants (simplified));
2610 else
2611 fprintf (dump_file, "\n");
2612 }
2613 }
2614 /* Setting value numbers to constants will occasionally
2615 screw up phi congruence because constants are not
2616 uniquely associated with a single ssa name that can be
2617 looked up. */
2618 if (simplified
2619 && is_gimple_min_invariant (simplified)
2620 && TREE_CODE (lhs) == SSA_NAME)
2621 {
2622 VN_INFO (lhs)->expr = simplified;
2623 VN_INFO (lhs)->has_constants = true;
2624 changed = set_ssa_val_to (lhs, simplified);
2625 goto done;
2626 }
2627 else if (simplified
2628 && TREE_CODE (simplified) == SSA_NAME
2629 && TREE_CODE (lhs) == SSA_NAME)
2630 {
2631 changed = visit_copy (lhs, simplified);
2632 goto done;
2633 }
2634 else if (simplified)
2635 {
2636 if (TREE_CODE (lhs) == SSA_NAME)
2637 {
2638 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2639 /* We have to unshare the expression or else
2640 valuizing may change the IL stream. */
2641 VN_INFO (lhs)->expr = unshare_expr (simplified);
2642 }
2643 }
2644 else if (stmt_has_constants (stmt)
2645 && TREE_CODE (lhs) == SSA_NAME)
2646 VN_INFO (lhs)->has_constants = true;
2647 else if (TREE_CODE (lhs) == SSA_NAME)
2648 {
2649 /* We reset expr and constantness here because we may
2650 have been value numbering optimistically, and
2651 iterating. They may become non-constant in this case,
2652 even if they were optimistically constant. */
2653
2654 VN_INFO (lhs)->has_constants = false;
2655 VN_INFO (lhs)->expr = NULL_TREE;
2656 }
2657
2658 if ((TREE_CODE (lhs) == SSA_NAME
2659 /* We can substitute SSA_NAMEs that are live over
2660 abnormal edges with their constant value. */
2661 && !(gimple_assign_copy_p (stmt)
2662 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2663 && !(simplified
2664 && is_gimple_min_invariant (simplified))
2665 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2666 /* Stores or copies from SSA_NAMEs that are live over
2667 abnormal edges are a problem. */
2668 || (gimple_assign_single_p (stmt)
2669 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2670 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2671 changed = defs_to_varying (stmt);
2672 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2673 {
2674 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2675 }
2676 else if (TREE_CODE (lhs) == SSA_NAME)
2677 {
2678 if ((gimple_assign_copy_p (stmt)
2679 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2680 || (simplified
2681 && is_gimple_min_invariant (simplified)))
2682 {
2683 VN_INFO (lhs)->has_constants = true;
2684 if (simplified)
2685 changed = set_ssa_val_to (lhs, simplified);
2686 else
2687 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2688 }
2689 else
2690 {
2691 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2692 {
2693 case GIMPLE_UNARY_RHS:
2694 changed = visit_unary_op (lhs, stmt);
2695 break;
2696 case GIMPLE_BINARY_RHS:
2697 changed = visit_binary_op (lhs, stmt);
2698 break;
2699 case GIMPLE_SINGLE_RHS:
2700 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2701 {
2702 case tcc_reference:
2703 /* VOP-less references can go through unary case. */
2704 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2705 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2706 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2707 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2708 {
2709 changed = visit_unary_op (lhs, stmt);
2710 break;
2711 }
2712 /* Fallthrough. */
2713 case tcc_declaration:
2714 changed = visit_reference_op_load
2715 (lhs, gimple_assign_rhs1 (stmt), stmt);
2716 break;
2717 case tcc_expression:
2718 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2719 {
2720 changed = visit_unary_op (lhs, stmt);
2721 break;
2722 }
2723 /* Fallthrough. */
2724 default:
2725 changed = defs_to_varying (stmt);
2726 }
2727 break;
2728 default:
2729 changed = defs_to_varying (stmt);
2730 break;
2731 }
2732 }
2733 }
2734 else
2735 changed = defs_to_varying (stmt);
2736 }
2737 else if (is_gimple_call (stmt))
2738 {
2739 tree lhs = gimple_call_lhs (stmt);
2740
2741 /* ??? We could try to simplify calls. */
2742
2743 if (stmt_has_constants (stmt)
2744 && TREE_CODE (lhs) == SSA_NAME)
2745 VN_INFO (lhs)->has_constants = true;
2746 else if (TREE_CODE (lhs) == SSA_NAME)
2747 {
2748 /* We reset expr and constantness here because we may
2749 have been value numbering optimistically, and
2750 iterating. They may become non-constant in this case,
2751 even if they were optimistically constant. */
2752 VN_INFO (lhs)->has_constants = false;
2753 VN_INFO (lhs)->expr = NULL_TREE;
2754 }
2755
2756 if (TREE_CODE (lhs) == SSA_NAME
2757 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2758 changed = defs_to_varying (stmt);
2759 /* ??? We should handle stores from calls. */
2760 else if (TREE_CODE (lhs) == SSA_NAME)
2761 {
2762 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2763 changed = visit_reference_op_call (lhs, stmt);
2764 else
2765 changed = defs_to_varying (stmt);
2766 }
2767 else
2768 changed = defs_to_varying (stmt);
2769 }
2770 }
2771 done:
2772 return changed;
2773 }
2774
2775 /* Compare two operands by reverse postorder index */
2776
2777 static int
2778 compare_ops (const void *pa, const void *pb)
2779 {
2780 const tree opa = *((const tree *)pa);
2781 const tree opb = *((const tree *)pb);
2782 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2783 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2784 basic_block bba;
2785 basic_block bbb;
2786
2787 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2788 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2789 else if (gimple_nop_p (opstmta))
2790 return -1;
2791 else if (gimple_nop_p (opstmtb))
2792 return 1;
2793
2794 bba = gimple_bb (opstmta);
2795 bbb = gimple_bb (opstmtb);
2796
2797 if (!bba && !bbb)
2798 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2799 else if (!bba)
2800 return -1;
2801 else if (!bbb)
2802 return 1;
2803
2804 if (bba == bbb)
2805 {
2806 if (gimple_code (opstmta) == GIMPLE_PHI
2807 && gimple_code (opstmtb) == GIMPLE_PHI)
2808 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2809 else if (gimple_code (opstmta) == GIMPLE_PHI)
2810 return -1;
2811 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2812 return 1;
2813 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2814 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2815 else
2816 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2817 }
2818 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2819 }
2820
2821 /* Sort an array containing members of a strongly connected component
2822 SCC so that the members are ordered by RPO number.
2823 This means that when the sort is complete, iterating through the
2824 array will give you the members in RPO order. */
2825
2826 static void
2827 sort_scc (VEC (tree, heap) *scc)
2828 {
2829 qsort (VEC_address (tree, scc),
2830 VEC_length (tree, scc),
2831 sizeof (tree),
2832 compare_ops);
2833 }
2834
2835 /* Insert the no longer used nary ONARY to the hash INFO. */
2836
2837 static void
2838 copy_nary (vn_nary_op_t onary, vn_tables_t info)
2839 {
2840 size_t size = (sizeof (struct vn_nary_op_s)
2841 - sizeof (tree) * (4 - onary->length));
2842 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&info->nary_obstack, size);
2843 void **slot;
2844 memcpy (nary, onary, size);
2845 slot = htab_find_slot_with_hash (info->nary, nary, nary->hashcode, INSERT);
2846 gcc_assert (!*slot);
2847 *slot = nary;
2848 }
2849
2850 /* Insert the no longer used phi OPHI to the hash INFO. */
2851
2852 static void
2853 copy_phi (vn_phi_t ophi, vn_tables_t info)
2854 {
2855 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
2856 void **slot;
2857 memcpy (phi, ophi, sizeof (*phi));
2858 ophi->phiargs = NULL;
2859 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
2860 gcc_assert (!*slot);
2861 *slot = phi;
2862 }
2863
2864 /* Insert the no longer used reference OREF to the hash INFO. */
2865
2866 static void
2867 copy_reference (vn_reference_t oref, vn_tables_t info)
2868 {
2869 vn_reference_t ref;
2870 void **slot;
2871 ref = (vn_reference_t) pool_alloc (info->references_pool);
2872 memcpy (ref, oref, sizeof (*ref));
2873 oref->operands = NULL;
2874 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
2875 INSERT);
2876 if (*slot)
2877 free_reference (*slot);
2878 *slot = ref;
2879 }
2880
2881 /* Process a strongly connected component in the SSA graph. */
2882
2883 static void
2884 process_scc (VEC (tree, heap) *scc)
2885 {
2886 tree var;
2887 unsigned int i;
2888 unsigned int iterations = 0;
2889 bool changed = true;
2890 htab_iterator hi;
2891 vn_nary_op_t nary;
2892 vn_phi_t phi;
2893 vn_reference_t ref;
2894
2895 /* If the SCC has a single member, just visit it. */
2896 if (VEC_length (tree, scc) == 1)
2897 {
2898 tree use = VEC_index (tree, scc, 0);
2899 if (!VN_INFO (use)->use_processed)
2900 visit_use (use);
2901 return;
2902 }
2903
2904 /* Iterate over the SCC with the optimistic table until it stops
2905 changing. */
2906 current_info = optimistic_info;
2907 while (changed)
2908 {
2909 changed = false;
2910 iterations++;
2911 /* As we are value-numbering optimistically we have to
2912 clear the expression tables and the simplified expressions
2913 in each iteration until we converge. */
2914 htab_empty (optimistic_info->nary);
2915 htab_empty (optimistic_info->phis);
2916 htab_empty (optimistic_info->references);
2917 obstack_free (&optimistic_info->nary_obstack, NULL);
2918 gcc_obstack_init (&optimistic_info->nary_obstack);
2919 empty_alloc_pool (optimistic_info->phis_pool);
2920 empty_alloc_pool (optimistic_info->references_pool);
2921 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2922 VN_INFO (var)->expr = NULL_TREE;
2923 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2924 changed |= visit_use (var);
2925 }
2926
2927 statistics_histogram_event (cfun, "SCC iterations", iterations);
2928
2929 /* Finally, copy the contents of the no longer used optimistic
2930 table to the valid table. */
2931 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
2932 copy_nary (nary, valid_info);
2933 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
2934 copy_phi (phi, valid_info);
2935 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
2936 copy_reference (ref, valid_info);
2937
2938 current_info = valid_info;
2939 }
2940
2941 DEF_VEC_O(ssa_op_iter);
2942 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2943
2944 /* Pop the components of the found SCC for NAME off the SCC stack
2945 and process them. Returns true if all went well, false if
2946 we run into resource limits. */
2947
2948 static bool
2949 extract_and_process_scc_for_name (tree name)
2950 {
2951 VEC (tree, heap) *scc = NULL;
2952 tree x;
2953
2954 /* Found an SCC, pop the components off the SCC stack and
2955 process them. */
2956 do
2957 {
2958 x = VEC_pop (tree, sccstack);
2959
2960 VN_INFO (x)->on_sccstack = false;
2961 VEC_safe_push (tree, heap, scc, x);
2962 } while (x != name);
2963
2964 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2965 if (VEC_length (tree, scc)
2966 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2967 {
2968 if (dump_file)
2969 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2970 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2971 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2972 return false;
2973 }
2974
2975 if (VEC_length (tree, scc) > 1)
2976 sort_scc (scc);
2977
2978 if (dump_file && (dump_flags & TDF_DETAILS))
2979 print_scc (dump_file, scc);
2980
2981 process_scc (scc);
2982
2983 VEC_free (tree, heap, scc);
2984
2985 return true;
2986 }
2987
2988 /* Depth first search on NAME to discover and process SCC's in the SSA
2989 graph.
2990 Execution of this algorithm relies on the fact that the SCC's are
2991 popped off the stack in topological order.
2992 Returns true if successful, false if we stopped processing SCC's due
2993 to resource constraints. */
2994
2995 static bool
2996 DFS (tree name)
2997 {
2998 VEC(ssa_op_iter, heap) *itervec = NULL;
2999 VEC(tree, heap) *namevec = NULL;
3000 use_operand_p usep = NULL;
3001 gimple defstmt;
3002 tree use;
3003 ssa_op_iter iter;
3004
3005 start_over:
3006 /* SCC info */
3007 VN_INFO (name)->dfsnum = next_dfs_num++;
3008 VN_INFO (name)->visited = true;
3009 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3010
3011 VEC_safe_push (tree, heap, sccstack, name);
3012 VN_INFO (name)->on_sccstack = true;
3013 defstmt = SSA_NAME_DEF_STMT (name);
3014
3015 /* Recursively DFS on our operands, looking for SCC's. */
3016 if (!gimple_nop_p (defstmt))
3017 {
3018 /* Push a new iterator. */
3019 if (gimple_code (defstmt) == GIMPLE_PHI)
3020 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3021 else
3022 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3023 }
3024 else
3025 clear_and_done_ssa_iter (&iter);
3026
3027 while (1)
3028 {
3029 /* If we are done processing uses of a name, go up the stack
3030 of iterators and process SCCs as we found them. */
3031 if (op_iter_done (&iter))
3032 {
3033 /* See if we found an SCC. */
3034 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3035 if (!extract_and_process_scc_for_name (name))
3036 {
3037 VEC_free (tree, heap, namevec);
3038 VEC_free (ssa_op_iter, heap, itervec);
3039 return false;
3040 }
3041
3042 /* Check if we are done. */
3043 if (VEC_empty (tree, namevec))
3044 {
3045 VEC_free (tree, heap, namevec);
3046 VEC_free (ssa_op_iter, heap, itervec);
3047 return true;
3048 }
3049
3050 /* Restore the last use walker and continue walking there. */
3051 use = name;
3052 name = VEC_pop (tree, namevec);
3053 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3054 sizeof (ssa_op_iter));
3055 VEC_pop (ssa_op_iter, itervec);
3056 goto continue_walking;
3057 }
3058
3059 use = USE_FROM_PTR (usep);
3060
3061 /* Since we handle phi nodes, we will sometimes get
3062 invariants in the use expression. */
3063 if (TREE_CODE (use) == SSA_NAME)
3064 {
3065 if (! (VN_INFO (use)->visited))
3066 {
3067 /* Recurse by pushing the current use walking state on
3068 the stack and starting over. */
3069 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3070 VEC_safe_push(tree, heap, namevec, name);
3071 name = use;
3072 goto start_over;
3073
3074 continue_walking:
3075 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3076 VN_INFO (use)->low);
3077 }
3078 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3079 && VN_INFO (use)->on_sccstack)
3080 {
3081 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3082 VN_INFO (name)->low);
3083 }
3084 }
3085
3086 usep = op_iter_next_use (&iter);
3087 }
3088 }
3089
3090 /* Allocate a value number table. */
3091
3092 static void
3093 allocate_vn_table (vn_tables_t table)
3094 {
3095 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3096 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3097 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3098 free_reference);
3099
3100 gcc_obstack_init (&table->nary_obstack);
3101 table->phis_pool = create_alloc_pool ("VN phis",
3102 sizeof (struct vn_phi_s),
3103 30);
3104 table->references_pool = create_alloc_pool ("VN references",
3105 sizeof (struct vn_reference_s),
3106 30);
3107 }
3108
3109 /* Free a value number table. */
3110
3111 static void
3112 free_vn_table (vn_tables_t table)
3113 {
3114 htab_delete (table->phis);
3115 htab_delete (table->nary);
3116 htab_delete (table->references);
3117 obstack_free (&table->nary_obstack, NULL);
3118 free_alloc_pool (table->phis_pool);
3119 free_alloc_pool (table->references_pool);
3120 }
3121
3122 static void
3123 init_scc_vn (void)
3124 {
3125 size_t i;
3126 int j;
3127 int *rpo_numbers_temp;
3128
3129 calculate_dominance_info (CDI_DOMINATORS);
3130 sccstack = NULL;
3131 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3132 free);
3133
3134 constant_value_ids = BITMAP_ALLOC (NULL);
3135
3136 next_dfs_num = 1;
3137 next_value_id = 1;
3138
3139 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3140 /* VEC_alloc doesn't actually grow it to the right size, it just
3141 preallocates the space to do so. */
3142 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3143 gcc_obstack_init (&vn_ssa_aux_obstack);
3144
3145 shared_lookup_phiargs = NULL;
3146 shared_lookup_references = NULL;
3147 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3148 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3149 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3150
3151 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3152 the i'th block in RPO order is bb. We want to map bb's to RPO
3153 numbers, so we need to rearrange this array. */
3154 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3155 rpo_numbers[rpo_numbers_temp[j]] = j;
3156
3157 XDELETE (rpo_numbers_temp);
3158
3159 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3160
3161 /* Create the VN_INFO structures, and initialize value numbers to
3162 TOP. */
3163 for (i = 0; i < num_ssa_names; i++)
3164 {
3165 tree name = ssa_name (i);
3166 if (name)
3167 {
3168 VN_INFO_GET (name)->valnum = VN_TOP;
3169 VN_INFO (name)->expr = NULL_TREE;
3170 VN_INFO (name)->value_id = 0;
3171 }
3172 }
3173
3174 renumber_gimple_stmt_uids ();
3175
3176 /* Create the valid and optimistic value numbering tables. */
3177 valid_info = XCNEW (struct vn_tables_s);
3178 allocate_vn_table (valid_info);
3179 optimistic_info = XCNEW (struct vn_tables_s);
3180 allocate_vn_table (optimistic_info);
3181 }
3182
3183 void
3184 free_scc_vn (void)
3185 {
3186 size_t i;
3187
3188 htab_delete (constant_to_value_id);
3189 BITMAP_FREE (constant_value_ids);
3190 VEC_free (tree, heap, shared_lookup_phiargs);
3191 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3192 XDELETEVEC (rpo_numbers);
3193
3194 for (i = 0; i < num_ssa_names; i++)
3195 {
3196 tree name = ssa_name (i);
3197 if (name
3198 && VN_INFO (name)->needs_insertion)
3199 release_ssa_name (name);
3200 }
3201 obstack_free (&vn_ssa_aux_obstack, NULL);
3202 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3203
3204 VEC_free (tree, heap, sccstack);
3205 free_vn_table (valid_info);
3206 XDELETE (valid_info);
3207 free_vn_table (optimistic_info);
3208 XDELETE (optimistic_info);
3209 }
3210
3211 /* Set the value ids in the valid hash tables. */
3212
3213 static void
3214 set_hashtable_value_ids (void)
3215 {
3216 htab_iterator hi;
3217 vn_nary_op_t vno;
3218 vn_reference_t vr;
3219 vn_phi_t vp;
3220
3221 /* Now set the value ids of the things we had put in the hash
3222 table. */
3223
3224 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3225 vno, vn_nary_op_t, hi)
3226 {
3227 if (vno->result)
3228 {
3229 if (TREE_CODE (vno->result) == SSA_NAME)
3230 vno->value_id = VN_INFO (vno->result)->value_id;
3231 else if (is_gimple_min_invariant (vno->result))
3232 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3233 }
3234 }
3235
3236 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3237 vp, vn_phi_t, hi)
3238 {
3239 if (vp->result)
3240 {
3241 if (TREE_CODE (vp->result) == SSA_NAME)
3242 vp->value_id = VN_INFO (vp->result)->value_id;
3243 else if (is_gimple_min_invariant (vp->result))
3244 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3245 }
3246 }
3247
3248 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3249 vr, vn_reference_t, hi)
3250 {
3251 if (vr->result)
3252 {
3253 if (TREE_CODE (vr->result) == SSA_NAME)
3254 vr->value_id = VN_INFO (vr->result)->value_id;
3255 else if (is_gimple_min_invariant (vr->result))
3256 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3257 }
3258 }
3259 }
3260
3261 /* Do SCCVN. Returns true if it finished, false if we bailed out
3262 due to resource constraints. */
3263
3264 bool
3265 run_scc_vn (bool may_insert_arg)
3266 {
3267 size_t i;
3268 tree param;
3269 bool changed = true;
3270
3271 may_insert = may_insert_arg;
3272
3273 init_scc_vn ();
3274 current_info = valid_info;
3275
3276 for (param = DECL_ARGUMENTS (current_function_decl);
3277 param;
3278 param = TREE_CHAIN (param))
3279 {
3280 if (gimple_default_def (cfun, param) != NULL)
3281 {
3282 tree def = gimple_default_def (cfun, param);
3283 VN_INFO (def)->valnum = def;
3284 }
3285 }
3286
3287 for (i = 1; i < num_ssa_names; ++i)
3288 {
3289 tree name = ssa_name (i);
3290 if (name
3291 && VN_INFO (name)->visited == false
3292 && !has_zero_uses (name))
3293 if (!DFS (name))
3294 {
3295 free_scc_vn ();
3296 may_insert = false;
3297 return false;
3298 }
3299 }
3300
3301 /* Initialize the value ids. */
3302
3303 for (i = 1; i < num_ssa_names; ++i)
3304 {
3305 tree name = ssa_name (i);
3306 vn_ssa_aux_t info;
3307 if (!name)
3308 continue;
3309 info = VN_INFO (name);
3310 if (info->valnum == name
3311 || info->valnum == VN_TOP)
3312 info->value_id = get_next_value_id ();
3313 else if (is_gimple_min_invariant (info->valnum))
3314 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3315 }
3316
3317 /* Propagate until they stop changing. */
3318 while (changed)
3319 {
3320 changed = false;
3321 for (i = 1; i < num_ssa_names; ++i)
3322 {
3323 tree name = ssa_name (i);
3324 vn_ssa_aux_t info;
3325 if (!name)
3326 continue;
3327 info = VN_INFO (name);
3328 if (TREE_CODE (info->valnum) == SSA_NAME
3329 && info->valnum != name
3330 && info->value_id != VN_INFO (info->valnum)->value_id)
3331 {
3332 changed = true;
3333 info->value_id = VN_INFO (info->valnum)->value_id;
3334 }
3335 }
3336 }
3337
3338 set_hashtable_value_ids ();
3339
3340 if (dump_file && (dump_flags & TDF_DETAILS))
3341 {
3342 fprintf (dump_file, "Value numbers:\n");
3343 for (i = 0; i < num_ssa_names; i++)
3344 {
3345 tree name = ssa_name (i);
3346 if (name
3347 && VN_INFO (name)->visited
3348 && SSA_VAL (name) != name)
3349 {
3350 print_generic_expr (dump_file, name, 0);
3351 fprintf (dump_file, " = ");
3352 print_generic_expr (dump_file, SSA_VAL (name), 0);
3353 fprintf (dump_file, "\n");
3354 }
3355 }
3356 }
3357
3358 may_insert = false;
3359 return true;
3360 }
3361
3362 /* Return the maximum value id we have ever seen. */
3363
3364 unsigned int
3365 get_max_value_id (void)
3366 {
3367 return next_value_id;
3368 }
3369
3370 /* Return the next unique value id. */
3371
3372 unsigned int
3373 get_next_value_id (void)
3374 {
3375 return next_value_id++;
3376 }
3377
3378
3379 /* Compare two expressions E1 and E2 and return true if they are equal. */
3380
3381 bool
3382 expressions_equal_p (tree e1, tree e2)
3383 {
3384 /* The obvious case. */
3385 if (e1 == e2)
3386 return true;
3387
3388 /* If only one of them is null, they cannot be equal. */
3389 if (!e1 || !e2)
3390 return false;
3391
3392 /* Now perform the actual comparison. */
3393 if (TREE_CODE (e1) == TREE_CODE (e2)
3394 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3395 return true;
3396
3397 return false;
3398 }
3399
3400
3401 /* Return true if the nary operation NARY may trap. This is a copy
3402 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3403
3404 bool
3405 vn_nary_may_trap (vn_nary_op_t nary)
3406 {
3407 tree type;
3408 tree rhs2 = NULL_TREE;
3409 bool honor_nans = false;
3410 bool honor_snans = false;
3411 bool fp_operation = false;
3412 bool honor_trapv = false;
3413 bool handled, ret;
3414 unsigned i;
3415
3416 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3417 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3418 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3419 {
3420 type = nary->type;
3421 fp_operation = FLOAT_TYPE_P (type);
3422 if (fp_operation)
3423 {
3424 honor_nans = flag_trapping_math && !flag_finite_math_only;
3425 honor_snans = flag_signaling_nans != 0;
3426 }
3427 else if (INTEGRAL_TYPE_P (type)
3428 && TYPE_OVERFLOW_TRAPS (type))
3429 honor_trapv = true;
3430 }
3431 if (nary->length >= 2)
3432 rhs2 = nary->op[1];
3433 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3434 honor_trapv,
3435 honor_nans, honor_snans, rhs2,
3436 &handled);
3437 if (handled
3438 && ret)
3439 return true;
3440
3441 for (i = 0; i < nary->length; ++i)
3442 if (tree_could_trap_p (nary->op[i]))
3443 return true;
3444
3445 return false;
3446 }