1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack
;
114 alloc_pool phis_pool
;
115 alloc_pool references_pool
;
118 static htab_t constant_to_value_id
;
119 static bitmap constant_value_ids
;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info
;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info
;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info
;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers
;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id
;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num
;
158 static VEC (tree
, heap
) *sccstack
;
161 DEF_VEC_P(vn_ssa_aux_t
);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t
, heap
);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t
, heap
) *vn_ssa_aux_table
;
169 static struct obstack vn_ssa_aux_obstack
;
171 /* Return the value numbering information for a given SSA name. */
176 vn_ssa_aux_t res
= VEC_index (vn_ssa_aux_t
, vn_ssa_aux_table
,
177 SSA_NAME_VERSION (name
));
178 gcc_checking_assert (res
);
182 /* Set the value numbering info for a given SSA name to a given
186 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
188 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
189 SSA_NAME_VERSION (name
), value
);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
196 VN_INFO_GET (tree name
)
198 vn_ssa_aux_t newinfo
;
200 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
201 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
202 if (SSA_NAME_VERSION (name
) >= VEC_length (vn_ssa_aux_t
, vn_ssa_aux_table
))
203 VEC_safe_grow (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
,
204 SSA_NAME_VERSION (name
) + 1);
205 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
206 SSA_NAME_VERSION (name
), newinfo
);
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
215 vn_get_expr_for (tree name
)
217 vn_ssa_aux_t vn
= VN_INFO (name
);
219 tree expr
= NULL_TREE
;
222 if (vn
->valnum
== VN_TOP
)
225 /* If the value-number is a constant it is the representative
227 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
230 /* Get to the information of the value of this SSA_NAME. */
231 vn
= VN_INFO (vn
->valnum
);
233 /* If the value-number is a constant it is the representative
235 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
238 /* Else if we have an expression, return it. */
239 if (vn
->expr
!= NULL_TREE
)
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
245 /* If the value number is not an assignment use it directly. */
246 if (!is_gimple_assign (def_stmt
))
249 /* FIXME tuples. This is incomplete and likely will miss some
251 code
= gimple_assign_rhs_code (def_stmt
);
252 switch (TREE_CODE_CLASS (code
))
255 if ((code
== REALPART_EXPR
256 || code
== IMAGPART_EXPR
257 || code
== VIEW_CONVERT_EXPR
)
258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt
),
260 expr
= fold_build1 (code
,
261 gimple_expr_type (def_stmt
),
262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt
), 0));
266 expr
= fold_build1 (code
,
267 gimple_expr_type (def_stmt
),
268 gimple_assign_rhs1 (def_stmt
));
272 expr
= fold_build2 (code
,
273 gimple_expr_type (def_stmt
),
274 gimple_assign_rhs1 (def_stmt
),
275 gimple_assign_rhs2 (def_stmt
));
278 case tcc_exceptional
:
279 if (code
== CONSTRUCTOR
281 (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))) == VECTOR_TYPE
)
282 expr
= gimple_assign_rhs1 (def_stmt
);
287 if (expr
== NULL_TREE
)
290 /* Cache the expression. */
297 /* Free a phi operation structure VP. */
302 vn_phi_t phi
= (vn_phi_t
) vp
;
303 VEC_free (tree
, heap
, phi
->phiargs
);
306 /* Free a reference operation structure VP. */
309 free_reference (void *vp
)
311 vn_reference_t vr
= (vn_reference_t
) vp
;
312 VEC_free (vn_reference_op_s
, heap
, vr
->operands
);
315 /* Hash table equality function for vn_constant_t. */
318 vn_constant_eq (const void *p1
, const void *p2
)
320 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
321 const struct vn_constant_s
*vc2
= (const struct vn_constant_s
*) p2
;
323 if (vc1
->hashcode
!= vc2
->hashcode
)
326 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
329 /* Hash table hash function for vn_constant_t. */
332 vn_constant_hash (const void *p1
)
334 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
335 return vc1
->hashcode
;
338 /* Lookup a value id for CONSTANT and return it. If it does not
342 get_constant_value_id (tree constant
)
345 struct vn_constant_s vc
;
347 vc
.hashcode
= vn_hash_constant_with_type (constant
);
348 vc
.constant
= constant
;
349 slot
= htab_find_slot_with_hash (constant_to_value_id
, &vc
,
350 vc
.hashcode
, NO_INSERT
);
352 return ((vn_constant_t
)*slot
)->value_id
;
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357 new one and return it. If it does exist, return it. */
360 get_or_alloc_constant_value_id (tree constant
)
363 struct vn_constant_s vc
;
366 vc
.hashcode
= vn_hash_constant_with_type (constant
);
367 vc
.constant
= constant
;
368 slot
= htab_find_slot_with_hash (constant_to_value_id
, &vc
,
369 vc
.hashcode
, INSERT
);
371 return ((vn_constant_t
)*slot
)->value_id
;
373 vcp
= XNEW (struct vn_constant_s
);
374 vcp
->hashcode
= vc
.hashcode
;
375 vcp
->constant
= constant
;
376 vcp
->value_id
= get_next_value_id ();
377 *slot
= (void *) vcp
;
378 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
379 return vcp
->value_id
;
382 /* Return true if V is a value id for a constant. */
385 value_id_constant_p (unsigned int v
)
387 return bitmap_bit_p (constant_value_ids
, v
);
390 /* Compare two reference operands P1 and P2 for equality. Return true if
391 they are equal, and false otherwise. */
394 vn_reference_op_eq (const void *p1
, const void *p2
)
396 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
397 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
399 return (vro1
->opcode
== vro2
->opcode
400 /* We do not care for differences in type qualification. */
401 && (vro1
->type
== vro2
->type
402 || (vro1
->type
&& vro2
->type
403 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
404 TYPE_MAIN_VARIANT (vro2
->type
))))
405 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
406 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
407 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
410 /* Compute the hash for a reference operand VRO1. */
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, hashval_t result
)
415 result
= iterative_hash_hashval_t (vro1
->opcode
, result
);
417 result
= iterative_hash_expr (vro1
->op0
, result
);
419 result
= iterative_hash_expr (vro1
->op1
, result
);
421 result
= iterative_hash_expr (vro1
->op2
, result
);
425 /* Return the hashcode for a given reference operation P1. */
428 vn_reference_hash (const void *p1
)
430 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
431 return vr1
->hashcode
;
434 /* Compute a hash for the reference operation VR1 and return it. */
437 vn_reference_compute_hash (const vn_reference_t vr1
)
439 hashval_t result
= 0;
441 vn_reference_op_t vro
;
442 HOST_WIDE_INT off
= -1;
445 FOR_EACH_VEC_ELT (vn_reference_op_s
, vr1
->operands
, i
, vro
)
447 if (vro
->opcode
== MEM_REF
)
449 else if (vro
->opcode
!= ADDR_EXPR
)
461 result
= iterative_hash_hashval_t (off
, result
);
464 && vro
->opcode
== ADDR_EXPR
)
468 tree op
= TREE_OPERAND (vro
->op0
, 0);
469 result
= iterative_hash_hashval_t (TREE_CODE (op
), result
);
470 result
= iterative_hash_expr (op
, result
);
474 result
= vn_reference_op_compute_hash (vro
, result
);
478 result
+= SSA_NAME_VERSION (vr1
->vuse
);
483 /* Return true if reference operations P1 and P2 are equivalent. This
484 means they have the same set of operands and vuses. */
487 vn_reference_eq (const void *p1
, const void *p2
)
491 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
492 const_vn_reference_t
const vr2
= (const_vn_reference_t
) p2
;
493 if (vr1
->hashcode
!= vr2
->hashcode
)
496 /* Early out if this is not a hash collision. */
497 if (vr1
->hashcode
!= vr2
->hashcode
)
500 /* The VOP needs to be the same. */
501 if (vr1
->vuse
!= vr2
->vuse
)
504 /* If the operands are the same we are done. */
505 if (vr1
->operands
== vr2
->operands
)
508 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
511 if (INTEGRAL_TYPE_P (vr1
->type
)
512 && INTEGRAL_TYPE_P (vr2
->type
))
514 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
517 else if (INTEGRAL_TYPE_P (vr1
->type
)
518 && (TYPE_PRECISION (vr1
->type
)
519 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
521 else if (INTEGRAL_TYPE_P (vr2
->type
)
522 && (TYPE_PRECISION (vr2
->type
)
523 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
530 HOST_WIDE_INT off1
= 0, off2
= 0;
531 vn_reference_op_t vro1
, vro2
;
532 vn_reference_op_s tem1
, tem2
;
533 bool deref1
= false, deref2
= false;
534 for (; VEC_iterate (vn_reference_op_s
, vr1
->operands
, i
, vro1
); i
++)
536 if (vro1
->opcode
== MEM_REF
)
542 for (; VEC_iterate (vn_reference_op_s
, vr2
->operands
, j
, vro2
); j
++)
544 if (vro2
->opcode
== MEM_REF
)
552 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
554 memset (&tem1
, 0, sizeof (tem1
));
555 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
556 tem1
.type
= TREE_TYPE (tem1
.op0
);
557 tem1
.opcode
= TREE_CODE (tem1
.op0
);
560 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
562 memset (&tem2
, 0, sizeof (tem2
));
563 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
564 tem2
.type
= TREE_TYPE (tem2
.op0
);
565 tem2
.opcode
= TREE_CODE (tem2
.op0
);
568 if (!vn_reference_op_eq (vro1
, vro2
))
573 while (VEC_length (vn_reference_op_s
, vr1
->operands
) != i
574 || VEC_length (vn_reference_op_s
, vr2
->operands
) != j
);
579 /* Copy the operations present in load/store REF into RESULT, a vector of
580 vn_reference_op_s's. */
583 copy_reference_ops_from_ref (tree ref
, VEC(vn_reference_op_s
, heap
) **result
)
585 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
587 vn_reference_op_s temp
;
589 memset (&temp
, 0, sizeof (temp
));
590 temp
.type
= TREE_TYPE (ref
);
591 temp
.opcode
= TREE_CODE (ref
);
592 temp
.op0
= TMR_INDEX (ref
);
593 temp
.op1
= TMR_STEP (ref
);
594 temp
.op2
= TMR_OFFSET (ref
);
596 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
598 memset (&temp
, 0, sizeof (temp
));
599 temp
.type
= NULL_TREE
;
600 temp
.opcode
= ERROR_MARK
;
601 temp
.op0
= TMR_INDEX2 (ref
);
603 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
605 memset (&temp
, 0, sizeof (temp
));
606 temp
.type
= NULL_TREE
;
607 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
608 temp
.op0
= TMR_BASE (ref
);
610 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
614 /* For non-calls, store the information that makes up the address. */
618 vn_reference_op_s temp
;
620 memset (&temp
, 0, sizeof (temp
));
621 temp
.type
= TREE_TYPE (ref
);
622 temp
.opcode
= TREE_CODE (ref
);
628 /* The base address gets its own vn_reference_op_s structure. */
629 temp
.op0
= TREE_OPERAND (ref
, 1);
630 if (host_integerp (TREE_OPERAND (ref
, 1), 0))
631 temp
.off
= TREE_INT_CST_LOW (TREE_OPERAND (ref
, 1));
634 /* Record bits and position. */
635 temp
.op0
= TREE_OPERAND (ref
, 1);
636 temp
.op1
= TREE_OPERAND (ref
, 2);
639 /* The field decl is enough to unambiguously specify the field,
640 a matching type is not necessary and a mismatching type
641 is always a spurious difference. */
642 temp
.type
= NULL_TREE
;
643 temp
.op0
= TREE_OPERAND (ref
, 1);
644 temp
.op1
= TREE_OPERAND (ref
, 2);
646 tree this_offset
= component_ref_field_offset (ref
);
648 && TREE_CODE (this_offset
) == INTEGER_CST
)
650 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
651 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
654 = double_int_add (tree_to_double_int (this_offset
),
656 (tree_to_double_int (bit_offset
),
658 ? 3 : exact_log2 (BITS_PER_UNIT
),
659 HOST_BITS_PER_DOUBLE_INT
, true));
660 if (double_int_fits_in_shwi_p (off
))
666 case ARRAY_RANGE_REF
:
668 /* Record index as operand. */
669 temp
.op0
= TREE_OPERAND (ref
, 1);
670 /* Always record lower bounds and element size. */
671 temp
.op1
= array_ref_low_bound (ref
);
672 temp
.op2
= array_ref_element_size (ref
);
673 if (TREE_CODE (temp
.op0
) == INTEGER_CST
674 && TREE_CODE (temp
.op1
) == INTEGER_CST
675 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
677 double_int off
= tree_to_double_int (temp
.op0
);
678 off
= double_int_add (off
,
680 (tree_to_double_int (temp
.op1
)));
681 off
= double_int_mul (off
, tree_to_double_int (temp
.op2
));
682 if (double_int_fits_in_shwi_p (off
))
687 if (DECL_HARD_REGISTER (ref
))
696 /* Canonicalize decls to MEM[&decl] which is what we end up with
697 when valueizing MEM[ptr] with ptr = &decl. */
698 temp
.opcode
= MEM_REF
;
699 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
701 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
702 temp
.opcode
= ADDR_EXPR
;
703 temp
.op0
= build_fold_addr_expr (ref
);
704 temp
.type
= TREE_TYPE (temp
.op0
);
718 if (is_gimple_min_invariant (ref
))
724 /* These are only interesting for their operands, their
725 existence, and their type. They will never be the last
726 ref in the chain of references (IE they require an
727 operand), so we don't have to put anything
728 for op* as it will be handled by the iteration */
730 case VIEW_CONVERT_EXPR
:
734 /* This is only interesting for its constant offset. */
735 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
740 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
742 if (REFERENCE_CLASS_P (ref
)
743 || (TREE_CODE (ref
) == ADDR_EXPR
744 && !is_gimple_min_invariant (ref
)))
745 ref
= TREE_OPERAND (ref
, 0);
751 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
752 operands in *OPS, the reference alias set SET and the reference type TYPE.
753 Return true if something useful was produced. */
756 ao_ref_init_from_vn_reference (ao_ref
*ref
,
757 alias_set_type set
, tree type
,
758 VEC (vn_reference_op_s
, heap
) *ops
)
760 vn_reference_op_t op
;
762 tree base
= NULL_TREE
;
764 HOST_WIDE_INT offset
= 0;
765 HOST_WIDE_INT max_size
;
766 HOST_WIDE_INT size
= -1;
767 tree size_tree
= NULL_TREE
;
768 alias_set_type base_alias_set
= -1;
770 /* First get the final access size from just the outermost expression. */
771 op
= VEC_index (vn_reference_op_s
, ops
, 0);
772 if (op
->opcode
== COMPONENT_REF
)
773 size_tree
= DECL_SIZE (op
->op0
);
774 else if (op
->opcode
== BIT_FIELD_REF
)
778 enum machine_mode mode
= TYPE_MODE (type
);
780 size_tree
= TYPE_SIZE (type
);
782 size
= GET_MODE_BITSIZE (mode
);
784 if (size_tree
!= NULL_TREE
)
786 if (!host_integerp (size_tree
, 1))
789 size
= TREE_INT_CST_LOW (size_tree
);
792 /* Initially, maxsize is the same as the accessed element size.
793 In the following it will only grow (or become -1). */
796 /* Compute cumulative bit-offset for nested component-refs and array-refs,
797 and find the ultimate containing object. */
798 FOR_EACH_VEC_ELT (vn_reference_op_s
, ops
, i
, op
)
802 /* These may be in the reference ops, but we cannot do anything
803 sensible with them here. */
805 /* Apart from ADDR_EXPR arguments to MEM_REF. */
806 if (base
!= NULL_TREE
807 && TREE_CODE (base
) == MEM_REF
809 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
811 vn_reference_op_t pop
= VEC_index (vn_reference_op_s
, ops
, i
-1);
812 base
= TREE_OPERAND (op
->op0
, 0);
819 offset
+= pop
->off
* BITS_PER_UNIT
;
827 /* Record the base objects. */
829 base_alias_set
= get_deref_alias_set (op
->op0
);
830 *op0_p
= build2 (MEM_REF
, op
->type
,
832 op0_p
= &TREE_OPERAND (*op0_p
, 0);
843 /* And now the usual component-reference style ops. */
845 offset
+= tree_low_cst (op
->op1
, 0);
850 tree field
= op
->op0
;
851 /* We do not have a complete COMPONENT_REF tree here so we
852 cannot use component_ref_field_offset. Do the interesting
856 || !host_integerp (DECL_FIELD_OFFSET (field
), 1))
860 offset
+= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
))
862 offset
+= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
867 case ARRAY_RANGE_REF
:
869 /* We recorded the lower bound and the element size. */
870 if (!host_integerp (op
->op0
, 0)
871 || !host_integerp (op
->op1
, 0)
872 || !host_integerp (op
->op2
, 0))
876 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (op
->op0
);
877 hindex
-= TREE_INT_CST_LOW (op
->op1
);
878 hindex
*= TREE_INT_CST_LOW (op
->op2
);
879 hindex
*= BITS_PER_UNIT
;
891 case VIEW_CONVERT_EXPR
:
908 if (base
== NULL_TREE
)
911 ref
->ref
= NULL_TREE
;
913 ref
->offset
= offset
;
915 ref
->max_size
= max_size
;
916 ref
->ref_alias_set
= set
;
917 if (base_alias_set
!= -1)
918 ref
->base_alias_set
= base_alias_set
;
920 ref
->base_alias_set
= get_alias_set (base
);
925 /* Copy the operations present in load/store/call REF into RESULT, a vector of
926 vn_reference_op_s's. */
929 copy_reference_ops_from_call (gimple call
,
930 VEC(vn_reference_op_s
, heap
) **result
)
932 vn_reference_op_s temp
;
935 /* Copy the type, opcode, function being called and static chain. */
936 memset (&temp
, 0, sizeof (temp
));
937 temp
.type
= gimple_call_return_type (call
);
938 temp
.opcode
= CALL_EXPR
;
939 temp
.op0
= gimple_call_fn (call
);
940 temp
.op1
= gimple_call_chain (call
);
942 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
944 /* Copy the call arguments. As they can be references as well,
945 just chain them together. */
946 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
948 tree callarg
= gimple_call_arg (call
, i
);
949 copy_reference_ops_from_ref (callarg
, result
);
953 /* Create a vector of vn_reference_op_s structures from REF, a
954 REFERENCE_CLASS_P tree. The vector is not shared. */
956 static VEC(vn_reference_op_s
, heap
) *
957 create_reference_ops_from_ref (tree ref
)
959 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
961 copy_reference_ops_from_ref (ref
, &result
);
965 /* Create a vector of vn_reference_op_s structures from CALL, a
966 call statement. The vector is not shared. */
968 static VEC(vn_reference_op_s
, heap
) *
969 create_reference_ops_from_call (gimple call
)
971 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
973 copy_reference_ops_from_call (call
, &result
);
977 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
978 *I_P to point to the last element of the replacement. */
980 vn_reference_fold_indirect (VEC (vn_reference_op_s
, heap
) **ops
,
983 unsigned int i
= *i_p
;
984 vn_reference_op_t op
= VEC_index (vn_reference_op_s
, *ops
, i
);
985 vn_reference_op_t mem_op
= VEC_index (vn_reference_op_s
, *ops
, i
- 1);
987 HOST_WIDE_INT addr_offset
;
989 /* The only thing we have to do is from &OBJ.foo.bar add the offset
990 from .foo.bar to the preceeding MEM_REF offset and replace the
991 address with &OBJ. */
992 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
994 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
995 if (addr_base
!= op
->op0
)
997 double_int off
= tree_to_double_int (mem_op
->op0
);
998 off
= double_int_sext (off
, TYPE_PRECISION (TREE_TYPE (mem_op
->op0
)));
999 off
= double_int_add (off
, shwi_to_double_int (addr_offset
));
1000 mem_op
->op0
= double_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1001 op
->op0
= build_fold_addr_expr (addr_base
);
1002 if (host_integerp (mem_op
->op0
, 0))
1003 mem_op
->off
= TREE_INT_CST_LOW (mem_op
->op0
);
1009 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1010 *I_P to point to the last element of the replacement. */
1012 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s
, heap
) **ops
,
1015 unsigned int i
= *i_p
;
1016 vn_reference_op_t op
= VEC_index (vn_reference_op_s
, *ops
, i
);
1017 vn_reference_op_t mem_op
= VEC_index (vn_reference_op_s
, *ops
, i
- 1);
1019 enum tree_code code
;
1022 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1023 if (!is_gimple_assign (def_stmt
))
1026 code
= gimple_assign_rhs_code (def_stmt
);
1027 if (code
!= ADDR_EXPR
1028 && code
!= POINTER_PLUS_EXPR
)
1031 off
= tree_to_double_int (mem_op
->op0
);
1032 off
= double_int_sext (off
, TYPE_PRECISION (TREE_TYPE (mem_op
->op0
)));
1034 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1035 from .foo.bar to the preceeding MEM_REF offset and replace the
1036 address with &OBJ. */
1037 if (code
== ADDR_EXPR
)
1039 tree addr
, addr_base
;
1040 HOST_WIDE_INT addr_offset
;
1042 addr
= gimple_assign_rhs1 (def_stmt
);
1043 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1046 || TREE_CODE (addr_base
) != MEM_REF
)
1049 off
= double_int_add (off
, shwi_to_double_int (addr_offset
));
1050 off
= double_int_add (off
, mem_ref_offset (addr_base
));
1051 op
->op0
= TREE_OPERAND (addr_base
, 0);
1056 ptr
= gimple_assign_rhs1 (def_stmt
);
1057 ptroff
= gimple_assign_rhs2 (def_stmt
);
1058 if (TREE_CODE (ptr
) != SSA_NAME
1059 || TREE_CODE (ptroff
) != INTEGER_CST
)
1062 off
= double_int_add (off
, tree_to_double_int (ptroff
));
1066 mem_op
->op0
= double_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1067 if (host_integerp (mem_op
->op0
, 0))
1068 mem_op
->off
= TREE_INT_CST_LOW (mem_op
->op0
);
1071 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1072 op
->op0
= SSA_VAL (op
->op0
);
1073 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1074 op
->opcode
= TREE_CODE (op
->op0
);
1077 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1078 vn_reference_maybe_forwprop_address (ops
, i_p
);
1079 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1080 vn_reference_fold_indirect (ops
, i_p
);
1083 /* Optimize the reference REF to a constant if possible or return
1084 NULL_TREE if not. */
1087 fully_constant_vn_reference_p (vn_reference_t ref
)
1089 VEC (vn_reference_op_s
, heap
) *operands
= ref
->operands
;
1090 vn_reference_op_t op
;
1092 /* Try to simplify the translated expression if it is
1093 a call to a builtin function with at most two arguments. */
1094 op
= VEC_index (vn_reference_op_s
, operands
, 0);
1095 if (op
->opcode
== CALL_EXPR
1096 && TREE_CODE (op
->op0
) == ADDR_EXPR
1097 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1098 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1099 && VEC_length (vn_reference_op_s
, operands
) >= 2
1100 && VEC_length (vn_reference_op_s
, operands
) <= 3)
1102 vn_reference_op_t arg0
, arg1
= NULL
;
1103 bool anyconst
= false;
1104 arg0
= VEC_index (vn_reference_op_s
, operands
, 1);
1105 if (VEC_length (vn_reference_op_s
, operands
) > 2)
1106 arg1
= VEC_index (vn_reference_op_s
, operands
, 2);
1107 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1108 || (arg0
->opcode
== ADDR_EXPR
1109 && is_gimple_min_invariant (arg0
->op0
)))
1112 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1113 || (arg1
->opcode
== ADDR_EXPR
1114 && is_gimple_min_invariant (arg1
->op0
))))
1118 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1121 arg1
? arg1
->op0
: NULL
);
1123 && TREE_CODE (folded
) == NOP_EXPR
)
1124 folded
= TREE_OPERAND (folded
, 0);
1126 && is_gimple_min_invariant (folded
))
1131 /* Simplify reads from constant strings. */
1132 else if (op
->opcode
== ARRAY_REF
1133 && TREE_CODE (op
->op0
) == INTEGER_CST
1134 && integer_zerop (op
->op1
)
1135 && VEC_length (vn_reference_op_s
, operands
) == 2)
1137 vn_reference_op_t arg0
;
1138 arg0
= VEC_index (vn_reference_op_s
, operands
, 1);
1139 if (arg0
->opcode
== STRING_CST
1140 && (TYPE_MODE (op
->type
)
1141 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
->op0
))))
1142 && GET_MODE_CLASS (TYPE_MODE (op
->type
)) == MODE_INT
1143 && GET_MODE_SIZE (TYPE_MODE (op
->type
)) == 1
1144 && compare_tree_int (op
->op0
, TREE_STRING_LENGTH (arg0
->op0
)) < 0)
1145 return build_int_cst_type (op
->type
,
1146 (TREE_STRING_POINTER (arg0
->op0
)
1147 [TREE_INT_CST_LOW (op
->op0
)]));
1153 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1154 structures into their value numbers. This is done in-place, and
1155 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1156 whether any operands were valueized. */
1158 static VEC (vn_reference_op_s
, heap
) *
1159 valueize_refs_1 (VEC (vn_reference_op_s
, heap
) *orig
, bool *valueized_anything
)
1161 vn_reference_op_t vro
;
1164 *valueized_anything
= false;
1166 FOR_EACH_VEC_ELT (vn_reference_op_s
, orig
, i
, vro
)
1168 if (vro
->opcode
== SSA_NAME
1169 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1171 tree tem
= SSA_VAL (vro
->op0
);
1172 if (tem
!= vro
->op0
)
1174 *valueized_anything
= true;
1177 /* If it transforms from an SSA_NAME to a constant, update
1179 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1180 vro
->opcode
= TREE_CODE (vro
->op0
);
1182 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1184 tree tem
= SSA_VAL (vro
->op1
);
1185 if (tem
!= vro
->op1
)
1187 *valueized_anything
= true;
1191 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1193 tree tem
= SSA_VAL (vro
->op2
);
1194 if (tem
!= vro
->op2
)
1196 *valueized_anything
= true;
1200 /* If it transforms from an SSA_NAME to an address, fold with
1201 a preceding indirect reference. */
1204 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1205 && VEC_index (vn_reference_op_s
,
1206 orig
, i
- 1)->opcode
== MEM_REF
)
1207 vn_reference_fold_indirect (&orig
, &i
);
1209 && vro
->opcode
== SSA_NAME
1210 && VEC_index (vn_reference_op_s
,
1211 orig
, i
- 1)->opcode
== MEM_REF
)
1212 vn_reference_maybe_forwprop_address (&orig
, &i
);
1213 /* If it transforms a non-constant ARRAY_REF into a constant
1214 one, adjust the constant offset. */
1215 else if (vro
->opcode
== ARRAY_REF
1217 && TREE_CODE (vro
->op0
) == INTEGER_CST
1218 && TREE_CODE (vro
->op1
) == INTEGER_CST
1219 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1221 double_int off
= tree_to_double_int (vro
->op0
);
1222 off
= double_int_add (off
,
1224 (tree_to_double_int (vro
->op1
)));
1225 off
= double_int_mul (off
, tree_to_double_int (vro
->op2
));
1226 if (double_int_fits_in_shwi_p (off
))
1234 static VEC (vn_reference_op_s
, heap
) *
1235 valueize_refs (VEC (vn_reference_op_s
, heap
) *orig
)
1238 return valueize_refs_1 (orig
, &tem
);
1241 static VEC(vn_reference_op_s
, heap
) *shared_lookup_references
;
1243 /* Create a vector of vn_reference_op_s structures from REF, a
1244 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1245 this function. *VALUEIZED_ANYTHING will specify whether any
1246 operands were valueized. */
1248 static VEC(vn_reference_op_s
, heap
) *
1249 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1253 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1254 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1255 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1256 valueized_anything
);
1257 return shared_lookup_references
;
1260 /* Create a vector of vn_reference_op_s structures from CALL, a
1261 call statement. The vector is shared among all callers of
1264 static VEC(vn_reference_op_s
, heap
) *
1265 valueize_shared_reference_ops_from_call (gimple call
)
1269 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1270 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1271 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1272 return shared_lookup_references
;
1275 /* Lookup a SCCVN reference operation VR in the current hash table.
1276 Returns the resulting value number if it exists in the hash table,
1277 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1278 vn_reference_t stored in the hashtable if something is found. */
1281 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1286 hash
= vr
->hashcode
;
1287 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
1289 if (!slot
&& current_info
== optimistic_info
)
1290 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
1295 *vnresult
= (vn_reference_t
)*slot
;
1296 return ((vn_reference_t
)*slot
)->result
;
1302 static tree
*last_vuse_ptr
;
1303 static vn_lookup_kind vn_walk_kind
;
1304 static vn_lookup_kind default_vn_walk_kind
;
1306 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1307 with the current VUSE and performs the expression lookup. */
1310 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
, void *vr_
)
1312 vn_reference_t vr
= (vn_reference_t
)vr_
;
1317 *last_vuse_ptr
= vuse
;
1319 /* Fixup vuse and hash. */
1321 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1322 vr
->vuse
= SSA_VAL (vuse
);
1324 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1326 hash
= vr
->hashcode
;
1327 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
1329 if (!slot
&& current_info
== optimistic_info
)
1330 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
1338 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1339 from the statement defining VUSE and if not successful tries to
1340 translate *REFP and VR_ through an aggregate copy at the defintion
1344 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
)
1346 vn_reference_t vr
= (vn_reference_t
)vr_
;
1347 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1349 HOST_WIDE_INT offset
, maxsize
;
1350 static VEC (vn_reference_op_s
, heap
) *lhs_ops
= NULL
;
1352 bool lhs_ref_ok
= false;
1354 /* First try to disambiguate after value-replacing in the definitions LHS. */
1355 if (is_gimple_assign (def_stmt
))
1357 VEC (vn_reference_op_s
, heap
) *tem
;
1358 tree lhs
= gimple_assign_lhs (def_stmt
);
1359 bool valueized_anything
= false;
1360 /* Avoid re-allocation overhead. */
1361 VEC_truncate (vn_reference_op_s
, lhs_ops
, 0);
1362 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1364 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1365 gcc_assert (lhs_ops
== tem
);
1366 if (valueized_anything
)
1368 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1369 get_alias_set (lhs
),
1370 TREE_TYPE (lhs
), lhs_ops
);
1372 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1377 ao_ref_init (&lhs_ref
, lhs
);
1382 base
= ao_ref_base (ref
);
1383 offset
= ref
->offset
;
1384 maxsize
= ref
->max_size
;
1386 /* If we cannot constrain the size of the reference we cannot
1387 test if anything kills it. */
1391 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1392 from that defintion.
1394 if (is_gimple_reg_type (vr
->type
)
1395 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1396 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1397 && host_integerp (gimple_call_arg (def_stmt
, 2), 1)
1398 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1400 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1402 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1403 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
);
1404 size2
= TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 2)) * 8;
1405 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1406 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 2))
1408 && operand_equal_p (base
, base2
, 0)
1409 && offset2
<= offset
1410 && offset2
+ size2
>= offset
+ maxsize
)
1412 tree val
= build_zero_cst (vr
->type
);
1413 unsigned int value_id
= get_or_alloc_constant_value_id (val
);
1414 return vn_reference_insert_pieces (vuse
, vr
->set
, vr
->type
,
1415 VEC_copy (vn_reference_op_s
,
1416 heap
, vr
->operands
),
1421 /* 2) Assignment from an empty CONSTRUCTOR. */
1422 else if (is_gimple_reg_type (vr
->type
)
1423 && gimple_assign_single_p (def_stmt
)
1424 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1425 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1428 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1429 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1430 &offset2
, &size2
, &maxsize2
);
1432 && operand_equal_p (base
, base2
, 0)
1433 && offset2
<= offset
1434 && offset2
+ size2
>= offset
+ maxsize
)
1436 tree val
= build_zero_cst (vr
->type
);
1437 unsigned int value_id
= get_or_alloc_constant_value_id (val
);
1438 return vn_reference_insert_pieces (vuse
, vr
->set
, vr
->type
,
1439 VEC_copy (vn_reference_op_s
,
1440 heap
, vr
->operands
),
1445 /* 3) For aggregate copies translate the reference through them if
1446 the copy kills ref. */
1447 else if (vn_walk_kind
== VN_WALKREWRITE
1448 && gimple_assign_single_p (def_stmt
)
1449 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1450 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1451 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1454 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1456 VEC (vn_reference_op_s
, heap
) *rhs
= NULL
;
1457 vn_reference_op_t vro
;
1463 /* See if the assignment kills REF. */
1464 base2
= ao_ref_base (&lhs_ref
);
1465 offset2
= lhs_ref
.offset
;
1466 size2
= lhs_ref
.size
;
1467 maxsize2
= lhs_ref
.max_size
;
1469 || (base
!= base2
&& !operand_equal_p (base
, base2
, 0))
1471 || offset2
+ size2
< offset
+ maxsize
)
1474 /* Find the common base of ref and the lhs. lhs_ops already
1475 contains valueized operands for the lhs. */
1476 i
= VEC_length (vn_reference_op_s
, vr
->operands
) - 1;
1477 j
= VEC_length (vn_reference_op_s
, lhs_ops
) - 1;
1478 while (j
>= 0 && i
>= 0
1479 && vn_reference_op_eq (VEC_index (vn_reference_op_s
,
1481 VEC_index (vn_reference_op_s
, lhs_ops
, j
)))
1487 /* ??? The innermost op should always be a MEM_REF and we already
1488 checked that the assignment to the lhs kills vr. Thus for
1489 aggregate copies using char[] types the vn_reference_op_eq
1490 may fail when comparing types for compatibility. But we really
1491 don't care here - further lookups with the rewritten operands
1492 will simply fail if we messed up types too badly. */
1493 if (j
== 0 && i
>= 0
1494 && VEC_index (vn_reference_op_s
, lhs_ops
, 0)->opcode
== MEM_REF
1495 && VEC_index (vn_reference_op_s
, lhs_ops
, 0)->off
!= -1
1496 && (VEC_index (vn_reference_op_s
, lhs_ops
, 0)->off
1497 == VEC_index (vn_reference_op_s
, vr
->operands
, i
)->off
))
1500 /* i now points to the first additional op.
1501 ??? LHS may not be completely contained in VR, one or more
1502 VIEW_CONVERT_EXPRs could be in its way. We could at least
1503 try handling outermost VIEW_CONVERT_EXPRs. */
1507 /* Now re-write REF to be based on the rhs of the assignment. */
1508 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
1509 /* We need to pre-pend vr->operands[0..i] to rhs. */
1510 if (i
+ 1 + VEC_length (vn_reference_op_s
, rhs
)
1511 > VEC_length (vn_reference_op_s
, vr
->operands
))
1513 VEC (vn_reference_op_s
, heap
) *old
= vr
->operands
;
1514 VEC_safe_grow (vn_reference_op_s
, heap
, vr
->operands
,
1515 i
+ 1 + VEC_length (vn_reference_op_s
, rhs
));
1516 if (old
== shared_lookup_references
1517 && vr
->operands
!= old
)
1518 shared_lookup_references
= NULL
;
1521 VEC_truncate (vn_reference_op_s
, vr
->operands
,
1522 i
+ 1 + VEC_length (vn_reference_op_s
, rhs
));
1523 FOR_EACH_VEC_ELT (vn_reference_op_s
, rhs
, j
, vro
)
1524 VEC_replace (vn_reference_op_s
, vr
->operands
, i
+ 1 + j
, vro
);
1525 VEC_free (vn_reference_op_s
, heap
, rhs
);
1526 vr
->hashcode
= vn_reference_compute_hash (vr
);
1528 /* Adjust *ref from the new operands. */
1529 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
1531 /* This can happen with bitfields. */
1532 if (ref
->size
!= r
.size
)
1536 /* Do not update last seen VUSE after translating. */
1537 last_vuse_ptr
= NULL
;
1539 /* Keep looking for the adjusted *REF / VR pair. */
1543 /* 4) For memcpy copies translate the reference through them if
1544 the copy kills ref. */
1545 else if (vn_walk_kind
== VN_WALKREWRITE
1546 && is_gimple_reg_type (vr
->type
)
1547 /* ??? Handle BCOPY as well. */
1548 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
1549 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
1550 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
1551 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
1552 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
1553 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
1554 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
1555 && host_integerp (gimple_call_arg (def_stmt
, 2), 1))
1559 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
1560 vn_reference_op_s op
;
1564 /* Only handle non-variable, addressable refs. */
1565 if (ref
->size
!= maxsize
1566 || offset
% BITS_PER_UNIT
!= 0
1567 || ref
->size
% BITS_PER_UNIT
!= 0)
1570 /* Extract a pointer base and an offset for the destination. */
1571 lhs
= gimple_call_arg (def_stmt
, 0);
1573 if (TREE_CODE (lhs
) == SSA_NAME
)
1574 lhs
= SSA_VAL (lhs
);
1575 if (TREE_CODE (lhs
) == ADDR_EXPR
)
1577 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
1581 if (TREE_CODE (tem
) == MEM_REF
1582 && host_integerp (TREE_OPERAND (tem
, 1), 1))
1584 lhs
= TREE_OPERAND (tem
, 0);
1585 lhs_offset
+= TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1));
1587 else if (DECL_P (tem
))
1588 lhs
= build_fold_addr_expr (tem
);
1592 if (TREE_CODE (lhs
) != SSA_NAME
1593 && TREE_CODE (lhs
) != ADDR_EXPR
)
1596 /* Extract a pointer base and an offset for the source. */
1597 rhs
= gimple_call_arg (def_stmt
, 1);
1599 if (TREE_CODE (rhs
) == SSA_NAME
)
1600 rhs
= SSA_VAL (rhs
);
1601 if (TREE_CODE (rhs
) == ADDR_EXPR
)
1603 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
1607 if (TREE_CODE (tem
) == MEM_REF
1608 && host_integerp (TREE_OPERAND (tem
, 1), 1))
1610 rhs
= TREE_OPERAND (tem
, 0);
1611 rhs_offset
+= TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1));
1613 else if (DECL_P (tem
))
1614 rhs
= build_fold_addr_expr (tem
);
1618 if (TREE_CODE (rhs
) != SSA_NAME
1619 && TREE_CODE (rhs
) != ADDR_EXPR
)
1622 copy_size
= TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 2));
1624 /* The bases of the destination and the references have to agree. */
1625 if ((TREE_CODE (base
) != MEM_REF
1627 || (TREE_CODE (base
) == MEM_REF
1628 && (TREE_OPERAND (base
, 0) != lhs
1629 || !host_integerp (TREE_OPERAND (base
, 1), 1)))
1631 && (TREE_CODE (lhs
) != ADDR_EXPR
1632 || TREE_OPERAND (lhs
, 0) != base
)))
1635 /* And the access has to be contained within the memcpy destination. */
1636 at
= offset
/ BITS_PER_UNIT
;
1637 if (TREE_CODE (base
) == MEM_REF
)
1638 at
+= TREE_INT_CST_LOW (TREE_OPERAND (base
, 1));
1640 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
1643 /* Make room for 2 operands in the new reference. */
1644 if (VEC_length (vn_reference_op_s
, vr
->operands
) < 2)
1646 VEC (vn_reference_op_s
, heap
) *old
= vr
->operands
;
1647 VEC_safe_grow (vn_reference_op_s
, heap
, vr
->operands
, 2);
1648 if (old
== shared_lookup_references
1649 && vr
->operands
!= old
)
1650 shared_lookup_references
= NULL
;
1653 VEC_truncate (vn_reference_op_s
, vr
->operands
, 2);
1655 /* The looked-through reference is a simple MEM_REF. */
1656 memset (&op
, 0, sizeof (op
));
1658 op
.opcode
= MEM_REF
;
1659 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
1660 op
.off
= at
- lhs_offset
+ rhs_offset
;
1661 VEC_replace (vn_reference_op_s
, vr
->operands
, 0, &op
);
1662 op
.type
= TREE_TYPE (rhs
);
1663 op
.opcode
= TREE_CODE (rhs
);
1666 VEC_replace (vn_reference_op_s
, vr
->operands
, 1, &op
);
1667 vr
->hashcode
= vn_reference_compute_hash (vr
);
1669 /* Adjust *ref from the new operands. */
1670 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
1672 /* This can happen with bitfields. */
1673 if (ref
->size
!= r
.size
)
1677 /* Do not update last seen VUSE after translating. */
1678 last_vuse_ptr
= NULL
;
1680 /* Keep looking for the adjusted *REF / VR pair. */
1684 /* Bail out and stop walking. */
1688 /* Lookup a reference operation by it's parts, in the current hash table.
1689 Returns the resulting value number if it exists in the hash table,
1690 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1691 vn_reference_t stored in the hashtable if something is found. */
1694 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
1695 VEC (vn_reference_op_s
, heap
) *operands
,
1696 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
1698 struct vn_reference_s vr1
;
1706 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1707 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1708 VEC_safe_grow (vn_reference_op_s
, heap
, shared_lookup_references
,
1709 VEC_length (vn_reference_op_s
, operands
));
1710 memcpy (VEC_address (vn_reference_op_s
, shared_lookup_references
),
1711 VEC_address (vn_reference_op_s
, operands
),
1712 sizeof (vn_reference_op_s
)
1713 * VEC_length (vn_reference_op_s
, operands
));
1714 vr1
.operands
= operands
= shared_lookup_references
1715 = valueize_refs (shared_lookup_references
);
1718 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1719 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
1722 vn_reference_lookup_1 (&vr1
, vnresult
);
1724 && kind
!= VN_NOWALK
1728 vn_walk_kind
= kind
;
1729 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
1731 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
1732 vn_reference_lookup_2
,
1733 vn_reference_lookup_3
, &vr1
);
1734 if (vr1
.operands
!= operands
)
1735 VEC_free (vn_reference_op_s
, heap
, vr1
.operands
);
1739 return (*vnresult
)->result
;
1744 /* Lookup OP in the current hash table, and return the resulting value
1745 number if it exists in the hash table. Return NULL_TREE if it does
1746 not exist in the hash table or if the result field of the structure
1747 was NULL.. VNRESULT will be filled in with the vn_reference_t
1748 stored in the hashtable if one exists. */
1751 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
1752 vn_reference_t
*vnresult
)
1754 VEC (vn_reference_op_s
, heap
) *operands
;
1755 struct vn_reference_s vr1
;
1757 bool valuezied_anything
;
1762 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1763 vr1
.operands
= operands
1764 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
1765 vr1
.type
= TREE_TYPE (op
);
1766 vr1
.set
= get_alias_set (op
);
1767 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1768 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
1771 if (kind
!= VN_NOWALK
1774 vn_reference_t wvnresult
;
1776 /* Make sure to use a valueized reference if we valueized anything.
1777 Otherwise preserve the full reference for advanced TBAA. */
1778 if (!valuezied_anything
1779 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
1781 ao_ref_init (&r
, op
);
1782 vn_walk_kind
= kind
;
1784 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
1785 vn_reference_lookup_2
,
1786 vn_reference_lookup_3
, &vr1
);
1787 if (vr1
.operands
!= operands
)
1788 VEC_free (vn_reference_op_s
, heap
, vr1
.operands
);
1792 *vnresult
= wvnresult
;
1793 return wvnresult
->result
;
1799 return vn_reference_lookup_1 (&vr1
, vnresult
);
1803 /* Insert OP into the current hash table with a value number of
1804 RESULT, and return the resulting reference structure we created. */
1807 vn_reference_insert (tree op
, tree result
, tree vuse
)
1812 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1813 if (TREE_CODE (result
) == SSA_NAME
)
1814 vr1
->value_id
= VN_INFO (result
)->value_id
;
1816 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
1817 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1818 vr1
->operands
= valueize_refs (create_reference_ops_from_ref (op
));
1819 vr1
->type
= TREE_TYPE (op
);
1820 vr1
->set
= get_alias_set (op
);
1821 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1822 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
1824 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1827 /* Because we lookup stores using vuses, and value number failures
1828 using the vdefs (see visit_reference_op_store for how and why),
1829 it's possible that on failure we may try to insert an already
1830 inserted store. This is not wrong, there is no ssa name for a
1831 store that we could use as a differentiator anyway. Thus, unlike
1832 the other lookup functions, you cannot gcc_assert (!*slot)
1835 /* But free the old slot in case of a collision. */
1837 free_reference (*slot
);
1843 /* Insert a reference by it's pieces into the current hash table with
1844 a value number of RESULT. Return the resulting reference
1845 structure we created. */
1848 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
1849 VEC (vn_reference_op_s
, heap
) *operands
,
1850 tree result
, unsigned int value_id
)
1856 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1857 vr1
->value_id
= value_id
;
1858 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1859 vr1
->operands
= valueize_refs (operands
);
1862 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1863 if (result
&& TREE_CODE (result
) == SSA_NAME
)
1864 result
= SSA_VAL (result
);
1865 vr1
->result
= result
;
1867 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1870 /* At this point we should have all the things inserted that we have
1871 seen before, and we should never try inserting something that
1873 gcc_assert (!*slot
);
1875 free_reference (*slot
);
1881 /* Compute and return the hash value for nary operation VBO1. */
1884 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
1889 for (i
= 0; i
< vno1
->length
; ++i
)
1890 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
1891 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
1893 if (vno1
->length
== 2
1894 && commutative_tree_code (vno1
->opcode
)
1895 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
1897 tree temp
= vno1
->op
[0];
1898 vno1
->op
[0] = vno1
->op
[1];
1902 hash
= iterative_hash_hashval_t (vno1
->opcode
, 0);
1903 for (i
= 0; i
< vno1
->length
; ++i
)
1904 hash
= iterative_hash_expr (vno1
->op
[i
], hash
);
1909 /* Return the computed hashcode for nary operation P1. */
1912 vn_nary_op_hash (const void *p1
)
1914 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1915 return vno1
->hashcode
;
1918 /* Compare nary operations P1 and P2 and return true if they are
1922 vn_nary_op_eq (const void *p1
, const void *p2
)
1924 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1925 const_vn_nary_op_t
const vno2
= (const_vn_nary_op_t
) p2
;
1928 if (vno1
->hashcode
!= vno2
->hashcode
)
1931 if (vno1
->length
!= vno2
->length
)
1934 if (vno1
->opcode
!= vno2
->opcode
1935 || !types_compatible_p (vno1
->type
, vno2
->type
))
1938 for (i
= 0; i
< vno1
->length
; ++i
)
1939 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
1945 /* Initialize VNO from the pieces provided. */
1948 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
1949 enum tree_code code
, tree type
, tree
*ops
)
1952 vno
->length
= length
;
1954 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
1957 /* Initialize VNO from OP. */
1960 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
1964 vno
->opcode
= TREE_CODE (op
);
1965 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
1966 vno
->type
= TREE_TYPE (op
);
1967 for (i
= 0; i
< vno
->length
; ++i
)
1968 vno
->op
[i
] = TREE_OPERAND (op
, i
);
1971 /* Return the number of operands for a vn_nary ops structure from STMT. */
1974 vn_nary_length_from_stmt (gimple stmt
)
1976 switch (gimple_assign_rhs_code (stmt
))
1980 case VIEW_CONVERT_EXPR
:
1984 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
1987 return gimple_num_ops (stmt
) - 1;
1991 /* Initialize VNO from STMT. */
1994 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple stmt
)
1998 vno
->opcode
= gimple_assign_rhs_code (stmt
);
1999 vno
->type
= gimple_expr_type (stmt
);
2000 switch (vno
->opcode
)
2004 case VIEW_CONVERT_EXPR
:
2006 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2010 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2011 for (i
= 0; i
< vno
->length
; ++i
)
2012 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2016 vno
->length
= gimple_num_ops (stmt
) - 1;
2017 for (i
= 0; i
< vno
->length
; ++i
)
2018 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2022 /* Compute the hashcode for VNO and look for it in the hash table;
2023 return the resulting value number if it exists in the hash table.
2024 Return NULL_TREE if it does not exist in the hash table or if the
2025 result field of the operation is NULL. VNRESULT will contain the
2026 vn_nary_op_t from the hashtable if it exists. */
2029 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2036 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2037 slot
= htab_find_slot_with_hash (current_info
->nary
, vno
, vno
->hashcode
,
2039 if (!slot
&& current_info
== optimistic_info
)
2040 slot
= htab_find_slot_with_hash (valid_info
->nary
, vno
, vno
->hashcode
,
2045 *vnresult
= (vn_nary_op_t
)*slot
;
2046 return ((vn_nary_op_t
)*slot
)->result
;
2049 /* Lookup a n-ary operation by its pieces and return the resulting value
2050 number if it exists in the hash table. Return NULL_TREE if it does
2051 not exist in the hash table or if the result field of the operation
2052 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2056 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2057 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2059 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2060 sizeof_vn_nary_op (length
));
2061 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2062 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2065 /* Lookup OP in the current hash table, and return the resulting value
2066 number if it exists in the hash table. Return NULL_TREE if it does
2067 not exist in the hash table or if the result field of the operation
2068 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2072 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2075 = XALLOCAVAR (struct vn_nary_op_s
,
2076 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2077 init_vn_nary_op_from_op (vno1
, op
);
2078 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2081 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2082 value number if it exists in the hash table. Return NULL_TREE if
2083 it does not exist in the hash table. VNRESULT will contain the
2084 vn_nary_op_t from the hashtable if it exists. */
2087 vn_nary_op_lookup_stmt (gimple stmt
, vn_nary_op_t
*vnresult
)
2090 = XALLOCAVAR (struct vn_nary_op_s
,
2091 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2092 init_vn_nary_op_from_stmt (vno1
, stmt
);
2093 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2096 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2099 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2101 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2104 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2108 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2110 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2111 ¤t_info
->nary_obstack
);
2113 vno1
->value_id
= value_id
;
2114 vno1
->length
= length
;
2115 vno1
->result
= result
;
2120 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2121 VNO->HASHCODE first. */
2124 vn_nary_op_insert_into (vn_nary_op_t vno
, htab_t table
, bool compute_hash
)
2129 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2131 slot
= htab_find_slot_with_hash (table
, vno
, vno
->hashcode
, INSERT
);
2132 gcc_assert (!*slot
);
2138 /* Insert a n-ary operation into the current hash table using it's
2139 pieces. Return the vn_nary_op_t structure we created and put in
2143 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2144 tree type
, tree
*ops
,
2145 tree result
, unsigned int value_id
)
2147 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2148 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2149 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2152 /* Insert OP into the current hash table with a value number of
2153 RESULT. Return the vn_nary_op_t structure we created and put in
2157 vn_nary_op_insert (tree op
, tree result
)
2159 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2162 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2163 init_vn_nary_op_from_op (vno1
, op
);
2164 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2167 /* Insert the rhs of STMT into the current hash table with a value number of
2171 vn_nary_op_insert_stmt (gimple stmt
, tree result
)
2174 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
2175 result
, VN_INFO (result
)->value_id
);
2176 init_vn_nary_op_from_stmt (vno1
, stmt
);
2177 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2180 /* Compute a hashcode for PHI operation VP1 and return it. */
2182 static inline hashval_t
2183 vn_phi_compute_hash (vn_phi_t vp1
)
2190 result
= vp1
->block
->index
;
2192 /* If all PHI arguments are constants we need to distinguish
2193 the PHI node via its type. */
2194 type
= TREE_TYPE (VEC_index (tree
, vp1
->phiargs
, 0));
2195 result
+= (INTEGRAL_TYPE_P (type
)
2196 + (INTEGRAL_TYPE_P (type
)
2197 ? TYPE_PRECISION (type
) + TYPE_UNSIGNED (type
) : 0));
2199 FOR_EACH_VEC_ELT (tree
, vp1
->phiargs
, i
, phi1op
)
2201 if (phi1op
== VN_TOP
)
2203 result
= iterative_hash_expr (phi1op
, result
);
2209 /* Return the computed hashcode for phi operation P1. */
2212 vn_phi_hash (const void *p1
)
2214 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
2215 return vp1
->hashcode
;
2218 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2221 vn_phi_eq (const void *p1
, const void *p2
)
2223 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
2224 const_vn_phi_t
const vp2
= (const_vn_phi_t
) p2
;
2226 if (vp1
->hashcode
!= vp2
->hashcode
)
2229 if (vp1
->block
== vp2
->block
)
2234 /* If the PHI nodes do not have compatible types
2235 they are not the same. */
2236 if (!types_compatible_p (TREE_TYPE (VEC_index (tree
, vp1
->phiargs
, 0)),
2237 TREE_TYPE (VEC_index (tree
, vp2
->phiargs
, 0))))
2240 /* Any phi in the same block will have it's arguments in the
2241 same edge order, because of how we store phi nodes. */
2242 FOR_EACH_VEC_ELT (tree
, vp1
->phiargs
, i
, phi1op
)
2244 tree phi2op
= VEC_index (tree
, vp2
->phiargs
, i
);
2245 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
2247 if (!expressions_equal_p (phi1op
, phi2op
))
2255 static VEC(tree
, heap
) *shared_lookup_phiargs
;
2257 /* Lookup PHI in the current hash table, and return the resulting
2258 value number if it exists in the hash table. Return NULL_TREE if
2259 it does not exist in the hash table. */
2262 vn_phi_lookup (gimple phi
)
2265 struct vn_phi_s vp1
;
2268 VEC_truncate (tree
, shared_lookup_phiargs
, 0);
2270 /* Canonicalize the SSA_NAME's to their value number. */
2271 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2273 tree def
= PHI_ARG_DEF (phi
, i
);
2274 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2275 VEC_safe_push (tree
, heap
, shared_lookup_phiargs
, def
);
2277 vp1
.phiargs
= shared_lookup_phiargs
;
2278 vp1
.block
= gimple_bb (phi
);
2279 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2280 slot
= htab_find_slot_with_hash (current_info
->phis
, &vp1
, vp1
.hashcode
,
2282 if (!slot
&& current_info
== optimistic_info
)
2283 slot
= htab_find_slot_with_hash (valid_info
->phis
, &vp1
, vp1
.hashcode
,
2287 return ((vn_phi_t
)*slot
)->result
;
2290 /* Insert PHI into the current hash table with a value number of
2294 vn_phi_insert (gimple phi
, tree result
)
2297 vn_phi_t vp1
= (vn_phi_t
) pool_alloc (current_info
->phis_pool
);
2299 VEC (tree
, heap
) *args
= NULL
;
2301 /* Canonicalize the SSA_NAME's to their value number. */
2302 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2304 tree def
= PHI_ARG_DEF (phi
, i
);
2305 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2306 VEC_safe_push (tree
, heap
, args
, def
);
2308 vp1
->value_id
= VN_INFO (result
)->value_id
;
2309 vp1
->phiargs
= args
;
2310 vp1
->block
= gimple_bb (phi
);
2311 vp1
->result
= result
;
2312 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
2314 slot
= htab_find_slot_with_hash (current_info
->phis
, vp1
, vp1
->hashcode
,
2317 /* Because we iterate over phi operations more than once, it's
2318 possible the slot might already exist here, hence no assert.*/
2324 /* Print set of components in strongly connected component SCC to OUT. */
2327 print_scc (FILE *out
, VEC (tree
, heap
) *scc
)
2332 fprintf (out
, "SCC consists of: ");
2333 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
2335 print_generic_expr (out
, var
, 0);
2338 fprintf (out
, "\n");
2341 /* Set the value number of FROM to TO, return true if it has changed
2345 set_ssa_val_to (tree from
, tree to
)
2347 tree currval
= SSA_VAL (from
);
2351 if (currval
== from
)
2353 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2355 fprintf (dump_file
, "Not changing value number of ");
2356 print_generic_expr (dump_file
, from
, 0);
2357 fprintf (dump_file
, " from VARYING to ");
2358 print_generic_expr (dump_file
, to
, 0);
2359 fprintf (dump_file
, "\n");
2363 else if (TREE_CODE (to
) == SSA_NAME
2364 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
2368 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2369 and invariants. So assert that here. */
2370 gcc_assert (to
!= NULL_TREE
2372 || TREE_CODE (to
) == SSA_NAME
2373 || is_gimple_min_invariant (to
)));
2375 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2377 fprintf (dump_file
, "Setting value number of ");
2378 print_generic_expr (dump_file
, from
, 0);
2379 fprintf (dump_file
, " to ");
2380 print_generic_expr (dump_file
, to
, 0);
2383 if (currval
!= to
&& !operand_equal_p (currval
, to
, OEP_PURE_SAME
))
2385 VN_INFO (from
)->valnum
= to
;
2386 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2387 fprintf (dump_file
, " (changed)\n");
2390 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2391 fprintf (dump_file
, "\n");
2395 /* Set all definitions in STMT to value number to themselves.
2396 Return true if a value number changed. */
2399 defs_to_varying (gimple stmt
)
2401 bool changed
= false;
2405 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2407 tree def
= DEF_FROM_PTR (defp
);
2409 VN_INFO (def
)->use_processed
= true;
2410 changed
|= set_ssa_val_to (def
, def
);
2415 static bool expr_has_constants (tree expr
);
2416 static tree
valueize_expr (tree expr
);
2418 /* Visit a copy between LHS and RHS, return true if the value number
2422 visit_copy (tree lhs
, tree rhs
)
2424 /* Follow chains of copies to their destination. */
2425 while (TREE_CODE (rhs
) == SSA_NAME
2426 && SSA_VAL (rhs
) != rhs
)
2427 rhs
= SSA_VAL (rhs
);
2429 /* The copy may have a more interesting constant filled expression
2430 (we don't, since we know our RHS is just an SSA name). */
2431 if (TREE_CODE (rhs
) == SSA_NAME
)
2433 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
2434 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
2437 return set_ssa_val_to (lhs
, rhs
);
2440 /* Visit a nary operator RHS, value number it, and return true if the
2441 value number of LHS has changed as a result. */
2444 visit_nary_op (tree lhs
, gimple stmt
)
2446 bool changed
= false;
2447 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
2450 changed
= set_ssa_val_to (lhs
, result
);
2453 changed
= set_ssa_val_to (lhs
, lhs
);
2454 vn_nary_op_insert_stmt (stmt
, lhs
);
2460 /* Visit a call STMT storing into LHS. Return true if the value number
2461 of the LHS has changed as a result. */
2464 visit_reference_op_call (tree lhs
, gimple stmt
)
2466 bool changed
= false;
2467 struct vn_reference_s vr1
;
2469 tree vuse
= gimple_vuse (stmt
);
2471 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2472 vr1
.operands
= valueize_shared_reference_ops_from_call (stmt
);
2473 vr1
.type
= gimple_expr_type (stmt
);
2475 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2476 result
= vn_reference_lookup_1 (&vr1
, NULL
);
2479 changed
= set_ssa_val_to (lhs
, result
);
2480 if (TREE_CODE (result
) == SSA_NAME
2481 && VN_INFO (result
)->has_constants
)
2482 VN_INFO (lhs
)->has_constants
= true;
2488 changed
= set_ssa_val_to (lhs
, lhs
);
2489 vr2
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
2490 vr2
->vuse
= vr1
.vuse
;
2491 vr2
->operands
= valueize_refs (create_reference_ops_from_call (stmt
));
2492 vr2
->type
= vr1
.type
;
2494 vr2
->hashcode
= vr1
.hashcode
;
2496 slot
= htab_find_slot_with_hash (current_info
->references
,
2497 vr2
, vr2
->hashcode
, INSERT
);
2499 free_reference (*slot
);
2506 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2507 and return true if the value number of the LHS has changed as a result. */
2510 visit_reference_op_load (tree lhs
, tree op
, gimple stmt
)
2512 bool changed
= false;
2516 last_vuse
= gimple_vuse (stmt
);
2517 last_vuse_ptr
= &last_vuse
;
2518 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
2519 default_vn_walk_kind
, NULL
);
2520 last_vuse_ptr
= NULL
;
2522 /* If we have a VCE, try looking up its operand as it might be stored in
2523 a different type. */
2524 if (!result
&& TREE_CODE (op
) == VIEW_CONVERT_EXPR
)
2525 result
= vn_reference_lookup (TREE_OPERAND (op
, 0), gimple_vuse (stmt
),
2526 default_vn_walk_kind
, NULL
);
2528 /* We handle type-punning through unions by value-numbering based
2529 on offset and size of the access. Be prepared to handle a
2530 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2532 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
2534 /* We will be setting the value number of lhs to the value number
2535 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2536 So first simplify and lookup this expression to see if it
2537 is already available. */
2538 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
2539 if ((CONVERT_EXPR_P (val
)
2540 || TREE_CODE (val
) == VIEW_CONVERT_EXPR
)
2541 && TREE_CODE (TREE_OPERAND (val
, 0)) == SSA_NAME
)
2543 tree tem
= valueize_expr (vn_get_expr_for (TREE_OPERAND (val
, 0)));
2544 if ((CONVERT_EXPR_P (tem
)
2545 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
)
2546 && (tem
= fold_unary_ignore_overflow (TREE_CODE (val
),
2547 TREE_TYPE (val
), tem
)))
2551 if (!is_gimple_min_invariant (val
)
2552 && TREE_CODE (val
) != SSA_NAME
)
2553 result
= vn_nary_op_lookup (val
, NULL
);
2554 /* If the expression is not yet available, value-number lhs to
2555 a new SSA_NAME we create. */
2558 result
= make_ssa_name (SSA_NAME_VAR (lhs
), gimple_build_nop ());
2559 /* Initialize value-number information properly. */
2560 VN_INFO_GET (result
)->valnum
= result
;
2561 VN_INFO (result
)->value_id
= get_next_value_id ();
2562 VN_INFO (result
)->expr
= val
;
2563 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
2564 VN_INFO (result
)->needs_insertion
= true;
2565 /* As all "inserted" statements are singleton SCCs, insert
2566 to the valid table. This is strictly needed to
2567 avoid re-generating new value SSA_NAMEs for the same
2568 expression during SCC iteration over and over (the
2569 optimistic table gets cleared after each iteration).
2570 We do not need to insert into the optimistic table, as
2571 lookups there will fall back to the valid table. */
2572 if (current_info
== optimistic_info
)
2574 current_info
= valid_info
;
2575 vn_nary_op_insert (val
, result
);
2576 current_info
= optimistic_info
;
2579 vn_nary_op_insert (val
, result
);
2580 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2582 fprintf (dump_file
, "Inserting name ");
2583 print_generic_expr (dump_file
, result
, 0);
2584 fprintf (dump_file
, " for expression ");
2585 print_generic_expr (dump_file
, val
, 0);
2586 fprintf (dump_file
, "\n");
2593 changed
= set_ssa_val_to (lhs
, result
);
2594 if (TREE_CODE (result
) == SSA_NAME
2595 && VN_INFO (result
)->has_constants
)
2597 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
2598 VN_INFO (lhs
)->has_constants
= true;
2603 changed
= set_ssa_val_to (lhs
, lhs
);
2604 vn_reference_insert (op
, lhs
, last_vuse
);
2611 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2612 and return true if the value number of the LHS has changed as a result. */
2615 visit_reference_op_store (tree lhs
, tree op
, gimple stmt
)
2617 bool changed
= false;
2619 bool resultsame
= false;
2621 /* First we want to lookup using the *vuses* from the store and see
2622 if there the last store to this location with the same address
2625 The vuses represent the memory state before the store. If the
2626 memory state, address, and value of the store is the same as the
2627 last store to this location, then this store will produce the
2628 same memory state as that store.
2630 In this case the vdef versions for this store are value numbered to those
2631 vuse versions, since they represent the same memory state after
2634 Otherwise, the vdefs for the store are used when inserting into
2635 the table, since the store generates a new memory state. */
2637 result
= vn_reference_lookup (lhs
, gimple_vuse (stmt
), VN_NOWALK
, NULL
);
2641 if (TREE_CODE (result
) == SSA_NAME
)
2642 result
= SSA_VAL (result
);
2643 if (TREE_CODE (op
) == SSA_NAME
)
2645 resultsame
= expressions_equal_p (result
, op
);
2648 if (!result
|| !resultsame
)
2652 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2654 fprintf (dump_file
, "No store match\n");
2655 fprintf (dump_file
, "Value numbering store ");
2656 print_generic_expr (dump_file
, lhs
, 0);
2657 fprintf (dump_file
, " to ");
2658 print_generic_expr (dump_file
, op
, 0);
2659 fprintf (dump_file
, "\n");
2661 /* Have to set value numbers before insert, since insert is
2662 going to valueize the references in-place. */
2663 if ((vdef
= gimple_vdef (stmt
)))
2665 VN_INFO (vdef
)->use_processed
= true;
2666 changed
|= set_ssa_val_to (vdef
, vdef
);
2669 /* Do not insert structure copies into the tables. */
2670 if (is_gimple_min_invariant (op
)
2671 || is_gimple_reg (op
))
2672 vn_reference_insert (lhs
, op
, vdef
);
2676 /* We had a match, so value number the vdef to have the value
2677 number of the vuse it came from. */
2680 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2681 fprintf (dump_file
, "Store matched earlier value,"
2682 "value numbering store vdefs to matching vuses.\n");
2684 def
= gimple_vdef (stmt
);
2685 use
= gimple_vuse (stmt
);
2687 VN_INFO (def
)->use_processed
= true;
2688 changed
|= set_ssa_val_to (def
, SSA_VAL (use
));
2694 /* Visit and value number PHI, return true if the value number
2698 visit_phi (gimple phi
)
2700 bool changed
= false;
2702 tree sameval
= VN_TOP
;
2703 bool allsame
= true;
2706 /* TODO: We could check for this in init_sccvn, and replace this
2707 with a gcc_assert. */
2708 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
2709 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
2711 /* See if all non-TOP arguments have the same value. TOP is
2712 equivalent to everything, so we can ignore it. */
2713 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2715 tree def
= PHI_ARG_DEF (phi
, i
);
2717 if (TREE_CODE (def
) == SSA_NAME
)
2718 def
= SSA_VAL (def
);
2721 if (sameval
== VN_TOP
)
2727 if (!expressions_equal_p (def
, sameval
))
2735 /* If all value numbered to the same value, the phi node has that
2739 if (is_gimple_min_invariant (sameval
))
2741 VN_INFO (PHI_RESULT (phi
))->has_constants
= true;
2742 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
2746 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
2747 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
2750 if (TREE_CODE (sameval
) == SSA_NAME
)
2751 return visit_copy (PHI_RESULT (phi
), sameval
);
2753 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
2756 /* Otherwise, see if it is equivalent to a phi node in this block. */
2757 result
= vn_phi_lookup (phi
);
2760 if (TREE_CODE (result
) == SSA_NAME
)
2761 changed
= visit_copy (PHI_RESULT (phi
), result
);
2763 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
2767 vn_phi_insert (phi
, PHI_RESULT (phi
));
2768 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
2769 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
2770 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
2776 /* Return true if EXPR contains constants. */
2779 expr_has_constants (tree expr
)
2781 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2784 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
2787 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
2788 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
2789 /* Constants inside reference ops are rarely interesting, but
2790 it can take a lot of looking to find them. */
2792 case tcc_declaration
:
2795 return is_gimple_min_invariant (expr
);
2800 /* Return true if STMT contains constants. */
2803 stmt_has_constants (gimple stmt
)
2805 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
2808 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
2810 case GIMPLE_UNARY_RHS
:
2811 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
2813 case GIMPLE_BINARY_RHS
:
2814 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))
2815 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt
)));
2816 case GIMPLE_TERNARY_RHS
:
2817 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))
2818 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt
))
2819 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt
)));
2820 case GIMPLE_SINGLE_RHS
:
2821 /* Constants inside reference ops are rarely interesting, but
2822 it can take a lot of looking to find them. */
2823 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
2830 /* Replace SSA_NAMES in expr with their value numbers, and return the
2832 This is performed in place. */
2835 valueize_expr (tree expr
)
2837 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2840 TREE_OPERAND (expr
, 1) = vn_valueize (TREE_OPERAND (expr
, 1));
2843 TREE_OPERAND (expr
, 0) = vn_valueize (TREE_OPERAND (expr
, 0));
2850 /* Simplify the binary expression RHS, and return the result if
2854 simplify_binary_expression (gimple stmt
)
2856 tree result
= NULL_TREE
;
2857 tree op0
= gimple_assign_rhs1 (stmt
);
2858 tree op1
= gimple_assign_rhs2 (stmt
);
2859 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2861 /* This will not catch every single case we could combine, but will
2862 catch those with constants. The goal here is to simultaneously
2863 combine constants between expressions, but avoid infinite
2864 expansion of expressions during simplification. */
2865 if (TREE_CODE (op0
) == SSA_NAME
)
2867 if (VN_INFO (op0
)->has_constants
2868 || TREE_CODE_CLASS (code
) == tcc_comparison
2869 || code
== COMPLEX_EXPR
)
2870 op0
= valueize_expr (vn_get_expr_for (op0
));
2872 op0
= vn_valueize (op0
);
2875 if (TREE_CODE (op1
) == SSA_NAME
)
2877 if (VN_INFO (op1
)->has_constants
2878 || code
== COMPLEX_EXPR
)
2879 op1
= valueize_expr (vn_get_expr_for (op1
));
2881 op1
= vn_valueize (op1
);
2884 /* Pointer plus constant can be represented as invariant address.
2885 Do so to allow further propatation, see also tree forwprop. */
2886 if (code
== POINTER_PLUS_EXPR
2887 && host_integerp (op1
, 1)
2888 && TREE_CODE (op0
) == ADDR_EXPR
2889 && is_gimple_min_invariant (op0
))
2890 return build_invariant_address (TREE_TYPE (op0
),
2891 TREE_OPERAND (op0
, 0),
2892 TREE_INT_CST_LOW (op1
));
2894 /* Avoid folding if nothing changed. */
2895 if (op0
== gimple_assign_rhs1 (stmt
)
2896 && op1
== gimple_assign_rhs2 (stmt
))
2899 fold_defer_overflow_warnings ();
2901 result
= fold_binary (code
, gimple_expr_type (stmt
), op0
, op1
);
2903 STRIP_USELESS_TYPE_CONVERSION (result
);
2905 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
2908 /* Make sure result is not a complex expression consisting
2909 of operators of operators (IE (a + b) + (a + c))
2910 Otherwise, we will end up with unbounded expressions if
2911 fold does anything at all. */
2912 if (result
&& valid_gimple_rhs_p (result
))
2918 /* Simplify the unary expression RHS, and return the result if
2922 simplify_unary_expression (gimple stmt
)
2924 tree result
= NULL_TREE
;
2925 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
2926 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2928 /* We handle some tcc_reference codes here that are all
2929 GIMPLE_ASSIGN_SINGLE codes. */
2930 if (code
== REALPART_EXPR
2931 || code
== IMAGPART_EXPR
2932 || code
== VIEW_CONVERT_EXPR
2933 || code
== BIT_FIELD_REF
)
2934 op0
= TREE_OPERAND (op0
, 0);
2936 if (TREE_CODE (op0
) != SSA_NAME
)
2940 if (VN_INFO (op0
)->has_constants
)
2941 op0
= valueize_expr (vn_get_expr_for (op0
));
2942 else if (CONVERT_EXPR_CODE_P (code
)
2943 || code
== REALPART_EXPR
2944 || code
== IMAGPART_EXPR
2945 || code
== VIEW_CONVERT_EXPR
2946 || code
== BIT_FIELD_REF
)
2948 /* We want to do tree-combining on conversion-like expressions.
2949 Make sure we feed only SSA_NAMEs or constants to fold though. */
2950 tree tem
= valueize_expr (vn_get_expr_for (op0
));
2951 if (UNARY_CLASS_P (tem
)
2952 || BINARY_CLASS_P (tem
)
2953 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
2954 || TREE_CODE (tem
) == SSA_NAME
2955 || TREE_CODE (tem
) == CONSTRUCTOR
2956 || is_gimple_min_invariant (tem
))
2960 /* Avoid folding if nothing changed, but remember the expression. */
2961 if (op0
== orig_op0
)
2964 if (code
== BIT_FIELD_REF
)
2966 tree rhs
= gimple_assign_rhs1 (stmt
);
2967 result
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (rhs
),
2968 op0
, TREE_OPERAND (rhs
, 1), TREE_OPERAND (rhs
, 2));
2971 result
= fold_unary_ignore_overflow (code
, gimple_expr_type (stmt
), op0
);
2974 STRIP_USELESS_TYPE_CONVERSION (result
);
2975 if (valid_gimple_rhs_p (result
))
2982 /* Try to simplify RHS using equivalences and constant folding. */
2985 try_to_simplify (gimple stmt
)
2987 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2990 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2991 in this case, there is no point in doing extra work. */
2992 if (code
== SSA_NAME
)
2995 /* First try constant folding based on our current lattice. */
2996 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
);
2998 && (TREE_CODE (tem
) == SSA_NAME
2999 || is_gimple_min_invariant (tem
)))
3002 /* If that didn't work try combining multiple statements. */
3003 switch (TREE_CODE_CLASS (code
))
3006 /* Fallthrough for some unary codes that can operate on registers. */
3007 if (!(code
== REALPART_EXPR
3008 || code
== IMAGPART_EXPR
3009 || code
== VIEW_CONVERT_EXPR
3010 || code
== BIT_FIELD_REF
))
3012 /* We could do a little more with unary ops, if they expand
3013 into binary ops, but it's debatable whether it is worth it. */
3015 return simplify_unary_expression (stmt
);
3017 case tcc_comparison
:
3019 return simplify_binary_expression (stmt
);
3028 /* Visit and value number USE, return true if the value number
3032 visit_use (tree use
)
3034 bool changed
= false;
3035 gimple stmt
= SSA_NAME_DEF_STMT (use
);
3037 VN_INFO (use
)->use_processed
= true;
3039 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
3040 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3041 && !SSA_NAME_IS_DEFAULT_DEF (use
))
3043 fprintf (dump_file
, "Value numbering ");
3044 print_generic_expr (dump_file
, use
, 0);
3045 fprintf (dump_file
, " stmt = ");
3046 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3049 /* Handle uninitialized uses. */
3050 if (SSA_NAME_IS_DEFAULT_DEF (use
))
3051 changed
= set_ssa_val_to (use
, use
);
3054 if (gimple_code (stmt
) == GIMPLE_PHI
)
3055 changed
= visit_phi (stmt
);
3056 else if (!gimple_has_lhs (stmt
)
3057 || gimple_has_volatile_ops (stmt
)
3058 || stmt_could_throw_p (stmt
))
3059 changed
= defs_to_varying (stmt
);
3060 else if (is_gimple_assign (stmt
))
3062 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3063 tree lhs
= gimple_assign_lhs (stmt
);
3064 tree rhs1
= gimple_assign_rhs1 (stmt
);
3067 /* Shortcut for copies. Simplifying copies is pointless,
3068 since we copy the expression and value they represent. */
3069 if (code
== SSA_NAME
3070 && TREE_CODE (lhs
) == SSA_NAME
)
3072 changed
= visit_copy (lhs
, rhs1
);
3075 simplified
= try_to_simplify (stmt
);
3078 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3080 fprintf (dump_file
, "RHS ");
3081 print_gimple_expr (dump_file
, stmt
, 0, 0);
3082 fprintf (dump_file
, " simplified to ");
3083 print_generic_expr (dump_file
, simplified
, 0);
3084 if (TREE_CODE (lhs
) == SSA_NAME
)
3085 fprintf (dump_file
, " has constants %d\n",
3086 expr_has_constants (simplified
));
3088 fprintf (dump_file
, "\n");
3091 /* Setting value numbers to constants will occasionally
3092 screw up phi congruence because constants are not
3093 uniquely associated with a single ssa name that can be
3096 && is_gimple_min_invariant (simplified
)
3097 && TREE_CODE (lhs
) == SSA_NAME
)
3099 VN_INFO (lhs
)->expr
= simplified
;
3100 VN_INFO (lhs
)->has_constants
= true;
3101 changed
= set_ssa_val_to (lhs
, simplified
);
3105 && TREE_CODE (simplified
) == SSA_NAME
3106 && TREE_CODE (lhs
) == SSA_NAME
)
3108 changed
= visit_copy (lhs
, simplified
);
3111 else if (simplified
)
3113 if (TREE_CODE (lhs
) == SSA_NAME
)
3115 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
3116 /* We have to unshare the expression or else
3117 valuizing may change the IL stream. */
3118 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
3121 else if (stmt_has_constants (stmt
)
3122 && TREE_CODE (lhs
) == SSA_NAME
)
3123 VN_INFO (lhs
)->has_constants
= true;
3124 else if (TREE_CODE (lhs
) == SSA_NAME
)
3126 /* We reset expr and constantness here because we may
3127 have been value numbering optimistically, and
3128 iterating. They may become non-constant in this case,
3129 even if they were optimistically constant. */
3131 VN_INFO (lhs
)->has_constants
= false;
3132 VN_INFO (lhs
)->expr
= NULL_TREE
;
3135 if ((TREE_CODE (lhs
) == SSA_NAME
3136 /* We can substitute SSA_NAMEs that are live over
3137 abnormal edges with their constant value. */
3138 && !(gimple_assign_copy_p (stmt
)
3139 && is_gimple_min_invariant (rhs1
))
3141 && is_gimple_min_invariant (simplified
))
3142 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3143 /* Stores or copies from SSA_NAMEs that are live over
3144 abnormal edges are a problem. */
3145 || (code
== SSA_NAME
3146 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
3147 changed
= defs_to_varying (stmt
);
3148 else if (REFERENCE_CLASS_P (lhs
)
3150 changed
= visit_reference_op_store (lhs
, rhs1
, stmt
);
3151 else if (TREE_CODE (lhs
) == SSA_NAME
)
3153 if ((gimple_assign_copy_p (stmt
)
3154 && is_gimple_min_invariant (rhs1
))
3156 && is_gimple_min_invariant (simplified
)))
3158 VN_INFO (lhs
)->has_constants
= true;
3160 changed
= set_ssa_val_to (lhs
, simplified
);
3162 changed
= set_ssa_val_to (lhs
, rhs1
);
3166 switch (get_gimple_rhs_class (code
))
3168 case GIMPLE_UNARY_RHS
:
3169 case GIMPLE_BINARY_RHS
:
3170 case GIMPLE_TERNARY_RHS
:
3171 changed
= visit_nary_op (lhs
, stmt
);
3173 case GIMPLE_SINGLE_RHS
:
3174 switch (TREE_CODE_CLASS (code
))
3177 /* VOP-less references can go through unary case. */
3178 if ((code
== REALPART_EXPR
3179 || code
== IMAGPART_EXPR
3180 || code
== VIEW_CONVERT_EXPR
3181 || code
== BIT_FIELD_REF
)
3182 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
3184 changed
= visit_nary_op (lhs
, stmt
);
3188 case tcc_declaration
:
3189 changed
= visit_reference_op_load (lhs
, rhs1
, stmt
);
3192 if (code
== ADDR_EXPR
)
3194 changed
= visit_nary_op (lhs
, stmt
);
3197 else if (code
== CONSTRUCTOR
)
3199 changed
= visit_nary_op (lhs
, stmt
);
3202 changed
= defs_to_varying (stmt
);
3206 changed
= defs_to_varying (stmt
);
3212 changed
= defs_to_varying (stmt
);
3214 else if (is_gimple_call (stmt
))
3216 tree lhs
= gimple_call_lhs (stmt
);
3218 /* ??? We could try to simplify calls. */
3220 if (stmt_has_constants (stmt
)
3221 && TREE_CODE (lhs
) == SSA_NAME
)
3222 VN_INFO (lhs
)->has_constants
= true;
3223 else if (TREE_CODE (lhs
) == SSA_NAME
)
3225 /* We reset expr and constantness here because we may
3226 have been value numbering optimistically, and
3227 iterating. They may become non-constant in this case,
3228 even if they were optimistically constant. */
3229 VN_INFO (lhs
)->has_constants
= false;
3230 VN_INFO (lhs
)->expr
= NULL_TREE
;
3233 if (TREE_CODE (lhs
) == SSA_NAME
3234 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3235 changed
= defs_to_varying (stmt
);
3236 /* ??? We should handle stores from calls. */
3237 else if (TREE_CODE (lhs
) == SSA_NAME
)
3239 if (!gimple_call_internal_p (stmt
)
3240 && gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
))
3241 changed
= visit_reference_op_call (lhs
, stmt
);
3243 changed
= defs_to_varying (stmt
);
3246 changed
= defs_to_varying (stmt
);
3253 /* Compare two operands by reverse postorder index */
3256 compare_ops (const void *pa
, const void *pb
)
3258 const tree opa
= *((const tree
*)pa
);
3259 const tree opb
= *((const tree
*)pb
);
3260 gimple opstmta
= SSA_NAME_DEF_STMT (opa
);
3261 gimple opstmtb
= SSA_NAME_DEF_STMT (opb
);
3265 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
3266 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3267 else if (gimple_nop_p (opstmta
))
3269 else if (gimple_nop_p (opstmtb
))
3272 bba
= gimple_bb (opstmta
);
3273 bbb
= gimple_bb (opstmtb
);
3276 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3284 if (gimple_code (opstmta
) == GIMPLE_PHI
3285 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3286 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3287 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3289 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3291 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3292 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3294 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3296 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3299 /* Sort an array containing members of a strongly connected component
3300 SCC so that the members are ordered by RPO number.
3301 This means that when the sort is complete, iterating through the
3302 array will give you the members in RPO order. */
3305 sort_scc (VEC (tree
, heap
) *scc
)
3307 VEC_qsort (tree
, scc
, compare_ops
);
3310 /* Insert the no longer used nary ONARY to the hash INFO. */
3313 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3315 size_t size
= sizeof_vn_nary_op (onary
->length
);
3316 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3317 &info
->nary_obstack
);
3318 memcpy (nary
, onary
, size
);
3319 vn_nary_op_insert_into (nary
, info
->nary
, false);
3322 /* Insert the no longer used phi OPHI to the hash INFO. */
3325 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3327 vn_phi_t phi
= (vn_phi_t
) pool_alloc (info
->phis_pool
);
3329 memcpy (phi
, ophi
, sizeof (*phi
));
3330 ophi
->phiargs
= NULL
;
3331 slot
= htab_find_slot_with_hash (info
->phis
, phi
, phi
->hashcode
, INSERT
);
3332 gcc_assert (!*slot
);
3336 /* Insert the no longer used reference OREF to the hash INFO. */
3339 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3343 ref
= (vn_reference_t
) pool_alloc (info
->references_pool
);
3344 memcpy (ref
, oref
, sizeof (*ref
));
3345 oref
->operands
= NULL
;
3346 slot
= htab_find_slot_with_hash (info
->references
, ref
, ref
->hashcode
,
3349 free_reference (*slot
);
3353 /* Process a strongly connected component in the SSA graph. */
3356 process_scc (VEC (tree
, heap
) *scc
)
3360 unsigned int iterations
= 0;
3361 bool changed
= true;
3367 /* If the SCC has a single member, just visit it. */
3368 if (VEC_length (tree
, scc
) == 1)
3370 tree use
= VEC_index (tree
, scc
, 0);
3371 if (VN_INFO (use
)->use_processed
)
3373 /* We need to make sure it doesn't form a cycle itself, which can
3374 happen for self-referential PHI nodes. In that case we would
3375 end up inserting an expression with VN_TOP operands into the
3376 valid table which makes us derive bogus equivalences later.
3377 The cheapest way to check this is to assume it for all PHI nodes. */
3378 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
3379 /* Fallthru to iteration. */ ;
3387 /* Iterate over the SCC with the optimistic table until it stops
3389 current_info
= optimistic_info
;
3394 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3395 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
3396 /* As we are value-numbering optimistically we have to
3397 clear the expression tables and the simplified expressions
3398 in each iteration until we converge. */
3399 htab_empty (optimistic_info
->nary
);
3400 htab_empty (optimistic_info
->phis
);
3401 htab_empty (optimistic_info
->references
);
3402 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
3403 gcc_obstack_init (&optimistic_info
->nary_obstack
);
3404 empty_alloc_pool (optimistic_info
->phis_pool
);
3405 empty_alloc_pool (optimistic_info
->references_pool
);
3406 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
3407 VN_INFO (var
)->expr
= NULL_TREE
;
3408 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
3409 changed
|= visit_use (var
);
3412 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
3414 /* Finally, copy the contents of the no longer used optimistic
3415 table to the valid table. */
3416 FOR_EACH_HTAB_ELEMENT (optimistic_info
->nary
, nary
, vn_nary_op_t
, hi
)
3417 copy_nary (nary
, valid_info
);
3418 FOR_EACH_HTAB_ELEMENT (optimistic_info
->phis
, phi
, vn_phi_t
, hi
)
3419 copy_phi (phi
, valid_info
);
3420 FOR_EACH_HTAB_ELEMENT (optimistic_info
->references
, ref
, vn_reference_t
, hi
)
3421 copy_reference (ref
, valid_info
);
3423 current_info
= valid_info
;
3426 DEF_VEC_O(ssa_op_iter
);
3427 DEF_VEC_ALLOC_O(ssa_op_iter
,heap
);
3429 /* Pop the components of the found SCC for NAME off the SCC stack
3430 and process them. Returns true if all went well, false if
3431 we run into resource limits. */
3434 extract_and_process_scc_for_name (tree name
)
3436 VEC (tree
, heap
) *scc
= NULL
;
3439 /* Found an SCC, pop the components off the SCC stack and
3443 x
= VEC_pop (tree
, sccstack
);
3445 VN_INFO (x
)->on_sccstack
= false;
3446 VEC_safe_push (tree
, heap
, scc
, x
);
3447 } while (x
!= name
);
3449 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3450 if (VEC_length (tree
, scc
)
3451 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
3454 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
3455 "SCC size %u exceeding %u\n", VEC_length (tree
, scc
),
3456 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
3460 if (VEC_length (tree
, scc
) > 1)
3463 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3464 print_scc (dump_file
, scc
);
3468 VEC_free (tree
, heap
, scc
);
3473 /* Depth first search on NAME to discover and process SCC's in the SSA
3475 Execution of this algorithm relies on the fact that the SCC's are
3476 popped off the stack in topological order.
3477 Returns true if successful, false if we stopped processing SCC's due
3478 to resource constraints. */
3483 VEC(ssa_op_iter
, heap
) *itervec
= NULL
;
3484 VEC(tree
, heap
) *namevec
= NULL
;
3485 use_operand_p usep
= NULL
;
3492 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
3493 VN_INFO (name
)->visited
= true;
3494 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
3496 VEC_safe_push (tree
, heap
, sccstack
, name
);
3497 VN_INFO (name
)->on_sccstack
= true;
3498 defstmt
= SSA_NAME_DEF_STMT (name
);
3500 /* Recursively DFS on our operands, looking for SCC's. */
3501 if (!gimple_nop_p (defstmt
))
3503 /* Push a new iterator. */
3504 if (gimple_code (defstmt
) == GIMPLE_PHI
)
3505 usep
= op_iter_init_phiuse (&iter
, defstmt
, SSA_OP_ALL_USES
);
3507 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
3510 clear_and_done_ssa_iter (&iter
);
3514 /* If we are done processing uses of a name, go up the stack
3515 of iterators and process SCCs as we found them. */
3516 if (op_iter_done (&iter
))
3518 /* See if we found an SCC. */
3519 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
3520 if (!extract_and_process_scc_for_name (name
))
3522 VEC_free (tree
, heap
, namevec
);
3523 VEC_free (ssa_op_iter
, heap
, itervec
);
3527 /* Check if we are done. */
3528 if (VEC_empty (tree
, namevec
))
3530 VEC_free (tree
, heap
, namevec
);
3531 VEC_free (ssa_op_iter
, heap
, itervec
);
3535 /* Restore the last use walker and continue walking there. */
3537 name
= VEC_pop (tree
, namevec
);
3538 memcpy (&iter
, VEC_last (ssa_op_iter
, itervec
),
3539 sizeof (ssa_op_iter
));
3540 VEC_pop (ssa_op_iter
, itervec
);
3541 goto continue_walking
;
3544 use
= USE_FROM_PTR (usep
);
3546 /* Since we handle phi nodes, we will sometimes get
3547 invariants in the use expression. */
3548 if (TREE_CODE (use
) == SSA_NAME
)
3550 if (! (VN_INFO (use
)->visited
))
3552 /* Recurse by pushing the current use walking state on
3553 the stack and starting over. */
3554 VEC_safe_push(ssa_op_iter
, heap
, itervec
, &iter
);
3555 VEC_safe_push(tree
, heap
, namevec
, name
);
3560 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
3561 VN_INFO (use
)->low
);
3563 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
3564 && VN_INFO (use
)->on_sccstack
)
3566 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
3567 VN_INFO (name
)->low
);
3571 usep
= op_iter_next_use (&iter
);
3575 /* Allocate a value number table. */
3578 allocate_vn_table (vn_tables_t table
)
3580 table
->phis
= htab_create (23, vn_phi_hash
, vn_phi_eq
, free_phi
);
3581 table
->nary
= htab_create (23, vn_nary_op_hash
, vn_nary_op_eq
, NULL
);
3582 table
->references
= htab_create (23, vn_reference_hash
, vn_reference_eq
,
3585 gcc_obstack_init (&table
->nary_obstack
);
3586 table
->phis_pool
= create_alloc_pool ("VN phis",
3587 sizeof (struct vn_phi_s
),
3589 table
->references_pool
= create_alloc_pool ("VN references",
3590 sizeof (struct vn_reference_s
),
3594 /* Free a value number table. */
3597 free_vn_table (vn_tables_t table
)
3599 htab_delete (table
->phis
);
3600 htab_delete (table
->nary
);
3601 htab_delete (table
->references
);
3602 obstack_free (&table
->nary_obstack
, NULL
);
3603 free_alloc_pool (table
->phis_pool
);
3604 free_alloc_pool (table
->references_pool
);
3612 int *rpo_numbers_temp
;
3614 calculate_dominance_info (CDI_DOMINATORS
);
3616 constant_to_value_id
= htab_create (23, vn_constant_hash
, vn_constant_eq
,
3619 constant_value_ids
= BITMAP_ALLOC (NULL
);
3624 vn_ssa_aux_table
= VEC_alloc (vn_ssa_aux_t
, heap
, num_ssa_names
+ 1);
3625 /* VEC_alloc doesn't actually grow it to the right size, it just
3626 preallocates the space to do so. */
3627 VEC_safe_grow_cleared (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
, num_ssa_names
+ 1);
3628 gcc_obstack_init (&vn_ssa_aux_obstack
);
3630 shared_lookup_phiargs
= NULL
;
3631 shared_lookup_references
= NULL
;
3632 rpo_numbers
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
3633 rpo_numbers_temp
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
3634 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
3636 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3637 the i'th block in RPO order is bb. We want to map bb's to RPO
3638 numbers, so we need to rearrange this array. */
3639 for (j
= 0; j
< n_basic_blocks
- NUM_FIXED_BLOCKS
; j
++)
3640 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
3642 XDELETE (rpo_numbers_temp
);
3644 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
3646 /* Create the VN_INFO structures, and initialize value numbers to
3648 for (i
= 0; i
< num_ssa_names
; i
++)
3650 tree name
= ssa_name (i
);
3653 VN_INFO_GET (name
)->valnum
= VN_TOP
;
3654 VN_INFO (name
)->expr
= NULL_TREE
;
3655 VN_INFO (name
)->value_id
= 0;
3659 renumber_gimple_stmt_uids ();
3661 /* Create the valid and optimistic value numbering tables. */
3662 valid_info
= XCNEW (struct vn_tables_s
);
3663 allocate_vn_table (valid_info
);
3664 optimistic_info
= XCNEW (struct vn_tables_s
);
3665 allocate_vn_table (optimistic_info
);
3673 htab_delete (constant_to_value_id
);
3674 BITMAP_FREE (constant_value_ids
);
3675 VEC_free (tree
, heap
, shared_lookup_phiargs
);
3676 VEC_free (vn_reference_op_s
, heap
, shared_lookup_references
);
3677 XDELETEVEC (rpo_numbers
);
3679 for (i
= 0; i
< num_ssa_names
; i
++)
3681 tree name
= ssa_name (i
);
3683 && VN_INFO (name
)->needs_insertion
)
3684 release_ssa_name (name
);
3686 obstack_free (&vn_ssa_aux_obstack
, NULL
);
3687 VEC_free (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
);
3689 VEC_free (tree
, heap
, sccstack
);
3690 free_vn_table (valid_info
);
3691 XDELETE (valid_info
);
3692 free_vn_table (optimistic_info
);
3693 XDELETE (optimistic_info
);
3696 /* Set *ID if we computed something useful in RESULT. */
3699 set_value_id_for_result (tree result
, unsigned int *id
)
3703 if (TREE_CODE (result
) == SSA_NAME
)
3704 *id
= VN_INFO (result
)->value_id
;
3705 else if (is_gimple_min_invariant (result
))
3706 *id
= get_or_alloc_constant_value_id (result
);
3710 /* Set the value ids in the valid hash tables. */
3713 set_hashtable_value_ids (void)
3720 /* Now set the value ids of the things we had put in the hash
3723 FOR_EACH_HTAB_ELEMENT (valid_info
->nary
,
3724 vno
, vn_nary_op_t
, hi
)
3725 set_value_id_for_result (vno
->result
, &vno
->value_id
);
3727 FOR_EACH_HTAB_ELEMENT (valid_info
->phis
,
3729 set_value_id_for_result (vp
->result
, &vp
->value_id
);
3731 FOR_EACH_HTAB_ELEMENT (valid_info
->references
,
3732 vr
, vn_reference_t
, hi
)
3733 set_value_id_for_result (vr
->result
, &vr
->value_id
);
3736 /* Do SCCVN. Returns true if it finished, false if we bailed out
3737 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3738 how we use the alias oracle walking during the VN process. */
3741 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
3745 bool changed
= true;
3747 default_vn_walk_kind
= default_vn_walk_kind_
;
3750 current_info
= valid_info
;
3752 for (param
= DECL_ARGUMENTS (current_function_decl
);
3754 param
= DECL_CHAIN (param
))
3756 if (gimple_default_def (cfun
, param
) != NULL
)
3758 tree def
= gimple_default_def (cfun
, param
);
3759 VN_INFO (def
)->valnum
= def
;
3763 for (i
= 1; i
< num_ssa_names
; ++i
)
3765 tree name
= ssa_name (i
);
3767 && VN_INFO (name
)->visited
== false
3768 && !has_zero_uses (name
))
3776 /* Initialize the value ids. */
3778 for (i
= 1; i
< num_ssa_names
; ++i
)
3780 tree name
= ssa_name (i
);
3784 info
= VN_INFO (name
);
3785 if (info
->valnum
== name
3786 || info
->valnum
== VN_TOP
)
3787 info
->value_id
= get_next_value_id ();
3788 else if (is_gimple_min_invariant (info
->valnum
))
3789 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
3792 /* Propagate until they stop changing. */
3796 for (i
= 1; i
< num_ssa_names
; ++i
)
3798 tree name
= ssa_name (i
);
3802 info
= VN_INFO (name
);
3803 if (TREE_CODE (info
->valnum
) == SSA_NAME
3804 && info
->valnum
!= name
3805 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
3808 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
3813 set_hashtable_value_ids ();
3815 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3817 fprintf (dump_file
, "Value numbers:\n");
3818 for (i
= 0; i
< num_ssa_names
; i
++)
3820 tree name
= ssa_name (i
);
3822 && VN_INFO (name
)->visited
3823 && SSA_VAL (name
) != name
)
3825 print_generic_expr (dump_file
, name
, 0);
3826 fprintf (dump_file
, " = ");
3827 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
3828 fprintf (dump_file
, "\n");
3836 /* Return the maximum value id we have ever seen. */
3839 get_max_value_id (void)
3841 return next_value_id
;
3844 /* Return the next unique value id. */
3847 get_next_value_id (void)
3849 return next_value_id
++;
3853 /* Compare two expressions E1 and E2 and return true if they are equal. */
3856 expressions_equal_p (tree e1
, tree e2
)
3858 /* The obvious case. */
3862 /* If only one of them is null, they cannot be equal. */
3866 /* Now perform the actual comparison. */
3867 if (TREE_CODE (e1
) == TREE_CODE (e2
)
3868 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
3875 /* Return true if the nary operation NARY may trap. This is a copy
3876 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3879 vn_nary_may_trap (vn_nary_op_t nary
)
3882 tree rhs2
= NULL_TREE
;
3883 bool honor_nans
= false;
3884 bool honor_snans
= false;
3885 bool fp_operation
= false;
3886 bool honor_trapv
= false;
3890 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
3891 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
3892 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
3895 fp_operation
= FLOAT_TYPE_P (type
);
3898 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
3899 honor_snans
= flag_signaling_nans
!= 0;
3901 else if (INTEGRAL_TYPE_P (type
)
3902 && TYPE_OVERFLOW_TRAPS (type
))
3905 if (nary
->length
>= 2)
3907 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
3909 honor_nans
, honor_snans
, rhs2
,
3915 for (i
= 0; i
< nary
->length
; ++i
)
3916 if (tree_could_trap_p (nary
->op
[i
]))