1 /* SCC value numbering for trees
2 Copyright (C) 2006-2020 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "splay-tree.h"
31 #include "insn-config.h"
35 #include "gimple-pretty-print.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
56 #include "tree-ssa-propagate.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
73 #include "tree-ssa-sccvn.h"
75 /* This algorithm is based on the SCC algorithm presented by Keith
76 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77 (http://citeseer.ist.psu.edu/41805.html). In
78 straight line code, it is equivalent to a regular hash based value
79 numbering that is performed in reverse postorder.
81 For code with cycles, there are two alternatives, both of which
82 require keeping the hashtables separate from the actual list of
83 value numbers for SSA names.
85 1. Iterate value numbering in an RPO walk of the blocks, removing
86 all the entries from the hashtable after each iteration (but
87 keeping the SSA name->value number mapping between iterations).
88 Iterate until it does not change.
90 2. Perform value numbering as part of an SCC walk on the SSA graph,
91 iterating only the cycles in the SSA graph until they do not change
92 (using a separate, optimistic hashtable for value numbering the SCC
95 The second is not just faster in practice (because most SSA graph
96 cycles do not involve all the variables in the graph), it also has
99 One of these nice properties is that when we pop an SCC off the
100 stack, we are guaranteed to have processed all the operands coming from
101 *outside of that SCC*, so we do not need to do anything special to
102 ensure they have value numbers.
104 Another nice property is that the SCC walk is done as part of a DFS
105 of the SSA graph, which makes it easy to perform combining and
106 simplifying operations at the same time.
108 The code below is deliberately written in a way that makes it easy
109 to separate the SCC walk from the other work it does.
111 In order to propagate constants through the code, we track which
112 expressions contain constants, and use those while folding. In
113 theory, we could also track expressions whose value numbers are
114 replaced, in case we end up folding based on expression
117 In order to value number memory, we assign value numbers to vuses.
118 This enables us to note that, for example, stores to the same
119 address of the same value from the same starting memory states are
123 1. We can iterate only the changing portions of the SCC's, but
124 I have not seen an SCC big enough for this to be a win.
125 2. If you differentiate between phi nodes for loops and phi nodes
126 for if-then-else, you can properly consider phi nodes in different
127 blocks for equivalence.
128 3. We could value number vuses in more cases, particularly, whole
132 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
133 #define BB_EXECUTABLE BB_VISITED
135 static vn_lookup_kind default_vn_walk_kind
;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
141 typedef vn_nary_op_s
*compare_type
;
142 static inline hashval_t
hash (const vn_nary_op_s
*);
143 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
146 /* Return the computed hashcode for nary operation P1. */
149 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
151 return vno1
->hashcode
;
154 /* Compare nary operations P1 and P2 and return true if they are
158 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
160 return vno1
== vno2
|| vn_nary_op_eq (vno1
, vno2
);
163 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
167 /* vn_phi hashtable helpers. */
170 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
172 struct vn_phi_hasher
: nofree_ptr_hash
<vn_phi_s
>
174 static inline hashval_t
hash (const vn_phi_s
*);
175 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
178 /* Return the computed hashcode for phi operation P1. */
181 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
183 return vp1
->hashcode
;
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
191 return vp1
== vp2
|| vn_phi_eq (vp1
, vp2
);
194 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
198 /* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
202 vn_reference_op_eq (const void *p1
, const void *p2
)
204 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
205 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
207 return (vro1
->opcode
== vro2
->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1
->type
== vro2
->type
210 || (vro1
->type
&& vro2
->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
212 TYPE_MAIN_VARIANT (vro2
->type
))))
213 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
214 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
215 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
218 /* Free a reference operation structure VP. */
221 free_reference (vn_reference_s
*vr
)
223 vr
->operands
.release ();
227 /* vn_reference hashtable helpers. */
229 struct vn_reference_hasher
: nofree_ptr_hash
<vn_reference_s
>
231 static inline hashval_t
hash (const vn_reference_s
*);
232 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
235 /* Return the hashcode for a given reference operation P1. */
238 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
240 return vr1
->hashcode
;
244 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
246 return v
== c
|| vn_reference_eq (v
, c
);
249 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
253 /* The set of VN hashtables. */
255 typedef struct vn_tables_s
257 vn_nary_op_table_type
*nary
;
258 vn_phi_table_type
*phis
;
259 vn_reference_table_type
*references
;
263 /* vn_constant hashtable helpers. */
265 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
267 static inline hashval_t
hash (const vn_constant_s
*);
268 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
271 /* Hash table hash function for vn_constant_t. */
274 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
276 return vc1
->hashcode
;
279 /* Hash table equality function for vn_constant_t. */
282 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
284 if (vc1
->hashcode
!= vc2
->hashcode
)
287 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
290 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
291 static bitmap constant_value_ids
;
294 /* Obstack we allocate the vn-tables elements from. */
295 static obstack vn_tables_obstack
;
296 /* Special obstack we never unwind. */
297 static obstack vn_tables_insert_obstack
;
299 static vn_reference_t last_inserted_ref
;
300 static vn_phi_t last_inserted_phi
;
301 static vn_nary_op_t last_inserted_nary
;
303 /* Valid hashtables storing information we have proven to be
305 static vn_tables_t valid_info
;
308 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 tree (*vn_valueize
) (tree
);
311 tree
vn_valueize_wrapper (tree t
, void* context ATTRIBUTE_UNUSED
)
313 return vn_valueize (t
);
317 /* This represents the top of the VN lattice, which is the universal
322 /* Unique counter for our value ids. */
324 static unsigned int next_value_id
;
327 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
328 are allocated on an obstack for locality reasons, and to free them
329 without looping over the vec. */
331 struct vn_ssa_aux_hasher
: typed_noop_remove
<vn_ssa_aux_t
>
333 typedef vn_ssa_aux_t value_type
;
334 typedef tree compare_type
;
335 static inline hashval_t
hash (const value_type
&);
336 static inline bool equal (const value_type
&, const compare_type
&);
337 static inline void mark_deleted (value_type
&) {}
338 static const bool empty_zero_p
= true;
339 static inline void mark_empty (value_type
&e
) { e
= NULL
; }
340 static inline bool is_deleted (value_type
&) { return false; }
341 static inline bool is_empty (value_type
&e
) { return e
== NULL
; }
345 vn_ssa_aux_hasher::hash (const value_type
&entry
)
347 return SSA_NAME_VERSION (entry
->name
);
351 vn_ssa_aux_hasher::equal (const value_type
&entry
, const compare_type
&name
)
353 return name
== entry
->name
;
356 static hash_table
<vn_ssa_aux_hasher
> *vn_ssa_aux_hash
;
357 typedef hash_table
<vn_ssa_aux_hasher
>::iterator vn_ssa_aux_iterator_type
;
358 static struct obstack vn_ssa_aux_obstack
;
360 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*, tree
);
361 static unsigned int vn_nary_length_from_stmt (gimple
*);
362 static vn_nary_op_t
alloc_vn_nary_op_noinit (unsigned int, obstack
*);
363 static vn_nary_op_t
vn_nary_op_insert_into (vn_nary_op_t
,
364 vn_nary_op_table_type
*, bool);
365 static void init_vn_nary_op_from_stmt (vn_nary_op_t
, gimple
*);
366 static void init_vn_nary_op_from_pieces (vn_nary_op_t
, unsigned int,
367 enum tree_code
, tree
, tree
*);
368 static tree
vn_lookup_simplify_result (gimple_match_op
*);
369 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
370 (tree
, alias_set_type
, tree
, vec
<vn_reference_op_s
, va_heap
>, tree
);
372 /* Return whether there is value numbering information for a given SSA name. */
375 has_VN_INFO (tree name
)
377 return vn_ssa_aux_hash
->find_with_hash (name
, SSA_NAME_VERSION (name
));
384 = vn_ssa_aux_hash
->find_slot_with_hash (name
, SSA_NAME_VERSION (name
),
389 vn_ssa_aux_t newinfo
= *res
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
390 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
391 newinfo
->name
= name
;
392 newinfo
->valnum
= VN_TOP
;
393 /* We are using the visited flag to handle uses with defs not within the
394 region being value-numbered. */
395 newinfo
->visited
= false;
397 /* Given we create the VN_INFOs on-demand now we have to do initialization
398 different than VN_TOP here. */
399 if (SSA_NAME_IS_DEFAULT_DEF (name
))
400 switch (TREE_CODE (SSA_NAME_VAR (name
)))
403 /* All undefined vars are VARYING. */
404 newinfo
->valnum
= name
;
405 newinfo
->visited
= true;
409 /* Parameters are VARYING but we can record a condition
410 if we know it is a non-NULL pointer. */
411 newinfo
->visited
= true;
412 newinfo
->valnum
= name
;
413 if (POINTER_TYPE_P (TREE_TYPE (name
))
414 && nonnull_arg_p (SSA_NAME_VAR (name
)))
418 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
420 /* Allocate from non-unwinding stack. */
421 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
422 init_vn_nary_op_from_pieces (nary
, 2, NE_EXPR
,
423 boolean_type_node
, ops
);
424 nary
->predicated_values
= 0;
425 nary
->u
.result
= boolean_true_node
;
426 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
427 gcc_assert (nary
->unwind_to
== NULL
);
428 /* Also do not link it into the undo chain. */
429 last_inserted_nary
= nary
->next
;
430 nary
->next
= (vn_nary_op_t
)(void *)-1;
431 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
432 init_vn_nary_op_from_pieces (nary
, 2, EQ_EXPR
,
433 boolean_type_node
, ops
);
434 nary
->predicated_values
= 0;
435 nary
->u
.result
= boolean_false_node
;
436 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
437 gcc_assert (nary
->unwind_to
== NULL
);
438 last_inserted_nary
= nary
->next
;
439 nary
->next
= (vn_nary_op_t
)(void *)-1;
440 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
442 fprintf (dump_file
, "Recording ");
443 print_generic_expr (dump_file
, name
, TDF_SLIM
);
444 fprintf (dump_file
, " != 0\n");
450 /* If the result is passed by invisible reference the default
451 def is initialized, otherwise it's uninitialized. Still
452 undefined is varying. */
453 newinfo
->visited
= true;
454 newinfo
->valnum
= name
;
463 /* Return the SSA value of X. */
466 SSA_VAL (tree x
, bool *visited
= NULL
)
468 vn_ssa_aux_t tem
= vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
470 *visited
= tem
&& tem
->visited
;
471 return tem
&& tem
->visited
? tem
->valnum
: x
;
474 /* Return the SSA value of the VUSE x, supporting released VDEFs
475 during elimination which will value-number the VDEF to the
476 associated VUSE (but not substitute in the whole lattice). */
479 vuse_ssa_val (tree x
)
487 gcc_assert (x
!= VN_TOP
);
489 while (SSA_NAME_IN_FREE_LIST (x
));
494 /* Similar to the above but used as callback for walk_non_aliases_vuses
495 and thus should stop at unvisited VUSE to not walk across region
499 vuse_valueize (tree vuse
)
504 vuse
= SSA_VAL (vuse
, &visited
);
507 gcc_assert (vuse
!= VN_TOP
);
509 while (SSA_NAME_IN_FREE_LIST (vuse
));
514 /* Return the vn_kind the expression computed by the stmt should be
518 vn_get_stmt_kind (gimple
*stmt
)
520 switch (gimple_code (stmt
))
528 enum tree_code code
= gimple_assign_rhs_code (stmt
);
529 tree rhs1
= gimple_assign_rhs1 (stmt
);
530 switch (get_gimple_rhs_class (code
))
532 case GIMPLE_UNARY_RHS
:
533 case GIMPLE_BINARY_RHS
:
534 case GIMPLE_TERNARY_RHS
:
536 case GIMPLE_SINGLE_RHS
:
537 switch (TREE_CODE_CLASS (code
))
540 /* VOP-less references can go through unary case. */
541 if ((code
== REALPART_EXPR
542 || code
== IMAGPART_EXPR
543 || code
== VIEW_CONVERT_EXPR
544 || code
== BIT_FIELD_REF
)
545 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
549 case tcc_declaration
:
556 if (code
== ADDR_EXPR
)
557 return (is_gimple_min_invariant (rhs1
)
558 ? VN_CONSTANT
: VN_REFERENCE
);
559 else if (code
== CONSTRUCTOR
)
572 /* Lookup a value id for CONSTANT and return it. If it does not
576 get_constant_value_id (tree constant
)
578 vn_constant_s
**slot
;
579 struct vn_constant_s vc
;
581 vc
.hashcode
= vn_hash_constant_with_type (constant
);
582 vc
.constant
= constant
;
583 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
585 return (*slot
)->value_id
;
589 /* Lookup a value id for CONSTANT, and if it does not exist, create a
590 new one and return it. If it does exist, return it. */
593 get_or_alloc_constant_value_id (tree constant
)
595 vn_constant_s
**slot
;
596 struct vn_constant_s vc
;
599 /* If the hashtable isn't initialized we're not running from PRE and thus
600 do not need value-ids. */
601 if (!constant_to_value_id
)
604 vc
.hashcode
= vn_hash_constant_with_type (constant
);
605 vc
.constant
= constant
;
606 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
608 return (*slot
)->value_id
;
610 vcp
= XNEW (struct vn_constant_s
);
611 vcp
->hashcode
= vc
.hashcode
;
612 vcp
->constant
= constant
;
613 vcp
->value_id
= get_next_value_id ();
615 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
616 return vcp
->value_id
;
619 /* Return true if V is a value id for a constant. */
622 value_id_constant_p (unsigned int v
)
624 return bitmap_bit_p (constant_value_ids
, v
);
627 /* Compute the hash for a reference operand VRO1. */
630 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
632 hstate
.add_int (vro1
->opcode
);
634 inchash::add_expr (vro1
->op0
, hstate
);
636 inchash::add_expr (vro1
->op1
, hstate
);
638 inchash::add_expr (vro1
->op2
, hstate
);
641 /* Compute a hash for the reference operation VR1 and return it. */
644 vn_reference_compute_hash (const vn_reference_t vr1
)
646 inchash::hash hstate
;
649 vn_reference_op_t vro
;
653 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
655 if (vro
->opcode
== MEM_REF
)
657 else if (vro
->opcode
!= ADDR_EXPR
)
659 if (maybe_ne (vro
->off
, -1))
661 if (known_eq (off
, -1))
667 if (maybe_ne (off
, -1)
668 && maybe_ne (off
, 0))
669 hstate
.add_poly_int (off
);
672 && vro
->opcode
== ADDR_EXPR
)
676 tree op
= TREE_OPERAND (vro
->op0
, 0);
677 hstate
.add_int (TREE_CODE (op
));
678 inchash::add_expr (op
, hstate
);
682 vn_reference_op_compute_hash (vro
, hstate
);
685 result
= hstate
.end ();
686 /* ??? We would ICE later if we hash instead of adding that in. */
688 result
+= SSA_NAME_VERSION (vr1
->vuse
);
693 /* Return true if reference operations VR1 and VR2 are equivalent. This
694 means they have the same set of operands and vuses. */
697 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
701 /* Early out if this is not a hash collision. */
702 if (vr1
->hashcode
!= vr2
->hashcode
)
705 /* The VOP needs to be the same. */
706 if (vr1
->vuse
!= vr2
->vuse
)
709 /* If the operands are the same we are done. */
710 if (vr1
->operands
== vr2
->operands
)
713 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
716 if (INTEGRAL_TYPE_P (vr1
->type
)
717 && INTEGRAL_TYPE_P (vr2
->type
))
719 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
722 else if (INTEGRAL_TYPE_P (vr1
->type
)
723 && (TYPE_PRECISION (vr1
->type
)
724 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
726 else if (INTEGRAL_TYPE_P (vr2
->type
)
727 && (TYPE_PRECISION (vr2
->type
)
728 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
735 poly_int64 off1
= 0, off2
= 0;
736 vn_reference_op_t vro1
, vro2
;
737 vn_reference_op_s tem1
, tem2
;
738 bool deref1
= false, deref2
= false;
739 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
741 if (vro1
->opcode
== MEM_REF
)
743 /* Do not look through a storage order barrier. */
744 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
746 if (known_eq (vro1
->off
, -1))
750 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
752 if (vro2
->opcode
== MEM_REF
)
754 /* Do not look through a storage order barrier. */
755 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
757 if (known_eq (vro2
->off
, -1))
761 if (maybe_ne (off1
, off2
))
763 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
765 memset (&tem1
, 0, sizeof (tem1
));
766 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
767 tem1
.type
= TREE_TYPE (tem1
.op0
);
768 tem1
.opcode
= TREE_CODE (tem1
.op0
);
772 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
774 memset (&tem2
, 0, sizeof (tem2
));
775 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
776 tem2
.type
= TREE_TYPE (tem2
.op0
);
777 tem2
.opcode
= TREE_CODE (tem2
.op0
);
781 if (deref1
!= deref2
)
783 if (!vn_reference_op_eq (vro1
, vro2
))
788 while (vr1
->operands
.length () != i
789 || vr2
->operands
.length () != j
);
794 /* Copy the operations present in load/store REF into RESULT, a vector of
795 vn_reference_op_s's. */
798 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
800 /* For non-calls, store the information that makes up the address. */
804 vn_reference_op_s temp
;
806 memset (&temp
, 0, sizeof (temp
));
807 temp
.type
= TREE_TYPE (ref
);
808 temp
.opcode
= TREE_CODE (ref
);
814 temp
.op0
= TREE_OPERAND (ref
, 1);
817 temp
.op0
= TREE_OPERAND (ref
, 1);
821 /* The base address gets its own vn_reference_op_s structure. */
822 temp
.op0
= TREE_OPERAND (ref
, 1);
823 if (!mem_ref_offset (ref
).to_shwi (&temp
.off
))
825 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
826 temp
.base
= MR_DEPENDENCE_BASE (ref
);
827 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
830 /* The base address gets its own vn_reference_op_s structure. */
831 temp
.op0
= TMR_INDEX (ref
);
832 temp
.op1
= TMR_STEP (ref
);
833 temp
.op2
= TMR_OFFSET (ref
);
834 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
835 temp
.base
= MR_DEPENDENCE_BASE (ref
);
836 result
->safe_push (temp
);
837 memset (&temp
, 0, sizeof (temp
));
838 temp
.type
= NULL_TREE
;
839 temp
.opcode
= ERROR_MARK
;
840 temp
.op0
= TMR_INDEX2 (ref
);
844 /* Record bits, position and storage order. */
845 temp
.op0
= TREE_OPERAND (ref
, 1);
846 temp
.op1
= TREE_OPERAND (ref
, 2);
847 if (!multiple_p (bit_field_offset (ref
), BITS_PER_UNIT
, &temp
.off
))
849 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
852 /* The field decl is enough to unambiguously specify the field,
853 a matching type is not necessary and a mismatching type
854 is always a spurious difference. */
855 temp
.type
= NULL_TREE
;
856 temp
.op0
= TREE_OPERAND (ref
, 1);
857 temp
.op1
= TREE_OPERAND (ref
, 2);
859 tree this_offset
= component_ref_field_offset (ref
);
861 && poly_int_tree_p (this_offset
))
863 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
864 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
867 = (wi::to_poly_offset (this_offset
)
868 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
869 /* Probibit value-numbering zero offset components
870 of addresses the same before the pass folding
871 __builtin_object_size had a chance to run
872 (checking cfun->after_inlining does the
874 if (TREE_CODE (orig
) != ADDR_EXPR
876 || cfun
->after_inlining
)
877 off
.to_shwi (&temp
.off
);
882 case ARRAY_RANGE_REF
:
885 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
886 /* Record index as operand. */
887 temp
.op0
= TREE_OPERAND (ref
, 1);
888 /* Always record lower bounds and element size. */
889 temp
.op1
= array_ref_low_bound (ref
);
890 /* But record element size in units of the type alignment. */
891 temp
.op2
= TREE_OPERAND (ref
, 3);
892 temp
.align
= eltype
->type_common
.align
;
894 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
895 size_int (TYPE_ALIGN_UNIT (eltype
)));
896 if (poly_int_tree_p (temp
.op0
)
897 && poly_int_tree_p (temp
.op1
)
898 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
900 poly_offset_int off
= ((wi::to_poly_offset (temp
.op0
)
901 - wi::to_poly_offset (temp
.op1
))
902 * wi::to_offset (temp
.op2
)
903 * vn_ref_op_align_unit (&temp
));
904 off
.to_shwi (&temp
.off
);
909 if (DECL_HARD_REGISTER (ref
))
918 /* Canonicalize decls to MEM[&decl] which is what we end up with
919 when valueizing MEM[ptr] with ptr = &decl. */
920 temp
.opcode
= MEM_REF
;
921 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
923 result
->safe_push (temp
);
924 temp
.opcode
= ADDR_EXPR
;
925 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
926 temp
.type
= TREE_TYPE (temp
.op0
);
941 if (is_gimple_min_invariant (ref
))
947 /* These are only interesting for their operands, their
948 existence, and their type. They will never be the last
949 ref in the chain of references (IE they require an
950 operand), so we don't have to put anything
951 for op* as it will be handled by the iteration */
955 case VIEW_CONVERT_EXPR
:
957 temp
.reverse
= storage_order_barrier_p (ref
);
960 /* This is only interesting for its constant offset. */
961 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
966 result
->safe_push (temp
);
968 if (REFERENCE_CLASS_P (ref
)
969 || TREE_CODE (ref
) == MODIFY_EXPR
970 || TREE_CODE (ref
) == WITH_SIZE_EXPR
971 || (TREE_CODE (ref
) == ADDR_EXPR
972 && !is_gimple_min_invariant (ref
)))
973 ref
= TREE_OPERAND (ref
, 0);
979 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
980 operands in *OPS, the reference alias set SET and the reference type TYPE.
981 Return true if something useful was produced. */
984 ao_ref_init_from_vn_reference (ao_ref
*ref
,
985 alias_set_type set
, tree type
,
986 vec
<vn_reference_op_s
> ops
)
988 vn_reference_op_t op
;
990 tree base
= NULL_TREE
;
992 poly_offset_int offset
= 0;
993 poly_offset_int max_size
;
994 poly_offset_int size
= -1;
995 tree size_tree
= NULL_TREE
;
996 alias_set_type base_alias_set
= -1;
998 /* First get the final access size from just the outermost expression. */
1000 if (op
->opcode
== COMPONENT_REF
)
1001 size_tree
= DECL_SIZE (op
->op0
);
1002 else if (op
->opcode
== BIT_FIELD_REF
)
1003 size_tree
= op
->op0
;
1006 machine_mode mode
= TYPE_MODE (type
);
1007 if (mode
== BLKmode
)
1008 size_tree
= TYPE_SIZE (type
);
1010 size
= GET_MODE_BITSIZE (mode
);
1012 if (size_tree
!= NULL_TREE
1013 && poly_int_tree_p (size_tree
))
1014 size
= wi::to_poly_offset (size_tree
);
1016 /* Initially, maxsize is the same as the accessed element size.
1017 In the following it will only grow (or become -1). */
1020 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1021 and find the ultimate containing object. */
1022 FOR_EACH_VEC_ELT (ops
, i
, op
)
1026 /* These may be in the reference ops, but we cannot do anything
1027 sensible with them here. */
1029 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1030 if (base
!= NULL_TREE
1031 && TREE_CODE (base
) == MEM_REF
1033 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1035 vn_reference_op_t pop
= &ops
[i
-1];
1036 base
= TREE_OPERAND (op
->op0
, 0);
1037 if (known_eq (pop
->off
, -1))
1043 offset
+= pop
->off
* BITS_PER_UNIT
;
1051 /* Record the base objects. */
1053 base_alias_set
= get_deref_alias_set (op
->op0
);
1054 *op0_p
= build2 (MEM_REF
, op
->type
,
1055 NULL_TREE
, op
->op0
);
1056 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
1057 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
1058 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1069 /* And now the usual component-reference style ops. */
1071 offset
+= wi::to_poly_offset (op
->op1
);
1076 tree field
= op
->op0
;
1077 /* We do not have a complete COMPONENT_REF tree here so we
1078 cannot use component_ref_field_offset. Do the interesting
1080 tree this_offset
= DECL_FIELD_OFFSET (field
);
1082 if (op
->op1
|| !poly_int_tree_p (this_offset
))
1086 poly_offset_int woffset
= (wi::to_poly_offset (this_offset
)
1087 << LOG2_BITS_PER_UNIT
);
1088 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1094 case ARRAY_RANGE_REF
:
1096 /* We recorded the lower bound and the element size. */
1097 if (!poly_int_tree_p (op
->op0
)
1098 || !poly_int_tree_p (op
->op1
)
1099 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1103 poly_offset_int woffset
1104 = wi::sext (wi::to_poly_offset (op
->op0
)
1105 - wi::to_poly_offset (op
->op1
),
1106 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1107 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1108 woffset
<<= LOG2_BITS_PER_UNIT
;
1120 case VIEW_CONVERT_EXPR
:
1137 if (base
== NULL_TREE
)
1140 ref
->ref
= NULL_TREE
;
1142 ref
->ref_alias_set
= set
;
1143 if (base_alias_set
!= -1)
1144 ref
->base_alias_set
= base_alias_set
;
1146 ref
->base_alias_set
= get_alias_set (base
);
1147 /* We discount volatiles from value-numbering elsewhere. */
1148 ref
->volatile_p
= false;
1150 if (!size
.to_shwi (&ref
->size
) || maybe_lt (ref
->size
, 0))
1158 if (!offset
.to_shwi (&ref
->offset
))
1165 if (!max_size
.to_shwi (&ref
->max_size
) || maybe_lt (ref
->max_size
, 0))
1171 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1172 vn_reference_op_s's. */
1175 copy_reference_ops_from_call (gcall
*call
,
1176 vec
<vn_reference_op_s
> *result
)
1178 vn_reference_op_s temp
;
1180 tree lhs
= gimple_call_lhs (call
);
1183 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1184 different. By adding the lhs here in the vector, we ensure that the
1185 hashcode is different, guaranteeing a different value number. */
1186 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1188 memset (&temp
, 0, sizeof (temp
));
1189 temp
.opcode
= MODIFY_EXPR
;
1190 temp
.type
= TREE_TYPE (lhs
);
1193 result
->safe_push (temp
);
1196 /* Copy the type, opcode, function, static chain and EH region, if any. */
1197 memset (&temp
, 0, sizeof (temp
));
1198 temp
.type
= gimple_call_fntype (call
);
1199 temp
.opcode
= CALL_EXPR
;
1200 temp
.op0
= gimple_call_fn (call
);
1201 temp
.op1
= gimple_call_chain (call
);
1202 if (stmt_could_throw_p (cfun
, call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1203 temp
.op2
= size_int (lr
);
1205 result
->safe_push (temp
);
1207 /* Copy the call arguments. As they can be references as well,
1208 just chain them together. */
1209 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1211 tree callarg
= gimple_call_arg (call
, i
);
1212 copy_reference_ops_from_ref (callarg
, result
);
1216 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1217 *I_P to point to the last element of the replacement. */
1219 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1222 unsigned int i
= *i_p
;
1223 vn_reference_op_t op
= &(*ops
)[i
];
1224 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1226 poly_int64 addr_offset
= 0;
1228 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1229 from .foo.bar to the preceding MEM_REF offset and replace the
1230 address with &OBJ. */
1231 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1233 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1234 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1237 = (poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
),
1240 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1241 op
->op0
= build_fold_addr_expr (addr_base
);
1242 if (tree_fits_shwi_p (mem_op
->op0
))
1243 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1251 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1252 *I_P to point to the last element of the replacement. */
1254 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1257 bool changed
= false;
1258 vn_reference_op_t op
;
1262 unsigned int i
= *i_p
;
1264 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1266 enum tree_code code
;
1267 poly_offset_int off
;
1269 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1270 if (!is_gimple_assign (def_stmt
))
1273 code
= gimple_assign_rhs_code (def_stmt
);
1274 if (code
!= ADDR_EXPR
1275 && code
!= POINTER_PLUS_EXPR
)
1278 off
= poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
), SIGNED
);
1280 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1281 from .foo.bar to the preceding MEM_REF offset and replace the
1282 address with &OBJ. */
1283 if (code
== ADDR_EXPR
)
1285 tree addr
, addr_base
;
1286 poly_int64 addr_offset
;
1288 addr
= gimple_assign_rhs1 (def_stmt
);
1289 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1291 /* If that didn't work because the address isn't invariant propagate
1292 the reference tree from the address operation in case the current
1293 dereference isn't offsetted. */
1295 && *i_p
== ops
->length () - 1
1296 && known_eq (off
, 0)
1297 /* This makes us disable this transform for PRE where the
1298 reference ops might be also used for code insertion which
1300 && default_vn_walk_kind
== VN_WALKREWRITE
)
1302 auto_vec
<vn_reference_op_s
, 32> tem
;
1303 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1304 /* Make sure to preserve TBAA info. The only objects not
1305 wrapped in MEM_REFs that can have their address taken are
1307 if (tem
.length () >= 2
1308 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1310 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1312 = wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1313 wi::to_poly_wide (new_mem_op
->op0
));
1316 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1319 ops
->safe_splice (tem
);
1324 || TREE_CODE (addr_base
) != MEM_REF
1325 || (TREE_CODE (TREE_OPERAND (addr_base
, 0)) == SSA_NAME
1326 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base
,
1331 off
+= mem_ref_offset (addr_base
);
1332 op
->op0
= TREE_OPERAND (addr_base
, 0);
1337 ptr
= gimple_assign_rhs1 (def_stmt
);
1338 ptroff
= gimple_assign_rhs2 (def_stmt
);
1339 if (TREE_CODE (ptr
) != SSA_NAME
1340 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1341 /* Make sure to not endlessly recurse.
1342 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1343 happen when we value-number a PHI to its backedge value. */
1344 || SSA_VAL (ptr
) == op
->op0
1345 || !poly_int_tree_p (ptroff
))
1348 off
+= wi::to_poly_offset (ptroff
);
1352 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1353 if (tree_fits_shwi_p (mem_op
->op0
))
1354 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1357 /* ??? Can end up with endless recursion here!?
1358 gcc.c-torture/execute/strcmp-1.c */
1359 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1360 op
->op0
= SSA_VAL (op
->op0
);
1361 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1362 op
->opcode
= TREE_CODE (op
->op0
);
1367 while (TREE_CODE (op
->op0
) == SSA_NAME
);
1369 /* Fold a remaining *&. */
1370 if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1371 vn_reference_fold_indirect (ops
, i_p
);
1376 /* Optimize the reference REF to a constant if possible or return
1377 NULL_TREE if not. */
1380 fully_constant_vn_reference_p (vn_reference_t ref
)
1382 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1383 vn_reference_op_t op
;
1385 /* Try to simplify the translated expression if it is
1386 a call to a builtin function with at most two arguments. */
1388 if (op
->opcode
== CALL_EXPR
1389 && TREE_CODE (op
->op0
) == ADDR_EXPR
1390 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1391 && fndecl_built_in_p (TREE_OPERAND (op
->op0
, 0))
1392 && operands
.length () >= 2
1393 && operands
.length () <= 3)
1395 vn_reference_op_t arg0
, arg1
= NULL
;
1396 bool anyconst
= false;
1397 arg0
= &operands
[1];
1398 if (operands
.length () > 2)
1399 arg1
= &operands
[2];
1400 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1401 || (arg0
->opcode
== ADDR_EXPR
1402 && is_gimple_min_invariant (arg0
->op0
)))
1405 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1406 || (arg1
->opcode
== ADDR_EXPR
1407 && is_gimple_min_invariant (arg1
->op0
))))
1411 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1414 arg1
? arg1
->op0
: NULL
);
1416 && TREE_CODE (folded
) == NOP_EXPR
)
1417 folded
= TREE_OPERAND (folded
, 0);
1419 && is_gimple_min_invariant (folded
))
1424 /* Simplify reads from constants or constant initializers. */
1425 else if (BITS_PER_UNIT
== 8
1426 && COMPLETE_TYPE_P (ref
->type
)
1427 && is_gimple_reg_type (ref
->type
))
1431 if (INTEGRAL_TYPE_P (ref
->type
))
1432 size
= TYPE_PRECISION (ref
->type
);
1433 else if (tree_fits_shwi_p (TYPE_SIZE (ref
->type
)))
1434 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1437 if (size
% BITS_PER_UNIT
!= 0
1438 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1440 size
/= BITS_PER_UNIT
;
1442 for (i
= 0; i
< operands
.length (); ++i
)
1444 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1449 if (known_eq (operands
[i
].off
, -1))
1451 off
+= operands
[i
].off
;
1452 if (operands
[i
].opcode
== MEM_REF
)
1458 vn_reference_op_t base
= &operands
[--i
];
1459 tree ctor
= error_mark_node
;
1460 tree decl
= NULL_TREE
;
1461 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1463 else if (base
->opcode
== MEM_REF
1464 && base
[1].opcode
== ADDR_EXPR
1465 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1466 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
1467 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == STRING_CST
))
1469 decl
= TREE_OPERAND (base
[1].op0
, 0);
1470 if (TREE_CODE (decl
) == STRING_CST
)
1473 ctor
= ctor_for_folding (decl
);
1475 if (ctor
== NULL_TREE
)
1476 return build_zero_cst (ref
->type
);
1477 else if (ctor
!= error_mark_node
)
1479 HOST_WIDE_INT const_off
;
1482 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1483 off
* BITS_PER_UNIT
,
1484 size
* BITS_PER_UNIT
, decl
);
1487 STRIP_USELESS_TYPE_CONVERSION (res
);
1488 if (is_gimple_min_invariant (res
))
1492 else if (off
.is_constant (&const_off
))
1494 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1495 int len
= native_encode_expr (ctor
, buf
, size
, const_off
);
1497 return native_interpret_expr (ref
->type
, buf
, len
);
1505 /* Return true if OPS contain a storage order barrier. */
1508 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1510 vn_reference_op_t op
;
1513 FOR_EACH_VEC_ELT (ops
, i
, op
)
1514 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1520 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1521 structures into their value numbers. This is done in-place, and
1522 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1523 whether any operands were valueized. */
1525 static vec
<vn_reference_op_s
>
1526 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
,
1527 bool with_avail
= false)
1529 vn_reference_op_t vro
;
1532 *valueized_anything
= false;
1534 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1536 if (vro
->opcode
== SSA_NAME
1537 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1539 tree tem
= with_avail
? vn_valueize (vro
->op0
) : SSA_VAL (vro
->op0
);
1540 if (tem
!= vro
->op0
)
1542 *valueized_anything
= true;
1545 /* If it transforms from an SSA_NAME to a constant, update
1547 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1548 vro
->opcode
= TREE_CODE (vro
->op0
);
1550 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1552 tree tem
= with_avail
? vn_valueize (vro
->op1
) : SSA_VAL (vro
->op1
);
1553 if (tem
!= vro
->op1
)
1555 *valueized_anything
= true;
1559 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1561 tree tem
= with_avail
? vn_valueize (vro
->op2
) : SSA_VAL (vro
->op2
);
1562 if (tem
!= vro
->op2
)
1564 *valueized_anything
= true;
1568 /* If it transforms from an SSA_NAME to an address, fold with
1569 a preceding indirect reference. */
1572 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1573 && orig
[i
- 1].opcode
== MEM_REF
)
1575 if (vn_reference_fold_indirect (&orig
, &i
))
1576 *valueized_anything
= true;
1579 && vro
->opcode
== SSA_NAME
1580 && orig
[i
- 1].opcode
== MEM_REF
)
1582 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1583 *valueized_anything
= true;
1585 /* If it transforms a non-constant ARRAY_REF into a constant
1586 one, adjust the constant offset. */
1587 else if (vro
->opcode
== ARRAY_REF
1588 && known_eq (vro
->off
, -1)
1589 && poly_int_tree_p (vro
->op0
)
1590 && poly_int_tree_p (vro
->op1
)
1591 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1593 poly_offset_int off
= ((wi::to_poly_offset (vro
->op0
)
1594 - wi::to_poly_offset (vro
->op1
))
1595 * wi::to_offset (vro
->op2
)
1596 * vn_ref_op_align_unit (vro
));
1597 off
.to_shwi (&vro
->off
);
1604 static vec
<vn_reference_op_s
>
1605 valueize_refs (vec
<vn_reference_op_s
> orig
)
1608 return valueize_refs_1 (orig
, &tem
);
1611 static vec
<vn_reference_op_s
> shared_lookup_references
;
1613 /* Create a vector of vn_reference_op_s structures from REF, a
1614 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1615 this function. *VALUEIZED_ANYTHING will specify whether any
1616 operands were valueized. */
1618 static vec
<vn_reference_op_s
>
1619 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1623 shared_lookup_references
.truncate (0);
1624 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1625 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1626 valueized_anything
);
1627 return shared_lookup_references
;
1630 /* Create a vector of vn_reference_op_s structures from CALL, a
1631 call statement. The vector is shared among all callers of
1634 static vec
<vn_reference_op_s
>
1635 valueize_shared_reference_ops_from_call (gcall
*call
)
1639 shared_lookup_references
.truncate (0);
1640 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1641 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1642 return shared_lookup_references
;
1645 /* Lookup a SCCVN reference operation VR in the current hash table.
1646 Returns the resulting value number if it exists in the hash table,
1647 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1648 vn_reference_t stored in the hashtable if something is found. */
1651 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1653 vn_reference_s
**slot
;
1656 hash
= vr
->hashcode
;
1657 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1661 *vnresult
= (vn_reference_t
)*slot
;
1662 return ((vn_reference_t
)*slot
)->result
;
1669 /* Partial definition tracking support. */
1673 HOST_WIDE_INT offset
;
1680 HOST_WIDE_INT offset
;
1684 /* Context for alias walking. */
1686 struct vn_walk_cb_data
1688 vn_walk_cb_data (vn_reference_t vr_
, tree orig_ref_
, tree
*last_vuse_ptr_
,
1689 vn_lookup_kind vn_walk_kind_
, bool tbaa_p_
)
1690 : vr (vr_
), last_vuse_ptr (last_vuse_ptr_
), last_vuse (NULL_TREE
),
1691 vn_walk_kind (vn_walk_kind_
), tbaa_p (tbaa_p_
),
1692 saved_operands (vNULL
), first_set (-2), known_ranges (NULL
)
1695 last_vuse_ptr
= &last_vuse
;
1696 ao_ref_init (&orig_ref
, orig_ref_
);
1698 ~vn_walk_cb_data ();
1699 void *finish (alias_set_type
, tree
);
1700 void *push_partial_def (const pd_data
& pd
, alias_set_type
, HOST_WIDE_INT
);
1704 tree
*last_vuse_ptr
;
1706 vn_lookup_kind vn_walk_kind
;
1708 vec
<vn_reference_op_s
> saved_operands
;
1710 /* The VDEFs of partial defs we come along. */
1711 auto_vec
<pd_data
, 2> partial_defs
;
1712 /* The first defs range to avoid splay tree setup in most cases. */
1713 pd_range first_range
;
1714 alias_set_type first_set
;
1715 splay_tree known_ranges
;
1716 obstack ranges_obstack
;
1719 vn_walk_cb_data::~vn_walk_cb_data ()
1723 splay_tree_delete (known_ranges
);
1724 obstack_free (&ranges_obstack
, NULL
);
1726 saved_operands
.release ();
1730 vn_walk_cb_data::finish (alias_set_type set
, tree val
)
1732 if (first_set
!= -2)
1734 return vn_reference_lookup_or_insert_for_pieces
1735 (last_vuse
, set
, vr
->type
,
1736 saved_operands
.exists () ? saved_operands
: vr
->operands
, val
);
1739 /* pd_range splay-tree helpers. */
1742 pd_range_compare (splay_tree_key offset1p
, splay_tree_key offset2p
)
1744 HOST_WIDE_INT offset1
= *(HOST_WIDE_INT
*)offset1p
;
1745 HOST_WIDE_INT offset2
= *(HOST_WIDE_INT
*)offset2p
;
1746 if (offset1
< offset2
)
1748 else if (offset1
> offset2
)
1754 pd_tree_alloc (int size
, void *data_
)
1756 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
1757 return obstack_alloc (&data
->ranges_obstack
, size
);
1761 pd_tree_dealloc (void *, void *)
1765 /* Push PD to the vector of partial definitions returning a
1766 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1767 NULL when we want to continue looking for partial defs or -1
1771 vn_walk_cb_data::push_partial_def (const pd_data
&pd
,
1772 alias_set_type set
, HOST_WIDE_INT maxsizei
)
1774 const HOST_WIDE_INT bufsize
= 64;
1775 /* We're using a fixed buffer for encoding so fail early if the object
1776 we want to interpret is bigger. */
1777 if (maxsizei
> bufsize
* BITS_PER_UNIT
1779 || BITS_PER_UNIT
!= 8
1780 /* Not prepared to handle PDP endian. */
1781 || BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
1784 bool pd_constant_p
= (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
1785 || CONSTANT_CLASS_P (pd
.rhs
));
1786 if (partial_defs
.is_empty ())
1788 /* If we get a clobber upfront, fail. */
1789 if (TREE_CLOBBER_P (pd
.rhs
))
1793 partial_defs
.safe_push (pd
);
1794 first_range
.offset
= pd
.offset
;
1795 first_range
.size
= pd
.size
;
1797 last_vuse_ptr
= NULL
;
1798 /* Continue looking for partial defs. */
1804 /* ??? Optimize the case where the 2nd partial def completes things. */
1805 gcc_obstack_init (&ranges_obstack
);
1806 known_ranges
= splay_tree_new_with_allocator (pd_range_compare
, 0, 0,
1808 pd_tree_dealloc
, this);
1809 splay_tree_insert (known_ranges
,
1810 (splay_tree_key
)&first_range
.offset
,
1811 (splay_tree_value
)&first_range
);
1814 pd_range newr
= { pd
.offset
, pd
.size
};
1817 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
1818 HOST_WIDE_INT loffset
= newr
.offset
+ 1;
1819 if ((n
= splay_tree_predecessor (known_ranges
, (splay_tree_key
)&loffset
))
1820 && ((r
= (pd_range
*)n
->value
), true)
1821 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
1822 newr
.offset
, newr
.size
))
1824 /* Ignore partial defs already covered. Here we also drop shadowed
1825 clobbers arriving here at the floor. */
1826 if (known_subrange_p (newr
.offset
, newr
.size
, r
->offset
, r
->size
))
1828 r
->size
= MAX (r
->offset
+ r
->size
, newr
.offset
+ newr
.size
) - r
->offset
;
1832 /* newr.offset wasn't covered yet, insert the range. */
1833 r
= XOBNEW (&ranges_obstack
, pd_range
);
1835 splay_tree_insert (known_ranges
, (splay_tree_key
)&r
->offset
,
1836 (splay_tree_value
)r
);
1838 /* Merge r which now contains newr and is a member of the splay tree with
1839 adjacent overlapping ranges. */
1841 while ((n
= splay_tree_successor (known_ranges
, (splay_tree_key
)&r
->offset
))
1842 && ((rafter
= (pd_range
*)n
->value
), true)
1843 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
1844 rafter
->offset
, rafter
->size
))
1846 r
->size
= MAX (r
->offset
+ r
->size
,
1847 rafter
->offset
+ rafter
->size
) - r
->offset
;
1848 splay_tree_remove (known_ranges
, (splay_tree_key
)&rafter
->offset
);
1850 /* If we get a clobber, fail. */
1851 if (TREE_CLOBBER_P (pd
.rhs
))
1853 /* Non-constants are OK as long as they are shadowed by a constant. */
1856 partial_defs
.safe_push (pd
);
1858 /* Now we have merged newr into the range tree. When we have covered
1859 [offseti, sizei] then the tree will contain exactly one node which has
1860 the desired properties and it will be 'r'. */
1861 if (!known_subrange_p (0, maxsizei
, r
->offset
, r
->size
))
1862 /* Continue looking for partial defs. */
1865 /* Now simply native encode all partial defs in reverse order. */
1866 unsigned ndefs
= partial_defs
.length ();
1867 /* We support up to 512-bit values (for V8DFmode). */
1868 unsigned char buffer
[bufsize
+ 1];
1869 unsigned char this_buffer
[bufsize
+ 1];
1872 memset (buffer
, 0, bufsize
+ 1);
1873 unsigned needed_len
= ROUND_UP (maxsizei
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
1874 while (!partial_defs
.is_empty ())
1876 pd_data pd
= partial_defs
.pop ();
1878 if (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
)
1880 /* Empty CONSTRUCTOR. */
1881 if (pd
.size
>= needed_len
* BITS_PER_UNIT
)
1884 len
= ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
1885 memset (this_buffer
, 0, len
);
1889 len
= native_encode_expr (pd
.rhs
, this_buffer
, bufsize
,
1890 MAX (0, -pd
.offset
) / BITS_PER_UNIT
);
1892 || len
< (ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
1893 - MAX (0, -pd
.offset
) / BITS_PER_UNIT
))
1895 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1896 fprintf (dump_file
, "Failed to encode %u "
1897 "partial definitions\n", ndefs
);
1902 unsigned char *p
= buffer
;
1903 HOST_WIDE_INT size
= pd
.size
;
1905 size
-= ROUND_DOWN (-pd
.offset
, BITS_PER_UNIT
);
1906 this_buffer
[len
] = 0;
1907 if (BYTES_BIG_ENDIAN
)
1909 /* LSB of this_buffer[len - 1] byte should be at
1910 pd.offset + pd.size - 1 bits in buffer. */
1911 amnt
= ((unsigned HOST_WIDE_INT
) pd
.offset
1912 + pd
.size
) % BITS_PER_UNIT
;
1914 shift_bytes_in_array_right (this_buffer
, len
+ 1, amnt
);
1915 unsigned char *q
= this_buffer
;
1916 unsigned int off
= 0;
1920 off
= pd
.offset
/ BITS_PER_UNIT
;
1921 gcc_assert (off
< needed_len
);
1925 msk
= ((1 << size
) - 1) << (BITS_PER_UNIT
- amnt
);
1926 *p
= (*p
& ~msk
) | (this_buffer
[len
] & msk
);
1931 if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
1932 q
= (this_buffer
+ len
1933 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
1935 if (pd
.offset
% BITS_PER_UNIT
)
1937 msk
= -1U << (BITS_PER_UNIT
1938 - (pd
.offset
% BITS_PER_UNIT
));
1939 *p
= (*p
& msk
) | (*q
& ~msk
);
1943 size
-= BITS_PER_UNIT
- (pd
.offset
% BITS_PER_UNIT
);
1944 gcc_assert (size
>= 0);
1948 else if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
1950 q
= (this_buffer
+ len
1951 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
1953 if (pd
.offset
% BITS_PER_UNIT
)
1956 size
-= BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) pd
.offset
1958 gcc_assert (size
>= 0);
1961 if ((unsigned HOST_WIDE_INT
) size
/ BITS_PER_UNIT
+ off
1963 size
= (needed_len
- off
) * BITS_PER_UNIT
;
1964 memcpy (p
, q
, size
/ BITS_PER_UNIT
);
1965 if (size
% BITS_PER_UNIT
)
1968 = -1U << (BITS_PER_UNIT
- (size
% BITS_PER_UNIT
));
1969 p
+= size
/ BITS_PER_UNIT
;
1970 q
+= size
/ BITS_PER_UNIT
;
1971 *p
= (*q
& msk
) | (*p
& ~msk
);
1976 size
= MIN (size
, (HOST_WIDE_INT
) needed_len
* BITS_PER_UNIT
);
1979 /* LSB of this_buffer[0] byte should be at pd.offset bits
1982 amnt
= pd
.offset
% BITS_PER_UNIT
;
1984 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
1985 unsigned int off
= pd
.offset
/ BITS_PER_UNIT
;
1986 gcc_assert (off
< needed_len
);
1988 if (amnt
+ size
< BITS_PER_UNIT
)
1990 /* Low amnt bits come from *p, then size bits
1991 from this_buffer[0] and the remaining again from
1993 msk
= ((1 << size
) - 1) << amnt
;
1994 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2000 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2002 size
-= (BITS_PER_UNIT
- amnt
);
2007 amnt
= (unsigned HOST_WIDE_INT
) pd
.offset
% BITS_PER_UNIT
;
2009 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
2011 memcpy (p
, this_buffer
+ (amnt
!= 0), size
/ BITS_PER_UNIT
);
2012 p
+= size
/ BITS_PER_UNIT
;
2013 if (size
% BITS_PER_UNIT
)
2015 unsigned int msk
= -1U << (size
% BITS_PER_UNIT
);
2016 *p
= (this_buffer
[(amnt
!= 0) + size
/ BITS_PER_UNIT
]
2017 & ~msk
) | (*p
& msk
);
2022 tree type
= vr
->type
;
2023 /* Make sure to interpret in a type that has a range covering the whole
2025 if (INTEGRAL_TYPE_P (vr
->type
) && maxsizei
!= TYPE_PRECISION (vr
->type
))
2026 type
= build_nonstandard_integer_type (maxsizei
, TYPE_UNSIGNED (type
));
2028 if (BYTES_BIG_ENDIAN
)
2030 unsigned sz
= needed_len
;
2031 if (maxsizei
% BITS_PER_UNIT
)
2032 shift_bytes_in_array_right (buffer
, needed_len
,
2034 - (maxsizei
% BITS_PER_UNIT
));
2035 if (INTEGRAL_TYPE_P (type
))
2036 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
2037 if (sz
> needed_len
)
2039 memcpy (this_buffer
+ (sz
- needed_len
), buffer
, needed_len
);
2040 val
= native_interpret_expr (type
, this_buffer
, sz
);
2043 val
= native_interpret_expr (type
, buffer
, needed_len
);
2046 val
= native_interpret_expr (type
, buffer
, bufsize
);
2047 /* If we chop off bits because the types precision doesn't match the memory
2048 access size this is ok when optimizing reads but not when called from
2049 the DSE code during elimination. */
2050 if (val
&& type
!= vr
->type
)
2052 if (! int_fits_type_p (val
, vr
->type
))
2055 val
= fold_convert (vr
->type
, val
);
2060 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2062 "Successfully combined %u partial definitions\n", ndefs
);
2063 /* We are using the alias-set of the first store we encounter which
2064 should be appropriate here. */
2065 return finish (first_set
, val
);
2069 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2071 "Failed to interpret %u encoded partial definitions\n", ndefs
);
2076 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2077 with the current VUSE and performs the expression lookup. */
2080 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
, void *data_
)
2082 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2083 vn_reference_t vr
= data
->vr
;
2084 vn_reference_s
**slot
;
2087 /* If we have partial definitions recorded we have to go through
2088 vn_reference_lookup_3. */
2089 if (!data
->partial_defs
.is_empty ())
2092 if (data
->last_vuse_ptr
)
2094 *data
->last_vuse_ptr
= vuse
;
2095 data
->last_vuse
= vuse
;
2098 /* Fixup vuse and hash. */
2100 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
2101 vr
->vuse
= vuse_ssa_val (vuse
);
2103 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
2105 hash
= vr
->hashcode
;
2106 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
2109 if ((*slot
)->result
&& data
->saved_operands
.exists ())
2110 return data
->finish (vr
->set
, (*slot
)->result
);
2117 /* Lookup an existing or insert a new vn_reference entry into the
2118 value table for the VUSE, SET, TYPE, OPERANDS reference which
2119 has the value VALUE which is either a constant or an SSA name. */
2121 static vn_reference_t
2122 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
2125 vec
<vn_reference_op_s
,
2130 vn_reference_t result
;
2132 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2133 vr1
.operands
= operands
;
2136 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2137 if (vn_reference_lookup_1 (&vr1
, &result
))
2139 if (TREE_CODE (value
) == SSA_NAME
)
2140 value_id
= VN_INFO (value
)->value_id
;
2142 value_id
= get_or_alloc_constant_value_id (value
);
2143 return vn_reference_insert_pieces (vuse
, set
, type
,
2144 operands
.copy (), value
, value_id
);
2147 /* Return a value-number for RCODE OPS... either by looking up an existing
2148 value-number for the simplified result or by inserting the operation if
2152 vn_nary_build_or_lookup_1 (gimple_match_op
*res_op
, bool insert
)
2154 tree result
= NULL_TREE
;
2155 /* We will be creating a value number for
2157 So first simplify and lookup this expression to see if it
2158 is already available. */
2159 /* For simplification valueize. */
2161 for (i
= 0; i
< res_op
->num_ops
; ++i
)
2162 if (TREE_CODE (res_op
->ops
[i
]) == SSA_NAME
)
2164 tree tem
= vn_valueize (res_op
->ops
[i
]);
2167 res_op
->ops
[i
] = tem
;
2169 /* If valueization of an operand fails (it is not available), skip
2172 if (i
== res_op
->num_ops
)
2174 mprts_hook
= vn_lookup_simplify_result
;
2175 res
= res_op
->resimplify (NULL
, vn_valueize
);
2178 gimple
*new_stmt
= NULL
;
2180 && gimple_simplified_result_is_gimple_val (res_op
))
2182 /* The expression is already available. */
2183 result
= res_op
->ops
[0];
2184 /* Valueize it, simplification returns sth in AVAIL only. */
2185 if (TREE_CODE (result
) == SSA_NAME
)
2186 result
= SSA_VAL (result
);
2190 tree val
= vn_lookup_simplify_result (res_op
);
2193 gimple_seq stmts
= NULL
;
2194 result
= maybe_push_res_to_seq (res_op
, &stmts
);
2197 gcc_assert (gimple_seq_singleton_p (stmts
));
2198 new_stmt
= gimple_seq_first_stmt (stmts
);
2202 /* The expression is already available. */
2207 /* The expression is not yet available, value-number lhs to
2208 the new SSA_NAME we created. */
2209 /* Initialize value-number information properly. */
2210 vn_ssa_aux_t result_info
= VN_INFO (result
);
2211 result_info
->valnum
= result
;
2212 result_info
->value_id
= get_next_value_id ();
2213 result_info
->visited
= 1;
2214 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
2216 result_info
->needs_insertion
= true;
2217 /* ??? PRE phi-translation inserts NARYs without corresponding
2218 SSA name result. Re-use those but set their result according
2219 to the stmt we just built. */
2220 vn_nary_op_t nary
= NULL
;
2221 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
2224 gcc_assert (! nary
->predicated_values
&& nary
->u
.result
== NULL_TREE
);
2225 nary
->u
.result
= gimple_assign_lhs (new_stmt
);
2227 /* As all "inserted" statements are singleton SCCs, insert
2228 to the valid table. This is strictly needed to
2229 avoid re-generating new value SSA_NAMEs for the same
2230 expression during SCC iteration over and over (the
2231 optimistic table gets cleared after each iteration).
2232 We do not need to insert into the optimistic table, as
2233 lookups there will fall back to the valid table. */
2236 unsigned int length
= vn_nary_length_from_stmt (new_stmt
);
2238 = alloc_vn_nary_op_noinit (length
, &vn_tables_insert_obstack
);
2239 vno1
->value_id
= result_info
->value_id
;
2240 vno1
->length
= length
;
2241 vno1
->predicated_values
= 0;
2242 vno1
->u
.result
= result
;
2243 init_vn_nary_op_from_stmt (vno1
, new_stmt
);
2244 vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
2245 /* Also do not link it into the undo chain. */
2246 last_inserted_nary
= vno1
->next
;
2247 vno1
->next
= (vn_nary_op_t
)(void *)-1;
2249 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2251 fprintf (dump_file
, "Inserting name ");
2252 print_generic_expr (dump_file
, result
);
2253 fprintf (dump_file
, " for expression ");
2254 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
2255 fprintf (dump_file
, "\n");
2261 /* Return a value-number for RCODE OPS... either by looking up an existing
2262 value-number for the simplified result or by inserting the operation. */
2265 vn_nary_build_or_lookup (gimple_match_op
*res_op
)
2267 return vn_nary_build_or_lookup_1 (res_op
, true);
2270 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2271 its value if present. */
2274 vn_nary_simplify (vn_nary_op_t nary
)
2276 if (nary
->length
> gimple_match_op::MAX_NUM_OPS
)
2278 gimple_match_op
op (gimple_match_cond::UNCOND
, nary
->opcode
,
2279 nary
->type
, nary
->length
);
2280 memcpy (op
.ops
, nary
->op
, sizeof (tree
) * nary
->length
);
2281 return vn_nary_build_or_lookup_1 (&op
, false);
2284 /* Elimination engine. */
2286 class eliminate_dom_walker
: public dom_walker
2289 eliminate_dom_walker (cdi_direction
, bitmap
);
2290 ~eliminate_dom_walker ();
2292 virtual edge
before_dom_children (basic_block
);
2293 virtual void after_dom_children (basic_block
);
2295 virtual tree
eliminate_avail (basic_block
, tree op
);
2296 virtual void eliminate_push_avail (basic_block
, tree op
);
2297 tree
eliminate_insert (basic_block
, gimple_stmt_iterator
*gsi
, tree val
);
2299 void eliminate_stmt (basic_block
, gimple_stmt_iterator
*);
2301 unsigned eliminate_cleanup (bool region_p
= false);
2304 unsigned int el_todo
;
2305 unsigned int eliminations
;
2306 unsigned int insertions
;
2308 /* SSA names that had their defs inserted by PRE if do_pre. */
2309 bitmap inserted_exprs
;
2311 /* Blocks with statements that have had their EH properties changed. */
2312 bitmap need_eh_cleanup
;
2314 /* Blocks with statements that have had their AB properties changed. */
2315 bitmap need_ab_cleanup
;
2317 /* Local state for the eliminate domwalk. */
2318 auto_vec
<gimple
*> to_remove
;
2319 auto_vec
<gimple
*> to_fixup
;
2320 auto_vec
<tree
> avail
;
2321 auto_vec
<tree
> avail_stack
;
2324 /* Adaptor to the elimination engine using RPO availability. */
2326 class rpo_elim
: public eliminate_dom_walker
2329 rpo_elim(basic_block entry_
)
2330 : eliminate_dom_walker (CDI_DOMINATORS
, NULL
), entry (entry_
),
2331 m_avail_freelist (NULL
) {}
2333 virtual tree
eliminate_avail (basic_block
, tree op
);
2335 virtual void eliminate_push_avail (basic_block
, tree
);
2338 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2340 vn_avail
*m_avail_freelist
;
2343 /* Global RPO state for access from hooks. */
2344 static rpo_elim
*rpo_avail
;
2345 basic_block vn_context_bb
;
2347 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2348 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2349 Otherwise return false. */
2352 adjust_offsets_for_equal_base_address (tree base1
, poly_int64
*offset1
,
2353 tree base2
, poly_int64
*offset2
)
2356 if (TREE_CODE (base1
) == MEM_REF
2357 && TREE_CODE (base2
) == MEM_REF
)
2359 if (mem_ref_offset (base1
).to_shwi (&soff
))
2361 base1
= TREE_OPERAND (base1
, 0);
2362 *offset1
+= soff
* BITS_PER_UNIT
;
2364 if (mem_ref_offset (base2
).to_shwi (&soff
))
2366 base2
= TREE_OPERAND (base2
, 0);
2367 *offset2
+= soff
* BITS_PER_UNIT
;
2369 return operand_equal_p (base1
, base2
, 0);
2371 return operand_equal_p (base1
, base2
, OEP_ADDRESS_OF
);
2374 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2375 from the statement defining VUSE and if not successful tries to
2376 translate *REFP and VR_ through an aggregate copy at the definition
2377 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2378 of *REF and *VR. If only disambiguation was performed then
2379 *DISAMBIGUATE_ONLY is set to true. */
2382 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *data_
,
2383 translate_flags
*disambiguate_only
)
2385 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2386 vn_reference_t vr
= data
->vr
;
2387 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2388 tree base
= ao_ref_base (ref
);
2389 HOST_WIDE_INT offseti
= 0, maxsizei
, sizei
= 0;
2390 static vec
<vn_reference_op_s
> lhs_ops
;
2392 bool lhs_ref_ok
= false;
2393 poly_int64 copy_size
;
2395 /* First try to disambiguate after value-replacing in the definitions LHS. */
2396 if (is_gimple_assign (def_stmt
))
2398 tree lhs
= gimple_assign_lhs (def_stmt
);
2399 bool valueized_anything
= false;
2400 /* Avoid re-allocation overhead. */
2401 lhs_ops
.truncate (0);
2402 basic_block saved_rpo_bb
= vn_context_bb
;
2403 vn_context_bb
= gimple_bb (def_stmt
);
2404 if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
)
2406 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
2407 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
, true);
2409 vn_context_bb
= saved_rpo_bb
;
2410 if (valueized_anything
)
2412 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
2413 get_alias_set (lhs
),
2414 TREE_TYPE (lhs
), lhs_ops
);
2416 && !refs_may_alias_p_1 (ref
, &lhs_ref
, data
->tbaa_p
))
2418 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2424 ao_ref_init (&lhs_ref
, lhs
);
2428 /* Besides valueizing the LHS we can also use access-path based
2429 disambiguation on the original non-valueized ref. */
2432 && data
->orig_ref
.ref
)
2434 /* We want to use the non-valueized LHS for this, but avoid redundant
2436 ao_ref
*lref
= &lhs_ref
;
2438 if (valueized_anything
)
2440 ao_ref_init (&lref_alt
, lhs
);
2443 if (!refs_may_alias_p_1 (&data
->orig_ref
, lref
, data
->tbaa_p
))
2445 *disambiguate_only
= (valueized_anything
2446 ? TR_VALUEIZE_AND_DISAMBIGUATE
2452 /* If we reach a clobbering statement try to skip it and see if
2453 we find a VN result with exactly the same value as the
2454 possible clobber. In this case we can ignore the clobber
2455 and return the found value. */
2456 if (is_gimple_reg_type (TREE_TYPE (lhs
))
2457 && types_compatible_p (TREE_TYPE (lhs
), vr
->type
)
2460 tree
*saved_last_vuse_ptr
= data
->last_vuse_ptr
;
2461 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2462 data
->last_vuse_ptr
= NULL
;
2463 tree saved_vuse
= vr
->vuse
;
2464 hashval_t saved_hashcode
= vr
->hashcode
;
2465 void *res
= vn_reference_lookup_2 (ref
, gimple_vuse (def_stmt
), data
);
2466 /* Need to restore vr->vuse and vr->hashcode. */
2467 vr
->vuse
= saved_vuse
;
2468 vr
->hashcode
= saved_hashcode
;
2469 data
->last_vuse_ptr
= saved_last_vuse_ptr
;
2470 if (res
&& res
!= (void *)-1)
2472 vn_reference_t vnresult
= (vn_reference_t
) res
;
2473 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2474 if (TREE_CODE (rhs
) == SSA_NAME
)
2475 rhs
= SSA_VAL (rhs
);
2476 if (vnresult
->result
2477 && operand_equal_p (vnresult
->result
, rhs
, 0)
2478 /* We have to honor our promise about union type punning
2479 and also support arbitrary overlaps with
2480 -fno-strict-aliasing. So simply resort to alignment to
2481 rule out overlaps. Do this check last because it is
2482 quite expensive compared to the hash-lookup above. */
2483 && multiple_p (get_object_alignment (ref
->ref
), ref
->size
)
2484 && multiple_p (get_object_alignment (lhs
), ref
->size
))
2489 else if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
2490 && gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
2491 && gimple_call_num_args (def_stmt
) <= 4)
2493 /* For builtin calls valueize its arguments and call the
2494 alias oracle again. Valueization may improve points-to
2495 info of pointers and constify size and position arguments.
2496 Originally this was motivated by PR61034 which has
2497 conditional calls to free falsely clobbering ref because
2498 of imprecise points-to info of the argument. */
2500 bool valueized_anything
= false;
2501 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2503 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
2504 tree val
= vn_valueize (oldargs
[i
]);
2505 if (val
!= oldargs
[i
])
2507 gimple_call_set_arg (def_stmt
, i
, val
);
2508 valueized_anything
= true;
2511 if (valueized_anything
)
2513 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
2515 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2516 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
2519 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2525 if (*disambiguate_only
> TR_TRANSLATE
)
2528 /* If we cannot constrain the size of the reference we cannot
2529 test if anything kills it. */
2530 if (!ref
->max_size_known_p ())
2533 poly_int64 offset
= ref
->offset
;
2534 poly_int64 maxsize
= ref
->max_size
;
2536 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2537 from that definition.
2539 if (is_gimple_reg_type (vr
->type
)
2540 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
2541 && (integer_zerop (gimple_call_arg (def_stmt
, 1))
2542 || ((TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
2543 || (INTEGRAL_TYPE_P (vr
->type
) && known_eq (ref
->size
, 8)))
2545 && BITS_PER_UNIT
== 8
2546 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
2547 && offset
.is_constant (&offseti
)
2548 && ref
->size
.is_constant (&sizei
)
2549 && (offseti
% BITS_PER_UNIT
== 0
2550 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
)))
2551 && poly_int_tree_p (gimple_call_arg (def_stmt
, 2))
2552 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2553 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
))
2556 poly_int64 offset2
, size2
, maxsize2
;
2558 tree ref2
= gimple_call_arg (def_stmt
, 0);
2559 if (TREE_CODE (ref2
) == SSA_NAME
)
2561 ref2
= SSA_VAL (ref2
);
2562 if (TREE_CODE (ref2
) == SSA_NAME
2563 && (TREE_CODE (base
) != MEM_REF
2564 || TREE_OPERAND (base
, 0) != ref2
))
2566 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref2
);
2567 if (gimple_assign_single_p (def_stmt
)
2568 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2569 ref2
= gimple_assign_rhs1 (def_stmt
);
2572 if (TREE_CODE (ref2
) == ADDR_EXPR
)
2574 ref2
= TREE_OPERAND (ref2
, 0);
2575 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
2577 if (!known_size_p (maxsize2
)
2578 || !known_eq (maxsize2
, size2
)
2579 || !operand_equal_p (base
, base2
, OEP_ADDRESS_OF
))
2582 else if (TREE_CODE (ref2
) == SSA_NAME
)
2585 if (TREE_CODE (base
) != MEM_REF
2586 || !(mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
).to_shwi (&soff
))
2590 if (TREE_OPERAND (base
, 0) != ref2
)
2592 gimple
*def
= SSA_NAME_DEF_STMT (ref2
);
2593 if (is_gimple_assign (def
)
2594 && gimple_assign_rhs_code (def
) == POINTER_PLUS_EXPR
2595 && gimple_assign_rhs1 (def
) == TREE_OPERAND (base
, 0)
2596 && poly_int_tree_p (gimple_assign_rhs2 (def
))
2597 && (wi::to_poly_offset (gimple_assign_rhs2 (def
))
2598 << LOG2_BITS_PER_UNIT
).to_shwi (&offset2
))
2600 ref2
= gimple_assign_rhs1 (def
);
2601 if (TREE_CODE (ref2
) == SSA_NAME
)
2602 ref2
= SSA_VAL (ref2
);
2610 tree len
= gimple_call_arg (def_stmt
, 2);
2611 HOST_WIDE_INT leni
, offset2i
;
2612 /* Sometimes the above trickery is smarter than alias analysis. Take
2613 advantage of that. */
2614 if (!ranges_maybe_overlap_p (offset
, maxsize
, offset2
,
2615 (wi::to_poly_offset (len
)
2616 << LOG2_BITS_PER_UNIT
)))
2618 if (data
->partial_defs
.is_empty ()
2619 && known_subrange_p (offset
, maxsize
, offset2
,
2620 wi::to_poly_offset (len
) << LOG2_BITS_PER_UNIT
))
2623 if (integer_zerop (gimple_call_arg (def_stmt
, 1)))
2624 val
= build_zero_cst (vr
->type
);
2625 else if (INTEGRAL_TYPE_P (vr
->type
)
2626 && known_eq (ref
->size
, 8)
2627 && offseti
% BITS_PER_UNIT
== 0)
2629 gimple_match_op
res_op (gimple_match_cond::UNCOND
, NOP_EXPR
,
2630 vr
->type
, gimple_call_arg (def_stmt
, 1));
2631 val
= vn_nary_build_or_lookup (&res_op
);
2633 || (TREE_CODE (val
) == SSA_NAME
2634 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2639 unsigned buflen
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr
->type
)) + 1;
2640 if (INTEGRAL_TYPE_P (vr
->type
))
2641 buflen
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr
->type
)) + 1;
2642 unsigned char *buf
= XALLOCAVEC (unsigned char, buflen
);
2643 memset (buf
, TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 1)),
2645 if (BYTES_BIG_ENDIAN
)
2648 = (((unsigned HOST_WIDE_INT
) offseti
+ sizei
)
2652 shift_bytes_in_array_right (buf
, buflen
,
2653 BITS_PER_UNIT
- amnt
);
2658 else if (offseti
% BITS_PER_UNIT
!= 0)
2661 = BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) offseti
2663 shift_bytes_in_array_left (buf
, buflen
, amnt
);
2667 val
= native_interpret_expr (vr
->type
, buf
, buflen
);
2671 return data
->finish (0, val
);
2673 /* For now handle clearing memory with partial defs. */
2674 else if (known_eq (ref
->size
, maxsize
)
2675 && integer_zerop (gimple_call_arg (def_stmt
, 1))
2676 && tree_fits_poly_int64_p (len
)
2677 && tree_to_poly_int64 (len
).is_constant (&leni
)
2678 && leni
<= INTTYPE_MAXIMUM (HOST_WIDE_INT
) / BITS_PER_UNIT
2679 && offset
.is_constant (&offseti
)
2680 && offset2
.is_constant (&offset2i
)
2681 && maxsize
.is_constant (&maxsizei
)
2682 && ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
2683 leni
<< LOG2_BITS_PER_UNIT
))
2686 pd
.rhs
= build_constructor (NULL_TREE
, NULL
);
2687 pd
.offset
= offset2i
- offseti
;
2688 pd
.size
= leni
<< LOG2_BITS_PER_UNIT
;
2689 return data
->push_partial_def (pd
, 0, maxsizei
);
2693 /* 2) Assignment from an empty CONSTRUCTOR. */
2694 else if (is_gimple_reg_type (vr
->type
)
2695 && gimple_assign_single_p (def_stmt
)
2696 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
2697 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
2699 tree lhs
= gimple_assign_lhs (def_stmt
);
2701 poly_int64 offset2
, size2
, maxsize2
;
2702 HOST_WIDE_INT offset2i
, size2i
;
2706 base2
= ao_ref_base (&lhs_ref
);
2707 offset2
= lhs_ref
.offset
;
2708 size2
= lhs_ref
.size
;
2709 maxsize2
= lhs_ref
.max_size
;
2710 reverse
= reverse_storage_order_for_component_p (lhs
);
2713 base2
= get_ref_base_and_extent (lhs
,
2714 &offset2
, &size2
, &maxsize2
, &reverse
);
2715 if (known_size_p (maxsize2
)
2716 && known_eq (maxsize2
, size2
)
2717 && adjust_offsets_for_equal_base_address (base
, &offset
,
2720 if (data
->partial_defs
.is_empty ()
2721 && known_subrange_p (offset
, maxsize
, offset2
, size2
))
2723 /* While technically undefined behavior do not optimize
2724 a full read from a clobber. */
2725 if (gimple_clobber_p (def_stmt
))
2727 tree val
= build_zero_cst (vr
->type
);
2728 return data
->finish (get_alias_set (lhs
), val
);
2730 else if (known_eq (ref
->size
, maxsize
)
2731 && maxsize
.is_constant (&maxsizei
)
2732 && offset
.is_constant (&offseti
)
2733 && offset2
.is_constant (&offset2i
)
2734 && size2
.is_constant (&size2i
)
2735 && ranges_known_overlap_p (offseti
, maxsizei
,
2738 /* Let clobbers be consumed by the partial-def tracker
2739 which can choose to ignore them if they are shadowed
2742 pd
.rhs
= gimple_assign_rhs1 (def_stmt
);
2743 pd
.offset
= offset2i
- offseti
;
2745 return data
->push_partial_def (pd
, get_alias_set (lhs
), maxsizei
);
2750 /* 3) Assignment from a constant. We can use folds native encode/interpret
2751 routines to extract the assigned bits. */
2752 else if (known_eq (ref
->size
, maxsize
)
2753 && is_gimple_reg_type (vr
->type
)
2754 && !contains_storage_order_barrier_p (vr
->operands
)
2755 && gimple_assign_single_p (def_stmt
)
2757 && BITS_PER_UNIT
== 8
2758 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
2759 /* native_encode and native_decode operate on arrays of bytes
2760 and so fundamentally need a compile-time size and offset. */
2761 && maxsize
.is_constant (&maxsizei
)
2762 && offset
.is_constant (&offseti
)
2763 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
2764 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
2765 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
2767 tree lhs
= gimple_assign_lhs (def_stmt
);
2769 poly_int64 offset2
, size2
, maxsize2
;
2770 HOST_WIDE_INT offset2i
, size2i
;
2774 base2
= ao_ref_base (&lhs_ref
);
2775 offset2
= lhs_ref
.offset
;
2776 size2
= lhs_ref
.size
;
2777 maxsize2
= lhs_ref
.max_size
;
2778 reverse
= reverse_storage_order_for_component_p (lhs
);
2781 base2
= get_ref_base_and_extent (lhs
,
2782 &offset2
, &size2
, &maxsize2
, &reverse
);
2785 && !storage_order_barrier_p (lhs
)
2786 && known_eq (maxsize2
, size2
)
2787 && adjust_offsets_for_equal_base_address (base
, &offset
,
2789 && offset
.is_constant (&offseti
)
2790 && offset2
.is_constant (&offset2i
)
2791 && size2
.is_constant (&size2i
))
2793 if (data
->partial_defs
.is_empty ()
2794 && known_subrange_p (offseti
, maxsizei
, offset2
, size2
))
2796 /* We support up to 512-bit values (for V8DFmode). */
2797 unsigned char buffer
[65];
2800 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2801 if (TREE_CODE (rhs
) == SSA_NAME
)
2802 rhs
= SSA_VAL (rhs
);
2803 len
= native_encode_expr (rhs
,
2804 buffer
, sizeof (buffer
) - 1,
2805 (offseti
- offset2i
) / BITS_PER_UNIT
);
2806 if (len
> 0 && len
* BITS_PER_UNIT
>= maxsizei
)
2808 tree type
= vr
->type
;
2809 unsigned char *buf
= buffer
;
2810 unsigned int amnt
= 0;
2811 /* Make sure to interpret in a type that has a range
2812 covering the whole access size. */
2813 if (INTEGRAL_TYPE_P (vr
->type
)
2814 && maxsizei
!= TYPE_PRECISION (vr
->type
))
2815 type
= build_nonstandard_integer_type (maxsizei
,
2816 TYPE_UNSIGNED (type
));
2817 if (BYTES_BIG_ENDIAN
)
2819 /* For big-endian native_encode_expr stored the rhs
2820 such that the LSB of it is the LSB of buffer[len - 1].
2821 That bit is stored into memory at position
2822 offset2 + size2 - 1, i.e. in byte
2823 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
2824 E.g. for offset2 1 and size2 14, rhs -1 and memory
2825 previously cleared that is:
2828 Now, if we want to extract offset 2 and size 12 from
2829 it using native_interpret_expr (which actually works
2830 for integral bitfield types in terms of byte size of
2831 the mode), the native_encode_expr stored the value
2834 and returned len 2 (the X bits are outside of
2836 Let sz be maxsize / BITS_PER_UNIT if not extracting
2837 a bitfield, and GET_MODE_SIZE otherwise.
2838 We need to align the LSB of the value we want to
2839 extract as the LSB of buf[sz - 1].
2840 The LSB from memory we need to read is at position
2841 offset + maxsize - 1. */
2842 HOST_WIDE_INT sz
= maxsizei
/ BITS_PER_UNIT
;
2843 if (INTEGRAL_TYPE_P (type
))
2844 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
2845 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
2846 - offseti
- maxsizei
) % BITS_PER_UNIT
;
2848 shift_bytes_in_array_right (buffer
, len
, amnt
);
2849 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
2850 - offseti
- maxsizei
- amnt
) / BITS_PER_UNIT
;
2851 if ((unsigned HOST_WIDE_INT
) sz
+ amnt
> (unsigned) len
)
2855 buf
= buffer
+ len
- sz
- amnt
;
2856 len
-= (buf
- buffer
);
2861 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
2862 - offseti
) % BITS_PER_UNIT
;
2866 shift_bytes_in_array_left (buffer
, len
+ 1, amnt
);
2870 tree val
= native_interpret_expr (type
, buf
, len
);
2871 /* If we chop off bits because the types precision doesn't
2872 match the memory access size this is ok when optimizing
2873 reads but not when called from the DSE code during
2876 && type
!= vr
->type
)
2878 if (! int_fits_type_p (val
, vr
->type
))
2881 val
= fold_convert (vr
->type
, val
);
2885 return data
->finish (get_alias_set (lhs
), val
);
2888 else if (ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
2892 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2893 if (TREE_CODE (rhs
) == SSA_NAME
)
2894 rhs
= SSA_VAL (rhs
);
2896 pd
.offset
= offset2i
- offseti
;
2898 return data
->push_partial_def (pd
, get_alias_set (lhs
), maxsizei
);
2903 /* 4) Assignment from an SSA name which definition we may be able
2904 to access pieces from or we can combine to a larger entity. */
2905 else if (known_eq (ref
->size
, maxsize
)
2906 && is_gimple_reg_type (vr
->type
)
2907 && !contains_storage_order_barrier_p (vr
->operands
)
2908 && gimple_assign_single_p (def_stmt
)
2909 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
2911 tree lhs
= gimple_assign_lhs (def_stmt
);
2913 poly_int64 offset2
, size2
, maxsize2
;
2914 HOST_WIDE_INT offset2i
, size2i
, offseti
;
2918 base2
= ao_ref_base (&lhs_ref
);
2919 offset2
= lhs_ref
.offset
;
2920 size2
= lhs_ref
.size
;
2921 maxsize2
= lhs_ref
.max_size
;
2922 reverse
= reverse_storage_order_for_component_p (lhs
);
2925 base2
= get_ref_base_and_extent (lhs
,
2926 &offset2
, &size2
, &maxsize2
, &reverse
);
2927 tree def_rhs
= gimple_assign_rhs1 (def_stmt
);
2929 && !storage_order_barrier_p (lhs
)
2930 && known_size_p (maxsize2
)
2931 && known_eq (maxsize2
, size2
)
2932 && adjust_offsets_for_equal_base_address (base
, &offset
,
2935 if (data
->partial_defs
.is_empty ()
2936 && known_subrange_p (offset
, maxsize
, offset2
, size2
)
2937 /* ??? We can't handle bitfield precision extracts without
2938 either using an alternate type for the BIT_FIELD_REF and
2939 then doing a conversion or possibly adjusting the offset
2940 according to endianness. */
2941 && (! INTEGRAL_TYPE_P (vr
->type
)
2942 || known_eq (ref
->size
, TYPE_PRECISION (vr
->type
)))
2943 && multiple_p (ref
->size
, BITS_PER_UNIT
))
2945 tree val
= NULL_TREE
;
2946 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs
))
2947 || type_has_mode_precision_p (TREE_TYPE (def_rhs
)))
2949 gimple_match_op
op (gimple_match_cond::UNCOND
,
2950 BIT_FIELD_REF
, vr
->type
,
2952 bitsize_int (ref
->size
),
2953 bitsize_int (offset
- offset2
));
2954 val
= vn_nary_build_or_lookup (&op
);
2956 else if (known_eq (ref
->size
, size2
))
2958 gimple_match_op
op (gimple_match_cond::UNCOND
,
2959 VIEW_CONVERT_EXPR
, vr
->type
,
2961 val
= vn_nary_build_or_lookup (&op
);
2964 && (TREE_CODE (val
) != SSA_NAME
2965 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2966 return data
->finish (get_alias_set (lhs
), val
);
2968 else if (maxsize
.is_constant (&maxsizei
)
2969 && offset
.is_constant (&offseti
)
2970 && offset2
.is_constant (&offset2i
)
2971 && size2
.is_constant (&size2i
)
2972 && ranges_known_overlap_p (offset
, maxsize
, offset2
, size2
))
2975 pd
.rhs
= SSA_VAL (def_rhs
);
2976 pd
.offset
= offset2i
- offseti
;
2978 return data
->push_partial_def (pd
, get_alias_set (lhs
), maxsizei
);
2983 /* 5) For aggregate copies translate the reference through them if
2984 the copy kills ref. */
2985 else if (data
->vn_walk_kind
== VN_WALKREWRITE
2986 && gimple_assign_single_p (def_stmt
)
2987 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
2988 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
2989 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
2991 tree lhs
= gimple_assign_lhs (def_stmt
);
2994 auto_vec
<vn_reference_op_s
> rhs
;
2995 vn_reference_op_t vro
;
3001 /* See if the assignment kills REF. */
3002 base2
= ao_ref_base (&lhs_ref
);
3003 if (!lhs_ref
.max_size_known_p ()
3005 && (TREE_CODE (base
) != MEM_REF
3006 || TREE_CODE (base2
) != MEM_REF
3007 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
3008 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
3009 TREE_OPERAND (base2
, 1))))
3010 || !stmt_kills_ref_p (def_stmt
, ref
))
3013 /* Find the common base of ref and the lhs. lhs_ops already
3014 contains valueized operands for the lhs. */
3015 i
= vr
->operands
.length () - 1;
3016 j
= lhs_ops
.length () - 1;
3017 while (j
>= 0 && i
>= 0
3018 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
3024 /* ??? The innermost op should always be a MEM_REF and we already
3025 checked that the assignment to the lhs kills vr. Thus for
3026 aggregate copies using char[] types the vn_reference_op_eq
3027 may fail when comparing types for compatibility. But we really
3028 don't care here - further lookups with the rewritten operands
3029 will simply fail if we messed up types too badly. */
3030 poly_int64 extra_off
= 0;
3031 if (j
== 0 && i
>= 0
3032 && lhs_ops
[0].opcode
== MEM_REF
3033 && maybe_ne (lhs_ops
[0].off
, -1))
3035 if (known_eq (lhs_ops
[0].off
, vr
->operands
[i
].off
))
3037 else if (vr
->operands
[i
].opcode
== MEM_REF
3038 && maybe_ne (vr
->operands
[i
].off
, -1))
3040 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
3045 /* i now points to the first additional op.
3046 ??? LHS may not be completely contained in VR, one or more
3047 VIEW_CONVERT_EXPRs could be in its way. We could at least
3048 try handling outermost VIEW_CONVERT_EXPRs. */
3052 /* Punt if the additional ops contain a storage order barrier. */
3053 for (k
= i
; k
>= 0; k
--)
3055 vro
= &vr
->operands
[k
];
3056 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
3060 /* Now re-write REF to be based on the rhs of the assignment. */
3061 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
3062 copy_reference_ops_from_ref (rhs1
, &rhs
);
3064 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3065 if (maybe_ne (extra_off
, 0))
3067 if (rhs
.length () < 2)
3069 int ix
= rhs
.length () - 2;
3070 if (rhs
[ix
].opcode
!= MEM_REF
3071 || known_eq (rhs
[ix
].off
, -1))
3073 rhs
[ix
].off
+= extra_off
;
3074 rhs
[ix
].op0
= int_const_binop (PLUS_EXPR
, rhs
[ix
].op0
,
3075 build_int_cst (TREE_TYPE (rhs
[ix
].op0
),
3079 /* Save the operands since we need to use the original ones for
3080 the hash entry we use. */
3081 if (!data
->saved_operands
.exists ())
3082 data
->saved_operands
= vr
->operands
.copy ();
3084 /* We need to pre-pend vr->operands[0..i] to rhs. */
3085 vec
<vn_reference_op_s
> old
= vr
->operands
;
3086 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
3087 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
3089 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
3090 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
3091 vr
->operands
[i
+ 1 + j
] = *vro
;
3092 vr
->operands
= valueize_refs (vr
->operands
);
3093 if (old
== shared_lookup_references
)
3094 shared_lookup_references
= vr
->operands
;
3095 vr
->hashcode
= vn_reference_compute_hash (vr
);
3097 /* Try folding the new reference to a constant. */
3098 tree val
= fully_constant_vn_reference_p (vr
);
3101 if (data
->partial_defs
.is_empty ())
3102 return data
->finish (get_alias_set (lhs
), val
);
3103 /* This is the only interesting case for partial-def handling
3104 coming from targets that like to gimplify init-ctors as
3105 aggregate copies from constant data like aarch64 for
3107 if (maxsize
.is_constant (&maxsizei
) && known_eq (ref
->size
, maxsize
))
3113 return data
->push_partial_def (pd
, get_alias_set (lhs
),
3118 /* Continuing with partial defs isn't easily possible here, we
3119 have to find a full def from further lookups from here. Probably
3120 not worth the special-casing everywhere. */
3121 if (!data
->partial_defs
.is_empty ())
3124 /* Adjust *ref from the new operands. */
3125 if (!ao_ref_init_from_vn_reference (&r
, get_alias_set (rhs1
),
3126 vr
->type
, vr
->operands
))
3128 /* This can happen with bitfields. */
3129 if (maybe_ne (ref
->size
, r
.size
))
3133 /* Do not update last seen VUSE after translating. */
3134 data
->last_vuse_ptr
= NULL
;
3135 /* Invalidate the original access path since it now contains
3137 data
->orig_ref
.ref
= NULL_TREE
;
3138 /* Use the alias-set of this LHS for recording an eventual result. */
3139 if (data
->first_set
== -2)
3140 data
->first_set
= get_alias_set (lhs
);
3142 /* Keep looking for the adjusted *REF / VR pair. */
3146 /* 6) For memcpy copies translate the reference through them if
3147 the copy kills ref. */
3148 else if (data
->vn_walk_kind
== VN_WALKREWRITE
3149 && is_gimple_reg_type (vr
->type
)
3150 /* ??? Handle BCOPY as well. */
3151 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
3152 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
3153 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
3154 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
3155 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
3156 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
3157 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
3158 && poly_int_tree_p (gimple_call_arg (def_stmt
, 2), ©_size
)
3159 /* Handling this is more complicated, give up for now. */
3160 && data
->partial_defs
.is_empty ())
3164 poly_int64 rhs_offset
, lhs_offset
;
3165 vn_reference_op_s op
;
3166 poly_uint64 mem_offset
;
3167 poly_int64 at
, byte_maxsize
;
3169 /* Only handle non-variable, addressable refs. */
3170 if (maybe_ne (ref
->size
, maxsize
)
3171 || !multiple_p (offset
, BITS_PER_UNIT
, &at
)
3172 || !multiple_p (maxsize
, BITS_PER_UNIT
, &byte_maxsize
))
3175 /* Extract a pointer base and an offset for the destination. */
3176 lhs
= gimple_call_arg (def_stmt
, 0);
3178 if (TREE_CODE (lhs
) == SSA_NAME
)
3180 lhs
= vn_valueize (lhs
);
3181 if (TREE_CODE (lhs
) == SSA_NAME
)
3183 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
3184 if (gimple_assign_single_p (def_stmt
)
3185 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
3186 lhs
= gimple_assign_rhs1 (def_stmt
);
3189 if (TREE_CODE (lhs
) == ADDR_EXPR
)
3191 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
3195 if (TREE_CODE (tem
) == MEM_REF
3196 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3198 lhs
= TREE_OPERAND (tem
, 0);
3199 if (TREE_CODE (lhs
) == SSA_NAME
)
3200 lhs
= vn_valueize (lhs
);
3201 lhs_offset
+= mem_offset
;
3203 else if (DECL_P (tem
))
3204 lhs
= build_fold_addr_expr (tem
);
3208 if (TREE_CODE (lhs
) != SSA_NAME
3209 && TREE_CODE (lhs
) != ADDR_EXPR
)
3212 /* Extract a pointer base and an offset for the source. */
3213 rhs
= gimple_call_arg (def_stmt
, 1);
3215 if (TREE_CODE (rhs
) == SSA_NAME
)
3216 rhs
= vn_valueize (rhs
);
3217 if (TREE_CODE (rhs
) == ADDR_EXPR
)
3219 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
3223 if (TREE_CODE (tem
) == MEM_REF
3224 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3226 rhs
= TREE_OPERAND (tem
, 0);
3227 rhs_offset
+= mem_offset
;
3229 else if (DECL_P (tem
)
3230 || TREE_CODE (tem
) == STRING_CST
)
3231 rhs
= build_fold_addr_expr (tem
);
3235 if (TREE_CODE (rhs
) == SSA_NAME
)
3236 rhs
= SSA_VAL (rhs
);
3237 else if (TREE_CODE (rhs
) != ADDR_EXPR
)
3240 /* The bases of the destination and the references have to agree. */
3241 if (TREE_CODE (base
) == MEM_REF
)
3243 if (TREE_OPERAND (base
, 0) != lhs
3244 || !poly_int_tree_p (TREE_OPERAND (base
, 1), &mem_offset
))
3248 else if (!DECL_P (base
)
3249 || TREE_CODE (lhs
) != ADDR_EXPR
3250 || TREE_OPERAND (lhs
, 0) != base
)
3253 /* If the access is completely outside of the memcpy destination
3254 area there is no aliasing. */
3255 if (!ranges_maybe_overlap_p (lhs_offset
, copy_size
, at
, byte_maxsize
))
3257 /* And the access has to be contained within the memcpy destination. */
3258 if (!known_subrange_p (at
, byte_maxsize
, lhs_offset
, copy_size
))
3261 /* Save the operands since we need to use the original ones for
3262 the hash entry we use. */
3263 if (!data
->saved_operands
.exists ())
3264 data
->saved_operands
= vr
->operands
.copy ();
3266 /* Make room for 2 operands in the new reference. */
3267 if (vr
->operands
.length () < 2)
3269 vec
<vn_reference_op_s
> old
= vr
->operands
;
3270 vr
->operands
.safe_grow_cleared (2);
3271 if (old
== shared_lookup_references
)
3272 shared_lookup_references
= vr
->operands
;
3275 vr
->operands
.truncate (2);
3277 /* The looked-through reference is a simple MEM_REF. */
3278 memset (&op
, 0, sizeof (op
));
3280 op
.opcode
= MEM_REF
;
3281 op
.op0
= build_int_cst (ptr_type_node
, at
- lhs_offset
+ rhs_offset
);
3282 op
.off
= at
- lhs_offset
+ rhs_offset
;
3283 vr
->operands
[0] = op
;
3284 op
.type
= TREE_TYPE (rhs
);
3285 op
.opcode
= TREE_CODE (rhs
);
3288 vr
->operands
[1] = op
;
3289 vr
->hashcode
= vn_reference_compute_hash (vr
);
3291 /* Try folding the new reference to a constant. */
3292 tree val
= fully_constant_vn_reference_p (vr
);
3294 return data
->finish (0, val
);
3296 /* Adjust *ref from the new operands. */
3297 if (!ao_ref_init_from_vn_reference (&r
, 0, vr
->type
, vr
->operands
))
3299 /* This can happen with bitfields. */
3300 if (maybe_ne (ref
->size
, r
.size
))
3304 /* Do not update last seen VUSE after translating. */
3305 data
->last_vuse_ptr
= NULL
;
3306 /* Invalidate the original access path since it now contains
3308 data
->orig_ref
.ref
= NULL_TREE
;
3309 /* Use the alias-set of this stmt for recording an eventual result. */
3310 if (data
->first_set
== -2)
3311 data
->first_set
= 0;
3313 /* Keep looking for the adjusted *REF / VR pair. */
3317 /* Bail out and stop walking. */
3321 /* Return a reference op vector from OP that can be used for
3322 vn_reference_lookup_pieces. The caller is responsible for releasing
3325 vec
<vn_reference_op_s
>
3326 vn_reference_operands_for_lookup (tree op
)
3329 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
3332 /* Lookup a reference operation by it's parts, in the current hash table.
3333 Returns the resulting value number if it exists in the hash table,
3334 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3335 vn_reference_t stored in the hashtable if something is found. */
3338 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
3339 vec
<vn_reference_op_s
> operands
,
3340 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
3342 struct vn_reference_s vr1
;
3350 vr1
.vuse
= vuse_ssa_val (vuse
);
3351 shared_lookup_references
.truncate (0);
3352 shared_lookup_references
.safe_grow (operands
.length ());
3353 memcpy (shared_lookup_references
.address (),
3354 operands
.address (),
3355 sizeof (vn_reference_op_s
)
3356 * operands
.length ());
3357 vr1
.operands
= operands
= shared_lookup_references
3358 = valueize_refs (shared_lookup_references
);
3361 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3362 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
3365 vn_reference_lookup_1 (&vr1
, vnresult
);
3367 && kind
!= VN_NOWALK
3371 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3372 vn_walk_cb_data
data (&vr1
, NULL_TREE
, NULL
, kind
, true);
3373 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
3375 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
, true,
3376 vn_reference_lookup_2
,
3377 vn_reference_lookup_3
,
3378 vuse_valueize
, limit
, &data
);
3379 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3383 return (*vnresult
)->result
;
3388 /* Lookup OP in the current hash table, and return the resulting value
3389 number if it exists in the hash table. Return NULL_TREE if it does
3390 not exist in the hash table or if the result field of the structure
3391 was NULL.. VNRESULT will be filled in with the vn_reference_t
3392 stored in the hashtable if one exists. When TBAA_P is false assume
3393 we are looking up a store and treat it as having alias-set zero.
3394 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded. */
3397 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
3398 vn_reference_t
*vnresult
, bool tbaa_p
, tree
*last_vuse_ptr
)
3400 vec
<vn_reference_op_s
> operands
;
3401 struct vn_reference_s vr1
;
3403 bool valuezied_anything
;
3408 vr1
.vuse
= vuse_ssa_val (vuse
);
3409 vr1
.operands
= operands
3410 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
3411 vr1
.type
= TREE_TYPE (op
);
3412 vr1
.set
= get_alias_set (op
);
3413 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3414 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
3417 if (kind
!= VN_NOWALK
3420 vn_reference_t wvnresult
;
3422 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3423 /* Make sure to use a valueized reference if we valueized anything.
3424 Otherwise preserve the full reference for advanced TBAA. */
3425 if (!valuezied_anything
3426 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
3428 ao_ref_init (&r
, op
);
3429 vn_walk_cb_data
data (&vr1
, r
.ref
? NULL_TREE
: op
,
3430 last_vuse_ptr
, kind
, tbaa_p
);
3432 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
, tbaa_p
,
3433 vn_reference_lookup_2
,
3434 vn_reference_lookup_3
,
3435 vuse_valueize
, limit
, &data
);
3436 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3440 *vnresult
= wvnresult
;
3441 return wvnresult
->result
;
3448 *last_vuse_ptr
= vr1
.vuse
;
3449 return vn_reference_lookup_1 (&vr1
, vnresult
);
3452 /* Lookup CALL in the current hash table and return the entry in
3453 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3456 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
3462 tree vuse
= gimple_vuse (call
);
3464 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
3465 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
3466 vr
->type
= gimple_expr_type (call
);
3468 vr
->hashcode
= vn_reference_compute_hash (vr
);
3469 vn_reference_lookup_1 (vr
, vnresult
);
3472 /* Insert OP into the current hash table with a value number of RESULT. */
3475 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
3477 vn_reference_s
**slot
;
3481 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3482 if (TREE_CODE (result
) == SSA_NAME
)
3483 vr1
->value_id
= VN_INFO (result
)->value_id
;
3485 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
3486 vr1
->vuse
= vuse_ssa_val (vuse
);
3487 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
3488 vr1
->type
= TREE_TYPE (op
);
3489 vr1
->set
= get_alias_set (op
);
3490 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
3491 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
3492 vr1
->result_vdef
= vdef
;
3494 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
3497 /* Because IL walking on reference lookup can end up visiting
3498 a def that is only to be visited later in iteration order
3499 when we are about to make an irreducible region reducible
3500 the def can be effectively processed and its ref being inserted
3501 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3502 but save a lookup if we deal with already inserted refs here. */
3505 /* We cannot assert that we have the same value either because
3506 when disentangling an irreducible region we may end up visiting
3507 a use before the corresponding def. That's a missed optimization
3508 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3509 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3510 && !operand_equal_p ((*slot
)->result
, vr1
->result
, 0))
3512 fprintf (dump_file
, "Keeping old value ");
3513 print_generic_expr (dump_file
, (*slot
)->result
);
3514 fprintf (dump_file
, " because of collision\n");
3516 free_reference (vr1
);
3517 obstack_free (&vn_tables_obstack
, vr1
);
3522 vr1
->next
= last_inserted_ref
;
3523 last_inserted_ref
= vr1
;
3526 /* Insert a reference by it's pieces into the current hash table with
3527 a value number of RESULT. Return the resulting reference
3528 structure we created. */
3531 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
3532 vec
<vn_reference_op_s
> operands
,
3533 tree result
, unsigned int value_id
)
3536 vn_reference_s
**slot
;
3539 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3540 vr1
->value_id
= value_id
;
3541 vr1
->vuse
= vuse_ssa_val (vuse
);
3542 vr1
->operands
= valueize_refs (operands
);
3545 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
3546 if (result
&& TREE_CODE (result
) == SSA_NAME
)
3547 result
= SSA_VAL (result
);
3548 vr1
->result
= result
;
3550 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
3553 /* At this point we should have all the things inserted that we have
3554 seen before, and we should never try inserting something that
3556 gcc_assert (!*slot
);
3559 vr1
->next
= last_inserted_ref
;
3560 last_inserted_ref
= vr1
;
3564 /* Compute and return the hash value for nary operation VBO1. */
3567 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
3569 inchash::hash hstate
;
3572 for (i
= 0; i
< vno1
->length
; ++i
)
3573 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
3574 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
3576 if (((vno1
->length
== 2
3577 && commutative_tree_code (vno1
->opcode
))
3578 || (vno1
->length
== 3
3579 && commutative_ternary_tree_code (vno1
->opcode
)))
3580 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
3581 std::swap (vno1
->op
[0], vno1
->op
[1]);
3582 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
3583 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
3585 std::swap (vno1
->op
[0], vno1
->op
[1]);
3586 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
3589 hstate
.add_int (vno1
->opcode
);
3590 for (i
= 0; i
< vno1
->length
; ++i
)
3591 inchash::add_expr (vno1
->op
[i
], hstate
);
3593 return hstate
.end ();
3596 /* Compare nary operations VNO1 and VNO2 and return true if they are
3600 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
3604 if (vno1
->hashcode
!= vno2
->hashcode
)
3607 if (vno1
->length
!= vno2
->length
)
3610 if (vno1
->opcode
!= vno2
->opcode
3611 || !types_compatible_p (vno1
->type
, vno2
->type
))
3614 for (i
= 0; i
< vno1
->length
; ++i
)
3615 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
3618 /* BIT_INSERT_EXPR has an implict operand as the type precision
3619 of op1. Need to check to make sure they are the same. */
3620 if (vno1
->opcode
== BIT_INSERT_EXPR
3621 && TREE_CODE (vno1
->op
[1]) == INTEGER_CST
3622 && TYPE_PRECISION (TREE_TYPE (vno1
->op
[1]))
3623 != TYPE_PRECISION (TREE_TYPE (vno2
->op
[1])))
3629 /* Initialize VNO from the pieces provided. */
3632 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
3633 enum tree_code code
, tree type
, tree
*ops
)
3636 vno
->length
= length
;
3638 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
3641 /* Return the number of operands for a vn_nary ops structure from STMT. */
3644 vn_nary_length_from_stmt (gimple
*stmt
)
3646 switch (gimple_assign_rhs_code (stmt
))
3650 case VIEW_CONVERT_EXPR
:
3657 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
3660 return gimple_num_ops (stmt
) - 1;
3664 /* Initialize VNO from STMT. */
3667 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
3671 vno
->opcode
= gimple_assign_rhs_code (stmt
);
3672 vno
->type
= gimple_expr_type (stmt
);
3673 switch (vno
->opcode
)
3677 case VIEW_CONVERT_EXPR
:
3679 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
3684 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
3685 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
3686 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
3690 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
3691 for (i
= 0; i
< vno
->length
; ++i
)
3692 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
3696 gcc_checking_assert (!gimple_assign_single_p (stmt
));
3697 vno
->length
= gimple_num_ops (stmt
) - 1;
3698 for (i
= 0; i
< vno
->length
; ++i
)
3699 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
3703 /* Compute the hashcode for VNO and look for it in the hash table;
3704 return the resulting value number if it exists in the hash table.
3705 Return NULL_TREE if it does not exist in the hash table or if the
3706 result field of the operation is NULL. VNRESULT will contain the
3707 vn_nary_op_t from the hashtable if it exists. */
3710 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
3712 vn_nary_op_s
**slot
;
3717 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
3718 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
3723 return (*slot
)->predicated_values
? NULL_TREE
: (*slot
)->u
.result
;
3726 /* Lookup a n-ary operation by its pieces and return the resulting value
3727 number if it exists in the hash table. Return NULL_TREE if it does
3728 not exist in the hash table or if the result field of the operation
3729 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3733 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
3734 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
3736 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
3737 sizeof_vn_nary_op (length
));
3738 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3739 return vn_nary_op_lookup_1 (vno1
, vnresult
);
3742 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3743 value number if it exists in the hash table. Return NULL_TREE if
3744 it does not exist in the hash table. VNRESULT will contain the
3745 vn_nary_op_t from the hashtable if it exists. */
3748 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
3751 = XALLOCAVAR (struct vn_nary_op_s
,
3752 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
3753 init_vn_nary_op_from_stmt (vno1
, stmt
);
3754 return vn_nary_op_lookup_1 (vno1
, vnresult
);
3757 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3760 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
3762 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
3765 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3769 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
3771 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
, &vn_tables_obstack
);
3773 vno1
->value_id
= value_id
;
3774 vno1
->length
= length
;
3775 vno1
->predicated_values
= 0;
3776 vno1
->u
.result
= result
;
3781 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3782 VNO->HASHCODE first. */
3785 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
3788 vn_nary_op_s
**slot
;
3792 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
3793 gcc_assert (! vno
->predicated_values
3794 || (! vno
->u
.values
->next
3795 && vno
->u
.values
->n
== 1));
3798 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
3799 vno
->unwind_to
= *slot
;
3802 /* Prefer non-predicated values.
3803 ??? Only if those are constant, otherwise, with constant predicated
3804 value, turn them into predicated values with entry-block validity
3805 (??? but we always find the first valid result currently). */
3806 if ((*slot
)->predicated_values
3807 && ! vno
->predicated_values
)
3809 /* ??? We cannot remove *slot from the unwind stack list.
3810 For the moment we deal with this by skipping not found
3811 entries but this isn't ideal ... */
3813 /* ??? Maintain a stack of states we can unwind in
3814 vn_nary_op_s? But how far do we unwind? In reality
3815 we need to push change records somewhere... Or not
3816 unwind vn_nary_op_s and linking them but instead
3817 unwind the results "list", linking that, which also
3818 doesn't move on hashtable resize. */
3819 /* We can also have a ->unwind_to recording *slot there.
3820 That way we can make u.values a fixed size array with
3821 recording the number of entries but of course we then
3822 have always N copies for each unwind_to-state. Or we
3823 make sure to only ever append and each unwinding will
3824 pop off one entry (but how to deal with predicated
3825 replaced with non-predicated here?) */
3826 vno
->next
= last_inserted_nary
;
3827 last_inserted_nary
= vno
;
3830 else if (vno
->predicated_values
3831 && ! (*slot
)->predicated_values
)
3833 else if (vno
->predicated_values
3834 && (*slot
)->predicated_values
)
3836 /* ??? Factor this all into a insert_single_predicated_value
3838 gcc_assert (!vno
->u
.values
->next
&& vno
->u
.values
->n
== 1);
3840 = BASIC_BLOCK_FOR_FN (cfun
, vno
->u
.values
->valid_dominated_by_p
[0]);
3841 vn_pval
*nval
= vno
->u
.values
;
3842 vn_pval
**next
= &vno
->u
.values
;
3844 for (vn_pval
*val
= (*slot
)->u
.values
; val
; val
= val
->next
)
3846 if (expressions_equal_p (val
->result
, vno
->u
.values
->result
))
3849 for (unsigned i
= 0; i
< val
->n
; ++i
)
3852 = BASIC_BLOCK_FOR_FN (cfun
,
3853 val
->valid_dominated_by_p
[i
]);
3854 if (dominated_by_p (CDI_DOMINATORS
, vno_bb
, val_bb
))
3855 /* Value registered with more generic predicate. */
3857 else if (dominated_by_p (CDI_DOMINATORS
, val_bb
, vno_bb
))
3858 /* Shouldn't happen, we insert in RPO order. */
3862 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3864 + val
->n
* sizeof (int));
3865 (*next
)->next
= NULL
;
3866 (*next
)->result
= val
->result
;
3867 (*next
)->n
= val
->n
+ 1;
3868 memcpy ((*next
)->valid_dominated_by_p
,
3869 val
->valid_dominated_by_p
,
3870 val
->n
* sizeof (int));
3871 (*next
)->valid_dominated_by_p
[val
->n
] = vno_bb
->index
;
3872 next
= &(*next
)->next
;
3873 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3874 fprintf (dump_file
, "Appending predicate to value.\n");
3877 /* Copy other predicated values. */
3878 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3880 + (val
->n
-1) * sizeof (int));
3881 memcpy (*next
, val
, sizeof (vn_pval
) + (val
->n
-1) * sizeof (int));
3882 (*next
)->next
= NULL
;
3883 next
= &(*next
)->next
;
3889 vno
->next
= last_inserted_nary
;
3890 last_inserted_nary
= vno
;
3894 /* While we do not want to insert things twice it's awkward to
3895 avoid it in the case where visit_nary_op pattern-matches stuff
3896 and ends up simplifying the replacement to itself. We then
3897 get two inserts, one from visit_nary_op and one from
3898 vn_nary_build_or_lookup.
3899 So allow inserts with the same value number. */
3900 if ((*slot
)->u
.result
== vno
->u
.result
)
3904 /* ??? There's also optimistic vs. previous commited state merging
3905 that is problematic for the case of unwinding. */
3907 /* ??? We should return NULL if we do not use 'vno' and have the
3908 caller release it. */
3909 gcc_assert (!*slot
);
3912 vno
->next
= last_inserted_nary
;
3913 last_inserted_nary
= vno
;
3917 /* Insert a n-ary operation into the current hash table using it's
3918 pieces. Return the vn_nary_op_t structure we created and put in
3922 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
3923 tree type
, tree
*ops
,
3924 tree result
, unsigned int value_id
)
3926 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
3927 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3928 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3932 vn_nary_op_insert_pieces_predicated (unsigned int length
, enum tree_code code
,
3933 tree type
, tree
*ops
,
3934 tree result
, unsigned int value_id
,
3937 /* ??? Currently tracking BBs. */
3938 if (! single_pred_p (pred_e
->dest
))
3940 /* Never record for backedges. */
3941 if (pred_e
->flags
& EDGE_DFS_BACK
)
3946 /* Ignore backedges. */
3947 FOR_EACH_EDGE (e
, ei
, pred_e
->dest
->preds
)
3948 if (! dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
3953 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3954 /* ??? Fix dumping, but currently we only get comparisons. */
3955 && TREE_CODE_CLASS (code
) == tcc_comparison
)
3957 fprintf (dump_file
, "Recording on edge %d->%d ", pred_e
->src
->index
,
3958 pred_e
->dest
->index
);
3959 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
3960 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
3961 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
3962 fprintf (dump_file
, " == %s\n",
3963 integer_zerop (result
) ? "false" : "true");
3965 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, NULL_TREE
, value_id
);
3966 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3967 vno1
->predicated_values
= 1;
3968 vno1
->u
.values
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3970 vno1
->u
.values
->next
= NULL
;
3971 vno1
->u
.values
->result
= result
;
3972 vno1
->u
.values
->n
= 1;
3973 vno1
->u
.values
->valid_dominated_by_p
[0] = pred_e
->dest
->index
;
3974 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3978 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
);
3981 vn_nary_op_get_predicated_value (vn_nary_op_t vno
, basic_block bb
)
3983 if (! vno
->predicated_values
)
3984 return vno
->u
.result
;
3985 for (vn_pval
*val
= vno
->u
.values
; val
; val
= val
->next
)
3986 for (unsigned i
= 0; i
< val
->n
; ++i
)
3987 if (dominated_by_p_w_unex (bb
,
3989 (cfun
, val
->valid_dominated_by_p
[i
])))
3994 /* Insert the rhs of STMT into the current hash table with a value number of
3998 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
4001 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
4002 result
, VN_INFO (result
)->value_id
);
4003 init_vn_nary_op_from_stmt (vno1
, stmt
);
4004 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
4007 /* Compute a hashcode for PHI operation VP1 and return it. */
4009 static inline hashval_t
4010 vn_phi_compute_hash (vn_phi_t vp1
)
4012 inchash::hash
hstate (EDGE_COUNT (vp1
->block
->preds
) > 2
4013 ? vp1
->block
->index
: EDGE_COUNT (vp1
->block
->preds
));
4019 /* If all PHI arguments are constants we need to distinguish
4020 the PHI node via its type. */
4022 hstate
.merge_hash (vn_hash_type (type
));
4024 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
4026 /* Don't hash backedge values they need to be handled as VN_TOP
4027 for optimistic value-numbering. */
4028 if (e
->flags
& EDGE_DFS_BACK
)
4031 phi1op
= vp1
->phiargs
[e
->dest_idx
];
4032 if (phi1op
== VN_TOP
)
4034 inchash::add_expr (phi1op
, hstate
);
4037 return hstate
.end ();
4041 /* Return true if COND1 and COND2 represent the same condition, set
4042 *INVERTED_P if one needs to be inverted to make it the same as
4046 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
4047 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
4049 enum tree_code code1
= gimple_cond_code (cond1
);
4050 enum tree_code code2
= gimple_cond_code (cond2
);
4052 *inverted_p
= false;
4055 else if (code1
== swap_tree_comparison (code2
))
4056 std::swap (lhs2
, rhs2
);
4057 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
4059 else if (code1
== invert_tree_comparison
4060 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
4062 std::swap (lhs2
, rhs2
);
4068 return ((expressions_equal_p (lhs1
, lhs2
)
4069 && expressions_equal_p (rhs1
, rhs2
))
4070 || (commutative_tree_code (code1
)
4071 && expressions_equal_p (lhs1
, rhs2
)
4072 && expressions_equal_p (rhs1
, lhs2
)));
4075 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4078 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
4080 if (vp1
->hashcode
!= vp2
->hashcode
)
4083 if (vp1
->block
!= vp2
->block
)
4085 if (EDGE_COUNT (vp1
->block
->preds
) != EDGE_COUNT (vp2
->block
->preds
))
4088 switch (EDGE_COUNT (vp1
->block
->preds
))
4091 /* Single-arg PHIs are just copies. */
4096 /* Rule out backedges into the PHI. */
4097 if (vp1
->block
->loop_father
->header
== vp1
->block
4098 || vp2
->block
->loop_father
->header
== vp2
->block
)
4101 /* If the PHI nodes do not have compatible types
4102 they are not the same. */
4103 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4107 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4109 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
4110 /* If the immediate dominator end in switch stmts multiple
4111 values may end up in the same PHI arg via intermediate
4113 if (EDGE_COUNT (idom1
->succs
) != 2
4114 || EDGE_COUNT (idom2
->succs
) != 2)
4117 /* Verify the controlling stmt is the same. */
4118 gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
));
4119 gcond
*last2
= safe_dyn_cast
<gcond
*> (last_stmt (idom2
));
4120 if (! last1
|| ! last2
)
4123 if (! cond_stmts_equal_p (last1
, vp1
->cclhs
, vp1
->ccrhs
,
4124 last2
, vp2
->cclhs
, vp2
->ccrhs
,
4128 /* Get at true/false controlled edges into the PHI. */
4129 edge te1
, te2
, fe1
, fe2
;
4130 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
4132 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
4136 /* Swap edges if the second condition is the inverted of the
4139 std::swap (te2
, fe2
);
4141 /* ??? Handle VN_TOP specially. */
4142 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
4143 vp2
->phiargs
[te2
->dest_idx
])
4144 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
4145 vp2
->phiargs
[fe2
->dest_idx
]))
4156 /* If the PHI nodes do not have compatible types
4157 they are not the same. */
4158 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4161 /* Any phi in the same block will have it's arguments in the
4162 same edge order, because of how we store phi nodes. */
4163 for (unsigned i
= 0; i
< EDGE_COUNT (vp1
->block
->preds
); ++i
)
4165 tree phi1op
= vp1
->phiargs
[i
];
4166 tree phi2op
= vp2
->phiargs
[i
];
4167 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
4169 if (!expressions_equal_p (phi1op
, phi2op
))
4176 /* Lookup PHI in the current hash table, and return the resulting
4177 value number if it exists in the hash table. Return NULL_TREE if
4178 it does not exist in the hash table. */
4181 vn_phi_lookup (gimple
*phi
, bool backedges_varying_p
)
4184 struct vn_phi_s
*vp1
;
4188 vp1
= XALLOCAVAR (struct vn_phi_s
,
4189 sizeof (struct vn_phi_s
)
4190 + (gimple_phi_num_args (phi
) - 1) * sizeof (tree
));
4192 /* Canonicalize the SSA_NAME's to their value number. */
4193 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4195 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4196 if (TREE_CODE (def
) == SSA_NAME
4197 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4198 def
= SSA_VAL (def
);
4199 vp1
->phiargs
[e
->dest_idx
] = def
;
4201 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4202 vp1
->block
= gimple_bb (phi
);
4203 /* Extract values of the controlling condition. */
4204 vp1
->cclhs
= NULL_TREE
;
4205 vp1
->ccrhs
= NULL_TREE
;
4206 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4207 if (EDGE_COUNT (idom1
->succs
) == 2)
4208 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
4210 /* ??? We want to use SSA_VAL here. But possibly not
4212 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4213 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4215 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4216 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, NO_INSERT
);
4219 return (*slot
)->result
;
4222 /* Insert PHI into the current hash table with a value number of
4226 vn_phi_insert (gimple
*phi
, tree result
, bool backedges_varying_p
)
4229 vn_phi_t vp1
= (vn_phi_t
) obstack_alloc (&vn_tables_obstack
,
4231 + ((gimple_phi_num_args (phi
) - 1)
4236 /* Canonicalize the SSA_NAME's to their value number. */
4237 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4239 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4240 if (TREE_CODE (def
) == SSA_NAME
4241 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4242 def
= SSA_VAL (def
);
4243 vp1
->phiargs
[e
->dest_idx
] = def
;
4245 vp1
->value_id
= VN_INFO (result
)->value_id
;
4246 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4247 vp1
->block
= gimple_bb (phi
);
4248 /* Extract values of the controlling condition. */
4249 vp1
->cclhs
= NULL_TREE
;
4250 vp1
->ccrhs
= NULL_TREE
;
4251 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4252 if (EDGE_COUNT (idom1
->succs
) == 2)
4253 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
4255 /* ??? We want to use SSA_VAL here. But possibly not
4257 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4258 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4260 vp1
->result
= result
;
4261 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4263 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
4264 gcc_assert (!*slot
);
4267 vp1
->next
= last_inserted_phi
;
4268 last_inserted_phi
= vp1
;
4273 /* Return true if BB1 is dominated by BB2 taking into account edges
4274 that are not executable. */
4277 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
4282 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4285 /* Before iterating we'd like to know if there exists a
4286 (executable) path from bb2 to bb1 at all, if not we can
4287 directly return false. For now simply iterate once. */
4289 /* Iterate to the single executable bb1 predecessor. */
4290 if (EDGE_COUNT (bb1
->preds
) > 1)
4293 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
4294 if (e
->flags
& EDGE_EXECUTABLE
)
4307 /* Re-do the dominance check with changed bb1. */
4308 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4313 /* Iterate to the single executable bb2 successor. */
4315 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
4316 if (e
->flags
& EDGE_EXECUTABLE
)
4327 /* Verify the reached block is only reached through succe.
4328 If there is only one edge we can spare us the dominator
4329 check and iterate directly. */
4330 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
4332 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
4334 && (e
->flags
& EDGE_EXECUTABLE
))
4344 /* Re-do the dominance check with changed bb2. */
4345 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4350 /* We could now iterate updating bb1 / bb2. */
4354 /* Set the value number of FROM to TO, return true if it has changed
4358 set_ssa_val_to (tree from
, tree to
)
4360 vn_ssa_aux_t from_info
= VN_INFO (from
);
4361 tree currval
= from_info
->valnum
; // SSA_VAL (from)
4362 poly_int64 toff
, coff
;
4364 /* The only thing we allow as value numbers are ssa_names
4365 and invariants. So assert that here. We don't allow VN_TOP
4366 as visiting a stmt should produce a value-number other than
4368 ??? Still VN_TOP can happen for unreachable code, so force
4369 it to varying in that case. Not all code is prepared to
4370 get VN_TOP on valueization. */
4373 /* ??? When iterating and visiting PHI <undef, backedge-value>
4374 for the first time we rightfully get VN_TOP and we need to
4375 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4376 With SCCVN we were simply lucky we iterated the other PHI
4377 cycles first and thus visited the backedge-value DEF. */
4378 if (currval
== VN_TOP
)
4380 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4381 fprintf (dump_file
, "Forcing value number to varying on "
4382 "receiving VN_TOP\n");
4386 gcc_checking_assert (to
!= NULL_TREE
4387 && ((TREE_CODE (to
) == SSA_NAME
4388 && (to
== from
|| SSA_VAL (to
) == to
))
4389 || is_gimple_min_invariant (to
)));
4393 if (currval
== from
)
4395 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4397 fprintf (dump_file
, "Not changing value number of ");
4398 print_generic_expr (dump_file
, from
);
4399 fprintf (dump_file
, " from VARYING to ");
4400 print_generic_expr (dump_file
, to
);
4401 fprintf (dump_file
, "\n");
4405 bool curr_invariant
= is_gimple_min_invariant (currval
);
4406 bool curr_undefined
= (TREE_CODE (currval
) == SSA_NAME
4407 && ssa_undefined_value_p (currval
, false));
4408 if (currval
!= VN_TOP
4411 && is_gimple_min_invariant (to
))
4413 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4415 fprintf (dump_file
, "Forcing VARYING instead of changing "
4416 "value number of ");
4417 print_generic_expr (dump_file
, from
);
4418 fprintf (dump_file
, " from ");
4419 print_generic_expr (dump_file
, currval
);
4420 fprintf (dump_file
, " (non-constant) to ");
4421 print_generic_expr (dump_file
, to
);
4422 fprintf (dump_file
, " (constant)\n");
4426 else if (currval
!= VN_TOP
4428 && TREE_CODE (to
) == SSA_NAME
4429 && ssa_undefined_value_p (to
, false))
4431 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4433 fprintf (dump_file
, "Forcing VARYING instead of changing "
4434 "value number of ");
4435 print_generic_expr (dump_file
, from
);
4436 fprintf (dump_file
, " from ");
4437 print_generic_expr (dump_file
, currval
);
4438 fprintf (dump_file
, " (non-undefined) to ");
4439 print_generic_expr (dump_file
, to
);
4440 fprintf (dump_file
, " (undefined)\n");
4444 else if (TREE_CODE (to
) == SSA_NAME
4445 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
4450 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4452 fprintf (dump_file
, "Setting value number of ");
4453 print_generic_expr (dump_file
, from
);
4454 fprintf (dump_file
, " to ");
4455 print_generic_expr (dump_file
, to
);
4459 && !operand_equal_p (currval
, to
, 0)
4460 /* Different undefined SSA names are not actually different. See
4461 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4462 && !(TREE_CODE (currval
) == SSA_NAME
4463 && TREE_CODE (to
) == SSA_NAME
4464 && ssa_undefined_value_p (currval
, false)
4465 && ssa_undefined_value_p (to
, false))
4466 /* ??? For addresses involving volatile objects or types operand_equal_p
4467 does not reliably detect ADDR_EXPRs as equal. We know we are only
4468 getting invariant gimple addresses here, so can use
4469 get_addr_base_and_unit_offset to do this comparison. */
4470 && !(TREE_CODE (currval
) == ADDR_EXPR
4471 && TREE_CODE (to
) == ADDR_EXPR
4472 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
4473 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
4474 && known_eq (coff
, toff
)))
4476 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4477 fprintf (dump_file
, " (changed)\n");
4478 from_info
->valnum
= to
;
4481 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4482 fprintf (dump_file
, "\n");
4486 /* Set all definitions in STMT to value number to themselves.
4487 Return true if a value number changed. */
4490 defs_to_varying (gimple
*stmt
)
4492 bool changed
= false;
4496 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
4498 tree def
= DEF_FROM_PTR (defp
);
4499 changed
|= set_ssa_val_to (def
, def
);
4504 /* Visit a copy between LHS and RHS, return true if the value number
4508 visit_copy (tree lhs
, tree rhs
)
4511 rhs
= SSA_VAL (rhs
);
4513 return set_ssa_val_to (lhs
, rhs
);
4516 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4520 valueized_wider_op (tree wide_type
, tree op
)
4522 if (TREE_CODE (op
) == SSA_NAME
)
4523 op
= vn_valueize (op
);
4525 /* Either we have the op widened available. */
4528 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
4529 wide_type
, ops
, NULL
);
4533 /* Or the op is truncated from some existing value. */
4534 if (TREE_CODE (op
) == SSA_NAME
)
4536 gimple
*def
= SSA_NAME_DEF_STMT (op
);
4537 if (is_gimple_assign (def
)
4538 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
4540 tem
= gimple_assign_rhs1 (def
);
4541 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
4543 if (TREE_CODE (tem
) == SSA_NAME
)
4544 tem
= vn_valueize (tem
);
4550 /* For constants simply extend it. */
4551 if (TREE_CODE (op
) == INTEGER_CST
)
4552 return wide_int_to_tree (wide_type
, wi::to_wide (op
));
4557 /* Visit a nary operator RHS, value number it, and return true if the
4558 value number of LHS has changed as a result. */
4561 visit_nary_op (tree lhs
, gassign
*stmt
)
4563 vn_nary_op_t vnresult
;
4564 tree result
= vn_nary_op_lookup_stmt (stmt
, &vnresult
);
4565 if (! result
&& vnresult
)
4566 result
= vn_nary_op_get_predicated_value (vnresult
, gimple_bb (stmt
));
4568 return set_ssa_val_to (lhs
, result
);
4570 /* Do some special pattern matching for redundancies of operations
4571 in different types. */
4572 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4573 tree type
= TREE_TYPE (lhs
);
4574 tree rhs1
= gimple_assign_rhs1 (stmt
);
4578 /* Match arithmetic done in a different type where we can easily
4579 substitute the result from some earlier sign-changed or widened
4581 if (INTEGRAL_TYPE_P (type
)
4582 && TREE_CODE (rhs1
) == SSA_NAME
4583 /* We only handle sign-changes, zero-extension -> & mask or
4584 sign-extension if we know the inner operation doesn't
4586 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
4587 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
4588 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
4589 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
4590 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
4592 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
4594 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
4595 || gimple_assign_rhs_code (def
) == MINUS_EXPR
4596 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
4599 /* Either we have the op widened available. */
4600 ops
[0] = valueized_wider_op (type
,
4601 gimple_assign_rhs1 (def
));
4603 ops
[1] = valueized_wider_op (type
,
4604 gimple_assign_rhs2 (def
));
4605 if (ops
[0] && ops
[1])
4607 ops
[0] = vn_nary_op_lookup_pieces
4608 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
4609 /* We have wider operation available. */
4611 /* If the leader is a wrapping operation we can
4612 insert it for code hoisting w/o introducing
4613 undefined overflow. If it is not it has to
4614 be available. See PR86554. */
4615 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops
[0]))
4616 || (rpo_avail
&& vn_context_bb
4617 && rpo_avail
->eliminate_avail (vn_context_bb
,
4620 unsigned lhs_prec
= TYPE_PRECISION (type
);
4621 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
4622 if (lhs_prec
== rhs_prec
4623 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
4624 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
4626 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
4627 NOP_EXPR
, type
, ops
[0]);
4628 result
= vn_nary_build_or_lookup (&match_op
);
4631 bool changed
= set_ssa_val_to (lhs
, result
);
4632 vn_nary_op_insert_stmt (stmt
, result
);
4638 tree mask
= wide_int_to_tree
4639 (type
, wi::mask (rhs_prec
, false, lhs_prec
));
4640 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
4644 result
= vn_nary_build_or_lookup (&match_op
);
4647 bool changed
= set_ssa_val_to (lhs
, result
);
4648 vn_nary_op_insert_stmt (stmt
, result
);
4659 bool changed
= set_ssa_val_to (lhs
, lhs
);
4660 vn_nary_op_insert_stmt (stmt
, lhs
);
4664 /* Visit a call STMT storing into LHS. Return true if the value number
4665 of the LHS has changed as a result. */
4668 visit_reference_op_call (tree lhs
, gcall
*stmt
)
4670 bool changed
= false;
4671 struct vn_reference_s vr1
;
4672 vn_reference_t vnresult
= NULL
;
4673 tree vdef
= gimple_vdef (stmt
);
4675 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4676 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
4679 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
4682 if (vnresult
->result_vdef
&& vdef
)
4683 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
4685 /* If the call was discovered to be pure or const reflect
4686 that as far as possible. */
4687 changed
|= set_ssa_val_to (vdef
, vuse_ssa_val (gimple_vuse (stmt
)));
4689 if (!vnresult
->result
&& lhs
)
4690 vnresult
->result
= lhs
;
4692 if (vnresult
->result
&& lhs
)
4693 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
4698 vn_reference_s
**slot
;
4699 tree vdef_val
= vdef
;
4702 /* If we value numbered an indirect functions function to
4703 one not clobbering memory value number its VDEF to its
4705 tree fn
= gimple_call_fn (stmt
);
4706 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
4709 if (TREE_CODE (fn
) == ADDR_EXPR
4710 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
4711 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
4712 & (ECF_CONST
| ECF_PURE
)))
4713 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
4715 changed
|= set_ssa_val_to (vdef
, vdef_val
);
4718 changed
|= set_ssa_val_to (lhs
, lhs
);
4719 vr2
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
4720 vr2
->vuse
= vr1
.vuse
;
4721 /* As we are not walking the virtual operand chain we know the
4722 shared_lookup_references are still original so we can re-use
4724 vr2
->operands
= vr1
.operands
.copy ();
4725 vr2
->type
= vr1
.type
;
4727 vr2
->hashcode
= vr1
.hashcode
;
4729 vr2
->result_vdef
= vdef_val
;
4731 slot
= valid_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
4733 gcc_assert (!*slot
);
4735 vr2
->next
= last_inserted_ref
;
4736 last_inserted_ref
= vr2
;
4742 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4743 and return true if the value number of the LHS has changed as a result. */
4746 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
4748 bool changed
= false;
4752 last_vuse
= gimple_vuse (stmt
);
4753 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
4754 default_vn_walk_kind
, NULL
, true, &last_vuse
);
4756 /* We handle type-punning through unions by value-numbering based
4757 on offset and size of the access. Be prepared to handle a
4758 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4760 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
4762 /* We will be setting the value number of lhs to the value number
4763 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4764 So first simplify and lookup this expression to see if it
4765 is already available. */
4766 gimple_match_op
res_op (gimple_match_cond::UNCOND
,
4767 VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
4768 result
= vn_nary_build_or_lookup (&res_op
);
4769 /* When building the conversion fails avoid inserting the reference
4772 return set_ssa_val_to (lhs
, lhs
);
4776 changed
= set_ssa_val_to (lhs
, result
);
4779 changed
= set_ssa_val_to (lhs
, lhs
);
4780 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
4787 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4788 and return true if the value number of the LHS has changed as a result. */
4791 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
4793 bool changed
= false;
4794 vn_reference_t vnresult
= NULL
;
4796 bool resultsame
= false;
4797 tree vuse
= gimple_vuse (stmt
);
4798 tree vdef
= gimple_vdef (stmt
);
4800 if (TREE_CODE (op
) == SSA_NAME
)
4803 /* First we want to lookup using the *vuses* from the store and see
4804 if there the last store to this location with the same address
4807 The vuses represent the memory state before the store. If the
4808 memory state, address, and value of the store is the same as the
4809 last store to this location, then this store will produce the
4810 same memory state as that store.
4812 In this case the vdef versions for this store are value numbered to those
4813 vuse versions, since they represent the same memory state after
4816 Otherwise, the vdefs for the store are used when inserting into
4817 the table, since the store generates a new memory state. */
4819 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
4821 && vnresult
->result
)
4823 tree result
= vnresult
->result
;
4824 gcc_checking_assert (TREE_CODE (result
) != SSA_NAME
4825 || result
== SSA_VAL (result
));
4826 resultsame
= expressions_equal_p (result
, op
);
4829 /* If the TBAA state isn't compatible for downstream reads
4830 we cannot value-number the VDEFs the same. */
4831 alias_set_type set
= get_alias_set (lhs
);
4832 if (vnresult
->set
!= set
4833 && ! alias_set_subset_of (set
, vnresult
->set
))
4840 /* Only perform the following when being called from PRE
4841 which embeds tail merging. */
4842 if (default_vn_walk_kind
== VN_WALK
)
4844 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
4845 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
4848 VN_INFO (vdef
)->visited
= true;
4849 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
4853 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4855 fprintf (dump_file
, "No store match\n");
4856 fprintf (dump_file
, "Value numbering store ");
4857 print_generic_expr (dump_file
, lhs
);
4858 fprintf (dump_file
, " to ");
4859 print_generic_expr (dump_file
, op
);
4860 fprintf (dump_file
, "\n");
4862 /* Have to set value numbers before insert, since insert is
4863 going to valueize the references in-place. */
4865 changed
|= set_ssa_val_to (vdef
, vdef
);
4867 /* Do not insert structure copies into the tables. */
4868 if (is_gimple_min_invariant (op
)
4869 || is_gimple_reg (op
))
4870 vn_reference_insert (lhs
, op
, vdef
, NULL
);
4872 /* Only perform the following when being called from PRE
4873 which embeds tail merging. */
4874 if (default_vn_walk_kind
== VN_WALK
)
4876 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
4877 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
4882 /* We had a match, so value number the vdef to have the value
4883 number of the vuse it came from. */
4885 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4886 fprintf (dump_file
, "Store matched earlier value, "
4887 "value numbering store vdefs to matching vuses.\n");
4889 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
4895 /* Visit and value number PHI, return true if the value number
4896 changed. When BACKEDGES_VARYING_P is true then assume all
4897 backedge values are varying. When INSERTED is not NULL then
4898 this is just a ahead query for a possible iteration, set INSERTED
4899 to true if we'd insert into the hashtable. */
4902 visit_phi (gimple
*phi
, bool *inserted
, bool backedges_varying_p
)
4904 tree result
, sameval
= VN_TOP
, seen_undef
= NULL_TREE
;
4905 tree backedge_val
= NULL_TREE
;
4906 bool seen_non_backedge
= false;
4907 tree sameval_base
= NULL_TREE
;
4908 poly_int64 soff
, doff
;
4909 unsigned n_executable
= 0;
4913 /* TODO: We could check for this in initialization, and replace this
4914 with a gcc_assert. */
4915 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
4916 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
4918 /* We track whether a PHI was CSEd to to avoid excessive iterations
4919 that would be necessary only because the PHI changed arguments
4922 gimple_set_plf (phi
, GF_PLF_1
, false);
4924 /* See if all non-TOP arguments have the same value. TOP is
4925 equivalent to everything, so we can ignore it. */
4926 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4927 if (e
->flags
& EDGE_EXECUTABLE
)
4929 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4932 if (TREE_CODE (def
) == SSA_NAME
)
4934 if (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
))
4935 def
= SSA_VAL (def
);
4936 if (e
->flags
& EDGE_DFS_BACK
)
4939 if (!(e
->flags
& EDGE_DFS_BACK
))
4940 seen_non_backedge
= true;
4943 /* Ignore undefined defs for sameval but record one. */
4944 else if (TREE_CODE (def
) == SSA_NAME
4945 && ! virtual_operand_p (def
)
4946 && ssa_undefined_value_p (def
, false))
4948 else if (sameval
== VN_TOP
)
4950 else if (!expressions_equal_p (def
, sameval
))
4952 /* We know we're arriving only with invariant addresses here,
4953 try harder comparing them. We can do some caching here
4954 which we cannot do in expressions_equal_p. */
4955 if (TREE_CODE (def
) == ADDR_EXPR
4956 && TREE_CODE (sameval
) == ADDR_EXPR
4957 && sameval_base
!= (void *)-1)
4960 sameval_base
= get_addr_base_and_unit_offset
4961 (TREE_OPERAND (sameval
, 0), &soff
);
4963 sameval_base
= (tree
)(void *)-1;
4964 else if ((get_addr_base_and_unit_offset
4965 (TREE_OPERAND (def
, 0), &doff
) == sameval_base
)
4966 && known_eq (soff
, doff
))
4969 sameval
= NULL_TREE
;
4974 /* If the value we want to use is flowing over the backedge and we
4975 should take it as VARYING but it has a non-VARYING value drop to
4977 If we value-number a virtual operand never value-number to the
4978 value from the backedge as that confuses the alias-walking code.
4979 See gcc.dg/torture/pr87176.c. If the value is the same on a
4980 non-backedge everything is OK though. */
4983 && !seen_non_backedge
4984 && TREE_CODE (backedge_val
) == SSA_NAME
4985 && sameval
== backedge_val
4986 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val
)
4987 || SSA_VAL (backedge_val
) != backedge_val
))
4988 /* Do not value-number a virtual operand to sth not visited though
4989 given that allows us to escape a region in alias walking. */
4991 && TREE_CODE (sameval
) == SSA_NAME
4992 && !SSA_NAME_IS_DEFAULT_DEF (sameval
)
4993 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval
)
4994 && (SSA_VAL (sameval
, &visited_p
), !visited_p
)))
4995 /* Note this just drops to VARYING without inserting the PHI into
4997 result
= PHI_RESULT (phi
);
4998 /* If none of the edges was executable keep the value-number at VN_TOP,
4999 if only a single edge is exectuable use its value. */
5000 else if (n_executable
<= 1)
5001 result
= seen_undef
? seen_undef
: sameval
;
5002 /* If we saw only undefined values and VN_TOP use one of the
5003 undefined values. */
5004 else if (sameval
== VN_TOP
)
5005 result
= seen_undef
? seen_undef
: sameval
;
5006 /* First see if it is equivalent to a phi node in this block. We prefer
5007 this as it allows IV elimination - see PRs 66502 and 67167. */
5008 else if ((result
= vn_phi_lookup (phi
, backedges_varying_p
)))
5011 && TREE_CODE (result
) == SSA_NAME
5012 && gimple_code (SSA_NAME_DEF_STMT (result
)) == GIMPLE_PHI
)
5014 gimple_set_plf (SSA_NAME_DEF_STMT (result
), GF_PLF_1
, true);
5015 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5017 fprintf (dump_file
, "Marking CSEd to PHI node ");
5018 print_gimple_expr (dump_file
, SSA_NAME_DEF_STMT (result
),
5020 fprintf (dump_file
, "\n");
5024 /* If all values are the same use that, unless we've seen undefined
5025 values as well and the value isn't constant.
5026 CCP/copyprop have the same restriction to not remove uninit warnings. */
5028 && (! seen_undef
|| is_gimple_min_invariant (sameval
)))
5032 result
= PHI_RESULT (phi
);
5033 /* Only insert PHIs that are varying, for constant value numbers
5034 we mess up equivalences otherwise as we are only comparing
5035 the immediate controlling predicates. */
5036 vn_phi_insert (phi
, result
, backedges_varying_p
);
5041 return set_ssa_val_to (PHI_RESULT (phi
), result
);
5044 /* Try to simplify RHS using equivalences and constant folding. */
5047 try_to_simplify (gassign
*stmt
)
5049 enum tree_code code
= gimple_assign_rhs_code (stmt
);
5052 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5053 in this case, there is no point in doing extra work. */
5054 if (code
== SSA_NAME
)
5057 /* First try constant folding based on our current lattice. */
5058 mprts_hook
= vn_lookup_simplify_result
;
5059 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
5062 && (TREE_CODE (tem
) == SSA_NAME
5063 || is_gimple_min_invariant (tem
)))
5069 /* Visit and value number STMT, return true if the value number
5073 visit_stmt (gimple
*stmt
, bool backedges_varying_p
= false)
5075 bool changed
= false;
5077 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5079 fprintf (dump_file
, "Value numbering stmt = ");
5080 print_gimple_stmt (dump_file
, stmt
, 0);
5083 if (gimple_code (stmt
) == GIMPLE_PHI
)
5084 changed
= visit_phi (stmt
, NULL
, backedges_varying_p
);
5085 else if (gimple_has_volatile_ops (stmt
))
5086 changed
= defs_to_varying (stmt
);
5087 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
5089 enum tree_code code
= gimple_assign_rhs_code (ass
);
5090 tree lhs
= gimple_assign_lhs (ass
);
5091 tree rhs1
= gimple_assign_rhs1 (ass
);
5094 /* Shortcut for copies. Simplifying copies is pointless,
5095 since we copy the expression and value they represent. */
5096 if (code
== SSA_NAME
5097 && TREE_CODE (lhs
) == SSA_NAME
)
5099 changed
= visit_copy (lhs
, rhs1
);
5102 simplified
= try_to_simplify (ass
);
5105 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5107 fprintf (dump_file
, "RHS ");
5108 print_gimple_expr (dump_file
, ass
, 0);
5109 fprintf (dump_file
, " simplified to ");
5110 print_generic_expr (dump_file
, simplified
);
5111 fprintf (dump_file
, "\n");
5114 /* Setting value numbers to constants will occasionally
5115 screw up phi congruence because constants are not
5116 uniquely associated with a single ssa name that can be
5119 && is_gimple_min_invariant (simplified
)
5120 && TREE_CODE (lhs
) == SSA_NAME
)
5122 changed
= set_ssa_val_to (lhs
, simplified
);
5126 && TREE_CODE (simplified
) == SSA_NAME
5127 && TREE_CODE (lhs
) == SSA_NAME
)
5129 changed
= visit_copy (lhs
, simplified
);
5133 if ((TREE_CODE (lhs
) == SSA_NAME
5134 /* We can substitute SSA_NAMEs that are live over
5135 abnormal edges with their constant value. */
5136 && !(gimple_assign_copy_p (ass
)
5137 && is_gimple_min_invariant (rhs1
))
5139 && is_gimple_min_invariant (simplified
))
5140 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
5141 /* Stores or copies from SSA_NAMEs that are live over
5142 abnormal edges are a problem. */
5143 || (code
== SSA_NAME
5144 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
5145 changed
= defs_to_varying (ass
);
5146 else if (REFERENCE_CLASS_P (lhs
)
5148 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
5149 else if (TREE_CODE (lhs
) == SSA_NAME
)
5151 if ((gimple_assign_copy_p (ass
)
5152 && is_gimple_min_invariant (rhs1
))
5154 && is_gimple_min_invariant (simplified
)))
5157 changed
= set_ssa_val_to (lhs
, simplified
);
5159 changed
= set_ssa_val_to (lhs
, rhs1
);
5163 /* Visit the original statement. */
5164 switch (vn_get_stmt_kind (ass
))
5167 changed
= visit_nary_op (lhs
, ass
);
5170 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
5173 changed
= defs_to_varying (ass
);
5179 changed
= defs_to_varying (ass
);
5181 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
5183 tree lhs
= gimple_call_lhs (call_stmt
);
5184 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5186 /* Try constant folding based on our current lattice. */
5187 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
5191 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5193 fprintf (dump_file
, "call ");
5194 print_gimple_expr (dump_file
, call_stmt
, 0);
5195 fprintf (dump_file
, " simplified to ");
5196 print_generic_expr (dump_file
, simplified
);
5197 fprintf (dump_file
, "\n");
5200 /* Setting value numbers to constants will occasionally
5201 screw up phi congruence because constants are not
5202 uniquely associated with a single ssa name that can be
5205 && is_gimple_min_invariant (simplified
))
5207 changed
= set_ssa_val_to (lhs
, simplified
);
5208 if (gimple_vdef (call_stmt
))
5209 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
5210 SSA_VAL (gimple_vuse (call_stmt
)));
5214 && TREE_CODE (simplified
) == SSA_NAME
)
5216 changed
= visit_copy (lhs
, simplified
);
5217 if (gimple_vdef (call_stmt
))
5218 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
5219 SSA_VAL (gimple_vuse (call_stmt
)));
5222 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
5224 changed
= defs_to_varying (call_stmt
);
5229 /* Pick up flags from a devirtualization target. */
5230 tree fn
= gimple_call_fn (stmt
);
5231 int extra_fnflags
= 0;
5232 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
5235 if (TREE_CODE (fn
) == ADDR_EXPR
5236 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
5237 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
5239 if (!gimple_call_internal_p (call_stmt
)
5240 && (/* Calls to the same function with the same vuse
5241 and the same operands do not necessarily return the same
5242 value, unless they're pure or const. */
5243 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
5244 & (ECF_PURE
| ECF_CONST
))
5245 /* If calls have a vdef, subsequent calls won't have
5246 the same incoming vuse. So, if 2 calls with vdef have the
5247 same vuse, we know they're not subsequent.
5248 We can value number 2 calls to the same function with the
5249 same vuse and the same operands which are not subsequent
5250 the same, because there is no code in the program that can
5251 compare the 2 values... */
5252 || (gimple_vdef (call_stmt
)
5253 /* ... unless the call returns a pointer which does
5254 not alias with anything else. In which case the
5255 information that the values are distinct are encoded
5257 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
5258 /* Only perform the following when being called from PRE
5259 which embeds tail merging. */
5260 && default_vn_walk_kind
== VN_WALK
)))
5261 changed
= visit_reference_op_call (lhs
, call_stmt
);
5263 changed
= defs_to_varying (call_stmt
);
5266 changed
= defs_to_varying (stmt
);
5272 /* Allocate a value number table. */
5275 allocate_vn_table (vn_tables_t table
, unsigned size
)
5277 table
->phis
= new vn_phi_table_type (size
);
5278 table
->nary
= new vn_nary_op_table_type (size
);
5279 table
->references
= new vn_reference_table_type (size
);
5282 /* Free a value number table. */
5285 free_vn_table (vn_tables_t table
)
5287 /* Walk over elements and release vectors. */
5288 vn_reference_iterator_type hir
;
5290 FOR_EACH_HASH_TABLE_ELEMENT (*table
->references
, vr
, vn_reference_t
, hir
)
5291 vr
->operands
.release ();
5296 delete table
->references
;
5297 table
->references
= NULL
;
5300 /* Set *ID according to RESULT. */
5303 set_value_id_for_result (tree result
, unsigned int *id
)
5305 if (result
&& TREE_CODE (result
) == SSA_NAME
)
5306 *id
= VN_INFO (result
)->value_id
;
5307 else if (result
&& is_gimple_min_invariant (result
))
5308 *id
= get_or_alloc_constant_value_id (result
);
5310 *id
= get_next_value_id ();
5313 /* Set the value ids in the valid hash tables. */
5316 set_hashtable_value_ids (void)
5318 vn_nary_op_iterator_type hin
;
5319 vn_phi_iterator_type hip
;
5320 vn_reference_iterator_type hir
;
5325 /* Now set the value ids of the things we had put in the hash
5328 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
5329 if (! vno
->predicated_values
)
5330 set_value_id_for_result (vno
->u
.result
, &vno
->value_id
);
5332 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
5333 set_value_id_for_result (vp
->result
, &vp
->value_id
);
5335 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
5337 set_value_id_for_result (vr
->result
, &vr
->value_id
);
5340 /* Return the maximum value id we have ever seen. */
5343 get_max_value_id (void)
5345 return next_value_id
;
5348 /* Return the next unique value id. */
5351 get_next_value_id (void)
5353 return next_value_id
++;
5357 /* Compare two expressions E1 and E2 and return true if they are equal. */
5360 expressions_equal_p (tree e1
, tree e2
)
5362 /* The obvious case. */
5366 /* If either one is VN_TOP consider them equal. */
5367 if (e1
== VN_TOP
|| e2
== VN_TOP
)
5370 /* If only one of them is null, they cannot be equal. */
5374 /* Now perform the actual comparison. */
5375 if (TREE_CODE (e1
) == TREE_CODE (e2
)
5376 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
5383 /* Return true if the nary operation NARY may trap. This is a copy
5384 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5387 vn_nary_may_trap (vn_nary_op_t nary
)
5390 tree rhs2
= NULL_TREE
;
5391 bool honor_nans
= false;
5392 bool honor_snans
= false;
5393 bool fp_operation
= false;
5394 bool honor_trapv
= false;
5398 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
5399 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
5400 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
5403 fp_operation
= FLOAT_TYPE_P (type
);
5406 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
5407 honor_snans
= flag_signaling_nans
!= 0;
5409 else if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_TRAPS (type
))
5412 if (nary
->length
>= 2)
5414 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
5415 honor_trapv
, honor_nans
, honor_snans
,
5420 for (i
= 0; i
< nary
->length
; ++i
)
5421 if (tree_could_trap_p (nary
->op
[i
]))
5427 /* Return true if the reference operation REF may trap. */
5430 vn_reference_may_trap (vn_reference_t ref
)
5432 switch (ref
->operands
[0].opcode
)
5436 /* We do not handle calls. */
5438 /* And toplevel address computations never trap. */
5443 vn_reference_op_t op
;
5445 FOR_EACH_VEC_ELT (ref
->operands
, i
, op
)
5449 case WITH_SIZE_EXPR
:
5450 case TARGET_MEM_REF
:
5451 /* Always variable. */
5454 if (op
->op1
&& TREE_CODE (op
->op1
) == SSA_NAME
)
5457 case ARRAY_RANGE_REF
:
5459 if (TREE_CODE (op
->op0
) == SSA_NAME
)
5463 /* Nothing interesting in itself, the base is separate. */
5465 /* The following are the address bases. */
5470 return tree_could_trap_p (TREE_OPERAND (op
->op0
, 0));
5478 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction
,
5479 bitmap inserted_exprs_
)
5480 : dom_walker (direction
), do_pre (inserted_exprs_
!= NULL
),
5481 el_todo (0), eliminations (0), insertions (0),
5482 inserted_exprs (inserted_exprs_
)
5484 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
5485 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
5488 eliminate_dom_walker::~eliminate_dom_walker ()
5490 BITMAP_FREE (need_eh_cleanup
);
5491 BITMAP_FREE (need_ab_cleanup
);
5494 /* Return a leader for OP that is available at the current point of the
5495 eliminate domwalk. */
5498 eliminate_dom_walker::eliminate_avail (basic_block
, tree op
)
5500 tree valnum
= VN_INFO (op
)->valnum
;
5501 if (TREE_CODE (valnum
) == SSA_NAME
)
5503 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
5505 if (avail
.length () > SSA_NAME_VERSION (valnum
))
5506 return avail
[SSA_NAME_VERSION (valnum
)];
5508 else if (is_gimple_min_invariant (valnum
))
5513 /* At the current point of the eliminate domwalk make OP available. */
5516 eliminate_dom_walker::eliminate_push_avail (basic_block
, tree op
)
5518 tree valnum
= VN_INFO (op
)->valnum
;
5519 if (TREE_CODE (valnum
) == SSA_NAME
)
5521 if (avail
.length () <= SSA_NAME_VERSION (valnum
))
5522 avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
5524 if (avail
[SSA_NAME_VERSION (valnum
)])
5525 pushop
= avail
[SSA_NAME_VERSION (valnum
)];
5526 avail_stack
.safe_push (pushop
);
5527 avail
[SSA_NAME_VERSION (valnum
)] = op
;
5531 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5532 the leader for the expression if insertion was successful. */
5535 eliminate_dom_walker::eliminate_insert (basic_block bb
,
5536 gimple_stmt_iterator
*gsi
, tree val
)
5538 /* We can insert a sequence with a single assignment only. */
5539 gimple_seq stmts
= VN_INFO (val
)->expr
;
5540 if (!gimple_seq_singleton_p (stmts
))
5542 gassign
*stmt
= dyn_cast
<gassign
*> (gimple_seq_first_stmt (stmts
));
5544 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
5545 && gimple_assign_rhs_code (stmt
) != VIEW_CONVERT_EXPR
5546 && gimple_assign_rhs_code (stmt
) != BIT_FIELD_REF
5547 && (gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
5548 || TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)))
5551 tree op
= gimple_assign_rhs1 (stmt
);
5552 if (gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
5553 || gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
5554 op
= TREE_OPERAND (op
, 0);
5555 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (bb
, op
) : op
;
5561 if (gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
5562 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
5563 TREE_TYPE (val
), leader
,
5564 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1),
5565 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2));
5566 else if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
)
5567 res
= gimple_build (&stmts
, BIT_AND_EXPR
,
5568 TREE_TYPE (val
), leader
, gimple_assign_rhs2 (stmt
));
5570 res
= gimple_build (&stmts
, gimple_assign_rhs_code (stmt
),
5571 TREE_TYPE (val
), leader
);
5572 if (TREE_CODE (res
) != SSA_NAME
5573 || SSA_NAME_IS_DEFAULT_DEF (res
)
5574 || gimple_bb (SSA_NAME_DEF_STMT (res
)))
5576 gimple_seq_discard (stmts
);
5578 /* During propagation we have to treat SSA info conservatively
5579 and thus we can end up simplifying the inserted expression
5580 at elimination time to sth not defined in stmts. */
5581 /* But then this is a redundancy we failed to detect. Which means
5582 res now has two values. That doesn't play well with how
5583 we track availability here, so give up. */
5584 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5586 if (TREE_CODE (res
) == SSA_NAME
)
5587 res
= eliminate_avail (bb
, res
);
5590 fprintf (dump_file
, "Failed to insert expression for value ");
5591 print_generic_expr (dump_file
, val
);
5592 fprintf (dump_file
, " which is really fully redundant to ");
5593 print_generic_expr (dump_file
, res
);
5594 fprintf (dump_file
, "\n");
5602 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
5603 VN_INFO (res
)->valnum
= val
;
5604 VN_INFO (res
)->visited
= true;
5608 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5610 fprintf (dump_file
, "Inserted ");
5611 print_gimple_stmt (dump_file
, SSA_NAME_DEF_STMT (res
), 0);
5618 eliminate_dom_walker::eliminate_stmt (basic_block b
, gimple_stmt_iterator
*gsi
)
5620 tree sprime
= NULL_TREE
;
5621 gimple
*stmt
= gsi_stmt (*gsi
);
5622 tree lhs
= gimple_get_lhs (stmt
);
5623 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
5624 && !gimple_has_volatile_ops (stmt
)
5625 /* See PR43491. Do not replace a global register variable when
5626 it is a the RHS of an assignment. Do replace local register
5627 variables since gcc does not guarantee a local variable will
5628 be allocated in register.
5629 ??? The fix isn't effective here. This should instead
5630 be ensured by not value-numbering them the same but treating
5631 them like volatiles? */
5632 && !(gimple_assign_single_p (stmt
)
5633 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
5634 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
5635 && is_global_var (gimple_assign_rhs1 (stmt
)))))
5637 sprime
= eliminate_avail (b
, lhs
);
5640 /* If there is no existing usable leader but SCCVN thinks
5641 it has an expression it wants to use as replacement,
5643 tree val
= VN_INFO (lhs
)->valnum
;
5645 && TREE_CODE (val
) == SSA_NAME
5646 && VN_INFO (val
)->needs_insertion
5647 && VN_INFO (val
)->expr
!= NULL
5648 && (sprime
= eliminate_insert (b
, gsi
, val
)) != NULL_TREE
)
5649 eliminate_push_avail (b
, sprime
);
5652 /* If this now constitutes a copy duplicate points-to
5653 and range info appropriately. This is especially
5654 important for inserted code. See tree-ssa-copy.c
5655 for similar code. */
5657 && TREE_CODE (sprime
) == SSA_NAME
)
5659 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
5660 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
5661 && SSA_NAME_PTR_INFO (lhs
)
5662 && ! SSA_NAME_PTR_INFO (sprime
))
5664 duplicate_ssa_name_ptr_info (sprime
,
5665 SSA_NAME_PTR_INFO (lhs
));
5667 mark_ptr_info_alignment_unknown
5668 (SSA_NAME_PTR_INFO (sprime
));
5670 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5671 && SSA_NAME_RANGE_INFO (lhs
)
5672 && ! SSA_NAME_RANGE_INFO (sprime
)
5674 duplicate_ssa_name_range_info (sprime
,
5675 SSA_NAME_RANGE_TYPE (lhs
),
5676 SSA_NAME_RANGE_INFO (lhs
));
5679 /* Inhibit the use of an inserted PHI on a loop header when
5680 the address of the memory reference is a simple induction
5681 variable. In other cases the vectorizer won't do anything
5682 anyway (either it's loop invariant or a complicated
5685 && TREE_CODE (sprime
) == SSA_NAME
5687 && (flag_tree_loop_vectorize
|| flag_tree_parallelize_loops
> 1)
5688 && loop_outer (b
->loop_father
)
5689 && has_zero_uses (sprime
)
5690 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
5691 && gimple_assign_load_p (stmt
))
5693 gimple
*def_stmt
= SSA_NAME_DEF_STMT (sprime
);
5694 basic_block def_bb
= gimple_bb (def_stmt
);
5695 if (gimple_code (def_stmt
) == GIMPLE_PHI
5696 && def_bb
->loop_father
->header
== def_bb
)
5698 loop_p loop
= def_bb
->loop_father
;
5702 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5705 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
5707 && flow_bb_inside_loop_p (loop
, def_bb
)
5708 && simple_iv (loop
, loop
, op
, &iv
, true))
5716 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5718 fprintf (dump_file
, "Not replacing ");
5719 print_gimple_expr (dump_file
, stmt
, 0);
5720 fprintf (dump_file
, " with ");
5721 print_generic_expr (dump_file
, sprime
);
5722 fprintf (dump_file
, " which would add a loop"
5723 " carried dependence to loop %d\n",
5726 /* Don't keep sprime available. */
5734 /* If we can propagate the value computed for LHS into
5735 all uses don't bother doing anything with this stmt. */
5736 if (may_propagate_copy (lhs
, sprime
))
5738 /* Mark it for removal. */
5739 to_remove
.safe_push (stmt
);
5741 /* ??? Don't count copy/constant propagations. */
5742 if (gimple_assign_single_p (stmt
)
5743 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
5744 || gimple_assign_rhs1 (stmt
) == sprime
))
5747 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5749 fprintf (dump_file
, "Replaced ");
5750 print_gimple_expr (dump_file
, stmt
, 0);
5751 fprintf (dump_file
, " with ");
5752 print_generic_expr (dump_file
, sprime
);
5753 fprintf (dump_file
, " in all uses of ");
5754 print_gimple_stmt (dump_file
, stmt
, 0);
5761 /* If this is an assignment from our leader (which
5762 happens in the case the value-number is a constant)
5763 then there is nothing to do. Likewise if we run into
5764 inserted code that needed a conversion because of
5765 our type-agnostic value-numbering of loads. */
5766 if ((gimple_assign_single_p (stmt
)
5767 || (is_gimple_assign (stmt
)
5768 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
5769 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)))
5770 && sprime
== gimple_assign_rhs1 (stmt
))
5773 /* Else replace its RHS. */
5774 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5776 fprintf (dump_file
, "Replaced ");
5777 print_gimple_expr (dump_file
, stmt
, 0);
5778 fprintf (dump_file
, " with ");
5779 print_generic_expr (dump_file
, sprime
);
5780 fprintf (dump_file
, " in ");
5781 print_gimple_stmt (dump_file
, stmt
, 0);
5785 bool can_make_abnormal_goto
= (is_gimple_call (stmt
)
5786 && stmt_can_make_abnormal_goto (stmt
));
5787 gimple
*orig_stmt
= stmt
;
5788 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
5789 TREE_TYPE (sprime
)))
5791 /* We preserve conversions to but not from function or method
5792 types. This asymmetry makes it necessary to re-instantiate
5793 conversions here. */
5794 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
5795 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs
))))
5796 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
5800 tree vdef
= gimple_vdef (stmt
);
5801 tree vuse
= gimple_vuse (stmt
);
5802 propagate_tree_value_into_stmt (gsi
, sprime
);
5803 stmt
= gsi_stmt (*gsi
);
5805 /* In case the VDEF on the original stmt was released, value-number
5806 it to the VUSE. This is to make vuse_ssa_val able to skip
5807 released virtual operands. */
5808 if (vdef
!= gimple_vdef (stmt
))
5810 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef
));
5811 VN_INFO (vdef
)->valnum
= vuse
;
5814 /* If we removed EH side-effects from the statement, clean
5815 its EH information. */
5816 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
5818 bitmap_set_bit (need_eh_cleanup
,
5819 gimple_bb (stmt
)->index
);
5820 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5821 fprintf (dump_file
, " Removed EH side-effects.\n");
5824 /* Likewise for AB side-effects. */
5825 if (can_make_abnormal_goto
5826 && !stmt_can_make_abnormal_goto (stmt
))
5828 bitmap_set_bit (need_ab_cleanup
,
5829 gimple_bb (stmt
)->index
);
5830 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5831 fprintf (dump_file
, " Removed AB side-effects.\n");
5838 /* If the statement is a scalar store, see if the expression
5839 has the same value number as its rhs. If so, the store is
5841 if (gimple_assign_single_p (stmt
)
5842 && !gimple_has_volatile_ops (stmt
)
5843 && !is_gimple_reg (gimple_assign_lhs (stmt
))
5844 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
5845 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
5847 tree rhs
= gimple_assign_rhs1 (stmt
);
5848 vn_reference_t vnresult
;
5849 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
5850 typed load of a byte known to be 0x11 as 1 so a store of
5851 a boolean 1 is detected as redundant. Because of this we
5852 have to make sure to lookup with a ref where its size
5853 matches the precision. */
5854 tree lookup_lhs
= lhs
;
5855 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5856 && (TREE_CODE (lhs
) != COMPONENT_REF
5857 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
5858 && !type_has_mode_precision_p (TREE_TYPE (lhs
)))
5860 if (TREE_CODE (lhs
) == COMPONENT_REF
5861 || TREE_CODE (lhs
) == MEM_REF
)
5863 tree ltype
= build_nonstandard_integer_type
5864 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs
))),
5865 TYPE_UNSIGNED (TREE_TYPE (lhs
)));
5866 if (TREE_CODE (lhs
) == COMPONENT_REF
)
5868 tree foff
= component_ref_field_offset (lhs
);
5869 tree f
= TREE_OPERAND (lhs
, 1);
5870 if (!poly_int_tree_p (foff
))
5871 lookup_lhs
= NULL_TREE
;
5873 lookup_lhs
= build3 (BIT_FIELD_REF
, ltype
,
5874 TREE_OPERAND (lhs
, 0),
5875 TYPE_SIZE (TREE_TYPE (lhs
)),
5877 (foff
, DECL_FIELD_BIT_OFFSET (f
)));
5880 lookup_lhs
= build2 (MEM_REF
, ltype
,
5881 TREE_OPERAND (lhs
, 0),
5882 TREE_OPERAND (lhs
, 1));
5885 lookup_lhs
= NULL_TREE
;
5887 tree val
= NULL_TREE
;
5889 val
= vn_reference_lookup (lookup_lhs
, gimple_vuse (stmt
),
5890 VN_WALKREWRITE
, &vnresult
, false);
5891 if (TREE_CODE (rhs
) == SSA_NAME
)
5892 rhs
= VN_INFO (rhs
)->valnum
;
5894 && (operand_equal_p (val
, rhs
, 0)
5895 /* Due to the bitfield lookups above we can get bit
5896 interpretations of the same RHS as values here. Those
5897 are redundant as well. */
5898 || (TREE_CODE (val
) == SSA_NAME
5899 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val
))
5900 && (val
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val
)))
5901 && TREE_CODE (val
) == VIEW_CONVERT_EXPR
5902 && TREE_OPERAND (val
, 0) == rhs
)))
5904 /* We can only remove the later store if the former aliases
5905 at least all accesses the later one does or if the store
5906 was to readonly memory storing the same value. */
5907 alias_set_type set
= get_alias_set (lhs
);
5909 || vnresult
->set
== set
5910 || alias_set_subset_of (set
, vnresult
->set
))
5912 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5914 fprintf (dump_file
, "Deleted redundant store ");
5915 print_gimple_stmt (dump_file
, stmt
, 0);
5918 /* Queue stmt for removal. */
5919 to_remove
.safe_push (stmt
);
5925 /* If this is a control statement value numbering left edges
5926 unexecuted on force the condition in a way consistent with
5928 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
5930 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
5931 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
5933 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5935 fprintf (dump_file
, "Removing unexecutable edge from ");
5936 print_gimple_stmt (dump_file
, stmt
, 0);
5938 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
5939 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
5940 gimple_cond_make_true (cond
);
5942 gimple_cond_make_false (cond
);
5944 el_todo
|= TODO_cleanup_cfg
;
5949 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
5950 bool was_noreturn
= (is_gimple_call (stmt
)
5951 && gimple_call_noreturn_p (stmt
));
5952 tree vdef
= gimple_vdef (stmt
);
5953 tree vuse
= gimple_vuse (stmt
);
5955 /* If we didn't replace the whole stmt (or propagate the result
5956 into all uses), replace all uses on this stmt with their
5958 bool modified
= false;
5959 use_operand_p use_p
;
5961 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5963 tree use
= USE_FROM_PTR (use_p
);
5964 /* ??? The call code above leaves stmt operands un-updated. */
5965 if (TREE_CODE (use
) != SSA_NAME
)
5968 if (SSA_NAME_IS_DEFAULT_DEF (use
))
5969 /* ??? For default defs BB shouldn't matter, but we have to
5970 solve the inconsistency between rpo eliminate and
5971 dom eliminate avail valueization first. */
5972 sprime
= eliminate_avail (b
, use
);
5974 /* Look for sth available at the definition block of the argument.
5975 This avoids inconsistencies between availability there which
5976 decides if the stmt can be removed and availability at the
5977 use site. The SSA property ensures that things available
5978 at the definition are also available at uses. */
5979 sprime
= eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use
)), use
);
5980 if (sprime
&& sprime
!= use
5981 && may_propagate_copy (use
, sprime
)
5982 /* We substitute into debug stmts to avoid excessive
5983 debug temporaries created by removed stmts, but we need
5984 to avoid doing so for inserted sprimes as we never want
5985 to create debug temporaries for them. */
5987 || TREE_CODE (sprime
) != SSA_NAME
5988 || !is_gimple_debug (stmt
)
5989 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
5991 propagate_value (use_p
, sprime
);
5996 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5997 into which is a requirement for the IPA devirt machinery. */
5998 gimple
*old_stmt
= stmt
;
6001 /* If a formerly non-invariant ADDR_EXPR is turned into an
6002 invariant one it was on a separate stmt. */
6003 if (gimple_assign_single_p (stmt
)
6004 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
6005 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
6006 gimple_stmt_iterator prev
= *gsi
;
6008 if (fold_stmt (gsi
))
6010 /* fold_stmt may have created new stmts inbetween
6011 the previous stmt and the folded stmt. Mark
6012 all defs created there as varying to not confuse
6013 the SCCVN machinery as we're using that even during
6015 if (gsi_end_p (prev
))
6016 prev
= gsi_start_bb (b
);
6019 if (gsi_stmt (prev
) != gsi_stmt (*gsi
))
6024 FOR_EACH_SSA_TREE_OPERAND (def
, gsi_stmt (prev
),
6025 dit
, SSA_OP_ALL_DEFS
)
6026 /* As existing DEFs may move between stmts
6027 only process new ones. */
6028 if (! has_VN_INFO (def
))
6030 VN_INFO (def
)->valnum
= def
;
6031 VN_INFO (def
)->visited
= true;
6033 if (gsi_stmt (prev
) == gsi_stmt (*gsi
))
6039 stmt
= gsi_stmt (*gsi
);
6040 /* In case we folded the stmt away schedule the NOP for removal. */
6041 if (gimple_nop_p (stmt
))
6042 to_remove
.safe_push (stmt
);
6045 /* Visit indirect calls and turn them into direct calls if
6046 possible using the devirtualization machinery. Do this before
6047 checking for required EH/abnormal/noreturn cleanup as devird
6048 may expose more of those. */
6049 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
6051 tree fn
= gimple_call_fn (call_stmt
);
6053 && flag_devirtualize
6054 && virtual_method_call_p (fn
))
6056 tree otr_type
= obj_type_ref_class (fn
);
6057 unsigned HOST_WIDE_INT otr_tok
6058 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn
));
6060 ipa_polymorphic_call_context
context (current_function_decl
,
6061 fn
, stmt
, &instance
);
6062 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
),
6063 otr_type
, stmt
, NULL
);
6065 vec
<cgraph_node
*> targets
6066 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
6067 otr_tok
, context
, &final
);
6069 dump_possible_polymorphic_call_targets (dump_file
,
6070 obj_type_ref_class (fn
),
6072 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
6075 if (targets
.length () == 1)
6076 fn
= targets
[0]->decl
;
6078 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6079 if (dump_enabled_p ())
6081 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
6082 "converting indirect call to "
6084 lang_hooks
.decl_printable_name (fn
, 2));
6086 gimple_call_set_fndecl (call_stmt
, fn
);
6087 /* If changing the call to __builtin_unreachable
6088 or similar noreturn function, adjust gimple_call_fntype
6090 if (gimple_call_noreturn_p (call_stmt
)
6091 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn
)))
6092 && TYPE_ARG_TYPES (TREE_TYPE (fn
))
6093 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn
)))
6095 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fn
));
6096 maybe_remove_unused_call_args (cfun
, call_stmt
);
6104 /* When changing a call into a noreturn call, cfg cleanup
6105 is needed to fix up the noreturn call. */
6107 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
6108 to_fixup
.safe_push (stmt
);
6109 /* When changing a condition or switch into one we know what
6110 edge will be executed, schedule a cfg cleanup. */
6111 if ((gimple_code (stmt
) == GIMPLE_COND
6112 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
6113 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
6114 || (gimple_code (stmt
) == GIMPLE_SWITCH
6115 && TREE_CODE (gimple_switch_index
6116 (as_a
<gswitch
*> (stmt
))) == INTEGER_CST
))
6117 el_todo
|= TODO_cleanup_cfg
;
6118 /* If we removed EH side-effects from the statement, clean
6119 its EH information. */
6120 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
6122 bitmap_set_bit (need_eh_cleanup
,
6123 gimple_bb (stmt
)->index
);
6124 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6125 fprintf (dump_file
, " Removed EH side-effects.\n");
6127 /* Likewise for AB side-effects. */
6128 if (can_make_abnormal_goto
6129 && !stmt_can_make_abnormal_goto (stmt
))
6131 bitmap_set_bit (need_ab_cleanup
,
6132 gimple_bb (stmt
)->index
);
6133 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6134 fprintf (dump_file
, " Removed AB side-effects.\n");
6137 /* In case the VDEF on the original stmt was released, value-number
6138 it to the VUSE. This is to make vuse_ssa_val able to skip
6139 released virtual operands. */
6140 if (vdef
&& SSA_NAME_IN_FREE_LIST (vdef
))
6141 VN_INFO (vdef
)->valnum
= vuse
;
6144 /* Make new values available - for fully redundant LHS we
6145 continue with the next stmt above and skip this. */
6147 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
6148 eliminate_push_avail (b
, DEF_FROM_PTR (defp
));
6151 /* Perform elimination for the basic-block B during the domwalk. */
6154 eliminate_dom_walker::before_dom_children (basic_block b
)
6157 avail_stack
.safe_push (NULL_TREE
);
6159 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
6160 if (!(b
->flags
& BB_EXECUTABLE
))
6165 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
6167 gphi
*phi
= gsi
.phi ();
6168 tree res
= PHI_RESULT (phi
);
6170 if (virtual_operand_p (res
))
6176 tree sprime
= eliminate_avail (b
, res
);
6180 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6182 fprintf (dump_file
, "Replaced redundant PHI node defining ");
6183 print_generic_expr (dump_file
, res
);
6184 fprintf (dump_file
, " with ");
6185 print_generic_expr (dump_file
, sprime
);
6186 fprintf (dump_file
, "\n");
6189 /* If we inserted this PHI node ourself, it's not an elimination. */
6190 if (! inserted_exprs
6191 || ! bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
6194 /* If we will propagate into all uses don't bother to do
6196 if (may_propagate_copy (res
, sprime
))
6198 /* Mark the PHI for removal. */
6199 to_remove
.safe_push (phi
);
6204 remove_phi_node (&gsi
, false);
6206 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
6207 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
6208 gimple
*stmt
= gimple_build_assign (res
, sprime
);
6209 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
6210 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
6214 eliminate_push_avail (b
, res
);
6218 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
6221 eliminate_stmt (b
, &gsi
);
6223 /* Replace destination PHI arguments. */
6226 FOR_EACH_EDGE (e
, ei
, b
->succs
)
6227 if (e
->flags
& EDGE_EXECUTABLE
)
6228 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
6232 gphi
*phi
= gsi
.phi ();
6233 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
6234 tree arg
= USE_FROM_PTR (use_p
);
6235 if (TREE_CODE (arg
) != SSA_NAME
6236 || virtual_operand_p (arg
))
6238 tree sprime
= eliminate_avail (b
, arg
);
6239 if (sprime
&& may_propagate_copy (arg
, sprime
))
6240 propagate_value (use_p
, sprime
);
6243 vn_context_bb
= NULL
;
6248 /* Make no longer available leaders no longer available. */
6251 eliminate_dom_walker::after_dom_children (basic_block
)
6254 while ((entry
= avail_stack
.pop ()) != NULL_TREE
)
6256 tree valnum
= VN_INFO (entry
)->valnum
;
6257 tree old
= avail
[SSA_NAME_VERSION (valnum
)];
6259 avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
6261 avail
[SSA_NAME_VERSION (valnum
)] = entry
;
6265 /* Remove queued stmts and perform delayed cleanups. */
6268 eliminate_dom_walker::eliminate_cleanup (bool region_p
)
6270 statistics_counter_event (cfun
, "Eliminated", eliminations
);
6271 statistics_counter_event (cfun
, "Insertions", insertions
);
6273 /* We cannot remove stmts during BB walk, especially not release SSA
6274 names there as this confuses the VN machinery. The stmts ending
6275 up in to_remove are either stores or simple copies.
6276 Remove stmts in reverse order to make debug stmt creation possible. */
6277 while (!to_remove
.is_empty ())
6279 bool do_release_defs
= true;
6280 gimple
*stmt
= to_remove
.pop ();
6282 /* When we are value-numbering a region we do not require exit PHIs to
6283 be present so we have to make sure to deal with uses outside of the
6284 region of stmts that we thought are eliminated.
6285 ??? Note we may be confused by uses in dead regions we didn't run
6286 elimination on. Rather than checking individual uses we accept
6287 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6288 contains such example). */
6291 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
6293 tree lhs
= gimple_phi_result (phi
);
6294 if (!has_zero_uses (lhs
))
6296 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6297 fprintf (dump_file
, "Keeping eliminated stmt live "
6298 "as copy because of out-of-region uses\n");
6299 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
6300 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
6301 gimple_stmt_iterator gsi
6302 = gsi_after_labels (gimple_bb (stmt
));
6303 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
6304 do_release_defs
= false;
6307 else if (tree lhs
= gimple_get_lhs (stmt
))
6308 if (TREE_CODE (lhs
) == SSA_NAME
6309 && !has_zero_uses (lhs
))
6311 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6312 fprintf (dump_file
, "Keeping eliminated stmt live "
6313 "as copy because of out-of-region uses\n");
6314 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
6315 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
6316 if (is_gimple_assign (stmt
))
6318 gimple_assign_set_rhs_from_tree (&gsi
, sprime
);
6319 stmt
= gsi_stmt (gsi
);
6321 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
6322 bitmap_set_bit (need_eh_cleanup
, gimple_bb (stmt
)->index
);
6327 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
6328 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
6329 do_release_defs
= false;
6334 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6336 fprintf (dump_file
, "Removing dead stmt ");
6337 print_gimple_stmt (dump_file
, stmt
, 0, TDF_NONE
);
6340 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
6341 if (gimple_code (stmt
) == GIMPLE_PHI
)
6342 remove_phi_node (&gsi
, do_release_defs
);
6345 basic_block bb
= gimple_bb (stmt
);
6346 unlink_stmt_vdef (stmt
);
6347 if (gsi_remove (&gsi
, true))
6348 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
6349 if (is_gimple_call (stmt
) && stmt_can_make_abnormal_goto (stmt
))
6350 bitmap_set_bit (need_ab_cleanup
, bb
->index
);
6351 if (do_release_defs
)
6352 release_defs (stmt
);
6355 /* Removing a stmt may expose a forwarder block. */
6356 el_todo
|= TODO_cleanup_cfg
;
6359 /* Fixup stmts that became noreturn calls. This may require splitting
6360 blocks and thus isn't possible during the dominator walk. Do this
6361 in reverse order so we don't inadvertedly remove a stmt we want to
6362 fixup by visiting a dominating now noreturn call first. */
6363 while (!to_fixup
.is_empty ())
6365 gimple
*stmt
= to_fixup
.pop ();
6367 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6369 fprintf (dump_file
, "Fixing up noreturn call ");
6370 print_gimple_stmt (dump_file
, stmt
, 0);
6373 if (fixup_noreturn_call (stmt
))
6374 el_todo
|= TODO_cleanup_cfg
;
6377 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
6378 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
6381 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
6384 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
6386 if (do_eh_cleanup
|| do_ab_cleanup
)
6387 el_todo
|= TODO_cleanup_cfg
;
6392 /* Eliminate fully redundant computations. */
6395 eliminate_with_rpo_vn (bitmap inserted_exprs
)
6397 eliminate_dom_walker
walker (CDI_DOMINATORS
, inserted_exprs
);
6399 walker
.walk (cfun
->cfg
->x_entry_block_ptr
);
6400 return walker
.eliminate_cleanup ();
6404 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
6405 bool iterate
, bool eliminate
);
6408 run_rpo_vn (vn_lookup_kind kind
)
6410 default_vn_walk_kind
= kind
;
6411 do_rpo_vn (cfun
, NULL
, NULL
, true, false);
6413 /* ??? Prune requirement of these. */
6414 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
6415 constant_value_ids
= BITMAP_ALLOC (NULL
);
6417 /* Initialize the value ids and prune out remaining VN_TOPs
6421 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6423 vn_ssa_aux_t info
= VN_INFO (name
);
6425 || info
->valnum
== VN_TOP
)
6426 info
->valnum
= name
;
6427 if (info
->valnum
== name
)
6428 info
->value_id
= get_next_value_id ();
6429 else if (is_gimple_min_invariant (info
->valnum
))
6430 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
6434 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6436 vn_ssa_aux_t info
= VN_INFO (name
);
6437 if (TREE_CODE (info
->valnum
) == SSA_NAME
6438 && info
->valnum
!= name
6439 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
6440 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
6443 set_hashtable_value_ids ();
6445 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6447 fprintf (dump_file
, "Value numbers:\n");
6448 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6450 if (VN_INFO (name
)->visited
6451 && SSA_VAL (name
) != name
)
6453 print_generic_expr (dump_file
, name
);
6454 fprintf (dump_file
, " = ");
6455 print_generic_expr (dump_file
, SSA_VAL (name
));
6456 fprintf (dump_file
, " (%04d)\n", VN_INFO (name
)->value_id
);
6462 /* Free VN associated data structures. */
6467 free_vn_table (valid_info
);
6468 XDELETE (valid_info
);
6469 obstack_free (&vn_tables_obstack
, NULL
);
6470 obstack_free (&vn_tables_insert_obstack
, NULL
);
6472 vn_ssa_aux_iterator_type it
;
6474 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash
, info
, vn_ssa_aux_t
, it
)
6475 if (info
->needs_insertion
)
6476 release_ssa_name (info
->name
);
6477 obstack_free (&vn_ssa_aux_obstack
, NULL
);
6478 delete vn_ssa_aux_hash
;
6480 delete constant_to_value_id
;
6481 constant_to_value_id
= NULL
;
6482 BITMAP_FREE (constant_value_ids
);
6485 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6488 vn_lookup_simplify_result (gimple_match_op
*res_op
)
6490 if (!res_op
->code
.is_tree_code ())
6492 tree
*ops
= res_op
->ops
;
6493 unsigned int length
= res_op
->num_ops
;
6494 if (res_op
->code
== CONSTRUCTOR
6495 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6496 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6497 && TREE_CODE (res_op
->ops
[0]) == CONSTRUCTOR
)
6499 length
= CONSTRUCTOR_NELTS (res_op
->ops
[0]);
6500 ops
= XALLOCAVEC (tree
, length
);
6501 for (unsigned i
= 0; i
< length
; ++i
)
6502 ops
[i
] = CONSTRUCTOR_ELT (res_op
->ops
[0], i
)->value
;
6504 vn_nary_op_t vnresult
= NULL
;
6505 tree res
= vn_nary_op_lookup_pieces (length
, (tree_code
) res_op
->code
,
6506 res_op
->type
, ops
, &vnresult
);
6507 /* If this is used from expression simplification make sure to
6508 return an available expression. */
6509 if (res
&& TREE_CODE (res
) == SSA_NAME
&& mprts_hook
&& rpo_avail
)
6510 res
= rpo_avail
->eliminate_avail (vn_context_bb
, res
);
6514 /* Return a leader for OPs value that is valid at BB. */
6517 rpo_elim::eliminate_avail (basic_block bb
, tree op
)
6520 tree valnum
= SSA_VAL (op
, &visited
);
6521 /* If we didn't visit OP then it must be defined outside of the
6522 region we process and also dominate it. So it is available. */
6525 if (TREE_CODE (valnum
) == SSA_NAME
)
6527 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
6529 vn_avail
*av
= VN_INFO (valnum
)->avail
;
6532 if (av
->location
== bb
->index
)
6533 /* On tramp3d 90% of the cases are here. */
6534 return ssa_name (av
->leader
);
6537 basic_block abb
= BASIC_BLOCK_FOR_FN (cfun
, av
->location
);
6538 /* ??? During elimination we have to use availability at the
6539 definition site of a use we try to replace. This
6540 is required to not run into inconsistencies because
6541 of dominated_by_p_w_unex behavior and removing a definition
6542 while not replacing all uses.
6543 ??? We could try to consistently walk dominators
6544 ignoring non-executable regions. The nearest common
6545 dominator of bb and abb is where we can stop walking. We
6546 may also be able to "pre-compute" (bits of) the next immediate
6547 (non-)dominator during the RPO walk when marking edges as
6549 if (dominated_by_p_w_unex (bb
, abb
))
6551 tree leader
= ssa_name (av
->leader
);
6552 /* Prevent eliminations that break loop-closed SSA. */
6553 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
)
6554 && ! SSA_NAME_IS_DEFAULT_DEF (leader
)
6555 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6556 (leader
))->loop_father
,
6559 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6561 print_generic_expr (dump_file
, leader
);
6562 fprintf (dump_file
, " is available for ");
6563 print_generic_expr (dump_file
, valnum
);
6564 fprintf (dump_file
, "\n");
6566 /* On tramp3d 99% of the _remaining_ cases succeed at
6570 /* ??? Can we somehow skip to the immediate dominator
6571 RPO index (bb_to_rpo)? Again, maybe not worth, on
6572 tramp3d the worst number of elements in the vector is 9. */
6577 else if (valnum
!= VN_TOP
)
6578 /* valnum is is_gimple_min_invariant. */
6583 /* Make LEADER a leader for its value at BB. */
6586 rpo_elim::eliminate_push_avail (basic_block bb
, tree leader
)
6588 tree valnum
= VN_INFO (leader
)->valnum
;
6589 if (valnum
== VN_TOP
6590 || is_gimple_min_invariant (valnum
))
6592 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6594 fprintf (dump_file
, "Making available beyond BB%d ", bb
->index
);
6595 print_generic_expr (dump_file
, leader
);
6596 fprintf (dump_file
, " for value ");
6597 print_generic_expr (dump_file
, valnum
);
6598 fprintf (dump_file
, "\n");
6600 vn_ssa_aux_t value
= VN_INFO (valnum
);
6602 if (m_avail_freelist
)
6604 av
= m_avail_freelist
;
6605 m_avail_freelist
= m_avail_freelist
->next
;
6608 av
= XOBNEW (&vn_ssa_aux_obstack
, vn_avail
);
6609 av
->location
= bb
->index
;
6610 av
->leader
= SSA_NAME_VERSION (leader
);
6611 av
->next
= value
->avail
;
6615 /* Valueization hook for RPO VN plus required state. */
6618 rpo_vn_valueize (tree name
)
6620 if (TREE_CODE (name
) == SSA_NAME
)
6622 vn_ssa_aux_t val
= VN_INFO (name
);
6625 tree tem
= val
->valnum
;
6626 if (tem
!= VN_TOP
&& tem
!= name
)
6628 if (TREE_CODE (tem
) != SSA_NAME
)
6630 /* For all values we only valueize to an available leader
6631 which means we can use SSA name info without restriction. */
6632 tem
= rpo_avail
->eliminate_avail (vn_context_bb
, tem
);
6641 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
6642 inverted condition. */
6645 insert_related_predicates_on_edge (enum tree_code code
, tree
*ops
, edge pred_e
)
6650 /* a < b -> a {!,<}= b */
6651 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
6652 ops
, boolean_true_node
, 0, pred_e
);
6653 vn_nary_op_insert_pieces_predicated (2, LE_EXPR
, boolean_type_node
,
6654 ops
, boolean_true_node
, 0, pred_e
);
6655 /* a < b -> ! a {>,=} b */
6656 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
6657 ops
, boolean_false_node
, 0, pred_e
);
6658 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
6659 ops
, boolean_false_node
, 0, pred_e
);
6662 /* a > b -> a {!,>}= b */
6663 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
6664 ops
, boolean_true_node
, 0, pred_e
);
6665 vn_nary_op_insert_pieces_predicated (2, GE_EXPR
, boolean_type_node
,
6666 ops
, boolean_true_node
, 0, pred_e
);
6667 /* a > b -> ! a {<,=} b */
6668 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
6669 ops
, boolean_false_node
, 0, pred_e
);
6670 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
6671 ops
, boolean_false_node
, 0, pred_e
);
6674 /* a == b -> ! a {<,>} b */
6675 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
6676 ops
, boolean_false_node
, 0, pred_e
);
6677 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
6678 ops
, boolean_false_node
, 0, pred_e
);
6683 /* Nothing besides inverted condition. */
6689 /* Main stmt worker for RPO VN, process BB. */
6692 process_bb (rpo_elim
&avail
, basic_block bb
,
6693 bool bb_visited
, bool iterate_phis
, bool iterate
, bool eliminate
,
6694 bool do_region
, bitmap exit_bbs
, bool skip_phis
)
6702 /* If we are in loop-closed SSA preserve this state. This is
6703 relevant when called on regions from outside of FRE/PRE. */
6704 bool lc_phi_nodes
= false;
6706 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
6707 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6708 if (e
->src
->loop_father
!= e
->dest
->loop_father
6709 && flow_loop_nested_p (e
->dest
->loop_father
,
6710 e
->src
->loop_father
))
6712 lc_phi_nodes
= true;
6716 /* When we visit a loop header substitute into loop info. */
6717 if (!iterate
&& eliminate
&& bb
->loop_father
->header
== bb
)
6719 /* Keep fields in sync with substitute_in_loop_info. */
6720 if (bb
->loop_father
->nb_iterations
)
6721 bb
->loop_father
->nb_iterations
6722 = simplify_replace_tree (bb
->loop_father
->nb_iterations
,
6723 NULL_TREE
, NULL_TREE
, &vn_valueize_wrapper
);
6726 /* Value-number all defs in the basic-block. */
6728 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6731 gphi
*phi
= gsi
.phi ();
6732 tree res
= PHI_RESULT (phi
);
6733 vn_ssa_aux_t res_info
= VN_INFO (res
);
6736 gcc_assert (!res_info
->visited
);
6737 res_info
->valnum
= VN_TOP
;
6738 res_info
->visited
= true;
6741 /* When not iterating force backedge values to varying. */
6742 visit_stmt (phi
, !iterate_phis
);
6743 if (virtual_operand_p (res
))
6747 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6748 how we handle backedges and availability.
6749 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
6750 tree val
= res_info
->valnum
;
6751 if (res
!= val
&& !iterate
&& eliminate
)
6753 if (tree leader
= avail
.eliminate_avail (bb
, res
))
6756 /* Preserve loop-closed SSA form. */
6758 || is_gimple_min_invariant (leader
)))
6760 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6762 fprintf (dump_file
, "Replaced redundant PHI node "
6764 print_generic_expr (dump_file
, res
);
6765 fprintf (dump_file
, " with ");
6766 print_generic_expr (dump_file
, leader
);
6767 fprintf (dump_file
, "\n");
6769 avail
.eliminations
++;
6771 if (may_propagate_copy (res
, leader
))
6773 /* Schedule for removal. */
6774 avail
.to_remove
.safe_push (phi
);
6777 /* ??? Else generate a copy stmt. */
6781 /* Only make defs available that not already are. But make
6782 sure loop-closed SSA PHI node defs are picked up for
6786 || ! avail
.eliminate_avail (bb
, res
))
6787 avail
.eliminate_push_avail (bb
, res
);
6790 /* For empty BBs mark outgoing edges executable. For non-empty BBs
6791 we do this when processing the last stmt as we have to do this
6792 before elimination which otherwise forces GIMPLE_CONDs to
6793 if (1 != 0) style when seeing non-executable edges. */
6794 if (gsi_end_p (gsi_start_bb (bb
)))
6796 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6798 if (!(e
->flags
& EDGE_EXECUTABLE
))
6800 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6802 "marking outgoing edge %d -> %d executable\n",
6803 e
->src
->index
, e
->dest
->index
);
6804 e
->flags
|= EDGE_EXECUTABLE
;
6805 e
->dest
->flags
|= BB_EXECUTABLE
;
6807 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6809 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6811 "marking destination block %d reachable\n",
6813 e
->dest
->flags
|= BB_EXECUTABLE
;
6817 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
6818 !gsi_end_p (gsi
); gsi_next (&gsi
))
6824 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
6826 vn_ssa_aux_t op_info
= VN_INFO (op
);
6827 gcc_assert (!op_info
->visited
);
6828 op_info
->valnum
= VN_TOP
;
6829 op_info
->visited
= true;
6832 /* We somehow have to deal with uses that are not defined
6833 in the processed region. Forcing unvisited uses to
6834 varying here doesn't play well with def-use following during
6835 expression simplification, so we deal with this by checking
6836 the visited flag in SSA_VAL. */
6839 visit_stmt (gsi_stmt (gsi
));
6841 gimple
*last
= gsi_stmt (gsi
);
6843 switch (gimple_code (last
))
6846 e
= find_taken_edge (bb
, vn_valueize (gimple_switch_index
6847 (as_a
<gswitch
*> (last
))));
6851 tree lhs
= vn_valueize (gimple_cond_lhs (last
));
6852 tree rhs
= vn_valueize (gimple_cond_rhs (last
));
6853 tree val
= gimple_simplify (gimple_cond_code (last
),
6854 boolean_type_node
, lhs
, rhs
,
6856 /* If the condition didn't simplfy see if we have recorded
6857 an expression from sofar taken edges. */
6858 if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
6860 vn_nary_op_t vnresult
;
6864 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (last
),
6865 boolean_type_node
, ops
,
6867 /* Did we get a predicated value? */
6868 if (! val
&& vnresult
&& vnresult
->predicated_values
)
6870 val
= vn_nary_op_get_predicated_value (vnresult
, bb
);
6871 if (val
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
6873 fprintf (dump_file
, "Got predicated value ");
6874 print_generic_expr (dump_file
, val
, TDF_NONE
);
6875 fprintf (dump_file
, " for ");
6876 print_gimple_stmt (dump_file
, last
, TDF_SLIM
);
6881 e
= find_taken_edge (bb
, val
);
6884 /* If we didn't manage to compute the taken edge then
6885 push predicated expressions for the condition itself
6886 and related conditions to the hashtables. This allows
6887 simplification of redundant conditions which is
6888 important as early cleanup. */
6889 edge true_e
, false_e
;
6890 extract_true_false_edges_from_block (bb
, &true_e
, &false_e
);
6891 enum tree_code code
= gimple_cond_code (last
);
6892 enum tree_code icode
6893 = invert_tree_comparison (code
, HONOR_NANS (lhs
));
6898 && bitmap_bit_p (exit_bbs
, true_e
->dest
->index
))
6901 && bitmap_bit_p (exit_bbs
, false_e
->dest
->index
))
6904 vn_nary_op_insert_pieces_predicated
6905 (2, code
, boolean_type_node
, ops
,
6906 boolean_true_node
, 0, true_e
);
6908 vn_nary_op_insert_pieces_predicated
6909 (2, code
, boolean_type_node
, ops
,
6910 boolean_false_node
, 0, false_e
);
6911 if (icode
!= ERROR_MARK
)
6914 vn_nary_op_insert_pieces_predicated
6915 (2, icode
, boolean_type_node
, ops
,
6916 boolean_false_node
, 0, true_e
);
6918 vn_nary_op_insert_pieces_predicated
6919 (2, icode
, boolean_type_node
, ops
,
6920 boolean_true_node
, 0, false_e
);
6922 /* Relax for non-integers, inverted condition handled
6924 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6927 insert_related_predicates_on_edge (code
, ops
, true_e
);
6929 insert_related_predicates_on_edge (icode
, ops
, false_e
);
6935 e
= find_taken_edge (bb
, vn_valueize (gimple_goto_dest (last
)));
6942 todo
= TODO_cleanup_cfg
;
6943 if (!(e
->flags
& EDGE_EXECUTABLE
))
6945 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6947 "marking known outgoing %sedge %d -> %d executable\n",
6948 e
->flags
& EDGE_DFS_BACK
? "back-" : "",
6949 e
->src
->index
, e
->dest
->index
);
6950 e
->flags
|= EDGE_EXECUTABLE
;
6951 e
->dest
->flags
|= BB_EXECUTABLE
;
6953 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6955 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6957 "marking destination block %d reachable\n",
6959 e
->dest
->flags
|= BB_EXECUTABLE
;
6962 else if (gsi_one_before_end_p (gsi
))
6964 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6966 if (!(e
->flags
& EDGE_EXECUTABLE
))
6968 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6970 "marking outgoing edge %d -> %d executable\n",
6971 e
->src
->index
, e
->dest
->index
);
6972 e
->flags
|= EDGE_EXECUTABLE
;
6973 e
->dest
->flags
|= BB_EXECUTABLE
;
6975 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6977 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6979 "marking destination block %d reachable\n",
6981 e
->dest
->flags
|= BB_EXECUTABLE
;
6986 /* Eliminate. That also pushes to avail. */
6987 if (eliminate
&& ! iterate
)
6988 avail
.eliminate_stmt (bb
, &gsi
);
6990 /* If not eliminating, make all not already available defs
6992 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_DEF
)
6993 if (! avail
.eliminate_avail (bb
, op
))
6994 avail
.eliminate_push_avail (bb
, op
);
6997 /* Eliminate in destination PHI arguments. Always substitute in dest
6998 PHIs, even for non-executable edges. This handles region
7000 if (!iterate
&& eliminate
)
7001 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7002 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
7003 !gsi_end_p (gsi
); gsi_next (&gsi
))
7005 gphi
*phi
= gsi
.phi ();
7006 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
7007 tree arg
= USE_FROM_PTR (use_p
);
7008 if (TREE_CODE (arg
) != SSA_NAME
7009 || virtual_operand_p (arg
))
7012 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
7014 sprime
= SSA_VAL (arg
);
7015 gcc_assert (TREE_CODE (sprime
) != SSA_NAME
7016 || SSA_NAME_IS_DEFAULT_DEF (sprime
));
7019 /* Look for sth available at the definition block of the argument.
7020 This avoids inconsistencies between availability there which
7021 decides if the stmt can be removed and availability at the
7022 use site. The SSA property ensures that things available
7023 at the definition are also available at uses. */
7024 sprime
= avail
.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg
)),
7028 && may_propagate_copy (arg
, sprime
))
7029 propagate_value (use_p
, sprime
);
7032 vn_context_bb
= NULL
;
7036 /* Unwind state per basic-block. */
7040 /* Times this block has been visited. */
7042 /* Whether to handle this as iteration point or whether to treat
7043 incoming backedge PHI values as varying. */
7045 /* Maximum RPO index this block is reachable from. */
7049 vn_reference_t ref_top
;
7051 vn_nary_op_t nary_top
;
7054 /* Unwind the RPO VN state for iteration. */
7057 do_unwind (unwind_state
*to
, int rpo_idx
, rpo_elim
&avail
, int *bb_to_rpo
)
7059 gcc_assert (to
->iterate
);
7060 for (; last_inserted_nary
!= to
->nary_top
;
7061 last_inserted_nary
= last_inserted_nary
->next
)
7064 slot
= valid_info
->nary
->find_slot_with_hash
7065 (last_inserted_nary
, last_inserted_nary
->hashcode
, NO_INSERT
);
7066 /* Predication causes the need to restore previous state. */
7067 if ((*slot
)->unwind_to
)
7068 *slot
= (*slot
)->unwind_to
;
7070 valid_info
->nary
->clear_slot (slot
);
7072 for (; last_inserted_phi
!= to
->phi_top
;
7073 last_inserted_phi
= last_inserted_phi
->next
)
7076 slot
= valid_info
->phis
->find_slot_with_hash
7077 (last_inserted_phi
, last_inserted_phi
->hashcode
, NO_INSERT
);
7078 valid_info
->phis
->clear_slot (slot
);
7080 for (; last_inserted_ref
!= to
->ref_top
;
7081 last_inserted_ref
= last_inserted_ref
->next
)
7083 vn_reference_t
*slot
;
7084 slot
= valid_info
->references
->find_slot_with_hash
7085 (last_inserted_ref
, last_inserted_ref
->hashcode
, NO_INSERT
);
7086 (*slot
)->operands
.release ();
7087 valid_info
->references
->clear_slot (slot
);
7089 obstack_free (&vn_tables_obstack
, to
->ob_top
);
7091 /* Prune [rpo_idx, ] from avail. */
7092 /* ??? This is O(number-of-values-in-region) which is
7093 O(region-size) rather than O(iteration-piece). */
7094 for (hash_table
<vn_ssa_aux_hasher
>::iterator i
= vn_ssa_aux_hash
->begin ();
7095 i
!= vn_ssa_aux_hash
->end (); ++i
)
7099 if (bb_to_rpo
[(*i
)->avail
->location
] < rpo_idx
)
7101 vn_avail
*av
= (*i
)->avail
;
7102 (*i
)->avail
= (*i
)->avail
->next
;
7103 av
->next
= avail
.m_avail_freelist
;
7104 avail
.m_avail_freelist
= av
;
7109 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7110 If ITERATE is true then treat backedges optimistically as not
7111 executed and iterate. If ELIMINATE is true then perform
7112 elimination, otherwise leave that to the caller. */
7115 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
7116 bool iterate
, bool eliminate
)
7120 /* We currently do not support region-based iteration when
7121 elimination is requested. */
7122 gcc_assert (!entry
|| !iterate
|| !eliminate
);
7123 /* When iterating we need loop info up-to-date. */
7124 gcc_assert (!iterate
|| !loops_state_satisfies_p (LOOPS_NEED_FIXUP
));
7126 bool do_region
= entry
!= NULL
;
7129 entry
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn
));
7130 exit_bbs
= BITMAP_ALLOC (NULL
);
7131 bitmap_set_bit (exit_bbs
, EXIT_BLOCK
);
7134 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7135 re-mark those that are contained in the region. */
7138 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
7139 e
->flags
&= ~EDGE_DFS_BACK
;
7141 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
);
7142 int n
= rev_post_order_and_mark_dfs_back_seme
7143 (fn
, entry
, exit_bbs
, !loops_state_satisfies_p (LOOPS_NEED_FIXUP
), rpo
);
7144 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
7145 for (int i
= 0; i
< n
/ 2; ++i
)
7146 std::swap (rpo
[i
], rpo
[n
-i
-1]);
7149 BITMAP_FREE (exit_bbs
);
7151 /* If there are any non-DFS_BACK edges into entry->dest skip
7152 processing PHI nodes for that block. This supports
7153 value-numbering loop bodies w/o the actual loop. */
7154 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
7156 && !(e
->flags
& EDGE_DFS_BACK
))
7158 bool skip_entry_phis
= e
!= NULL
;
7159 if (skip_entry_phis
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
7160 fprintf (dump_file
, "Region does not contain all edges into "
7161 "the entry block, skipping its PHIs.\n");
7163 int *bb_to_rpo
= XNEWVEC (int, last_basic_block_for_fn (fn
));
7164 for (int i
= 0; i
< n
; ++i
)
7165 bb_to_rpo
[rpo
[i
]] = i
;
7167 unwind_state
*rpo_state
= XNEWVEC (unwind_state
, n
);
7169 rpo_elim
avail (entry
->dest
);
7172 /* Verify we have no extra entries into the region. */
7173 if (flag_checking
&& do_region
)
7175 auto_bb_flag
bb_in_region (fn
);
7176 for (int i
= 0; i
< n
; ++i
)
7178 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7179 bb
->flags
|= bb_in_region
;
7181 /* We can't merge the first two loops because we cannot rely
7182 on EDGE_DFS_BACK for edges not within the region. But if
7183 we decide to always have the bb_in_region flag we can
7184 do the checking during the RPO walk itself (but then it's
7185 also easy to handle MEME conservatively). */
7186 for (int i
= 0; i
< n
; ++i
)
7188 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7191 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7192 gcc_assert (e
== entry
7193 || (skip_entry_phis
&& bb
== entry
->dest
)
7194 || (e
->src
->flags
& bb_in_region
));
7196 for (int i
= 0; i
< n
; ++i
)
7198 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7199 bb
->flags
&= ~bb_in_region
;
7203 /* Create the VN state. For the initial size of the various hashtables
7204 use a heuristic based on region size and number of SSA names. */
7205 unsigned region_size
= (((unsigned HOST_WIDE_INT
)n
* num_ssa_names
)
7206 / (n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
));
7207 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
7210 vn_ssa_aux_hash
= new hash_table
<vn_ssa_aux_hasher
> (region_size
* 2);
7211 gcc_obstack_init (&vn_ssa_aux_obstack
);
7213 gcc_obstack_init (&vn_tables_obstack
);
7214 gcc_obstack_init (&vn_tables_insert_obstack
);
7215 valid_info
= XCNEW (struct vn_tables_s
);
7216 allocate_vn_table (valid_info
, region_size
);
7217 last_inserted_ref
= NULL
;
7218 last_inserted_phi
= NULL
;
7219 last_inserted_nary
= NULL
;
7221 vn_valueize
= rpo_vn_valueize
;
7223 /* Initialize the unwind state and edge/BB executable state. */
7224 bool need_max_rpo_iterate
= false;
7225 for (int i
= 0; i
< n
; ++i
)
7227 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7228 rpo_state
[i
].visited
= 0;
7229 rpo_state
[i
].max_rpo
= i
;
7230 bb
->flags
&= ~BB_EXECUTABLE
;
7231 bool has_backedges
= false;
7234 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7236 if (e
->flags
& EDGE_DFS_BACK
)
7237 has_backedges
= true;
7238 e
->flags
&= ~EDGE_EXECUTABLE
;
7239 if (iterate
|| e
== entry
|| (skip_entry_phis
&& bb
== entry
->dest
))
7241 if (bb_to_rpo
[e
->src
->index
] > i
)
7243 rpo_state
[i
].max_rpo
= MAX (rpo_state
[i
].max_rpo
,
7244 bb_to_rpo
[e
->src
->index
]);
7245 need_max_rpo_iterate
= true;
7248 rpo_state
[i
].max_rpo
7249 = MAX (rpo_state
[i
].max_rpo
,
7250 rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
);
7252 rpo_state
[i
].iterate
= iterate
&& has_backedges
;
7254 entry
->flags
|= EDGE_EXECUTABLE
;
7255 entry
->dest
->flags
|= BB_EXECUTABLE
;
7257 /* When there are irreducible regions the simplistic max_rpo computation
7258 above for the case of backedges doesn't work and we need to iterate
7259 until there are no more changes. */
7261 while (need_max_rpo_iterate
)
7264 need_max_rpo_iterate
= false;
7265 for (int i
= 0; i
< n
; ++i
)
7267 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7270 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7272 if (e
== entry
|| (skip_entry_phis
&& bb
== entry
->dest
))
7274 int max_rpo
= MAX (rpo_state
[i
].max_rpo
,
7275 rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
);
7276 if (rpo_state
[i
].max_rpo
!= max_rpo
)
7278 rpo_state
[i
].max_rpo
= max_rpo
;
7279 need_max_rpo_iterate
= true;
7284 statistics_histogram_event (cfun
, "RPO max_rpo iterations", nit
);
7286 /* As heuristic to improve compile-time we handle only the N innermost
7287 loops and the outermost one optimistically. */
7291 unsigned max_depth
= param_rpo_vn_max_loop_depth
;
7292 FOR_EACH_LOOP (loop
, LI_ONLY_INNERMOST
)
7293 if (loop_depth (loop
) > max_depth
)
7294 for (unsigned i
= 2;
7295 i
< loop_depth (loop
) - max_depth
; ++i
)
7297 basic_block header
= superloop_at_depth (loop
, i
)->header
;
7298 bool non_latch_backedge
= false;
7301 FOR_EACH_EDGE (e
, ei
, header
->preds
)
7302 if (e
->flags
& EDGE_DFS_BACK
)
7304 /* There can be a non-latch backedge into the header
7305 which is part of an outer irreducible region. We
7306 cannot avoid iterating this block then. */
7307 if (!dominated_by_p (CDI_DOMINATORS
,
7310 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7311 fprintf (dump_file
, "non-latch backedge %d -> %d "
7312 "forces iteration of loop %d\n",
7313 e
->src
->index
, e
->dest
->index
, loop
->num
);
7314 non_latch_backedge
= true;
7317 e
->flags
|= EDGE_EXECUTABLE
;
7319 rpo_state
[bb_to_rpo
[header
->index
]].iterate
= non_latch_backedge
;
7326 /* Go and process all blocks, iterating as necessary. */
7329 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
7331 /* If the block has incoming backedges remember unwind state. This
7332 is required even for non-executable blocks since in irreducible
7333 regions we might reach them via the backedge and re-start iterating
7335 Note we can individually mark blocks with incoming backedges to
7336 not iterate where we then handle PHIs conservatively. We do that
7337 heuristically to reduce compile-time for degenerate cases. */
7338 if (rpo_state
[idx
].iterate
)
7340 rpo_state
[idx
].ob_top
= obstack_alloc (&vn_tables_obstack
, 0);
7341 rpo_state
[idx
].ref_top
= last_inserted_ref
;
7342 rpo_state
[idx
].phi_top
= last_inserted_phi
;
7343 rpo_state
[idx
].nary_top
= last_inserted_nary
;
7346 if (!(bb
->flags
& BB_EXECUTABLE
))
7348 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7349 fprintf (dump_file
, "Block %d: BB%d found not executable\n",
7355 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7356 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
7358 todo
|= process_bb (avail
, bb
,
7359 rpo_state
[idx
].visited
!= 0,
7360 rpo_state
[idx
].iterate
,
7361 iterate
, eliminate
, do_region
, exit_bbs
, false);
7362 rpo_state
[idx
].visited
++;
7364 /* Verify if changed values flow over executable outgoing backedges
7365 and those change destination PHI values (that's the thing we
7366 can easily verify). Reduce over all such edges to the farthest
7368 int iterate_to
= -1;
7371 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7372 if ((e
->flags
& (EDGE_DFS_BACK
|EDGE_EXECUTABLE
))
7373 == (EDGE_DFS_BACK
|EDGE_EXECUTABLE
)
7374 && rpo_state
[bb_to_rpo
[e
->dest
->index
]].iterate
)
7376 int destidx
= bb_to_rpo
[e
->dest
->index
];
7377 if (!rpo_state
[destidx
].visited
)
7379 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7380 fprintf (dump_file
, "Unvisited destination %d\n",
7382 if (iterate_to
== -1 || destidx
< iterate_to
)
7383 iterate_to
= destidx
;
7386 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7387 fprintf (dump_file
, "Looking for changed values of backedge"
7388 " %d->%d destination PHIs\n",
7389 e
->src
->index
, e
->dest
->index
);
7390 vn_context_bb
= e
->dest
;
7392 for (gsi
= gsi_start_phis (e
->dest
);
7393 !gsi_end_p (gsi
); gsi_next (&gsi
))
7395 bool inserted
= false;
7396 /* While we'd ideally just iterate on value changes
7397 we CSE PHIs and do that even across basic-block
7398 boundaries. So even hashtable state changes can
7399 be important (which is roughly equivalent to
7400 PHI argument value changes). To not excessively
7401 iterate because of that we track whether a PHI
7402 was CSEd to with GF_PLF_1. */
7403 bool phival_changed
;
7404 if ((phival_changed
= visit_phi (gsi
.phi (),
7406 || (inserted
&& gimple_plf (gsi
.phi (), GF_PLF_1
)))
7409 && dump_file
&& (dump_flags
& TDF_DETAILS
))
7410 fprintf (dump_file
, "PHI was CSEd and hashtable "
7411 "state (changed)\n");
7412 if (iterate_to
== -1 || destidx
< iterate_to
)
7413 iterate_to
= destidx
;
7417 vn_context_bb
= NULL
;
7419 if (iterate_to
!= -1)
7421 do_unwind (&rpo_state
[iterate_to
], iterate_to
, avail
, bb_to_rpo
);
7423 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7424 fprintf (dump_file
, "Iterating to %d BB%d\n",
7425 iterate_to
, rpo
[iterate_to
]);
7435 /* Process all blocks greedily with a worklist that enforces RPO
7436 processing of reachable blocks. */
7437 auto_bitmap worklist
;
7438 bitmap_set_bit (worklist
, 0);
7439 while (!bitmap_empty_p (worklist
))
7441 int idx
= bitmap_first_set_bit (worklist
);
7442 bitmap_clear_bit (worklist
, idx
);
7443 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
7444 gcc_assert ((bb
->flags
& BB_EXECUTABLE
)
7445 && !rpo_state
[idx
].visited
);
7447 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7448 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
7450 /* When we run into predecessor edges where we cannot trust its
7451 executable state mark them executable so PHI processing will
7453 ??? Do we need to force arguments flowing over that edge
7454 to be varying or will they even always be? */
7457 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7458 if (!(e
->flags
& EDGE_EXECUTABLE
)
7459 && (bb
== entry
->dest
7460 || (!rpo_state
[bb_to_rpo
[e
->src
->index
]].visited
7461 && (rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
7464 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7465 fprintf (dump_file
, "Cannot trust state of predecessor "
7466 "edge %d -> %d, marking executable\n",
7467 e
->src
->index
, e
->dest
->index
);
7468 e
->flags
|= EDGE_EXECUTABLE
;
7472 todo
|= process_bb (avail
, bb
, false, false, false, eliminate
,
7473 do_region
, exit_bbs
,
7474 skip_entry_phis
&& bb
== entry
->dest
);
7475 rpo_state
[idx
].visited
++;
7477 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7478 if ((e
->flags
& EDGE_EXECUTABLE
)
7479 && e
->dest
->index
!= EXIT_BLOCK
7480 && (!do_region
|| !bitmap_bit_p (exit_bbs
, e
->dest
->index
))
7481 && !rpo_state
[bb_to_rpo
[e
->dest
->index
]].visited
)
7482 bitmap_set_bit (worklist
, bb_to_rpo
[e
->dest
->index
]);
7486 /* If statistics or dump file active. */
7488 unsigned max_visited
= 1;
7489 for (int i
= 0; i
< n
; ++i
)
7491 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7492 if (bb
->flags
& BB_EXECUTABLE
)
7494 statistics_histogram_event (cfun
, "RPO block visited times",
7495 rpo_state
[i
].visited
);
7496 if (rpo_state
[i
].visited
> max_visited
)
7497 max_visited
= rpo_state
[i
].visited
;
7499 unsigned nvalues
= 0, navail
= 0;
7500 for (hash_table
<vn_ssa_aux_hasher
>::iterator i
= vn_ssa_aux_hash
->begin ();
7501 i
!= vn_ssa_aux_hash
->end (); ++i
)
7504 vn_avail
*av
= (*i
)->avail
;
7511 statistics_counter_event (cfun
, "RPO blocks", n
);
7512 statistics_counter_event (cfun
, "RPO blocks visited", nblk
);
7513 statistics_counter_event (cfun
, "RPO blocks executable", nex
);
7514 statistics_histogram_event (cfun
, "RPO iterations", 10*nblk
/ nex
);
7515 statistics_histogram_event (cfun
, "RPO num values", nvalues
);
7516 statistics_histogram_event (cfun
, "RPO num avail", navail
);
7517 statistics_histogram_event (cfun
, "RPO num lattice",
7518 vn_ssa_aux_hash
->elements ());
7519 if (dump_file
&& (dump_flags
& (TDF_DETAILS
|TDF_STATS
)))
7521 fprintf (dump_file
, "RPO iteration over %d blocks visited %" PRIu64
7522 " blocks in total discovering %d executable blocks iterating "
7523 "%d.%d times, a block was visited max. %u times\n",
7525 (int)((10*nblk
/ nex
)/10), (int)((10*nblk
/ nex
)%10),
7527 fprintf (dump_file
, "RPO tracked %d values available at %d locations "
7528 "and %" PRIu64
" lattice elements\n",
7529 nvalues
, navail
, (uint64_t) vn_ssa_aux_hash
->elements ());
7534 /* When !iterate we already performed elimination during the RPO
7538 /* Elimination for region-based VN needs to be done within the
7540 gcc_assert (! do_region
);
7541 /* Note we can't use avail.walk here because that gets confused
7542 by the existing availability and it will be less efficient
7544 todo
|= eliminate_with_rpo_vn (NULL
);
7547 todo
|= avail
.eliminate_cleanup (do_region
);
7553 XDELETEVEC (bb_to_rpo
);
7555 XDELETEVEC (rpo_state
);
7560 /* Region-based entry for RPO VN. Performs value-numbering and elimination
7561 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
7562 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7563 are not considered. */
7566 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
)
7568 default_vn_walk_kind
= VN_WALKREWRITE
;
7569 unsigned todo
= do_rpo_vn (fn
, entry
, exit_bbs
, false, true);
7577 const pass_data pass_data_fre
=
7579 GIMPLE_PASS
, /* type */
7581 OPTGROUP_NONE
, /* optinfo_flags */
7582 TV_TREE_FRE
, /* tv_id */
7583 ( PROP_cfg
| PROP_ssa
), /* properties_required */
7584 0, /* properties_provided */
7585 0, /* properties_destroyed */
7586 0, /* todo_flags_start */
7587 0, /* todo_flags_finish */
7590 class pass_fre
: public gimple_opt_pass
7593 pass_fre (gcc::context
*ctxt
)
7594 : gimple_opt_pass (pass_data_fre
, ctxt
), may_iterate (true)
7597 /* opt_pass methods: */
7598 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
7599 void set_pass_param (unsigned int n
, bool param
)
7601 gcc_assert (n
== 0);
7602 may_iterate
= param
;
7604 virtual bool gate (function
*)
7606 return flag_tree_fre
!= 0 && (may_iterate
|| optimize
> 1);
7608 virtual unsigned int execute (function
*);
7612 }; // class pass_fre
7615 pass_fre::execute (function
*fun
)
7619 /* At -O[1g] use the cheap non-iterating mode. */
7620 bool iterate_p
= may_iterate
&& (optimize
> 1);
7621 calculate_dominance_info (CDI_DOMINATORS
);
7623 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
7625 default_vn_walk_kind
= VN_WALKREWRITE
;
7626 todo
= do_rpo_vn (fun
, NULL
, NULL
, iterate_p
, true);
7630 loop_optimizer_finalize ();
7632 /* For late FRE after IVOPTs and unrolling, see if we can
7633 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
7635 todo
|= TODO_update_address_taken
;
7643 make_pass_fre (gcc::context
*ctxt
)
7645 return new pass_fre (ctxt
);
7648 #undef BB_EXECUTABLE