1 /* SCC value numbering for trees
2 Copyright (C) 2006-2020 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "splay-tree.h"
31 #include "insn-config.h"
35 #include "gimple-pretty-print.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
56 #include "tree-ssa-propagate.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
73 #include "tree-ssa-sccvn.h"
75 /* This algorithm is based on the SCC algorithm presented by Keith
76 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77 (http://citeseer.ist.psu.edu/41805.html). In
78 straight line code, it is equivalent to a regular hash based value
79 numbering that is performed in reverse postorder.
81 For code with cycles, there are two alternatives, both of which
82 require keeping the hashtables separate from the actual list of
83 value numbers for SSA names.
85 1. Iterate value numbering in an RPO walk of the blocks, removing
86 all the entries from the hashtable after each iteration (but
87 keeping the SSA name->value number mapping between iterations).
88 Iterate until it does not change.
90 2. Perform value numbering as part of an SCC walk on the SSA graph,
91 iterating only the cycles in the SSA graph until they do not change
92 (using a separate, optimistic hashtable for value numbering the SCC
95 The second is not just faster in practice (because most SSA graph
96 cycles do not involve all the variables in the graph), it also has
99 One of these nice properties is that when we pop an SCC off the
100 stack, we are guaranteed to have processed all the operands coming from
101 *outside of that SCC*, so we do not need to do anything special to
102 ensure they have value numbers.
104 Another nice property is that the SCC walk is done as part of a DFS
105 of the SSA graph, which makes it easy to perform combining and
106 simplifying operations at the same time.
108 The code below is deliberately written in a way that makes it easy
109 to separate the SCC walk from the other work it does.
111 In order to propagate constants through the code, we track which
112 expressions contain constants, and use those while folding. In
113 theory, we could also track expressions whose value numbers are
114 replaced, in case we end up folding based on expression
117 In order to value number memory, we assign value numbers to vuses.
118 This enables us to note that, for example, stores to the same
119 address of the same value from the same starting memory states are
123 1. We can iterate only the changing portions of the SCC's, but
124 I have not seen an SCC big enough for this to be a win.
125 2. If you differentiate between phi nodes for loops and phi nodes
126 for if-then-else, you can properly consider phi nodes in different
127 blocks for equivalence.
128 3. We could value number vuses in more cases, particularly, whole
132 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
133 #define BB_EXECUTABLE BB_VISITED
135 static vn_lookup_kind default_vn_walk_kind
;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
141 typedef vn_nary_op_s
*compare_type
;
142 static inline hashval_t
hash (const vn_nary_op_s
*);
143 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
146 /* Return the computed hashcode for nary operation P1. */
149 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
151 return vno1
->hashcode
;
154 /* Compare nary operations P1 and P2 and return true if they are
158 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
160 return vno1
== vno2
|| vn_nary_op_eq (vno1
, vno2
);
163 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
167 /* vn_phi hashtable helpers. */
170 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
172 struct vn_phi_hasher
: nofree_ptr_hash
<vn_phi_s
>
174 static inline hashval_t
hash (const vn_phi_s
*);
175 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
178 /* Return the computed hashcode for phi operation P1. */
181 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
183 return vp1
->hashcode
;
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
191 return vp1
== vp2
|| vn_phi_eq (vp1
, vp2
);
194 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
198 /* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
202 vn_reference_op_eq (const void *p1
, const void *p2
)
204 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
205 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
207 return (vro1
->opcode
== vro2
->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1
->type
== vro2
->type
210 || (vro1
->type
&& vro2
->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
212 TYPE_MAIN_VARIANT (vro2
->type
))))
213 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
214 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
215 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
218 /* Free a reference operation structure VP. */
221 free_reference (vn_reference_s
*vr
)
223 vr
->operands
.release ();
227 /* vn_reference hashtable helpers. */
229 struct vn_reference_hasher
: nofree_ptr_hash
<vn_reference_s
>
231 static inline hashval_t
hash (const vn_reference_s
*);
232 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
235 /* Return the hashcode for a given reference operation P1. */
238 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
240 return vr1
->hashcode
;
244 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
246 return v
== c
|| vn_reference_eq (v
, c
);
249 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
253 /* The set of VN hashtables. */
255 typedef struct vn_tables_s
257 vn_nary_op_table_type
*nary
;
258 vn_phi_table_type
*phis
;
259 vn_reference_table_type
*references
;
263 /* vn_constant hashtable helpers. */
265 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
267 static inline hashval_t
hash (const vn_constant_s
*);
268 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
271 /* Hash table hash function for vn_constant_t. */
274 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
276 return vc1
->hashcode
;
279 /* Hash table equality function for vn_constant_t. */
282 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
284 if (vc1
->hashcode
!= vc2
->hashcode
)
287 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
290 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
291 static bitmap constant_value_ids
;
294 /* Obstack we allocate the vn-tables elements from. */
295 static obstack vn_tables_obstack
;
296 /* Special obstack we never unwind. */
297 static obstack vn_tables_insert_obstack
;
299 static vn_reference_t last_inserted_ref
;
300 static vn_phi_t last_inserted_phi
;
301 static vn_nary_op_t last_inserted_nary
;
303 /* Valid hashtables storing information we have proven to be
305 static vn_tables_t valid_info
;
308 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 tree (*vn_valueize
) (tree
);
311 tree
vn_valueize_wrapper (tree t
, void* context ATTRIBUTE_UNUSED
)
313 return vn_valueize (t
);
317 /* This represents the top of the VN lattice, which is the universal
322 /* Unique counter for our value ids. */
324 static unsigned int next_value_id
;
327 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
328 are allocated on an obstack for locality reasons, and to free them
329 without looping over the vec. */
331 struct vn_ssa_aux_hasher
: typed_noop_remove
<vn_ssa_aux_t
>
333 typedef vn_ssa_aux_t value_type
;
334 typedef tree compare_type
;
335 static inline hashval_t
hash (const value_type
&);
336 static inline bool equal (const value_type
&, const compare_type
&);
337 static inline void mark_deleted (value_type
&) {}
338 static const bool empty_zero_p
= true;
339 static inline void mark_empty (value_type
&e
) { e
= NULL
; }
340 static inline bool is_deleted (value_type
&) { return false; }
341 static inline bool is_empty (value_type
&e
) { return e
== NULL
; }
345 vn_ssa_aux_hasher::hash (const value_type
&entry
)
347 return SSA_NAME_VERSION (entry
->name
);
351 vn_ssa_aux_hasher::equal (const value_type
&entry
, const compare_type
&name
)
353 return name
== entry
->name
;
356 static hash_table
<vn_ssa_aux_hasher
> *vn_ssa_aux_hash
;
357 typedef hash_table
<vn_ssa_aux_hasher
>::iterator vn_ssa_aux_iterator_type
;
358 static struct obstack vn_ssa_aux_obstack
;
360 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*, tree
);
361 static unsigned int vn_nary_length_from_stmt (gimple
*);
362 static vn_nary_op_t
alloc_vn_nary_op_noinit (unsigned int, obstack
*);
363 static vn_nary_op_t
vn_nary_op_insert_into (vn_nary_op_t
,
364 vn_nary_op_table_type
*, bool);
365 static void init_vn_nary_op_from_stmt (vn_nary_op_t
, gimple
*);
366 static void init_vn_nary_op_from_pieces (vn_nary_op_t
, unsigned int,
367 enum tree_code
, tree
, tree
*);
368 static tree
vn_lookup_simplify_result (gimple_match_op
*);
369 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
370 (tree
, alias_set_type
, tree
, vec
<vn_reference_op_s
, va_heap
>, tree
);
372 /* Return whether there is value numbering information for a given SSA name. */
375 has_VN_INFO (tree name
)
377 return vn_ssa_aux_hash
->find_with_hash (name
, SSA_NAME_VERSION (name
));
384 = vn_ssa_aux_hash
->find_slot_with_hash (name
, SSA_NAME_VERSION (name
),
389 vn_ssa_aux_t newinfo
= *res
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
390 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
391 newinfo
->name
= name
;
392 newinfo
->valnum
= VN_TOP
;
393 /* We are using the visited flag to handle uses with defs not within the
394 region being value-numbered. */
395 newinfo
->visited
= false;
397 /* Given we create the VN_INFOs on-demand now we have to do initialization
398 different than VN_TOP here. */
399 if (SSA_NAME_IS_DEFAULT_DEF (name
))
400 switch (TREE_CODE (SSA_NAME_VAR (name
)))
403 /* All undefined vars are VARYING. */
404 newinfo
->valnum
= name
;
405 newinfo
->visited
= true;
409 /* Parameters are VARYING but we can record a condition
410 if we know it is a non-NULL pointer. */
411 newinfo
->visited
= true;
412 newinfo
->valnum
= name
;
413 if (POINTER_TYPE_P (TREE_TYPE (name
))
414 && nonnull_arg_p (SSA_NAME_VAR (name
)))
418 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
420 /* Allocate from non-unwinding stack. */
421 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
422 init_vn_nary_op_from_pieces (nary
, 2, NE_EXPR
,
423 boolean_type_node
, ops
);
424 nary
->predicated_values
= 0;
425 nary
->u
.result
= boolean_true_node
;
426 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
427 gcc_assert (nary
->unwind_to
== NULL
);
428 /* Also do not link it into the undo chain. */
429 last_inserted_nary
= nary
->next
;
430 nary
->next
= (vn_nary_op_t
)(void *)-1;
431 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
432 init_vn_nary_op_from_pieces (nary
, 2, EQ_EXPR
,
433 boolean_type_node
, ops
);
434 nary
->predicated_values
= 0;
435 nary
->u
.result
= boolean_false_node
;
436 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
437 gcc_assert (nary
->unwind_to
== NULL
);
438 last_inserted_nary
= nary
->next
;
439 nary
->next
= (vn_nary_op_t
)(void *)-1;
440 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
442 fprintf (dump_file
, "Recording ");
443 print_generic_expr (dump_file
, name
, TDF_SLIM
);
444 fprintf (dump_file
, " != 0\n");
450 /* If the result is passed by invisible reference the default
451 def is initialized, otherwise it's uninitialized. Still
452 undefined is varying. */
453 newinfo
->visited
= true;
454 newinfo
->valnum
= name
;
463 /* Return the SSA value of X. */
466 SSA_VAL (tree x
, bool *visited
= NULL
)
468 vn_ssa_aux_t tem
= vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
470 *visited
= tem
&& tem
->visited
;
471 return tem
&& tem
->visited
? tem
->valnum
: x
;
474 /* Return the SSA value of the VUSE x, supporting released VDEFs
475 during elimination which will value-number the VDEF to the
476 associated VUSE (but not substitute in the whole lattice). */
479 vuse_ssa_val (tree x
)
487 gcc_assert (x
!= VN_TOP
);
489 while (SSA_NAME_IN_FREE_LIST (x
));
494 /* Similar to the above but used as callback for walk_non_aliases_vuses
495 and thus should stop at unvisited VUSE to not walk across region
499 vuse_valueize (tree vuse
)
504 vuse
= SSA_VAL (vuse
, &visited
);
507 gcc_assert (vuse
!= VN_TOP
);
509 while (SSA_NAME_IN_FREE_LIST (vuse
));
514 /* Return the vn_kind the expression computed by the stmt should be
518 vn_get_stmt_kind (gimple
*stmt
)
520 switch (gimple_code (stmt
))
528 enum tree_code code
= gimple_assign_rhs_code (stmt
);
529 tree rhs1
= gimple_assign_rhs1 (stmt
);
530 switch (get_gimple_rhs_class (code
))
532 case GIMPLE_UNARY_RHS
:
533 case GIMPLE_BINARY_RHS
:
534 case GIMPLE_TERNARY_RHS
:
536 case GIMPLE_SINGLE_RHS
:
537 switch (TREE_CODE_CLASS (code
))
540 /* VOP-less references can go through unary case. */
541 if ((code
== REALPART_EXPR
542 || code
== IMAGPART_EXPR
543 || code
== VIEW_CONVERT_EXPR
544 || code
== BIT_FIELD_REF
)
545 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
549 case tcc_declaration
:
556 if (code
== ADDR_EXPR
)
557 return (is_gimple_min_invariant (rhs1
)
558 ? VN_CONSTANT
: VN_REFERENCE
);
559 else if (code
== CONSTRUCTOR
)
572 /* Lookup a value id for CONSTANT and return it. If it does not
576 get_constant_value_id (tree constant
)
578 vn_constant_s
**slot
;
579 struct vn_constant_s vc
;
581 vc
.hashcode
= vn_hash_constant_with_type (constant
);
582 vc
.constant
= constant
;
583 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
585 return (*slot
)->value_id
;
589 /* Lookup a value id for CONSTANT, and if it does not exist, create a
590 new one and return it. If it does exist, return it. */
593 get_or_alloc_constant_value_id (tree constant
)
595 vn_constant_s
**slot
;
596 struct vn_constant_s vc
;
599 /* If the hashtable isn't initialized we're not running from PRE and thus
600 do not need value-ids. */
601 if (!constant_to_value_id
)
604 vc
.hashcode
= vn_hash_constant_with_type (constant
);
605 vc
.constant
= constant
;
606 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
608 return (*slot
)->value_id
;
610 vcp
= XNEW (struct vn_constant_s
);
611 vcp
->hashcode
= vc
.hashcode
;
612 vcp
->constant
= constant
;
613 vcp
->value_id
= get_next_value_id ();
615 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
616 return vcp
->value_id
;
619 /* Return true if V is a value id for a constant. */
622 value_id_constant_p (unsigned int v
)
624 return bitmap_bit_p (constant_value_ids
, v
);
627 /* Compute the hash for a reference operand VRO1. */
630 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
632 hstate
.add_int (vro1
->opcode
);
634 inchash::add_expr (vro1
->op0
, hstate
);
636 inchash::add_expr (vro1
->op1
, hstate
);
638 inchash::add_expr (vro1
->op2
, hstate
);
641 /* Compute a hash for the reference operation VR1 and return it. */
644 vn_reference_compute_hash (const vn_reference_t vr1
)
646 inchash::hash hstate
;
649 vn_reference_op_t vro
;
653 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
655 if (vro
->opcode
== MEM_REF
)
657 else if (vro
->opcode
!= ADDR_EXPR
)
659 if (maybe_ne (vro
->off
, -1))
661 if (known_eq (off
, -1))
667 if (maybe_ne (off
, -1)
668 && maybe_ne (off
, 0))
669 hstate
.add_poly_int (off
);
672 && vro
->opcode
== ADDR_EXPR
)
676 tree op
= TREE_OPERAND (vro
->op0
, 0);
677 hstate
.add_int (TREE_CODE (op
));
678 inchash::add_expr (op
, hstate
);
682 vn_reference_op_compute_hash (vro
, hstate
);
685 result
= hstate
.end ();
686 /* ??? We would ICE later if we hash instead of adding that in. */
688 result
+= SSA_NAME_VERSION (vr1
->vuse
);
693 /* Return true if reference operations VR1 and VR2 are equivalent. This
694 means they have the same set of operands and vuses. */
697 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
701 /* Early out if this is not a hash collision. */
702 if (vr1
->hashcode
!= vr2
->hashcode
)
705 /* The VOP needs to be the same. */
706 if (vr1
->vuse
!= vr2
->vuse
)
709 /* If the operands are the same we are done. */
710 if (vr1
->operands
== vr2
->operands
)
713 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
716 if (INTEGRAL_TYPE_P (vr1
->type
)
717 && INTEGRAL_TYPE_P (vr2
->type
))
719 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
722 else if (INTEGRAL_TYPE_P (vr1
->type
)
723 && (TYPE_PRECISION (vr1
->type
)
724 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
726 else if (INTEGRAL_TYPE_P (vr2
->type
)
727 && (TYPE_PRECISION (vr2
->type
)
728 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
735 poly_int64 off1
= 0, off2
= 0;
736 vn_reference_op_t vro1
, vro2
;
737 vn_reference_op_s tem1
, tem2
;
738 bool deref1
= false, deref2
= false;
739 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
741 if (vro1
->opcode
== MEM_REF
)
743 /* Do not look through a storage order barrier. */
744 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
746 if (known_eq (vro1
->off
, -1))
750 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
752 if (vro2
->opcode
== MEM_REF
)
754 /* Do not look through a storage order barrier. */
755 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
757 if (known_eq (vro2
->off
, -1))
761 if (maybe_ne (off1
, off2
))
763 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
765 memset (&tem1
, 0, sizeof (tem1
));
766 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
767 tem1
.type
= TREE_TYPE (tem1
.op0
);
768 tem1
.opcode
= TREE_CODE (tem1
.op0
);
772 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
774 memset (&tem2
, 0, sizeof (tem2
));
775 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
776 tem2
.type
= TREE_TYPE (tem2
.op0
);
777 tem2
.opcode
= TREE_CODE (tem2
.op0
);
781 if (deref1
!= deref2
)
783 if (!vn_reference_op_eq (vro1
, vro2
))
788 while (vr1
->operands
.length () != i
789 || vr2
->operands
.length () != j
);
794 /* Copy the operations present in load/store REF into RESULT, a vector of
795 vn_reference_op_s's. */
798 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
800 /* For non-calls, store the information that makes up the address. */
804 vn_reference_op_s temp
;
806 memset (&temp
, 0, sizeof (temp
));
807 temp
.type
= TREE_TYPE (ref
);
808 temp
.opcode
= TREE_CODE (ref
);
814 temp
.op0
= TREE_OPERAND (ref
, 1);
817 temp
.op0
= TREE_OPERAND (ref
, 1);
821 /* The base address gets its own vn_reference_op_s structure. */
822 temp
.op0
= TREE_OPERAND (ref
, 1);
823 if (!mem_ref_offset (ref
).to_shwi (&temp
.off
))
825 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
826 temp
.base
= MR_DEPENDENCE_BASE (ref
);
827 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
830 /* The base address gets its own vn_reference_op_s structure. */
831 temp
.op0
= TMR_INDEX (ref
);
832 temp
.op1
= TMR_STEP (ref
);
833 temp
.op2
= TMR_OFFSET (ref
);
834 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
835 temp
.base
= MR_DEPENDENCE_BASE (ref
);
836 result
->safe_push (temp
);
837 memset (&temp
, 0, sizeof (temp
));
838 temp
.type
= NULL_TREE
;
839 temp
.opcode
= ERROR_MARK
;
840 temp
.op0
= TMR_INDEX2 (ref
);
844 /* Record bits, position and storage order. */
845 temp
.op0
= TREE_OPERAND (ref
, 1);
846 temp
.op1
= TREE_OPERAND (ref
, 2);
847 if (!multiple_p (bit_field_offset (ref
), BITS_PER_UNIT
, &temp
.off
))
849 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
852 /* The field decl is enough to unambiguously specify the field,
853 a matching type is not necessary and a mismatching type
854 is always a spurious difference. */
855 temp
.type
= NULL_TREE
;
856 temp
.op0
= TREE_OPERAND (ref
, 1);
857 temp
.op1
= TREE_OPERAND (ref
, 2);
859 tree this_offset
= component_ref_field_offset (ref
);
861 && poly_int_tree_p (this_offset
))
863 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
864 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
867 = (wi::to_poly_offset (this_offset
)
868 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
869 /* Probibit value-numbering zero offset components
870 of addresses the same before the pass folding
871 __builtin_object_size had a chance to run
872 (checking cfun->after_inlining does the
874 if (TREE_CODE (orig
) != ADDR_EXPR
876 || cfun
->after_inlining
)
877 off
.to_shwi (&temp
.off
);
882 case ARRAY_RANGE_REF
:
885 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
886 /* Record index as operand. */
887 temp
.op0
= TREE_OPERAND (ref
, 1);
888 /* Always record lower bounds and element size. */
889 temp
.op1
= array_ref_low_bound (ref
);
890 /* But record element size in units of the type alignment. */
891 temp
.op2
= TREE_OPERAND (ref
, 3);
892 temp
.align
= eltype
->type_common
.align
;
894 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
895 size_int (TYPE_ALIGN_UNIT (eltype
)));
896 if (poly_int_tree_p (temp
.op0
)
897 && poly_int_tree_p (temp
.op1
)
898 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
900 poly_offset_int off
= ((wi::to_poly_offset (temp
.op0
)
901 - wi::to_poly_offset (temp
.op1
))
902 * wi::to_offset (temp
.op2
)
903 * vn_ref_op_align_unit (&temp
));
904 off
.to_shwi (&temp
.off
);
909 if (DECL_HARD_REGISTER (ref
))
918 /* Canonicalize decls to MEM[&decl] which is what we end up with
919 when valueizing MEM[ptr] with ptr = &decl. */
920 temp
.opcode
= MEM_REF
;
921 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
923 result
->safe_push (temp
);
924 temp
.opcode
= ADDR_EXPR
;
925 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
926 temp
.type
= TREE_TYPE (temp
.op0
);
941 if (is_gimple_min_invariant (ref
))
947 /* These are only interesting for their operands, their
948 existence, and their type. They will never be the last
949 ref in the chain of references (IE they require an
950 operand), so we don't have to put anything
951 for op* as it will be handled by the iteration */
955 case VIEW_CONVERT_EXPR
:
957 temp
.reverse
= storage_order_barrier_p (ref
);
960 /* This is only interesting for its constant offset. */
961 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
966 result
->safe_push (temp
);
968 if (REFERENCE_CLASS_P (ref
)
969 || TREE_CODE (ref
) == MODIFY_EXPR
970 || TREE_CODE (ref
) == WITH_SIZE_EXPR
971 || (TREE_CODE (ref
) == ADDR_EXPR
972 && !is_gimple_min_invariant (ref
)))
973 ref
= TREE_OPERAND (ref
, 0);
979 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
980 operands in *OPS, the reference alias set SET and the reference type TYPE.
981 Return true if something useful was produced. */
984 ao_ref_init_from_vn_reference (ao_ref
*ref
,
985 alias_set_type set
, tree type
,
986 vec
<vn_reference_op_s
> ops
)
988 vn_reference_op_t op
;
990 tree base
= NULL_TREE
;
992 poly_offset_int offset
= 0;
993 poly_offset_int max_size
;
994 poly_offset_int size
= -1;
995 tree size_tree
= NULL_TREE
;
996 alias_set_type base_alias_set
= -1;
998 /* First get the final access size from just the outermost expression. */
1000 if (op
->opcode
== COMPONENT_REF
)
1001 size_tree
= DECL_SIZE (op
->op0
);
1002 else if (op
->opcode
== BIT_FIELD_REF
)
1003 size_tree
= op
->op0
;
1006 machine_mode mode
= TYPE_MODE (type
);
1007 if (mode
== BLKmode
)
1008 size_tree
= TYPE_SIZE (type
);
1010 size
= GET_MODE_BITSIZE (mode
);
1012 if (size_tree
!= NULL_TREE
1013 && poly_int_tree_p (size_tree
))
1014 size
= wi::to_poly_offset (size_tree
);
1016 /* Initially, maxsize is the same as the accessed element size.
1017 In the following it will only grow (or become -1). */
1020 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1021 and find the ultimate containing object. */
1022 FOR_EACH_VEC_ELT (ops
, i
, op
)
1026 /* These may be in the reference ops, but we cannot do anything
1027 sensible with them here. */
1029 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1030 if (base
!= NULL_TREE
1031 && TREE_CODE (base
) == MEM_REF
1033 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1035 vn_reference_op_t pop
= &ops
[i
-1];
1036 base
= TREE_OPERAND (op
->op0
, 0);
1037 if (known_eq (pop
->off
, -1))
1043 offset
+= pop
->off
* BITS_PER_UNIT
;
1051 /* Record the base objects. */
1053 base_alias_set
= get_deref_alias_set (op
->op0
);
1054 *op0_p
= build2 (MEM_REF
, op
->type
,
1055 NULL_TREE
, op
->op0
);
1056 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
1057 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
1058 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1069 /* And now the usual component-reference style ops. */
1071 offset
+= wi::to_poly_offset (op
->op1
);
1076 tree field
= op
->op0
;
1077 /* We do not have a complete COMPONENT_REF tree here so we
1078 cannot use component_ref_field_offset. Do the interesting
1080 tree this_offset
= DECL_FIELD_OFFSET (field
);
1082 if (op
->op1
|| !poly_int_tree_p (this_offset
))
1086 poly_offset_int woffset
= (wi::to_poly_offset (this_offset
)
1087 << LOG2_BITS_PER_UNIT
);
1088 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1094 case ARRAY_RANGE_REF
:
1096 /* We recorded the lower bound and the element size. */
1097 if (!poly_int_tree_p (op
->op0
)
1098 || !poly_int_tree_p (op
->op1
)
1099 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1103 poly_offset_int woffset
1104 = wi::sext (wi::to_poly_offset (op
->op0
)
1105 - wi::to_poly_offset (op
->op1
),
1106 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1107 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1108 woffset
<<= LOG2_BITS_PER_UNIT
;
1120 case VIEW_CONVERT_EXPR
:
1137 if (base
== NULL_TREE
)
1140 ref
->ref
= NULL_TREE
;
1142 ref
->ref_alias_set
= set
;
1143 if (base_alias_set
!= -1)
1144 ref
->base_alias_set
= base_alias_set
;
1146 ref
->base_alias_set
= get_alias_set (base
);
1147 /* We discount volatiles from value-numbering elsewhere. */
1148 ref
->volatile_p
= false;
1150 if (!size
.to_shwi (&ref
->size
) || maybe_lt (ref
->size
, 0))
1158 if (!offset
.to_shwi (&ref
->offset
))
1165 if (!max_size
.to_shwi (&ref
->max_size
) || maybe_lt (ref
->max_size
, 0))
1171 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1172 vn_reference_op_s's. */
1175 copy_reference_ops_from_call (gcall
*call
,
1176 vec
<vn_reference_op_s
> *result
)
1178 vn_reference_op_s temp
;
1180 tree lhs
= gimple_call_lhs (call
);
1183 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1184 different. By adding the lhs here in the vector, we ensure that the
1185 hashcode is different, guaranteeing a different value number. */
1186 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1188 memset (&temp
, 0, sizeof (temp
));
1189 temp
.opcode
= MODIFY_EXPR
;
1190 temp
.type
= TREE_TYPE (lhs
);
1193 result
->safe_push (temp
);
1196 /* Copy the type, opcode, function, static chain and EH region, if any. */
1197 memset (&temp
, 0, sizeof (temp
));
1198 temp
.type
= gimple_call_fntype (call
);
1199 temp
.opcode
= CALL_EXPR
;
1200 temp
.op0
= gimple_call_fn (call
);
1201 temp
.op1
= gimple_call_chain (call
);
1202 if (stmt_could_throw_p (cfun
, call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1203 temp
.op2
= size_int (lr
);
1205 result
->safe_push (temp
);
1207 /* Copy the call arguments. As they can be references as well,
1208 just chain them together. */
1209 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1211 tree callarg
= gimple_call_arg (call
, i
);
1212 copy_reference_ops_from_ref (callarg
, result
);
1216 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1217 *I_P to point to the last element of the replacement. */
1219 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1222 unsigned int i
= *i_p
;
1223 vn_reference_op_t op
= &(*ops
)[i
];
1224 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1226 poly_int64 addr_offset
= 0;
1228 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1229 from .foo.bar to the preceding MEM_REF offset and replace the
1230 address with &OBJ. */
1231 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1233 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1234 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1237 = (poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
),
1240 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1241 op
->op0
= build_fold_addr_expr (addr_base
);
1242 if (tree_fits_shwi_p (mem_op
->op0
))
1243 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1251 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1252 *I_P to point to the last element of the replacement. */
1254 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1257 bool changed
= false;
1258 vn_reference_op_t op
;
1262 unsigned int i
= *i_p
;
1264 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1266 enum tree_code code
;
1267 poly_offset_int off
;
1269 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1270 if (!is_gimple_assign (def_stmt
))
1273 code
= gimple_assign_rhs_code (def_stmt
);
1274 if (code
!= ADDR_EXPR
1275 && code
!= POINTER_PLUS_EXPR
)
1278 off
= poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
), SIGNED
);
1280 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1281 from .foo.bar to the preceding MEM_REF offset and replace the
1282 address with &OBJ. */
1283 if (code
== ADDR_EXPR
)
1285 tree addr
, addr_base
;
1286 poly_int64 addr_offset
;
1288 addr
= gimple_assign_rhs1 (def_stmt
);
1289 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1291 /* If that didn't work because the address isn't invariant propagate
1292 the reference tree from the address operation in case the current
1293 dereference isn't offsetted. */
1295 && *i_p
== ops
->length () - 1
1296 && known_eq (off
, 0)
1297 /* This makes us disable this transform for PRE where the
1298 reference ops might be also used for code insertion which
1300 && default_vn_walk_kind
== VN_WALKREWRITE
)
1302 auto_vec
<vn_reference_op_s
, 32> tem
;
1303 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1304 /* Make sure to preserve TBAA info. The only objects not
1305 wrapped in MEM_REFs that can have their address taken are
1307 if (tem
.length () >= 2
1308 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1310 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1312 = wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1313 wi::to_poly_wide (new_mem_op
->op0
));
1316 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1319 ops
->safe_splice (tem
);
1324 || TREE_CODE (addr_base
) != MEM_REF
1325 || (TREE_CODE (TREE_OPERAND (addr_base
, 0)) == SSA_NAME
1326 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base
,
1331 off
+= mem_ref_offset (addr_base
);
1332 op
->op0
= TREE_OPERAND (addr_base
, 0);
1337 ptr
= gimple_assign_rhs1 (def_stmt
);
1338 ptroff
= gimple_assign_rhs2 (def_stmt
);
1339 if (TREE_CODE (ptr
) != SSA_NAME
1340 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1341 /* Make sure to not endlessly recurse.
1342 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1343 happen when we value-number a PHI to its backedge value. */
1344 || SSA_VAL (ptr
) == op
->op0
1345 || !poly_int_tree_p (ptroff
))
1348 off
+= wi::to_poly_offset (ptroff
);
1352 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1353 if (tree_fits_shwi_p (mem_op
->op0
))
1354 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1357 /* ??? Can end up with endless recursion here!?
1358 gcc.c-torture/execute/strcmp-1.c */
1359 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1360 op
->op0
= SSA_VAL (op
->op0
);
1361 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1362 op
->opcode
= TREE_CODE (op
->op0
);
1367 while (TREE_CODE (op
->op0
) == SSA_NAME
);
1369 /* Fold a remaining *&. */
1370 if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1371 vn_reference_fold_indirect (ops
, i_p
);
1376 /* Optimize the reference REF to a constant if possible or return
1377 NULL_TREE if not. */
1380 fully_constant_vn_reference_p (vn_reference_t ref
)
1382 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1383 vn_reference_op_t op
;
1385 /* Try to simplify the translated expression if it is
1386 a call to a builtin function with at most two arguments. */
1388 if (op
->opcode
== CALL_EXPR
1389 && TREE_CODE (op
->op0
) == ADDR_EXPR
1390 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1391 && fndecl_built_in_p (TREE_OPERAND (op
->op0
, 0))
1392 && operands
.length () >= 2
1393 && operands
.length () <= 3)
1395 vn_reference_op_t arg0
, arg1
= NULL
;
1396 bool anyconst
= false;
1397 arg0
= &operands
[1];
1398 if (operands
.length () > 2)
1399 arg1
= &operands
[2];
1400 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1401 || (arg0
->opcode
== ADDR_EXPR
1402 && is_gimple_min_invariant (arg0
->op0
)))
1405 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1406 || (arg1
->opcode
== ADDR_EXPR
1407 && is_gimple_min_invariant (arg1
->op0
))))
1411 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1414 arg1
? arg1
->op0
: NULL
);
1416 && TREE_CODE (folded
) == NOP_EXPR
)
1417 folded
= TREE_OPERAND (folded
, 0);
1419 && is_gimple_min_invariant (folded
))
1424 /* Simplify reads from constants or constant initializers. */
1425 else if (BITS_PER_UNIT
== 8
1426 && COMPLETE_TYPE_P (ref
->type
)
1427 && is_gimple_reg_type (ref
->type
))
1431 if (INTEGRAL_TYPE_P (ref
->type
))
1432 size
= TYPE_PRECISION (ref
->type
);
1433 else if (tree_fits_shwi_p (TYPE_SIZE (ref
->type
)))
1434 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1437 if (size
% BITS_PER_UNIT
!= 0
1438 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1440 size
/= BITS_PER_UNIT
;
1442 for (i
= 0; i
< operands
.length (); ++i
)
1444 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1449 if (known_eq (operands
[i
].off
, -1))
1451 off
+= operands
[i
].off
;
1452 if (operands
[i
].opcode
== MEM_REF
)
1458 vn_reference_op_t base
= &operands
[--i
];
1459 tree ctor
= error_mark_node
;
1460 tree decl
= NULL_TREE
;
1461 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1463 else if (base
->opcode
== MEM_REF
1464 && base
[1].opcode
== ADDR_EXPR
1465 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1466 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
1467 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == STRING_CST
))
1469 decl
= TREE_OPERAND (base
[1].op0
, 0);
1470 if (TREE_CODE (decl
) == STRING_CST
)
1473 ctor
= ctor_for_folding (decl
);
1475 if (ctor
== NULL_TREE
)
1476 return build_zero_cst (ref
->type
);
1477 else if (ctor
!= error_mark_node
)
1479 HOST_WIDE_INT const_off
;
1482 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1483 off
* BITS_PER_UNIT
,
1484 size
* BITS_PER_UNIT
, decl
);
1487 STRIP_USELESS_TYPE_CONVERSION (res
);
1488 if (is_gimple_min_invariant (res
))
1492 else if (off
.is_constant (&const_off
))
1494 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1495 int len
= native_encode_expr (ctor
, buf
, size
, const_off
);
1497 return native_interpret_expr (ref
->type
, buf
, len
);
1505 /* Return true if OPS contain a storage order barrier. */
1508 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1510 vn_reference_op_t op
;
1513 FOR_EACH_VEC_ELT (ops
, i
, op
)
1514 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1520 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1521 structures into their value numbers. This is done in-place, and
1522 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1523 whether any operands were valueized. */
1525 static vec
<vn_reference_op_s
>
1526 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
,
1527 bool with_avail
= false)
1529 vn_reference_op_t vro
;
1532 *valueized_anything
= false;
1534 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1536 if (vro
->opcode
== SSA_NAME
1537 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1539 tree tem
= with_avail
? vn_valueize (vro
->op0
) : SSA_VAL (vro
->op0
);
1540 if (tem
!= vro
->op0
)
1542 *valueized_anything
= true;
1545 /* If it transforms from an SSA_NAME to a constant, update
1547 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1548 vro
->opcode
= TREE_CODE (vro
->op0
);
1550 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1552 tree tem
= with_avail
? vn_valueize (vro
->op1
) : SSA_VAL (vro
->op1
);
1553 if (tem
!= vro
->op1
)
1555 *valueized_anything
= true;
1559 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1561 tree tem
= with_avail
? vn_valueize (vro
->op2
) : SSA_VAL (vro
->op2
);
1562 if (tem
!= vro
->op2
)
1564 *valueized_anything
= true;
1568 /* If it transforms from an SSA_NAME to an address, fold with
1569 a preceding indirect reference. */
1572 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1573 && orig
[i
- 1].opcode
== MEM_REF
)
1575 if (vn_reference_fold_indirect (&orig
, &i
))
1576 *valueized_anything
= true;
1579 && vro
->opcode
== SSA_NAME
1580 && orig
[i
- 1].opcode
== MEM_REF
)
1582 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1583 *valueized_anything
= true;
1585 /* If it transforms a non-constant ARRAY_REF into a constant
1586 one, adjust the constant offset. */
1587 else if (vro
->opcode
== ARRAY_REF
1588 && known_eq (vro
->off
, -1)
1589 && poly_int_tree_p (vro
->op0
)
1590 && poly_int_tree_p (vro
->op1
)
1591 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1593 poly_offset_int off
= ((wi::to_poly_offset (vro
->op0
)
1594 - wi::to_poly_offset (vro
->op1
))
1595 * wi::to_offset (vro
->op2
)
1596 * vn_ref_op_align_unit (vro
));
1597 off
.to_shwi (&vro
->off
);
1604 static vec
<vn_reference_op_s
>
1605 valueize_refs (vec
<vn_reference_op_s
> orig
)
1608 return valueize_refs_1 (orig
, &tem
);
1611 static vec
<vn_reference_op_s
> shared_lookup_references
;
1613 /* Create a vector of vn_reference_op_s structures from REF, a
1614 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1615 this function. *VALUEIZED_ANYTHING will specify whether any
1616 operands were valueized. */
1618 static vec
<vn_reference_op_s
>
1619 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1623 shared_lookup_references
.truncate (0);
1624 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1625 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1626 valueized_anything
);
1627 return shared_lookup_references
;
1630 /* Create a vector of vn_reference_op_s structures from CALL, a
1631 call statement. The vector is shared among all callers of
1634 static vec
<vn_reference_op_s
>
1635 valueize_shared_reference_ops_from_call (gcall
*call
)
1639 shared_lookup_references
.truncate (0);
1640 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1641 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1642 return shared_lookup_references
;
1645 /* Lookup a SCCVN reference operation VR in the current hash table.
1646 Returns the resulting value number if it exists in the hash table,
1647 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1648 vn_reference_t stored in the hashtable if something is found. */
1651 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1653 vn_reference_s
**slot
;
1656 hash
= vr
->hashcode
;
1657 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1661 *vnresult
= (vn_reference_t
)*slot
;
1662 return ((vn_reference_t
)*slot
)->result
;
1669 /* Partial definition tracking support. */
1673 HOST_WIDE_INT offset
;
1680 HOST_WIDE_INT offset
;
1684 /* Context for alias walking. */
1686 struct vn_walk_cb_data
1688 vn_walk_cb_data (vn_reference_t vr_
, tree orig_ref_
, tree
*last_vuse_ptr_
,
1689 vn_lookup_kind vn_walk_kind_
, bool tbaa_p_
)
1690 : vr (vr_
), last_vuse_ptr (last_vuse_ptr_
), last_vuse (NULL_TREE
),
1691 vn_walk_kind (vn_walk_kind_
), tbaa_p (tbaa_p_
),
1692 saved_operands (vNULL
), first_set (-2), known_ranges (NULL
)
1695 last_vuse_ptr
= &last_vuse
;
1696 ao_ref_init (&orig_ref
, orig_ref_
);
1698 ~vn_walk_cb_data ();
1699 void *finish (alias_set_type
, tree
);
1700 void *push_partial_def (const pd_data
& pd
, alias_set_type
, HOST_WIDE_INT
);
1704 tree
*last_vuse_ptr
;
1706 vn_lookup_kind vn_walk_kind
;
1708 vec
<vn_reference_op_s
> saved_operands
;
1710 /* The VDEFs of partial defs we come along. */
1711 auto_vec
<pd_data
, 2> partial_defs
;
1712 /* The first defs range to avoid splay tree setup in most cases. */
1713 pd_range first_range
;
1714 alias_set_type first_set
;
1715 splay_tree known_ranges
;
1716 obstack ranges_obstack
;
1719 vn_walk_cb_data::~vn_walk_cb_data ()
1723 splay_tree_delete (known_ranges
);
1724 obstack_free (&ranges_obstack
, NULL
);
1726 saved_operands
.release ();
1730 vn_walk_cb_data::finish (alias_set_type set
, tree val
)
1732 if (first_set
!= -2)
1734 return vn_reference_lookup_or_insert_for_pieces
1735 (last_vuse
, set
, vr
->type
,
1736 saved_operands
.exists () ? saved_operands
: vr
->operands
, val
);
1739 /* pd_range splay-tree helpers. */
1742 pd_range_compare (splay_tree_key offset1p
, splay_tree_key offset2p
)
1744 HOST_WIDE_INT offset1
= *(HOST_WIDE_INT
*)offset1p
;
1745 HOST_WIDE_INT offset2
= *(HOST_WIDE_INT
*)offset2p
;
1746 if (offset1
< offset2
)
1748 else if (offset1
> offset2
)
1754 pd_tree_alloc (int size
, void *data_
)
1756 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
1757 return obstack_alloc (&data
->ranges_obstack
, size
);
1761 pd_tree_dealloc (void *, void *)
1765 /* Push PD to the vector of partial definitions returning a
1766 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1767 NULL when we want to continue looking for partial defs or -1
1771 vn_walk_cb_data::push_partial_def (const pd_data
&pd
,
1772 alias_set_type set
, HOST_WIDE_INT maxsizei
)
1774 const HOST_WIDE_INT bufsize
= 64;
1775 /* We're using a fixed buffer for encoding so fail early if the object
1776 we want to interpret is bigger. */
1777 if (maxsizei
> bufsize
* BITS_PER_UNIT
1779 || BITS_PER_UNIT
!= 8
1780 /* Not prepared to handle PDP endian. */
1781 || BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
1784 bool pd_constant_p
= (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
1785 || CONSTANT_CLASS_P (pd
.rhs
));
1786 if (partial_defs
.is_empty ())
1788 /* If we get a clobber upfront, fail. */
1789 if (TREE_CLOBBER_P (pd
.rhs
))
1793 partial_defs
.safe_push (pd
);
1794 first_range
.offset
= pd
.offset
;
1795 first_range
.size
= pd
.size
;
1797 last_vuse_ptr
= NULL
;
1798 /* Continue looking for partial defs. */
1804 /* ??? Optimize the case where the 2nd partial def completes things. */
1805 gcc_obstack_init (&ranges_obstack
);
1806 known_ranges
= splay_tree_new_with_allocator (pd_range_compare
, 0, 0,
1808 pd_tree_dealloc
, this);
1809 splay_tree_insert (known_ranges
,
1810 (splay_tree_key
)&first_range
.offset
,
1811 (splay_tree_value
)&first_range
);
1814 pd_range newr
= { pd
.offset
, pd
.size
};
1817 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
1818 HOST_WIDE_INT loffset
= newr
.offset
+ 1;
1819 if ((n
= splay_tree_predecessor (known_ranges
, (splay_tree_key
)&loffset
))
1820 && ((r
= (pd_range
*)n
->value
), true)
1821 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
1822 newr
.offset
, newr
.size
))
1824 /* Ignore partial defs already covered. Here we also drop shadowed
1825 clobbers arriving here at the floor. */
1826 if (known_subrange_p (newr
.offset
, newr
.size
, r
->offset
, r
->size
))
1828 r
->size
= MAX (r
->offset
+ r
->size
, newr
.offset
+ newr
.size
) - r
->offset
;
1832 /* newr.offset wasn't covered yet, insert the range. */
1833 r
= XOBNEW (&ranges_obstack
, pd_range
);
1835 splay_tree_insert (known_ranges
, (splay_tree_key
)&r
->offset
,
1836 (splay_tree_value
)r
);
1838 /* Merge r which now contains newr and is a member of the splay tree with
1839 adjacent overlapping ranges. */
1841 while ((n
= splay_tree_successor (known_ranges
, (splay_tree_key
)&r
->offset
))
1842 && ((rafter
= (pd_range
*)n
->value
), true)
1843 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
1844 rafter
->offset
, rafter
->size
))
1846 r
->size
= MAX (r
->offset
+ r
->size
,
1847 rafter
->offset
+ rafter
->size
) - r
->offset
;
1848 splay_tree_remove (known_ranges
, (splay_tree_key
)&rafter
->offset
);
1850 /* If we get a clobber, fail. */
1851 if (TREE_CLOBBER_P (pd
.rhs
))
1853 /* Non-constants are OK as long as they are shadowed by a constant. */
1856 partial_defs
.safe_push (pd
);
1858 /* Now we have merged newr into the range tree. When we have covered
1859 [offseti, sizei] then the tree will contain exactly one node which has
1860 the desired properties and it will be 'r'. */
1861 if (!known_subrange_p (0, maxsizei
, r
->offset
, r
->size
))
1862 /* Continue looking for partial defs. */
1865 /* Now simply native encode all partial defs in reverse order. */
1866 unsigned ndefs
= partial_defs
.length ();
1867 /* We support up to 512-bit values (for V8DFmode). */
1868 unsigned char buffer
[bufsize
+ 1];
1869 unsigned char this_buffer
[bufsize
+ 1];
1872 memset (buffer
, 0, bufsize
+ 1);
1873 unsigned needed_len
= ROUND_UP (maxsizei
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
1874 while (!partial_defs
.is_empty ())
1876 pd_data pd
= partial_defs
.pop ();
1878 if (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
)
1880 /* Empty CONSTRUCTOR. */
1881 if (pd
.size
>= needed_len
* BITS_PER_UNIT
)
1884 len
= ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
1885 memset (this_buffer
, 0, len
);
1889 len
= native_encode_expr (pd
.rhs
, this_buffer
, bufsize
,
1890 MAX (0, -pd
.offset
) / BITS_PER_UNIT
);
1892 || len
< (ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
1893 - MAX (0, -pd
.offset
) / BITS_PER_UNIT
))
1895 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1896 fprintf (dump_file
, "Failed to encode %u "
1897 "partial definitions\n", ndefs
);
1902 unsigned char *p
= buffer
;
1903 HOST_WIDE_INT size
= pd
.size
;
1905 size
-= ROUND_DOWN (-pd
.offset
, BITS_PER_UNIT
);
1906 this_buffer
[len
] = 0;
1907 if (BYTES_BIG_ENDIAN
)
1909 /* LSB of this_buffer[len - 1] byte should be at
1910 pd.offset + pd.size - 1 bits in buffer. */
1911 amnt
= ((unsigned HOST_WIDE_INT
) pd
.offset
1912 + pd
.size
) % BITS_PER_UNIT
;
1914 shift_bytes_in_array_right (this_buffer
, len
+ 1, amnt
);
1915 unsigned char *q
= this_buffer
;
1916 unsigned int off
= 0;
1920 off
= pd
.offset
/ BITS_PER_UNIT
;
1921 gcc_assert (off
< needed_len
);
1925 msk
= ((1 << size
) - 1) << (BITS_PER_UNIT
- amnt
);
1926 *p
= (*p
& ~msk
) | (this_buffer
[len
] & msk
);
1931 if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
1932 q
= (this_buffer
+ len
1933 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
1935 if (pd
.offset
% BITS_PER_UNIT
)
1937 msk
= -1U << (BITS_PER_UNIT
1938 - (pd
.offset
% BITS_PER_UNIT
));
1939 *p
= (*p
& msk
) | (*q
& ~msk
);
1943 size
-= BITS_PER_UNIT
- (pd
.offset
% BITS_PER_UNIT
);
1944 gcc_assert (size
>= 0);
1948 else if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
1950 q
= (this_buffer
+ len
1951 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
1953 if (pd
.offset
% BITS_PER_UNIT
)
1956 size
-= BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) pd
.offset
1958 gcc_assert (size
>= 0);
1961 if ((unsigned HOST_WIDE_INT
) size
/ BITS_PER_UNIT
+ off
1963 size
= (needed_len
- off
) * BITS_PER_UNIT
;
1964 memcpy (p
, q
, size
/ BITS_PER_UNIT
);
1965 if (size
% BITS_PER_UNIT
)
1968 = -1U << (BITS_PER_UNIT
- (size
% BITS_PER_UNIT
));
1969 p
+= size
/ BITS_PER_UNIT
;
1970 q
+= size
/ BITS_PER_UNIT
;
1971 *p
= (*q
& msk
) | (*p
& ~msk
);
1976 size
= MIN (size
, (HOST_WIDE_INT
) needed_len
* BITS_PER_UNIT
);
1979 /* LSB of this_buffer[0] byte should be at pd.offset bits
1982 amnt
= pd
.offset
% BITS_PER_UNIT
;
1984 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
1985 unsigned int off
= pd
.offset
/ BITS_PER_UNIT
;
1986 gcc_assert (off
< needed_len
);
1988 if (amnt
+ size
< BITS_PER_UNIT
)
1990 /* Low amnt bits come from *p, then size bits
1991 from this_buffer[0] and the remaining again from
1993 msk
= ((1 << size
) - 1) << amnt
;
1994 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2000 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2002 size
-= (BITS_PER_UNIT
- amnt
);
2007 amnt
= (unsigned HOST_WIDE_INT
) pd
.offset
% BITS_PER_UNIT
;
2009 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
2011 memcpy (p
, this_buffer
+ (amnt
!= 0), size
/ BITS_PER_UNIT
);
2012 p
+= size
/ BITS_PER_UNIT
;
2013 if (size
% BITS_PER_UNIT
)
2015 unsigned int msk
= -1U << (size
% BITS_PER_UNIT
);
2016 *p
= (this_buffer
[(amnt
!= 0) + size
/ BITS_PER_UNIT
]
2017 & ~msk
) | (*p
& msk
);
2022 tree type
= vr
->type
;
2023 /* Make sure to interpret in a type that has a range covering the whole
2025 if (INTEGRAL_TYPE_P (vr
->type
) && maxsizei
!= TYPE_PRECISION (vr
->type
))
2026 type
= build_nonstandard_integer_type (maxsizei
, TYPE_UNSIGNED (type
));
2028 if (BYTES_BIG_ENDIAN
)
2030 unsigned sz
= needed_len
;
2031 if (maxsizei
% BITS_PER_UNIT
)
2032 shift_bytes_in_array_right (buffer
, needed_len
,
2034 - (maxsizei
% BITS_PER_UNIT
));
2035 if (INTEGRAL_TYPE_P (type
))
2036 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
2037 if (sz
> needed_len
)
2039 memcpy (this_buffer
+ (sz
- needed_len
), buffer
, needed_len
);
2040 val
= native_interpret_expr (type
, this_buffer
, sz
);
2043 val
= native_interpret_expr (type
, buffer
, needed_len
);
2046 val
= native_interpret_expr (type
, buffer
, bufsize
);
2047 /* If we chop off bits because the types precision doesn't match the memory
2048 access size this is ok when optimizing reads but not when called from
2049 the DSE code during elimination. */
2050 if (val
&& type
!= vr
->type
)
2052 if (! int_fits_type_p (val
, vr
->type
))
2055 val
= fold_convert (vr
->type
, val
);
2060 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2062 "Successfully combined %u partial definitions\n", ndefs
);
2063 /* We are using the alias-set of the first store we encounter which
2064 should be appropriate here. */
2065 return finish (first_set
, val
);
2069 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2071 "Failed to interpret %u encoded partial definitions\n", ndefs
);
2076 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2077 with the current VUSE and performs the expression lookup. */
2080 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
, void *data_
)
2082 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2083 vn_reference_t vr
= data
->vr
;
2084 vn_reference_s
**slot
;
2087 /* If we have partial definitions recorded we have to go through
2088 vn_reference_lookup_3. */
2089 if (!data
->partial_defs
.is_empty ())
2092 if (data
->last_vuse_ptr
)
2094 *data
->last_vuse_ptr
= vuse
;
2095 data
->last_vuse
= vuse
;
2098 /* Fixup vuse and hash. */
2100 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
2101 vr
->vuse
= vuse_ssa_val (vuse
);
2103 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
2105 hash
= vr
->hashcode
;
2106 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
2109 if ((*slot
)->result
&& data
->saved_operands
.exists ())
2110 return data
->finish (vr
->set
, (*slot
)->result
);
2117 /* Lookup an existing or insert a new vn_reference entry into the
2118 value table for the VUSE, SET, TYPE, OPERANDS reference which
2119 has the value VALUE which is either a constant or an SSA name. */
2121 static vn_reference_t
2122 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
2125 vec
<vn_reference_op_s
,
2130 vn_reference_t result
;
2132 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2133 vr1
.operands
= operands
;
2136 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2137 if (vn_reference_lookup_1 (&vr1
, &result
))
2139 if (TREE_CODE (value
) == SSA_NAME
)
2140 value_id
= VN_INFO (value
)->value_id
;
2142 value_id
= get_or_alloc_constant_value_id (value
);
2143 return vn_reference_insert_pieces (vuse
, set
, type
,
2144 operands
.copy (), value
, value_id
);
2147 /* Return a value-number for RCODE OPS... either by looking up an existing
2148 value-number for the simplified result or by inserting the operation if
2152 vn_nary_build_or_lookup_1 (gimple_match_op
*res_op
, bool insert
)
2154 tree result
= NULL_TREE
;
2155 /* We will be creating a value number for
2157 So first simplify and lookup this expression to see if it
2158 is already available. */
2159 /* For simplification valueize. */
2161 for (i
= 0; i
< res_op
->num_ops
; ++i
)
2162 if (TREE_CODE (res_op
->ops
[i
]) == SSA_NAME
)
2164 tree tem
= vn_valueize (res_op
->ops
[i
]);
2167 res_op
->ops
[i
] = tem
;
2169 /* If valueization of an operand fails (it is not available), skip
2172 if (i
== res_op
->num_ops
)
2174 mprts_hook
= vn_lookup_simplify_result
;
2175 res
= res_op
->resimplify (NULL
, vn_valueize
);
2178 gimple
*new_stmt
= NULL
;
2180 && gimple_simplified_result_is_gimple_val (res_op
))
2182 /* The expression is already available. */
2183 result
= res_op
->ops
[0];
2184 /* Valueize it, simplification returns sth in AVAIL only. */
2185 if (TREE_CODE (result
) == SSA_NAME
)
2186 result
= SSA_VAL (result
);
2190 tree val
= vn_lookup_simplify_result (res_op
);
2193 gimple_seq stmts
= NULL
;
2194 result
= maybe_push_res_to_seq (res_op
, &stmts
);
2197 gcc_assert (gimple_seq_singleton_p (stmts
));
2198 new_stmt
= gimple_seq_first_stmt (stmts
);
2202 /* The expression is already available. */
2207 /* The expression is not yet available, value-number lhs to
2208 the new SSA_NAME we created. */
2209 /* Initialize value-number information properly. */
2210 vn_ssa_aux_t result_info
= VN_INFO (result
);
2211 result_info
->valnum
= result
;
2212 result_info
->value_id
= get_next_value_id ();
2213 result_info
->visited
= 1;
2214 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
2216 result_info
->needs_insertion
= true;
2217 /* ??? PRE phi-translation inserts NARYs without corresponding
2218 SSA name result. Re-use those but set their result according
2219 to the stmt we just built. */
2220 vn_nary_op_t nary
= NULL
;
2221 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
2224 gcc_assert (! nary
->predicated_values
&& nary
->u
.result
== NULL_TREE
);
2225 nary
->u
.result
= gimple_assign_lhs (new_stmt
);
2227 /* As all "inserted" statements are singleton SCCs, insert
2228 to the valid table. This is strictly needed to
2229 avoid re-generating new value SSA_NAMEs for the same
2230 expression during SCC iteration over and over (the
2231 optimistic table gets cleared after each iteration).
2232 We do not need to insert into the optimistic table, as
2233 lookups there will fall back to the valid table. */
2236 unsigned int length
= vn_nary_length_from_stmt (new_stmt
);
2238 = alloc_vn_nary_op_noinit (length
, &vn_tables_insert_obstack
);
2239 vno1
->value_id
= result_info
->value_id
;
2240 vno1
->length
= length
;
2241 vno1
->predicated_values
= 0;
2242 vno1
->u
.result
= result
;
2243 init_vn_nary_op_from_stmt (vno1
, new_stmt
);
2244 vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
2245 /* Also do not link it into the undo chain. */
2246 last_inserted_nary
= vno1
->next
;
2247 vno1
->next
= (vn_nary_op_t
)(void *)-1;
2249 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2251 fprintf (dump_file
, "Inserting name ");
2252 print_generic_expr (dump_file
, result
);
2253 fprintf (dump_file
, " for expression ");
2254 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
2255 fprintf (dump_file
, "\n");
2261 /* Return a value-number for RCODE OPS... either by looking up an existing
2262 value-number for the simplified result or by inserting the operation. */
2265 vn_nary_build_or_lookup (gimple_match_op
*res_op
)
2267 return vn_nary_build_or_lookup_1 (res_op
, true);
2270 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2271 its value if present. */
2274 vn_nary_simplify (vn_nary_op_t nary
)
2276 if (nary
->length
> gimple_match_op::MAX_NUM_OPS
)
2278 gimple_match_op
op (gimple_match_cond::UNCOND
, nary
->opcode
,
2279 nary
->type
, nary
->length
);
2280 memcpy (op
.ops
, nary
->op
, sizeof (tree
) * nary
->length
);
2281 return vn_nary_build_or_lookup_1 (&op
, false);
2284 /* Elimination engine. */
2286 class eliminate_dom_walker
: public dom_walker
2289 eliminate_dom_walker (cdi_direction
, bitmap
);
2290 ~eliminate_dom_walker ();
2292 virtual edge
before_dom_children (basic_block
);
2293 virtual void after_dom_children (basic_block
);
2295 virtual tree
eliminate_avail (basic_block
, tree op
);
2296 virtual void eliminate_push_avail (basic_block
, tree op
);
2297 tree
eliminate_insert (basic_block
, gimple_stmt_iterator
*gsi
, tree val
);
2299 void eliminate_stmt (basic_block
, gimple_stmt_iterator
*);
2301 unsigned eliminate_cleanup (bool region_p
= false);
2304 unsigned int el_todo
;
2305 unsigned int eliminations
;
2306 unsigned int insertions
;
2308 /* SSA names that had their defs inserted by PRE if do_pre. */
2309 bitmap inserted_exprs
;
2311 /* Blocks with statements that have had their EH properties changed. */
2312 bitmap need_eh_cleanup
;
2314 /* Blocks with statements that have had their AB properties changed. */
2315 bitmap need_ab_cleanup
;
2317 /* Local state for the eliminate domwalk. */
2318 auto_vec
<gimple
*> to_remove
;
2319 auto_vec
<gimple
*> to_fixup
;
2320 auto_vec
<tree
> avail
;
2321 auto_vec
<tree
> avail_stack
;
2324 /* Adaptor to the elimination engine using RPO availability. */
2326 class rpo_elim
: public eliminate_dom_walker
2329 rpo_elim(basic_block entry_
)
2330 : eliminate_dom_walker (CDI_DOMINATORS
, NULL
), entry (entry_
),
2331 m_avail_freelist (NULL
) {}
2333 virtual tree
eliminate_avail (basic_block
, tree op
);
2335 virtual void eliminate_push_avail (basic_block
, tree
);
2338 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2340 vn_avail
*m_avail_freelist
;
2343 /* Global RPO state for access from hooks. */
2344 static rpo_elim
*rpo_avail
;
2345 basic_block vn_context_bb
;
2347 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2348 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2349 Otherwise return false. */
2352 adjust_offsets_for_equal_base_address (tree base1
, poly_int64
*offset1
,
2353 tree base2
, poly_int64
*offset2
)
2356 if (TREE_CODE (base1
) == MEM_REF
2357 && TREE_CODE (base2
) == MEM_REF
)
2359 if (mem_ref_offset (base1
).to_shwi (&soff
))
2361 base1
= TREE_OPERAND (base1
, 0);
2362 *offset1
+= soff
* BITS_PER_UNIT
;
2364 if (mem_ref_offset (base2
).to_shwi (&soff
))
2366 base2
= TREE_OPERAND (base2
, 0);
2367 *offset2
+= soff
* BITS_PER_UNIT
;
2369 return operand_equal_p (base1
, base2
, 0);
2371 return operand_equal_p (base1
, base2
, OEP_ADDRESS_OF
);
2374 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2375 from the statement defining VUSE and if not successful tries to
2376 translate *REFP and VR_ through an aggregate copy at the definition
2377 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2378 of *REF and *VR. If only disambiguation was performed then
2379 *DISAMBIGUATE_ONLY is set to true. */
2382 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *data_
,
2383 translate_flags
*disambiguate_only
)
2385 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2386 vn_reference_t vr
= data
->vr
;
2387 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2388 tree base
= ao_ref_base (ref
);
2389 HOST_WIDE_INT offseti
= 0, maxsizei
, sizei
= 0;
2390 static vec
<vn_reference_op_s
> lhs_ops
;
2392 bool lhs_ref_ok
= false;
2393 poly_int64 copy_size
;
2395 /* First try to disambiguate after value-replacing in the definitions LHS. */
2396 if (is_gimple_assign (def_stmt
))
2398 tree lhs
= gimple_assign_lhs (def_stmt
);
2399 bool valueized_anything
= false;
2400 /* Avoid re-allocation overhead. */
2401 lhs_ops
.truncate (0);
2402 basic_block saved_rpo_bb
= vn_context_bb
;
2403 vn_context_bb
= gimple_bb (def_stmt
);
2404 if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
)
2406 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
2407 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
, true);
2409 vn_context_bb
= saved_rpo_bb
;
2410 if (valueized_anything
)
2412 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
2413 get_alias_set (lhs
),
2414 TREE_TYPE (lhs
), lhs_ops
);
2416 && !refs_may_alias_p_1 (ref
, &lhs_ref
, data
->tbaa_p
))
2418 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2424 ao_ref_init (&lhs_ref
, lhs
);
2428 /* Besides valueizing the LHS we can also use access-path based
2429 disambiguation on the original non-valueized ref. */
2432 && data
->orig_ref
.ref
)
2434 /* We want to use the non-valueized LHS for this, but avoid redundant
2436 ao_ref
*lref
= &lhs_ref
;
2438 if (valueized_anything
)
2440 ao_ref_init (&lref_alt
, lhs
);
2443 if (!refs_may_alias_p_1 (&data
->orig_ref
, lref
, data
->tbaa_p
))
2445 *disambiguate_only
= (valueized_anything
2446 ? TR_VALUEIZE_AND_DISAMBIGUATE
2452 /* If we reach a clobbering statement try to skip it and see if
2453 we find a VN result with exactly the same value as the
2454 possible clobber. In this case we can ignore the clobber
2455 and return the found value. */
2456 if (is_gimple_reg_type (TREE_TYPE (lhs
))
2457 && types_compatible_p (TREE_TYPE (lhs
), vr
->type
)
2460 tree
*saved_last_vuse_ptr
= data
->last_vuse_ptr
;
2461 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2462 data
->last_vuse_ptr
= NULL
;
2463 tree saved_vuse
= vr
->vuse
;
2464 hashval_t saved_hashcode
= vr
->hashcode
;
2465 void *res
= vn_reference_lookup_2 (ref
, gimple_vuse (def_stmt
), data
);
2466 /* Need to restore vr->vuse and vr->hashcode. */
2467 vr
->vuse
= saved_vuse
;
2468 vr
->hashcode
= saved_hashcode
;
2469 data
->last_vuse_ptr
= saved_last_vuse_ptr
;
2470 if (res
&& res
!= (void *)-1)
2472 vn_reference_t vnresult
= (vn_reference_t
) res
;
2473 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2474 if (TREE_CODE (rhs
) == SSA_NAME
)
2475 rhs
= SSA_VAL (rhs
);
2476 if (vnresult
->result
2477 && operand_equal_p (vnresult
->result
, rhs
, 0)
2478 /* We have to honor our promise about union type punning
2479 and also support arbitrary overlaps with
2480 -fno-strict-aliasing. So simply resort to alignment to
2481 rule out overlaps. Do this check last because it is
2482 quite expensive compared to the hash-lookup above. */
2483 && multiple_p (get_object_alignment (ref
->ref
), ref
->size
)
2484 && multiple_p (get_object_alignment (lhs
), ref
->size
))
2489 else if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
2490 && gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
2491 && gimple_call_num_args (def_stmt
) <= 4)
2493 /* For builtin calls valueize its arguments and call the
2494 alias oracle again. Valueization may improve points-to
2495 info of pointers and constify size and position arguments.
2496 Originally this was motivated by PR61034 which has
2497 conditional calls to free falsely clobbering ref because
2498 of imprecise points-to info of the argument. */
2500 bool valueized_anything
= false;
2501 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2503 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
2504 tree val
= vn_valueize (oldargs
[i
]);
2505 if (val
!= oldargs
[i
])
2507 gimple_call_set_arg (def_stmt
, i
, val
);
2508 valueized_anything
= true;
2511 if (valueized_anything
)
2513 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
2515 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2516 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
2519 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2525 if (*disambiguate_only
> TR_TRANSLATE
)
2528 /* If we cannot constrain the size of the reference we cannot
2529 test if anything kills it. */
2530 if (!ref
->max_size_known_p ())
2533 poly_int64 offset
= ref
->offset
;
2534 poly_int64 maxsize
= ref
->max_size
;
2536 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2537 from that definition.
2539 if (is_gimple_reg_type (vr
->type
)
2540 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
2541 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET_CHK
))
2542 && (integer_zerop (gimple_call_arg (def_stmt
, 1))
2543 || ((TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
2544 || (INTEGRAL_TYPE_P (vr
->type
) && known_eq (ref
->size
, 8)))
2546 && BITS_PER_UNIT
== 8
2547 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
2548 && offset
.is_constant (&offseti
)
2549 && ref
->size
.is_constant (&sizei
)
2550 && (offseti
% BITS_PER_UNIT
== 0
2551 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
)))
2552 && (poly_int_tree_p (gimple_call_arg (def_stmt
, 2))
2553 || (TREE_CODE (gimple_call_arg (def_stmt
, 2)) == SSA_NAME
2554 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt
, 2)))))
2555 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2556 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
))
2559 poly_int64 offset2
, size2
, maxsize2
;
2561 tree ref2
= gimple_call_arg (def_stmt
, 0);
2562 if (TREE_CODE (ref2
) == SSA_NAME
)
2564 ref2
= SSA_VAL (ref2
);
2565 if (TREE_CODE (ref2
) == SSA_NAME
2566 && (TREE_CODE (base
) != MEM_REF
2567 || TREE_OPERAND (base
, 0) != ref2
))
2569 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref2
);
2570 if (gimple_assign_single_p (def_stmt
)
2571 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2572 ref2
= gimple_assign_rhs1 (def_stmt
);
2575 if (TREE_CODE (ref2
) == ADDR_EXPR
)
2577 ref2
= TREE_OPERAND (ref2
, 0);
2578 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
2580 if (!known_size_p (maxsize2
)
2581 || !known_eq (maxsize2
, size2
)
2582 || !operand_equal_p (base
, base2
, OEP_ADDRESS_OF
))
2585 else if (TREE_CODE (ref2
) == SSA_NAME
)
2588 if (TREE_CODE (base
) != MEM_REF
2589 || !(mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
).to_shwi (&soff
))
2593 if (TREE_OPERAND (base
, 0) != ref2
)
2595 gimple
*def
= SSA_NAME_DEF_STMT (ref2
);
2596 if (is_gimple_assign (def
)
2597 && gimple_assign_rhs_code (def
) == POINTER_PLUS_EXPR
2598 && gimple_assign_rhs1 (def
) == TREE_OPERAND (base
, 0)
2599 && poly_int_tree_p (gimple_assign_rhs2 (def
))
2600 && (wi::to_poly_offset (gimple_assign_rhs2 (def
))
2601 << LOG2_BITS_PER_UNIT
).to_shwi (&offset2
))
2603 ref2
= gimple_assign_rhs1 (def
);
2604 if (TREE_CODE (ref2
) == SSA_NAME
)
2605 ref2
= SSA_VAL (ref2
);
2613 tree len
= gimple_call_arg (def_stmt
, 2);
2614 HOST_WIDE_INT leni
, offset2i
;
2615 if (TREE_CODE (len
) == SSA_NAME
)
2616 len
= SSA_VAL (len
);
2617 /* Sometimes the above trickery is smarter than alias analysis. Take
2618 advantage of that. */
2619 if (!ranges_maybe_overlap_p (offset
, maxsize
, offset2
,
2620 (wi::to_poly_offset (len
)
2621 << LOG2_BITS_PER_UNIT
)))
2623 if (data
->partial_defs
.is_empty ()
2624 && known_subrange_p (offset
, maxsize
, offset2
,
2625 wi::to_poly_offset (len
) << LOG2_BITS_PER_UNIT
))
2628 if (integer_zerop (gimple_call_arg (def_stmt
, 1)))
2629 val
= build_zero_cst (vr
->type
);
2630 else if (INTEGRAL_TYPE_P (vr
->type
)
2631 && known_eq (ref
->size
, 8)
2632 && offseti
% BITS_PER_UNIT
== 0)
2634 gimple_match_op
res_op (gimple_match_cond::UNCOND
, NOP_EXPR
,
2635 vr
->type
, gimple_call_arg (def_stmt
, 1));
2636 val
= vn_nary_build_or_lookup (&res_op
);
2638 || (TREE_CODE (val
) == SSA_NAME
2639 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2644 unsigned buflen
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr
->type
)) + 1;
2645 if (INTEGRAL_TYPE_P (vr
->type
))
2646 buflen
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr
->type
)) + 1;
2647 unsigned char *buf
= XALLOCAVEC (unsigned char, buflen
);
2648 memset (buf
, TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 1)),
2650 if (BYTES_BIG_ENDIAN
)
2653 = (((unsigned HOST_WIDE_INT
) offseti
+ sizei
)
2657 shift_bytes_in_array_right (buf
, buflen
,
2658 BITS_PER_UNIT
- amnt
);
2663 else if (offseti
% BITS_PER_UNIT
!= 0)
2666 = BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) offseti
2668 shift_bytes_in_array_left (buf
, buflen
, amnt
);
2672 val
= native_interpret_expr (vr
->type
, buf
, buflen
);
2676 return data
->finish (0, val
);
2678 /* For now handle clearing memory with partial defs. */
2679 else if (known_eq (ref
->size
, maxsize
)
2680 && integer_zerop (gimple_call_arg (def_stmt
, 1))
2681 && tree_fits_poly_int64_p (len
)
2682 && tree_to_poly_int64 (len
).is_constant (&leni
)
2683 && leni
<= INTTYPE_MAXIMUM (HOST_WIDE_INT
) / BITS_PER_UNIT
2684 && offset
.is_constant (&offseti
)
2685 && offset2
.is_constant (&offset2i
)
2686 && maxsize
.is_constant (&maxsizei
)
2687 && ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
2688 leni
<< LOG2_BITS_PER_UNIT
))
2691 pd
.rhs
= build_constructor (NULL_TREE
, NULL
);
2692 pd
.offset
= offset2i
- offseti
;
2693 pd
.size
= leni
<< LOG2_BITS_PER_UNIT
;
2694 return data
->push_partial_def (pd
, 0, maxsizei
);
2698 /* 2) Assignment from an empty CONSTRUCTOR. */
2699 else if (is_gimple_reg_type (vr
->type
)
2700 && gimple_assign_single_p (def_stmt
)
2701 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
2702 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
2704 tree lhs
= gimple_assign_lhs (def_stmt
);
2706 poly_int64 offset2
, size2
, maxsize2
;
2707 HOST_WIDE_INT offset2i
, size2i
;
2711 base2
= ao_ref_base (&lhs_ref
);
2712 offset2
= lhs_ref
.offset
;
2713 size2
= lhs_ref
.size
;
2714 maxsize2
= lhs_ref
.max_size
;
2715 reverse
= reverse_storage_order_for_component_p (lhs
);
2718 base2
= get_ref_base_and_extent (lhs
,
2719 &offset2
, &size2
, &maxsize2
, &reverse
);
2720 if (known_size_p (maxsize2
)
2721 && known_eq (maxsize2
, size2
)
2722 && adjust_offsets_for_equal_base_address (base
, &offset
,
2725 if (data
->partial_defs
.is_empty ()
2726 && known_subrange_p (offset
, maxsize
, offset2
, size2
))
2728 /* While technically undefined behavior do not optimize
2729 a full read from a clobber. */
2730 if (gimple_clobber_p (def_stmt
))
2732 tree val
= build_zero_cst (vr
->type
);
2733 return data
->finish (get_alias_set (lhs
), val
);
2735 else if (known_eq (ref
->size
, maxsize
)
2736 && maxsize
.is_constant (&maxsizei
)
2737 && offset
.is_constant (&offseti
)
2738 && offset2
.is_constant (&offset2i
)
2739 && size2
.is_constant (&size2i
)
2740 && ranges_known_overlap_p (offseti
, maxsizei
,
2743 /* Let clobbers be consumed by the partial-def tracker
2744 which can choose to ignore them if they are shadowed
2747 pd
.rhs
= gimple_assign_rhs1 (def_stmt
);
2748 pd
.offset
= offset2i
- offseti
;
2750 return data
->push_partial_def (pd
, get_alias_set (lhs
), maxsizei
);
2755 /* 3) Assignment from a constant. We can use folds native encode/interpret
2756 routines to extract the assigned bits. */
2757 else if (known_eq (ref
->size
, maxsize
)
2758 && is_gimple_reg_type (vr
->type
)
2759 && !contains_storage_order_barrier_p (vr
->operands
)
2760 && gimple_assign_single_p (def_stmt
)
2762 && BITS_PER_UNIT
== 8
2763 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
2764 /* native_encode and native_decode operate on arrays of bytes
2765 and so fundamentally need a compile-time size and offset. */
2766 && maxsize
.is_constant (&maxsizei
)
2767 && offset
.is_constant (&offseti
)
2768 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
2769 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
2770 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
2772 tree lhs
= gimple_assign_lhs (def_stmt
);
2774 poly_int64 offset2
, size2
, maxsize2
;
2775 HOST_WIDE_INT offset2i
, size2i
;
2779 base2
= ao_ref_base (&lhs_ref
);
2780 offset2
= lhs_ref
.offset
;
2781 size2
= lhs_ref
.size
;
2782 maxsize2
= lhs_ref
.max_size
;
2783 reverse
= reverse_storage_order_for_component_p (lhs
);
2786 base2
= get_ref_base_and_extent (lhs
,
2787 &offset2
, &size2
, &maxsize2
, &reverse
);
2790 && !storage_order_barrier_p (lhs
)
2791 && known_eq (maxsize2
, size2
)
2792 && adjust_offsets_for_equal_base_address (base
, &offset
,
2794 && offset
.is_constant (&offseti
)
2795 && offset2
.is_constant (&offset2i
)
2796 && size2
.is_constant (&size2i
))
2798 if (data
->partial_defs
.is_empty ()
2799 && known_subrange_p (offseti
, maxsizei
, offset2
, size2
))
2801 /* We support up to 512-bit values (for V8DFmode). */
2802 unsigned char buffer
[65];
2805 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2806 if (TREE_CODE (rhs
) == SSA_NAME
)
2807 rhs
= SSA_VAL (rhs
);
2808 len
= native_encode_expr (rhs
,
2809 buffer
, sizeof (buffer
) - 1,
2810 (offseti
- offset2i
) / BITS_PER_UNIT
);
2811 if (len
> 0 && len
* BITS_PER_UNIT
>= maxsizei
)
2813 tree type
= vr
->type
;
2814 unsigned char *buf
= buffer
;
2815 unsigned int amnt
= 0;
2816 /* Make sure to interpret in a type that has a range
2817 covering the whole access size. */
2818 if (INTEGRAL_TYPE_P (vr
->type
)
2819 && maxsizei
!= TYPE_PRECISION (vr
->type
))
2820 type
= build_nonstandard_integer_type (maxsizei
,
2821 TYPE_UNSIGNED (type
));
2822 if (BYTES_BIG_ENDIAN
)
2824 /* For big-endian native_encode_expr stored the rhs
2825 such that the LSB of it is the LSB of buffer[len - 1].
2826 That bit is stored into memory at position
2827 offset2 + size2 - 1, i.e. in byte
2828 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
2829 E.g. for offset2 1 and size2 14, rhs -1 and memory
2830 previously cleared that is:
2833 Now, if we want to extract offset 2 and size 12 from
2834 it using native_interpret_expr (which actually works
2835 for integral bitfield types in terms of byte size of
2836 the mode), the native_encode_expr stored the value
2839 and returned len 2 (the X bits are outside of
2841 Let sz be maxsize / BITS_PER_UNIT if not extracting
2842 a bitfield, and GET_MODE_SIZE otherwise.
2843 We need to align the LSB of the value we want to
2844 extract as the LSB of buf[sz - 1].
2845 The LSB from memory we need to read is at position
2846 offset + maxsize - 1. */
2847 HOST_WIDE_INT sz
= maxsizei
/ BITS_PER_UNIT
;
2848 if (INTEGRAL_TYPE_P (type
))
2849 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
2850 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
2851 - offseti
- maxsizei
) % BITS_PER_UNIT
;
2853 shift_bytes_in_array_right (buffer
, len
, amnt
);
2854 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
2855 - offseti
- maxsizei
- amnt
) / BITS_PER_UNIT
;
2856 if ((unsigned HOST_WIDE_INT
) sz
+ amnt
> (unsigned) len
)
2860 buf
= buffer
+ len
- sz
- amnt
;
2861 len
-= (buf
- buffer
);
2866 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
2867 - offseti
) % BITS_PER_UNIT
;
2871 shift_bytes_in_array_left (buffer
, len
+ 1, amnt
);
2875 tree val
= native_interpret_expr (type
, buf
, len
);
2876 /* If we chop off bits because the types precision doesn't
2877 match the memory access size this is ok when optimizing
2878 reads but not when called from the DSE code during
2881 && type
!= vr
->type
)
2883 if (! int_fits_type_p (val
, vr
->type
))
2886 val
= fold_convert (vr
->type
, val
);
2890 return data
->finish (get_alias_set (lhs
), val
);
2893 else if (ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
2897 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2898 if (TREE_CODE (rhs
) == SSA_NAME
)
2899 rhs
= SSA_VAL (rhs
);
2901 pd
.offset
= offset2i
- offseti
;
2903 return data
->push_partial_def (pd
, get_alias_set (lhs
), maxsizei
);
2908 /* 4) Assignment from an SSA name which definition we may be able
2909 to access pieces from or we can combine to a larger entity. */
2910 else if (known_eq (ref
->size
, maxsize
)
2911 && is_gimple_reg_type (vr
->type
)
2912 && !contains_storage_order_barrier_p (vr
->operands
)
2913 && gimple_assign_single_p (def_stmt
)
2914 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
2916 tree lhs
= gimple_assign_lhs (def_stmt
);
2918 poly_int64 offset2
, size2
, maxsize2
;
2919 HOST_WIDE_INT offset2i
, size2i
, offseti
;
2923 base2
= ao_ref_base (&lhs_ref
);
2924 offset2
= lhs_ref
.offset
;
2925 size2
= lhs_ref
.size
;
2926 maxsize2
= lhs_ref
.max_size
;
2927 reverse
= reverse_storage_order_for_component_p (lhs
);
2930 base2
= get_ref_base_and_extent (lhs
,
2931 &offset2
, &size2
, &maxsize2
, &reverse
);
2932 tree def_rhs
= gimple_assign_rhs1 (def_stmt
);
2934 && !storage_order_barrier_p (lhs
)
2935 && known_size_p (maxsize2
)
2936 && known_eq (maxsize2
, size2
)
2937 && adjust_offsets_for_equal_base_address (base
, &offset
,
2940 if (data
->partial_defs
.is_empty ()
2941 && known_subrange_p (offset
, maxsize
, offset2
, size2
)
2942 /* ??? We can't handle bitfield precision extracts without
2943 either using an alternate type for the BIT_FIELD_REF and
2944 then doing a conversion or possibly adjusting the offset
2945 according to endianness. */
2946 && (! INTEGRAL_TYPE_P (vr
->type
)
2947 || known_eq (ref
->size
, TYPE_PRECISION (vr
->type
)))
2948 && multiple_p (ref
->size
, BITS_PER_UNIT
))
2950 tree val
= NULL_TREE
;
2951 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs
))
2952 || type_has_mode_precision_p (TREE_TYPE (def_rhs
)))
2954 gimple_match_op
op (gimple_match_cond::UNCOND
,
2955 BIT_FIELD_REF
, vr
->type
,
2957 bitsize_int (ref
->size
),
2958 bitsize_int (offset
- offset2
));
2959 val
= vn_nary_build_or_lookup (&op
);
2961 else if (known_eq (ref
->size
, size2
))
2963 gimple_match_op
op (gimple_match_cond::UNCOND
,
2964 VIEW_CONVERT_EXPR
, vr
->type
,
2966 val
= vn_nary_build_or_lookup (&op
);
2969 && (TREE_CODE (val
) != SSA_NAME
2970 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2971 return data
->finish (get_alias_set (lhs
), val
);
2973 else if (maxsize
.is_constant (&maxsizei
)
2974 && offset
.is_constant (&offseti
)
2975 && offset2
.is_constant (&offset2i
)
2976 && size2
.is_constant (&size2i
)
2977 && ranges_known_overlap_p (offset
, maxsize
, offset2
, size2
))
2980 pd
.rhs
= SSA_VAL (def_rhs
);
2981 pd
.offset
= offset2i
- offseti
;
2983 return data
->push_partial_def (pd
, get_alias_set (lhs
), maxsizei
);
2988 /* 5) For aggregate copies translate the reference through them if
2989 the copy kills ref. */
2990 else if (data
->vn_walk_kind
== VN_WALKREWRITE
2991 && gimple_assign_single_p (def_stmt
)
2992 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
2993 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
2994 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
2996 tree lhs
= gimple_assign_lhs (def_stmt
);
2999 auto_vec
<vn_reference_op_s
> rhs
;
3000 vn_reference_op_t vro
;
3006 /* See if the assignment kills REF. */
3007 base2
= ao_ref_base (&lhs_ref
);
3008 if (!lhs_ref
.max_size_known_p ()
3010 && (TREE_CODE (base
) != MEM_REF
3011 || TREE_CODE (base2
) != MEM_REF
3012 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
3013 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
3014 TREE_OPERAND (base2
, 1))))
3015 || !stmt_kills_ref_p (def_stmt
, ref
))
3018 /* Find the common base of ref and the lhs. lhs_ops already
3019 contains valueized operands for the lhs. */
3020 i
= vr
->operands
.length () - 1;
3021 j
= lhs_ops
.length () - 1;
3022 while (j
>= 0 && i
>= 0
3023 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
3029 /* ??? The innermost op should always be a MEM_REF and we already
3030 checked that the assignment to the lhs kills vr. Thus for
3031 aggregate copies using char[] types the vn_reference_op_eq
3032 may fail when comparing types for compatibility. But we really
3033 don't care here - further lookups with the rewritten operands
3034 will simply fail if we messed up types too badly. */
3035 poly_int64 extra_off
= 0;
3036 if (j
== 0 && i
>= 0
3037 && lhs_ops
[0].opcode
== MEM_REF
3038 && maybe_ne (lhs_ops
[0].off
, -1))
3040 if (known_eq (lhs_ops
[0].off
, vr
->operands
[i
].off
))
3042 else if (vr
->operands
[i
].opcode
== MEM_REF
3043 && maybe_ne (vr
->operands
[i
].off
, -1))
3045 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
3050 /* i now points to the first additional op.
3051 ??? LHS may not be completely contained in VR, one or more
3052 VIEW_CONVERT_EXPRs could be in its way. We could at least
3053 try handling outermost VIEW_CONVERT_EXPRs. */
3057 /* Punt if the additional ops contain a storage order barrier. */
3058 for (k
= i
; k
>= 0; k
--)
3060 vro
= &vr
->operands
[k
];
3061 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
3065 /* Now re-write REF to be based on the rhs of the assignment. */
3066 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
3067 copy_reference_ops_from_ref (rhs1
, &rhs
);
3069 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3070 if (maybe_ne (extra_off
, 0))
3072 if (rhs
.length () < 2)
3074 int ix
= rhs
.length () - 2;
3075 if (rhs
[ix
].opcode
!= MEM_REF
3076 || known_eq (rhs
[ix
].off
, -1))
3078 rhs
[ix
].off
+= extra_off
;
3079 rhs
[ix
].op0
= int_const_binop (PLUS_EXPR
, rhs
[ix
].op0
,
3080 build_int_cst (TREE_TYPE (rhs
[ix
].op0
),
3084 /* Save the operands since we need to use the original ones for
3085 the hash entry we use. */
3086 if (!data
->saved_operands
.exists ())
3087 data
->saved_operands
= vr
->operands
.copy ();
3089 /* We need to pre-pend vr->operands[0..i] to rhs. */
3090 vec
<vn_reference_op_s
> old
= vr
->operands
;
3091 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
3092 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
3094 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
3095 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
3096 vr
->operands
[i
+ 1 + j
] = *vro
;
3097 vr
->operands
= valueize_refs (vr
->operands
);
3098 if (old
== shared_lookup_references
)
3099 shared_lookup_references
= vr
->operands
;
3100 vr
->hashcode
= vn_reference_compute_hash (vr
);
3102 /* Try folding the new reference to a constant. */
3103 tree val
= fully_constant_vn_reference_p (vr
);
3106 if (data
->partial_defs
.is_empty ())
3107 return data
->finish (get_alias_set (lhs
), val
);
3108 /* This is the only interesting case for partial-def handling
3109 coming from targets that like to gimplify init-ctors as
3110 aggregate copies from constant data like aarch64 for
3112 if (maxsize
.is_constant (&maxsizei
) && known_eq (ref
->size
, maxsize
))
3118 return data
->push_partial_def (pd
, get_alias_set (lhs
),
3123 /* Continuing with partial defs isn't easily possible here, we
3124 have to find a full def from further lookups from here. Probably
3125 not worth the special-casing everywhere. */
3126 if (!data
->partial_defs
.is_empty ())
3129 /* Adjust *ref from the new operands. */
3130 if (!ao_ref_init_from_vn_reference (&r
, get_alias_set (rhs1
),
3131 vr
->type
, vr
->operands
))
3133 /* This can happen with bitfields. */
3134 if (maybe_ne (ref
->size
, r
.size
))
3138 /* Do not update last seen VUSE after translating. */
3139 data
->last_vuse_ptr
= NULL
;
3140 /* Invalidate the original access path since it now contains
3142 data
->orig_ref
.ref
= NULL_TREE
;
3143 /* Use the alias-set of this LHS for recording an eventual result. */
3144 if (data
->first_set
== -2)
3145 data
->first_set
= get_alias_set (lhs
);
3147 /* Keep looking for the adjusted *REF / VR pair. */
3151 /* 6) For memcpy copies translate the reference through them if
3152 the copy kills ref. */
3153 else if (data
->vn_walk_kind
== VN_WALKREWRITE
3154 && is_gimple_reg_type (vr
->type
)
3155 /* ??? Handle BCOPY as well. */
3156 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
3157 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY_CHK
)
3158 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
3159 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY_CHK
)
3160 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
)
3161 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE_CHK
))
3162 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
3163 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
3164 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
3165 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
3166 && (poly_int_tree_p (gimple_call_arg (def_stmt
, 2), ©_size
)
3167 || (TREE_CODE (gimple_call_arg (def_stmt
, 2)) == SSA_NAME
3168 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt
, 2)),
3170 /* Handling this is more complicated, give up for now. */
3171 && data
->partial_defs
.is_empty ())
3175 poly_int64 rhs_offset
, lhs_offset
;
3176 vn_reference_op_s op
;
3177 poly_uint64 mem_offset
;
3178 poly_int64 at
, byte_maxsize
;
3180 /* Only handle non-variable, addressable refs. */
3181 if (maybe_ne (ref
->size
, maxsize
)
3182 || !multiple_p (offset
, BITS_PER_UNIT
, &at
)
3183 || !multiple_p (maxsize
, BITS_PER_UNIT
, &byte_maxsize
))
3186 /* Extract a pointer base and an offset for the destination. */
3187 lhs
= gimple_call_arg (def_stmt
, 0);
3189 if (TREE_CODE (lhs
) == SSA_NAME
)
3191 lhs
= vn_valueize (lhs
);
3192 if (TREE_CODE (lhs
) == SSA_NAME
)
3194 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
3195 if (gimple_assign_single_p (def_stmt
)
3196 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
3197 lhs
= gimple_assign_rhs1 (def_stmt
);
3200 if (TREE_CODE (lhs
) == ADDR_EXPR
)
3202 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
3206 if (TREE_CODE (tem
) == MEM_REF
3207 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3209 lhs
= TREE_OPERAND (tem
, 0);
3210 if (TREE_CODE (lhs
) == SSA_NAME
)
3211 lhs
= vn_valueize (lhs
);
3212 lhs_offset
+= mem_offset
;
3214 else if (DECL_P (tem
))
3215 lhs
= build_fold_addr_expr (tem
);
3219 if (TREE_CODE (lhs
) != SSA_NAME
3220 && TREE_CODE (lhs
) != ADDR_EXPR
)
3223 /* Extract a pointer base and an offset for the source. */
3224 rhs
= gimple_call_arg (def_stmt
, 1);
3226 if (TREE_CODE (rhs
) == SSA_NAME
)
3227 rhs
= vn_valueize (rhs
);
3228 if (TREE_CODE (rhs
) == ADDR_EXPR
)
3230 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
3234 if (TREE_CODE (tem
) == MEM_REF
3235 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3237 rhs
= TREE_OPERAND (tem
, 0);
3238 rhs_offset
+= mem_offset
;
3240 else if (DECL_P (tem
)
3241 || TREE_CODE (tem
) == STRING_CST
)
3242 rhs
= build_fold_addr_expr (tem
);
3246 if (TREE_CODE (rhs
) == SSA_NAME
)
3247 rhs
= SSA_VAL (rhs
);
3248 else if (TREE_CODE (rhs
) != ADDR_EXPR
)
3251 /* The bases of the destination and the references have to agree. */
3252 if (TREE_CODE (base
) == MEM_REF
)
3254 if (TREE_OPERAND (base
, 0) != lhs
3255 || !poly_int_tree_p (TREE_OPERAND (base
, 1), &mem_offset
))
3259 else if (!DECL_P (base
)
3260 || TREE_CODE (lhs
) != ADDR_EXPR
3261 || TREE_OPERAND (lhs
, 0) != base
)
3264 /* If the access is completely outside of the memcpy destination
3265 area there is no aliasing. */
3266 if (!ranges_maybe_overlap_p (lhs_offset
, copy_size
, at
, byte_maxsize
))
3268 /* And the access has to be contained within the memcpy destination. */
3269 if (!known_subrange_p (at
, byte_maxsize
, lhs_offset
, copy_size
))
3272 /* Save the operands since we need to use the original ones for
3273 the hash entry we use. */
3274 if (!data
->saved_operands
.exists ())
3275 data
->saved_operands
= vr
->operands
.copy ();
3277 /* Make room for 2 operands in the new reference. */
3278 if (vr
->operands
.length () < 2)
3280 vec
<vn_reference_op_s
> old
= vr
->operands
;
3281 vr
->operands
.safe_grow_cleared (2);
3282 if (old
== shared_lookup_references
)
3283 shared_lookup_references
= vr
->operands
;
3286 vr
->operands
.truncate (2);
3288 /* The looked-through reference is a simple MEM_REF. */
3289 memset (&op
, 0, sizeof (op
));
3291 op
.opcode
= MEM_REF
;
3292 op
.op0
= build_int_cst (ptr_type_node
, at
- lhs_offset
+ rhs_offset
);
3293 op
.off
= at
- lhs_offset
+ rhs_offset
;
3294 vr
->operands
[0] = op
;
3295 op
.type
= TREE_TYPE (rhs
);
3296 op
.opcode
= TREE_CODE (rhs
);
3299 vr
->operands
[1] = op
;
3300 vr
->hashcode
= vn_reference_compute_hash (vr
);
3302 /* Try folding the new reference to a constant. */
3303 tree val
= fully_constant_vn_reference_p (vr
);
3305 return data
->finish (0, val
);
3307 /* Adjust *ref from the new operands. */
3308 if (!ao_ref_init_from_vn_reference (&r
, 0, vr
->type
, vr
->operands
))
3310 /* This can happen with bitfields. */
3311 if (maybe_ne (ref
->size
, r
.size
))
3315 /* Do not update last seen VUSE after translating. */
3316 data
->last_vuse_ptr
= NULL
;
3317 /* Invalidate the original access path since it now contains
3319 data
->orig_ref
.ref
= NULL_TREE
;
3320 /* Use the alias-set of this stmt for recording an eventual result. */
3321 if (data
->first_set
== -2)
3322 data
->first_set
= 0;
3324 /* Keep looking for the adjusted *REF / VR pair. */
3328 /* Bail out and stop walking. */
3332 /* Return a reference op vector from OP that can be used for
3333 vn_reference_lookup_pieces. The caller is responsible for releasing
3336 vec
<vn_reference_op_s
>
3337 vn_reference_operands_for_lookup (tree op
)
3340 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
3343 /* Lookup a reference operation by it's parts, in the current hash table.
3344 Returns the resulting value number if it exists in the hash table,
3345 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3346 vn_reference_t stored in the hashtable if something is found. */
3349 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
3350 vec
<vn_reference_op_s
> operands
,
3351 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
3353 struct vn_reference_s vr1
;
3361 vr1
.vuse
= vuse_ssa_val (vuse
);
3362 shared_lookup_references
.truncate (0);
3363 shared_lookup_references
.safe_grow (operands
.length ());
3364 memcpy (shared_lookup_references
.address (),
3365 operands
.address (),
3366 sizeof (vn_reference_op_s
)
3367 * operands
.length ());
3368 vr1
.operands
= operands
= shared_lookup_references
3369 = valueize_refs (shared_lookup_references
);
3372 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3373 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
3376 vn_reference_lookup_1 (&vr1
, vnresult
);
3378 && kind
!= VN_NOWALK
3382 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3383 vn_walk_cb_data
data (&vr1
, NULL_TREE
, NULL
, kind
, true);
3384 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
3386 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
, true,
3387 vn_reference_lookup_2
,
3388 vn_reference_lookup_3
,
3389 vuse_valueize
, limit
, &data
);
3390 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3394 return (*vnresult
)->result
;
3399 /* Lookup OP in the current hash table, and return the resulting value
3400 number if it exists in the hash table. Return NULL_TREE if it does
3401 not exist in the hash table or if the result field of the structure
3402 was NULL.. VNRESULT will be filled in with the vn_reference_t
3403 stored in the hashtable if one exists. When TBAA_P is false assume
3404 we are looking up a store and treat it as having alias-set zero.
3405 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded. */
3408 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
3409 vn_reference_t
*vnresult
, bool tbaa_p
, tree
*last_vuse_ptr
)
3411 vec
<vn_reference_op_s
> operands
;
3412 struct vn_reference_s vr1
;
3414 bool valuezied_anything
;
3419 vr1
.vuse
= vuse_ssa_val (vuse
);
3420 vr1
.operands
= operands
3421 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
3422 vr1
.type
= TREE_TYPE (op
);
3423 vr1
.set
= get_alias_set (op
);
3424 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3425 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
3428 if (kind
!= VN_NOWALK
3431 vn_reference_t wvnresult
;
3433 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3434 /* Make sure to use a valueized reference if we valueized anything.
3435 Otherwise preserve the full reference for advanced TBAA. */
3436 if (!valuezied_anything
3437 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
3439 ao_ref_init (&r
, op
);
3440 vn_walk_cb_data
data (&vr1
, r
.ref
? NULL_TREE
: op
,
3441 last_vuse_ptr
, kind
, tbaa_p
);
3443 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
, tbaa_p
,
3444 vn_reference_lookup_2
,
3445 vn_reference_lookup_3
,
3446 vuse_valueize
, limit
, &data
);
3447 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3451 *vnresult
= wvnresult
;
3452 return wvnresult
->result
;
3459 *last_vuse_ptr
= vr1
.vuse
;
3460 return vn_reference_lookup_1 (&vr1
, vnresult
);
3463 /* Lookup CALL in the current hash table and return the entry in
3464 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3467 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
3473 tree vuse
= gimple_vuse (call
);
3475 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
3476 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
3477 vr
->type
= gimple_expr_type (call
);
3479 vr
->hashcode
= vn_reference_compute_hash (vr
);
3480 vn_reference_lookup_1 (vr
, vnresult
);
3483 /* Insert OP into the current hash table with a value number of RESULT. */
3486 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
3488 vn_reference_s
**slot
;
3492 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3493 if (TREE_CODE (result
) == SSA_NAME
)
3494 vr1
->value_id
= VN_INFO (result
)->value_id
;
3496 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
3497 vr1
->vuse
= vuse_ssa_val (vuse
);
3498 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
3499 vr1
->type
= TREE_TYPE (op
);
3500 vr1
->set
= get_alias_set (op
);
3501 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
3502 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
3503 vr1
->result_vdef
= vdef
;
3505 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
3508 /* Because IL walking on reference lookup can end up visiting
3509 a def that is only to be visited later in iteration order
3510 when we are about to make an irreducible region reducible
3511 the def can be effectively processed and its ref being inserted
3512 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3513 but save a lookup if we deal with already inserted refs here. */
3516 /* We cannot assert that we have the same value either because
3517 when disentangling an irreducible region we may end up visiting
3518 a use before the corresponding def. That's a missed optimization
3519 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3520 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3521 && !operand_equal_p ((*slot
)->result
, vr1
->result
, 0))
3523 fprintf (dump_file
, "Keeping old value ");
3524 print_generic_expr (dump_file
, (*slot
)->result
);
3525 fprintf (dump_file
, " because of collision\n");
3527 free_reference (vr1
);
3528 obstack_free (&vn_tables_obstack
, vr1
);
3533 vr1
->next
= last_inserted_ref
;
3534 last_inserted_ref
= vr1
;
3537 /* Insert a reference by it's pieces into the current hash table with
3538 a value number of RESULT. Return the resulting reference
3539 structure we created. */
3542 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
3543 vec
<vn_reference_op_s
> operands
,
3544 tree result
, unsigned int value_id
)
3547 vn_reference_s
**slot
;
3550 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3551 vr1
->value_id
= value_id
;
3552 vr1
->vuse
= vuse_ssa_val (vuse
);
3553 vr1
->operands
= valueize_refs (operands
);
3556 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
3557 if (result
&& TREE_CODE (result
) == SSA_NAME
)
3558 result
= SSA_VAL (result
);
3559 vr1
->result
= result
;
3561 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
3564 /* At this point we should have all the things inserted that we have
3565 seen before, and we should never try inserting something that
3567 gcc_assert (!*slot
);
3570 vr1
->next
= last_inserted_ref
;
3571 last_inserted_ref
= vr1
;
3575 /* Compute and return the hash value for nary operation VBO1. */
3578 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
3580 inchash::hash hstate
;
3583 for (i
= 0; i
< vno1
->length
; ++i
)
3584 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
3585 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
3587 if (((vno1
->length
== 2
3588 && commutative_tree_code (vno1
->opcode
))
3589 || (vno1
->length
== 3
3590 && commutative_ternary_tree_code (vno1
->opcode
)))
3591 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
3592 std::swap (vno1
->op
[0], vno1
->op
[1]);
3593 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
3594 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
3596 std::swap (vno1
->op
[0], vno1
->op
[1]);
3597 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
3600 hstate
.add_int (vno1
->opcode
);
3601 for (i
= 0; i
< vno1
->length
; ++i
)
3602 inchash::add_expr (vno1
->op
[i
], hstate
);
3604 return hstate
.end ();
3607 /* Compare nary operations VNO1 and VNO2 and return true if they are
3611 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
3615 if (vno1
->hashcode
!= vno2
->hashcode
)
3618 if (vno1
->length
!= vno2
->length
)
3621 if (vno1
->opcode
!= vno2
->opcode
3622 || !types_compatible_p (vno1
->type
, vno2
->type
))
3625 for (i
= 0; i
< vno1
->length
; ++i
)
3626 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
3629 /* BIT_INSERT_EXPR has an implict operand as the type precision
3630 of op1. Need to check to make sure they are the same. */
3631 if (vno1
->opcode
== BIT_INSERT_EXPR
3632 && TREE_CODE (vno1
->op
[1]) == INTEGER_CST
3633 && TYPE_PRECISION (TREE_TYPE (vno1
->op
[1]))
3634 != TYPE_PRECISION (TREE_TYPE (vno2
->op
[1])))
3640 /* Initialize VNO from the pieces provided. */
3643 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
3644 enum tree_code code
, tree type
, tree
*ops
)
3647 vno
->length
= length
;
3649 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
3652 /* Return the number of operands for a vn_nary ops structure from STMT. */
3655 vn_nary_length_from_stmt (gimple
*stmt
)
3657 switch (gimple_assign_rhs_code (stmt
))
3661 case VIEW_CONVERT_EXPR
:
3668 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
3671 return gimple_num_ops (stmt
) - 1;
3675 /* Initialize VNO from STMT. */
3678 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
3682 vno
->opcode
= gimple_assign_rhs_code (stmt
);
3683 vno
->type
= gimple_expr_type (stmt
);
3684 switch (vno
->opcode
)
3688 case VIEW_CONVERT_EXPR
:
3690 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
3695 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
3696 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
3697 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
3701 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
3702 for (i
= 0; i
< vno
->length
; ++i
)
3703 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
3707 gcc_checking_assert (!gimple_assign_single_p (stmt
));
3708 vno
->length
= gimple_num_ops (stmt
) - 1;
3709 for (i
= 0; i
< vno
->length
; ++i
)
3710 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
3714 /* Compute the hashcode for VNO and look for it in the hash table;
3715 return the resulting value number if it exists in the hash table.
3716 Return NULL_TREE if it does not exist in the hash table or if the
3717 result field of the operation is NULL. VNRESULT will contain the
3718 vn_nary_op_t from the hashtable if it exists. */
3721 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
3723 vn_nary_op_s
**slot
;
3728 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
3729 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
3734 return (*slot
)->predicated_values
? NULL_TREE
: (*slot
)->u
.result
;
3737 /* Lookup a n-ary operation by its pieces and return the resulting value
3738 number if it exists in the hash table. Return NULL_TREE if it does
3739 not exist in the hash table or if the result field of the operation
3740 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3744 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
3745 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
3747 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
3748 sizeof_vn_nary_op (length
));
3749 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3750 return vn_nary_op_lookup_1 (vno1
, vnresult
);
3753 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3754 value number if it exists in the hash table. Return NULL_TREE if
3755 it does not exist in the hash table. VNRESULT will contain the
3756 vn_nary_op_t from the hashtable if it exists. */
3759 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
3762 = XALLOCAVAR (struct vn_nary_op_s
,
3763 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
3764 init_vn_nary_op_from_stmt (vno1
, stmt
);
3765 return vn_nary_op_lookup_1 (vno1
, vnresult
);
3768 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3771 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
3773 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
3776 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3780 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
3782 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
, &vn_tables_obstack
);
3784 vno1
->value_id
= value_id
;
3785 vno1
->length
= length
;
3786 vno1
->predicated_values
= 0;
3787 vno1
->u
.result
= result
;
3792 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3793 VNO->HASHCODE first. */
3796 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
3799 vn_nary_op_s
**slot
;
3803 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
3804 gcc_assert (! vno
->predicated_values
3805 || (! vno
->u
.values
->next
3806 && vno
->u
.values
->n
== 1));
3809 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
3810 vno
->unwind_to
= *slot
;
3813 /* Prefer non-predicated values.
3814 ??? Only if those are constant, otherwise, with constant predicated
3815 value, turn them into predicated values with entry-block validity
3816 (??? but we always find the first valid result currently). */
3817 if ((*slot
)->predicated_values
3818 && ! vno
->predicated_values
)
3820 /* ??? We cannot remove *slot from the unwind stack list.
3821 For the moment we deal with this by skipping not found
3822 entries but this isn't ideal ... */
3824 /* ??? Maintain a stack of states we can unwind in
3825 vn_nary_op_s? But how far do we unwind? In reality
3826 we need to push change records somewhere... Or not
3827 unwind vn_nary_op_s and linking them but instead
3828 unwind the results "list", linking that, which also
3829 doesn't move on hashtable resize. */
3830 /* We can also have a ->unwind_to recording *slot there.
3831 That way we can make u.values a fixed size array with
3832 recording the number of entries but of course we then
3833 have always N copies for each unwind_to-state. Or we
3834 make sure to only ever append and each unwinding will
3835 pop off one entry (but how to deal with predicated
3836 replaced with non-predicated here?) */
3837 vno
->next
= last_inserted_nary
;
3838 last_inserted_nary
= vno
;
3841 else if (vno
->predicated_values
3842 && ! (*slot
)->predicated_values
)
3844 else if (vno
->predicated_values
3845 && (*slot
)->predicated_values
)
3847 /* ??? Factor this all into a insert_single_predicated_value
3849 gcc_assert (!vno
->u
.values
->next
&& vno
->u
.values
->n
== 1);
3851 = BASIC_BLOCK_FOR_FN (cfun
, vno
->u
.values
->valid_dominated_by_p
[0]);
3852 vn_pval
*nval
= vno
->u
.values
;
3853 vn_pval
**next
= &vno
->u
.values
;
3855 for (vn_pval
*val
= (*slot
)->u
.values
; val
; val
= val
->next
)
3857 if (expressions_equal_p (val
->result
, vno
->u
.values
->result
))
3860 for (unsigned i
= 0; i
< val
->n
; ++i
)
3863 = BASIC_BLOCK_FOR_FN (cfun
,
3864 val
->valid_dominated_by_p
[i
]);
3865 if (dominated_by_p (CDI_DOMINATORS
, vno_bb
, val_bb
))
3866 /* Value registered with more generic predicate. */
3868 else if (dominated_by_p (CDI_DOMINATORS
, val_bb
, vno_bb
))
3869 /* Shouldn't happen, we insert in RPO order. */
3873 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3875 + val
->n
* sizeof (int));
3876 (*next
)->next
= NULL
;
3877 (*next
)->result
= val
->result
;
3878 (*next
)->n
= val
->n
+ 1;
3879 memcpy ((*next
)->valid_dominated_by_p
,
3880 val
->valid_dominated_by_p
,
3881 val
->n
* sizeof (int));
3882 (*next
)->valid_dominated_by_p
[val
->n
] = vno_bb
->index
;
3883 next
= &(*next
)->next
;
3884 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3885 fprintf (dump_file
, "Appending predicate to value.\n");
3888 /* Copy other predicated values. */
3889 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3891 + (val
->n
-1) * sizeof (int));
3892 memcpy (*next
, val
, sizeof (vn_pval
) + (val
->n
-1) * sizeof (int));
3893 (*next
)->next
= NULL
;
3894 next
= &(*next
)->next
;
3900 vno
->next
= last_inserted_nary
;
3901 last_inserted_nary
= vno
;
3905 /* While we do not want to insert things twice it's awkward to
3906 avoid it in the case where visit_nary_op pattern-matches stuff
3907 and ends up simplifying the replacement to itself. We then
3908 get two inserts, one from visit_nary_op and one from
3909 vn_nary_build_or_lookup.
3910 So allow inserts with the same value number. */
3911 if ((*slot
)->u
.result
== vno
->u
.result
)
3915 /* ??? There's also optimistic vs. previous commited state merging
3916 that is problematic for the case of unwinding. */
3918 /* ??? We should return NULL if we do not use 'vno' and have the
3919 caller release it. */
3920 gcc_assert (!*slot
);
3923 vno
->next
= last_inserted_nary
;
3924 last_inserted_nary
= vno
;
3928 /* Insert a n-ary operation into the current hash table using it's
3929 pieces. Return the vn_nary_op_t structure we created and put in
3933 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
3934 tree type
, tree
*ops
,
3935 tree result
, unsigned int value_id
)
3937 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
3938 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3939 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3943 vn_nary_op_insert_pieces_predicated (unsigned int length
, enum tree_code code
,
3944 tree type
, tree
*ops
,
3945 tree result
, unsigned int value_id
,
3948 /* ??? Currently tracking BBs. */
3949 if (! single_pred_p (pred_e
->dest
))
3951 /* Never record for backedges. */
3952 if (pred_e
->flags
& EDGE_DFS_BACK
)
3957 /* Ignore backedges. */
3958 FOR_EACH_EDGE (e
, ei
, pred_e
->dest
->preds
)
3959 if (! dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
3964 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3965 /* ??? Fix dumping, but currently we only get comparisons. */
3966 && TREE_CODE_CLASS (code
) == tcc_comparison
)
3968 fprintf (dump_file
, "Recording on edge %d->%d ", pred_e
->src
->index
,
3969 pred_e
->dest
->index
);
3970 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
3971 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
3972 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
3973 fprintf (dump_file
, " == %s\n",
3974 integer_zerop (result
) ? "false" : "true");
3976 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, NULL_TREE
, value_id
);
3977 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3978 vno1
->predicated_values
= 1;
3979 vno1
->u
.values
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3981 vno1
->u
.values
->next
= NULL
;
3982 vno1
->u
.values
->result
= result
;
3983 vno1
->u
.values
->n
= 1;
3984 vno1
->u
.values
->valid_dominated_by_p
[0] = pred_e
->dest
->index
;
3985 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3989 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
);
3992 vn_nary_op_get_predicated_value (vn_nary_op_t vno
, basic_block bb
)
3994 if (! vno
->predicated_values
)
3995 return vno
->u
.result
;
3996 for (vn_pval
*val
= vno
->u
.values
; val
; val
= val
->next
)
3997 for (unsigned i
= 0; i
< val
->n
; ++i
)
3998 if (dominated_by_p_w_unex (bb
,
4000 (cfun
, val
->valid_dominated_by_p
[i
])))
4005 /* Insert the rhs of STMT into the current hash table with a value number of
4009 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
4012 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
4013 result
, VN_INFO (result
)->value_id
);
4014 init_vn_nary_op_from_stmt (vno1
, stmt
);
4015 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
4018 /* Compute a hashcode for PHI operation VP1 and return it. */
4020 static inline hashval_t
4021 vn_phi_compute_hash (vn_phi_t vp1
)
4023 inchash::hash
hstate (EDGE_COUNT (vp1
->block
->preds
) > 2
4024 ? vp1
->block
->index
: EDGE_COUNT (vp1
->block
->preds
));
4030 /* If all PHI arguments are constants we need to distinguish
4031 the PHI node via its type. */
4033 hstate
.merge_hash (vn_hash_type (type
));
4035 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
4037 /* Don't hash backedge values they need to be handled as VN_TOP
4038 for optimistic value-numbering. */
4039 if (e
->flags
& EDGE_DFS_BACK
)
4042 phi1op
= vp1
->phiargs
[e
->dest_idx
];
4043 if (phi1op
== VN_TOP
)
4045 inchash::add_expr (phi1op
, hstate
);
4048 return hstate
.end ();
4052 /* Return true if COND1 and COND2 represent the same condition, set
4053 *INVERTED_P if one needs to be inverted to make it the same as
4057 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
4058 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
4060 enum tree_code code1
= gimple_cond_code (cond1
);
4061 enum tree_code code2
= gimple_cond_code (cond2
);
4063 *inverted_p
= false;
4066 else if (code1
== swap_tree_comparison (code2
))
4067 std::swap (lhs2
, rhs2
);
4068 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
4070 else if (code1
== invert_tree_comparison
4071 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
4073 std::swap (lhs2
, rhs2
);
4079 return ((expressions_equal_p (lhs1
, lhs2
)
4080 && expressions_equal_p (rhs1
, rhs2
))
4081 || (commutative_tree_code (code1
)
4082 && expressions_equal_p (lhs1
, rhs2
)
4083 && expressions_equal_p (rhs1
, lhs2
)));
4086 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4089 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
4091 if (vp1
->hashcode
!= vp2
->hashcode
)
4094 if (vp1
->block
!= vp2
->block
)
4096 if (EDGE_COUNT (vp1
->block
->preds
) != EDGE_COUNT (vp2
->block
->preds
))
4099 switch (EDGE_COUNT (vp1
->block
->preds
))
4102 /* Single-arg PHIs are just copies. */
4107 /* Rule out backedges into the PHI. */
4108 if (vp1
->block
->loop_father
->header
== vp1
->block
4109 || vp2
->block
->loop_father
->header
== vp2
->block
)
4112 /* If the PHI nodes do not have compatible types
4113 they are not the same. */
4114 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4118 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4120 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
4121 /* If the immediate dominator end in switch stmts multiple
4122 values may end up in the same PHI arg via intermediate
4124 if (EDGE_COUNT (idom1
->succs
) != 2
4125 || EDGE_COUNT (idom2
->succs
) != 2)
4128 /* Verify the controlling stmt is the same. */
4129 gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
));
4130 gcond
*last2
= safe_dyn_cast
<gcond
*> (last_stmt (idom2
));
4131 if (! last1
|| ! last2
)
4134 if (! cond_stmts_equal_p (last1
, vp1
->cclhs
, vp1
->ccrhs
,
4135 last2
, vp2
->cclhs
, vp2
->ccrhs
,
4139 /* Get at true/false controlled edges into the PHI. */
4140 edge te1
, te2
, fe1
, fe2
;
4141 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
4143 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
4147 /* Swap edges if the second condition is the inverted of the
4150 std::swap (te2
, fe2
);
4152 /* ??? Handle VN_TOP specially. */
4153 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
4154 vp2
->phiargs
[te2
->dest_idx
])
4155 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
4156 vp2
->phiargs
[fe2
->dest_idx
]))
4167 /* If the PHI nodes do not have compatible types
4168 they are not the same. */
4169 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4172 /* Any phi in the same block will have it's arguments in the
4173 same edge order, because of how we store phi nodes. */
4174 for (unsigned i
= 0; i
< EDGE_COUNT (vp1
->block
->preds
); ++i
)
4176 tree phi1op
= vp1
->phiargs
[i
];
4177 tree phi2op
= vp2
->phiargs
[i
];
4178 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
4180 if (!expressions_equal_p (phi1op
, phi2op
))
4187 /* Lookup PHI in the current hash table, and return the resulting
4188 value number if it exists in the hash table. Return NULL_TREE if
4189 it does not exist in the hash table. */
4192 vn_phi_lookup (gimple
*phi
, bool backedges_varying_p
)
4195 struct vn_phi_s
*vp1
;
4199 vp1
= XALLOCAVAR (struct vn_phi_s
,
4200 sizeof (struct vn_phi_s
)
4201 + (gimple_phi_num_args (phi
) - 1) * sizeof (tree
));
4203 /* Canonicalize the SSA_NAME's to their value number. */
4204 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4206 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4207 if (TREE_CODE (def
) == SSA_NAME
4208 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4209 def
= SSA_VAL (def
);
4210 vp1
->phiargs
[e
->dest_idx
] = def
;
4212 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4213 vp1
->block
= gimple_bb (phi
);
4214 /* Extract values of the controlling condition. */
4215 vp1
->cclhs
= NULL_TREE
;
4216 vp1
->ccrhs
= NULL_TREE
;
4217 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4218 if (EDGE_COUNT (idom1
->succs
) == 2)
4219 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
4221 /* ??? We want to use SSA_VAL here. But possibly not
4223 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4224 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4226 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4227 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, NO_INSERT
);
4230 return (*slot
)->result
;
4233 /* Insert PHI into the current hash table with a value number of
4237 vn_phi_insert (gimple
*phi
, tree result
, bool backedges_varying_p
)
4240 vn_phi_t vp1
= (vn_phi_t
) obstack_alloc (&vn_tables_obstack
,
4242 + ((gimple_phi_num_args (phi
) - 1)
4247 /* Canonicalize the SSA_NAME's to their value number. */
4248 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4250 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4251 if (TREE_CODE (def
) == SSA_NAME
4252 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4253 def
= SSA_VAL (def
);
4254 vp1
->phiargs
[e
->dest_idx
] = def
;
4256 vp1
->value_id
= VN_INFO (result
)->value_id
;
4257 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4258 vp1
->block
= gimple_bb (phi
);
4259 /* Extract values of the controlling condition. */
4260 vp1
->cclhs
= NULL_TREE
;
4261 vp1
->ccrhs
= NULL_TREE
;
4262 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4263 if (EDGE_COUNT (idom1
->succs
) == 2)
4264 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
4266 /* ??? We want to use SSA_VAL here. But possibly not
4268 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4269 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4271 vp1
->result
= result
;
4272 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4274 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
4275 gcc_assert (!*slot
);
4278 vp1
->next
= last_inserted_phi
;
4279 last_inserted_phi
= vp1
;
4284 /* Return true if BB1 is dominated by BB2 taking into account edges
4285 that are not executable. */
4288 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
4293 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4296 /* Before iterating we'd like to know if there exists a
4297 (executable) path from bb2 to bb1 at all, if not we can
4298 directly return false. For now simply iterate once. */
4300 /* Iterate to the single executable bb1 predecessor. */
4301 if (EDGE_COUNT (bb1
->preds
) > 1)
4304 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
4305 if (e
->flags
& EDGE_EXECUTABLE
)
4318 /* Re-do the dominance check with changed bb1. */
4319 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4324 /* Iterate to the single executable bb2 successor. */
4326 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
4327 if (e
->flags
& EDGE_EXECUTABLE
)
4338 /* Verify the reached block is only reached through succe.
4339 If there is only one edge we can spare us the dominator
4340 check and iterate directly. */
4341 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
4343 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
4345 && (e
->flags
& EDGE_EXECUTABLE
))
4355 /* Re-do the dominance check with changed bb2. */
4356 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4361 /* We could now iterate updating bb1 / bb2. */
4365 /* Set the value number of FROM to TO, return true if it has changed
4369 set_ssa_val_to (tree from
, tree to
)
4371 vn_ssa_aux_t from_info
= VN_INFO (from
);
4372 tree currval
= from_info
->valnum
; // SSA_VAL (from)
4373 poly_int64 toff
, coff
;
4375 /* The only thing we allow as value numbers are ssa_names
4376 and invariants. So assert that here. We don't allow VN_TOP
4377 as visiting a stmt should produce a value-number other than
4379 ??? Still VN_TOP can happen for unreachable code, so force
4380 it to varying in that case. Not all code is prepared to
4381 get VN_TOP on valueization. */
4384 /* ??? When iterating and visiting PHI <undef, backedge-value>
4385 for the first time we rightfully get VN_TOP and we need to
4386 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4387 With SCCVN we were simply lucky we iterated the other PHI
4388 cycles first and thus visited the backedge-value DEF. */
4389 if (currval
== VN_TOP
)
4391 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4392 fprintf (dump_file
, "Forcing value number to varying on "
4393 "receiving VN_TOP\n");
4397 gcc_checking_assert (to
!= NULL_TREE
4398 && ((TREE_CODE (to
) == SSA_NAME
4399 && (to
== from
|| SSA_VAL (to
) == to
))
4400 || is_gimple_min_invariant (to
)));
4404 if (currval
== from
)
4406 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4408 fprintf (dump_file
, "Not changing value number of ");
4409 print_generic_expr (dump_file
, from
);
4410 fprintf (dump_file
, " from VARYING to ");
4411 print_generic_expr (dump_file
, to
);
4412 fprintf (dump_file
, "\n");
4416 bool curr_invariant
= is_gimple_min_invariant (currval
);
4417 bool curr_undefined
= (TREE_CODE (currval
) == SSA_NAME
4418 && ssa_undefined_value_p (currval
, false));
4419 if (currval
!= VN_TOP
4422 && is_gimple_min_invariant (to
))
4424 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4426 fprintf (dump_file
, "Forcing VARYING instead of changing "
4427 "value number of ");
4428 print_generic_expr (dump_file
, from
);
4429 fprintf (dump_file
, " from ");
4430 print_generic_expr (dump_file
, currval
);
4431 fprintf (dump_file
, " (non-constant) to ");
4432 print_generic_expr (dump_file
, to
);
4433 fprintf (dump_file
, " (constant)\n");
4437 else if (currval
!= VN_TOP
4439 && TREE_CODE (to
) == SSA_NAME
4440 && ssa_undefined_value_p (to
, false))
4442 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4444 fprintf (dump_file
, "Forcing VARYING instead of changing "
4445 "value number of ");
4446 print_generic_expr (dump_file
, from
);
4447 fprintf (dump_file
, " from ");
4448 print_generic_expr (dump_file
, currval
);
4449 fprintf (dump_file
, " (non-undefined) to ");
4450 print_generic_expr (dump_file
, to
);
4451 fprintf (dump_file
, " (undefined)\n");
4455 else if (TREE_CODE (to
) == SSA_NAME
4456 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
4461 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4463 fprintf (dump_file
, "Setting value number of ");
4464 print_generic_expr (dump_file
, from
);
4465 fprintf (dump_file
, " to ");
4466 print_generic_expr (dump_file
, to
);
4470 && !operand_equal_p (currval
, to
, 0)
4471 /* Different undefined SSA names are not actually different. See
4472 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4473 && !(TREE_CODE (currval
) == SSA_NAME
4474 && TREE_CODE (to
) == SSA_NAME
4475 && ssa_undefined_value_p (currval
, false)
4476 && ssa_undefined_value_p (to
, false))
4477 /* ??? For addresses involving volatile objects or types operand_equal_p
4478 does not reliably detect ADDR_EXPRs as equal. We know we are only
4479 getting invariant gimple addresses here, so can use
4480 get_addr_base_and_unit_offset to do this comparison. */
4481 && !(TREE_CODE (currval
) == ADDR_EXPR
4482 && TREE_CODE (to
) == ADDR_EXPR
4483 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
4484 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
4485 && known_eq (coff
, toff
)))
4487 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4488 fprintf (dump_file
, " (changed)\n");
4489 from_info
->valnum
= to
;
4492 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4493 fprintf (dump_file
, "\n");
4497 /* Set all definitions in STMT to value number to themselves.
4498 Return true if a value number changed. */
4501 defs_to_varying (gimple
*stmt
)
4503 bool changed
= false;
4507 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
4509 tree def
= DEF_FROM_PTR (defp
);
4510 changed
|= set_ssa_val_to (def
, def
);
4515 /* Visit a copy between LHS and RHS, return true if the value number
4519 visit_copy (tree lhs
, tree rhs
)
4522 rhs
= SSA_VAL (rhs
);
4524 return set_ssa_val_to (lhs
, rhs
);
4527 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4531 valueized_wider_op (tree wide_type
, tree op
)
4533 if (TREE_CODE (op
) == SSA_NAME
)
4534 op
= vn_valueize (op
);
4536 /* Either we have the op widened available. */
4539 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
4540 wide_type
, ops
, NULL
);
4544 /* Or the op is truncated from some existing value. */
4545 if (TREE_CODE (op
) == SSA_NAME
)
4547 gimple
*def
= SSA_NAME_DEF_STMT (op
);
4548 if (is_gimple_assign (def
)
4549 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
4551 tem
= gimple_assign_rhs1 (def
);
4552 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
4554 if (TREE_CODE (tem
) == SSA_NAME
)
4555 tem
= vn_valueize (tem
);
4561 /* For constants simply extend it. */
4562 if (TREE_CODE (op
) == INTEGER_CST
)
4563 return wide_int_to_tree (wide_type
, wi::to_wide (op
));
4568 /* Visit a nary operator RHS, value number it, and return true if the
4569 value number of LHS has changed as a result. */
4572 visit_nary_op (tree lhs
, gassign
*stmt
)
4574 vn_nary_op_t vnresult
;
4575 tree result
= vn_nary_op_lookup_stmt (stmt
, &vnresult
);
4576 if (! result
&& vnresult
)
4577 result
= vn_nary_op_get_predicated_value (vnresult
, gimple_bb (stmt
));
4579 return set_ssa_val_to (lhs
, result
);
4581 /* Do some special pattern matching for redundancies of operations
4582 in different types. */
4583 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4584 tree type
= TREE_TYPE (lhs
);
4585 tree rhs1
= gimple_assign_rhs1 (stmt
);
4589 /* Match arithmetic done in a different type where we can easily
4590 substitute the result from some earlier sign-changed or widened
4592 if (INTEGRAL_TYPE_P (type
)
4593 && TREE_CODE (rhs1
) == SSA_NAME
4594 /* We only handle sign-changes, zero-extension -> & mask or
4595 sign-extension if we know the inner operation doesn't
4597 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
4598 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
4599 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
4600 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
4601 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
4603 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
4605 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
4606 || gimple_assign_rhs_code (def
) == MINUS_EXPR
4607 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
4610 /* Either we have the op widened available. */
4611 ops
[0] = valueized_wider_op (type
,
4612 gimple_assign_rhs1 (def
));
4614 ops
[1] = valueized_wider_op (type
,
4615 gimple_assign_rhs2 (def
));
4616 if (ops
[0] && ops
[1])
4618 ops
[0] = vn_nary_op_lookup_pieces
4619 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
4620 /* We have wider operation available. */
4622 /* If the leader is a wrapping operation we can
4623 insert it for code hoisting w/o introducing
4624 undefined overflow. If it is not it has to
4625 be available. See PR86554. */
4626 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops
[0]))
4627 || (rpo_avail
&& vn_context_bb
4628 && rpo_avail
->eliminate_avail (vn_context_bb
,
4631 unsigned lhs_prec
= TYPE_PRECISION (type
);
4632 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
4633 if (lhs_prec
== rhs_prec
4634 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
4635 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
4637 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
4638 NOP_EXPR
, type
, ops
[0]);
4639 result
= vn_nary_build_or_lookup (&match_op
);
4642 bool changed
= set_ssa_val_to (lhs
, result
);
4643 vn_nary_op_insert_stmt (stmt
, result
);
4649 tree mask
= wide_int_to_tree
4650 (type
, wi::mask (rhs_prec
, false, lhs_prec
));
4651 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
4655 result
= vn_nary_build_or_lookup (&match_op
);
4658 bool changed
= set_ssa_val_to (lhs
, result
);
4659 vn_nary_op_insert_stmt (stmt
, result
);
4670 bool changed
= set_ssa_val_to (lhs
, lhs
);
4671 vn_nary_op_insert_stmt (stmt
, lhs
);
4675 /* Visit a call STMT storing into LHS. Return true if the value number
4676 of the LHS has changed as a result. */
4679 visit_reference_op_call (tree lhs
, gcall
*stmt
)
4681 bool changed
= false;
4682 struct vn_reference_s vr1
;
4683 vn_reference_t vnresult
= NULL
;
4684 tree vdef
= gimple_vdef (stmt
);
4686 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4687 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
4690 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
4693 if (vnresult
->result_vdef
&& vdef
)
4694 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
4696 /* If the call was discovered to be pure or const reflect
4697 that as far as possible. */
4698 changed
|= set_ssa_val_to (vdef
, vuse_ssa_val (gimple_vuse (stmt
)));
4700 if (!vnresult
->result
&& lhs
)
4701 vnresult
->result
= lhs
;
4703 if (vnresult
->result
&& lhs
)
4704 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
4709 vn_reference_s
**slot
;
4710 tree vdef_val
= vdef
;
4713 /* If we value numbered an indirect functions function to
4714 one not clobbering memory value number its VDEF to its
4716 tree fn
= gimple_call_fn (stmt
);
4717 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
4720 if (TREE_CODE (fn
) == ADDR_EXPR
4721 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
4722 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
4723 & (ECF_CONST
| ECF_PURE
)))
4724 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
4726 changed
|= set_ssa_val_to (vdef
, vdef_val
);
4729 changed
|= set_ssa_val_to (lhs
, lhs
);
4730 vr2
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
4731 vr2
->vuse
= vr1
.vuse
;
4732 /* As we are not walking the virtual operand chain we know the
4733 shared_lookup_references are still original so we can re-use
4735 vr2
->operands
= vr1
.operands
.copy ();
4736 vr2
->type
= vr1
.type
;
4738 vr2
->hashcode
= vr1
.hashcode
;
4740 vr2
->result_vdef
= vdef_val
;
4742 slot
= valid_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
4744 gcc_assert (!*slot
);
4746 vr2
->next
= last_inserted_ref
;
4747 last_inserted_ref
= vr2
;
4753 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4754 and return true if the value number of the LHS has changed as a result. */
4757 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
4759 bool changed
= false;
4763 last_vuse
= gimple_vuse (stmt
);
4764 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
4765 default_vn_walk_kind
, NULL
, true, &last_vuse
);
4767 /* We handle type-punning through unions by value-numbering based
4768 on offset and size of the access. Be prepared to handle a
4769 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4771 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
4773 /* We will be setting the value number of lhs to the value number
4774 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4775 So first simplify and lookup this expression to see if it
4776 is already available. */
4777 gimple_match_op
res_op (gimple_match_cond::UNCOND
,
4778 VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
4779 result
= vn_nary_build_or_lookup (&res_op
);
4780 /* When building the conversion fails avoid inserting the reference
4783 return set_ssa_val_to (lhs
, lhs
);
4787 changed
= set_ssa_val_to (lhs
, result
);
4790 changed
= set_ssa_val_to (lhs
, lhs
);
4791 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
4798 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4799 and return true if the value number of the LHS has changed as a result. */
4802 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
4804 bool changed
= false;
4805 vn_reference_t vnresult
= NULL
;
4807 bool resultsame
= false;
4808 tree vuse
= gimple_vuse (stmt
);
4809 tree vdef
= gimple_vdef (stmt
);
4811 if (TREE_CODE (op
) == SSA_NAME
)
4814 /* First we want to lookup using the *vuses* from the store and see
4815 if there the last store to this location with the same address
4818 The vuses represent the memory state before the store. If the
4819 memory state, address, and value of the store is the same as the
4820 last store to this location, then this store will produce the
4821 same memory state as that store.
4823 In this case the vdef versions for this store are value numbered to those
4824 vuse versions, since they represent the same memory state after
4827 Otherwise, the vdefs for the store are used when inserting into
4828 the table, since the store generates a new memory state. */
4830 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
4832 && vnresult
->result
)
4834 tree result
= vnresult
->result
;
4835 gcc_checking_assert (TREE_CODE (result
) != SSA_NAME
4836 || result
== SSA_VAL (result
));
4837 resultsame
= expressions_equal_p (result
, op
);
4840 /* If the TBAA state isn't compatible for downstream reads
4841 we cannot value-number the VDEFs the same. */
4842 alias_set_type set
= get_alias_set (lhs
);
4843 if (vnresult
->set
!= set
4844 && ! alias_set_subset_of (set
, vnresult
->set
))
4851 /* Only perform the following when being called from PRE
4852 which embeds tail merging. */
4853 if (default_vn_walk_kind
== VN_WALK
)
4855 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
4856 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
4859 VN_INFO (vdef
)->visited
= true;
4860 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
4864 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4866 fprintf (dump_file
, "No store match\n");
4867 fprintf (dump_file
, "Value numbering store ");
4868 print_generic_expr (dump_file
, lhs
);
4869 fprintf (dump_file
, " to ");
4870 print_generic_expr (dump_file
, op
);
4871 fprintf (dump_file
, "\n");
4873 /* Have to set value numbers before insert, since insert is
4874 going to valueize the references in-place. */
4876 changed
|= set_ssa_val_to (vdef
, vdef
);
4878 /* Do not insert structure copies into the tables. */
4879 if (is_gimple_min_invariant (op
)
4880 || is_gimple_reg (op
))
4881 vn_reference_insert (lhs
, op
, vdef
, NULL
);
4883 /* Only perform the following when being called from PRE
4884 which embeds tail merging. */
4885 if (default_vn_walk_kind
== VN_WALK
)
4887 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
4888 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
4893 /* We had a match, so value number the vdef to have the value
4894 number of the vuse it came from. */
4896 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4897 fprintf (dump_file
, "Store matched earlier value, "
4898 "value numbering store vdefs to matching vuses.\n");
4900 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
4906 /* Visit and value number PHI, return true if the value number
4907 changed. When BACKEDGES_VARYING_P is true then assume all
4908 backedge values are varying. When INSERTED is not NULL then
4909 this is just a ahead query for a possible iteration, set INSERTED
4910 to true if we'd insert into the hashtable. */
4913 visit_phi (gimple
*phi
, bool *inserted
, bool backedges_varying_p
)
4915 tree result
, sameval
= VN_TOP
, seen_undef
= NULL_TREE
;
4916 tree backedge_val
= NULL_TREE
;
4917 bool seen_non_backedge
= false;
4918 tree sameval_base
= NULL_TREE
;
4919 poly_int64 soff
, doff
;
4920 unsigned n_executable
= 0;
4924 /* TODO: We could check for this in initialization, and replace this
4925 with a gcc_assert. */
4926 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
4927 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
4929 /* We track whether a PHI was CSEd to to avoid excessive iterations
4930 that would be necessary only because the PHI changed arguments
4933 gimple_set_plf (phi
, GF_PLF_1
, false);
4935 /* See if all non-TOP arguments have the same value. TOP is
4936 equivalent to everything, so we can ignore it. */
4937 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4938 if (e
->flags
& EDGE_EXECUTABLE
)
4940 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4943 if (TREE_CODE (def
) == SSA_NAME
)
4945 if (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
))
4946 def
= SSA_VAL (def
);
4947 if (e
->flags
& EDGE_DFS_BACK
)
4950 if (!(e
->flags
& EDGE_DFS_BACK
))
4951 seen_non_backedge
= true;
4954 /* Ignore undefined defs for sameval but record one. */
4955 else if (TREE_CODE (def
) == SSA_NAME
4956 && ! virtual_operand_p (def
)
4957 && ssa_undefined_value_p (def
, false))
4959 else if (sameval
== VN_TOP
)
4961 else if (!expressions_equal_p (def
, sameval
))
4963 /* We know we're arriving only with invariant addresses here,
4964 try harder comparing them. We can do some caching here
4965 which we cannot do in expressions_equal_p. */
4966 if (TREE_CODE (def
) == ADDR_EXPR
4967 && TREE_CODE (sameval
) == ADDR_EXPR
4968 && sameval_base
!= (void *)-1)
4971 sameval_base
= get_addr_base_and_unit_offset
4972 (TREE_OPERAND (sameval
, 0), &soff
);
4974 sameval_base
= (tree
)(void *)-1;
4975 else if ((get_addr_base_and_unit_offset
4976 (TREE_OPERAND (def
, 0), &doff
) == sameval_base
)
4977 && known_eq (soff
, doff
))
4980 sameval
= NULL_TREE
;
4985 /* If the value we want to use is flowing over the backedge and we
4986 should take it as VARYING but it has a non-VARYING value drop to
4988 If we value-number a virtual operand never value-number to the
4989 value from the backedge as that confuses the alias-walking code.
4990 See gcc.dg/torture/pr87176.c. If the value is the same on a
4991 non-backedge everything is OK though. */
4994 && !seen_non_backedge
4995 && TREE_CODE (backedge_val
) == SSA_NAME
4996 && sameval
== backedge_val
4997 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val
)
4998 || SSA_VAL (backedge_val
) != backedge_val
))
4999 /* Do not value-number a virtual operand to sth not visited though
5000 given that allows us to escape a region in alias walking. */
5002 && TREE_CODE (sameval
) == SSA_NAME
5003 && !SSA_NAME_IS_DEFAULT_DEF (sameval
)
5004 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval
)
5005 && (SSA_VAL (sameval
, &visited_p
), !visited_p
)))
5006 /* Note this just drops to VARYING without inserting the PHI into
5008 result
= PHI_RESULT (phi
);
5009 /* If none of the edges was executable keep the value-number at VN_TOP,
5010 if only a single edge is exectuable use its value. */
5011 else if (n_executable
<= 1)
5012 result
= seen_undef
? seen_undef
: sameval
;
5013 /* If we saw only undefined values and VN_TOP use one of the
5014 undefined values. */
5015 else if (sameval
== VN_TOP
)
5016 result
= seen_undef
? seen_undef
: sameval
;
5017 /* First see if it is equivalent to a phi node in this block. We prefer
5018 this as it allows IV elimination - see PRs 66502 and 67167. */
5019 else if ((result
= vn_phi_lookup (phi
, backedges_varying_p
)))
5022 && TREE_CODE (result
) == SSA_NAME
5023 && gimple_code (SSA_NAME_DEF_STMT (result
)) == GIMPLE_PHI
)
5025 gimple_set_plf (SSA_NAME_DEF_STMT (result
), GF_PLF_1
, true);
5026 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5028 fprintf (dump_file
, "Marking CSEd to PHI node ");
5029 print_gimple_expr (dump_file
, SSA_NAME_DEF_STMT (result
),
5031 fprintf (dump_file
, "\n");
5035 /* If all values are the same use that, unless we've seen undefined
5036 values as well and the value isn't constant.
5037 CCP/copyprop have the same restriction to not remove uninit warnings. */
5039 && (! seen_undef
|| is_gimple_min_invariant (sameval
)))
5043 result
= PHI_RESULT (phi
);
5044 /* Only insert PHIs that are varying, for constant value numbers
5045 we mess up equivalences otherwise as we are only comparing
5046 the immediate controlling predicates. */
5047 vn_phi_insert (phi
, result
, backedges_varying_p
);
5052 return set_ssa_val_to (PHI_RESULT (phi
), result
);
5055 /* Try to simplify RHS using equivalences and constant folding. */
5058 try_to_simplify (gassign
*stmt
)
5060 enum tree_code code
= gimple_assign_rhs_code (stmt
);
5063 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5064 in this case, there is no point in doing extra work. */
5065 if (code
== SSA_NAME
)
5068 /* First try constant folding based on our current lattice. */
5069 mprts_hook
= vn_lookup_simplify_result
;
5070 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
5073 && (TREE_CODE (tem
) == SSA_NAME
5074 || is_gimple_min_invariant (tem
)))
5080 /* Visit and value number STMT, return true if the value number
5084 visit_stmt (gimple
*stmt
, bool backedges_varying_p
= false)
5086 bool changed
= false;
5088 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5090 fprintf (dump_file
, "Value numbering stmt = ");
5091 print_gimple_stmt (dump_file
, stmt
, 0);
5094 if (gimple_code (stmt
) == GIMPLE_PHI
)
5095 changed
= visit_phi (stmt
, NULL
, backedges_varying_p
);
5096 else if (gimple_has_volatile_ops (stmt
))
5097 changed
= defs_to_varying (stmt
);
5098 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
5100 enum tree_code code
= gimple_assign_rhs_code (ass
);
5101 tree lhs
= gimple_assign_lhs (ass
);
5102 tree rhs1
= gimple_assign_rhs1 (ass
);
5105 /* Shortcut for copies. Simplifying copies is pointless,
5106 since we copy the expression and value they represent. */
5107 if (code
== SSA_NAME
5108 && TREE_CODE (lhs
) == SSA_NAME
)
5110 changed
= visit_copy (lhs
, rhs1
);
5113 simplified
= try_to_simplify (ass
);
5116 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5118 fprintf (dump_file
, "RHS ");
5119 print_gimple_expr (dump_file
, ass
, 0);
5120 fprintf (dump_file
, " simplified to ");
5121 print_generic_expr (dump_file
, simplified
);
5122 fprintf (dump_file
, "\n");
5125 /* Setting value numbers to constants will occasionally
5126 screw up phi congruence because constants are not
5127 uniquely associated with a single ssa name that can be
5130 && is_gimple_min_invariant (simplified
)
5131 && TREE_CODE (lhs
) == SSA_NAME
)
5133 changed
= set_ssa_val_to (lhs
, simplified
);
5137 && TREE_CODE (simplified
) == SSA_NAME
5138 && TREE_CODE (lhs
) == SSA_NAME
)
5140 changed
= visit_copy (lhs
, simplified
);
5144 if ((TREE_CODE (lhs
) == SSA_NAME
5145 /* We can substitute SSA_NAMEs that are live over
5146 abnormal edges with their constant value. */
5147 && !(gimple_assign_copy_p (ass
)
5148 && is_gimple_min_invariant (rhs1
))
5150 && is_gimple_min_invariant (simplified
))
5151 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
5152 /* Stores or copies from SSA_NAMEs that are live over
5153 abnormal edges are a problem. */
5154 || (code
== SSA_NAME
5155 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
5156 changed
= defs_to_varying (ass
);
5157 else if (REFERENCE_CLASS_P (lhs
)
5159 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
5160 else if (TREE_CODE (lhs
) == SSA_NAME
)
5162 if ((gimple_assign_copy_p (ass
)
5163 && is_gimple_min_invariant (rhs1
))
5165 && is_gimple_min_invariant (simplified
)))
5168 changed
= set_ssa_val_to (lhs
, simplified
);
5170 changed
= set_ssa_val_to (lhs
, rhs1
);
5174 /* Visit the original statement. */
5175 switch (vn_get_stmt_kind (ass
))
5178 changed
= visit_nary_op (lhs
, ass
);
5181 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
5184 changed
= defs_to_varying (ass
);
5190 changed
= defs_to_varying (ass
);
5192 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
5194 tree lhs
= gimple_call_lhs (call_stmt
);
5195 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5197 /* Try constant folding based on our current lattice. */
5198 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
5202 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5204 fprintf (dump_file
, "call ");
5205 print_gimple_expr (dump_file
, call_stmt
, 0);
5206 fprintf (dump_file
, " simplified to ");
5207 print_generic_expr (dump_file
, simplified
);
5208 fprintf (dump_file
, "\n");
5211 /* Setting value numbers to constants will occasionally
5212 screw up phi congruence because constants are not
5213 uniquely associated with a single ssa name that can be
5216 && is_gimple_min_invariant (simplified
))
5218 changed
= set_ssa_val_to (lhs
, simplified
);
5219 if (gimple_vdef (call_stmt
))
5220 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
5221 SSA_VAL (gimple_vuse (call_stmt
)));
5225 && TREE_CODE (simplified
) == SSA_NAME
)
5227 changed
= visit_copy (lhs
, simplified
);
5228 if (gimple_vdef (call_stmt
))
5229 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
5230 SSA_VAL (gimple_vuse (call_stmt
)));
5233 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
5235 changed
= defs_to_varying (call_stmt
);
5240 /* Pick up flags from a devirtualization target. */
5241 tree fn
= gimple_call_fn (stmt
);
5242 int extra_fnflags
= 0;
5243 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
5246 if (TREE_CODE (fn
) == ADDR_EXPR
5247 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
5248 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
5250 if (!gimple_call_internal_p (call_stmt
)
5251 && (/* Calls to the same function with the same vuse
5252 and the same operands do not necessarily return the same
5253 value, unless they're pure or const. */
5254 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
5255 & (ECF_PURE
| ECF_CONST
))
5256 /* If calls have a vdef, subsequent calls won't have
5257 the same incoming vuse. So, if 2 calls with vdef have the
5258 same vuse, we know they're not subsequent.
5259 We can value number 2 calls to the same function with the
5260 same vuse and the same operands which are not subsequent
5261 the same, because there is no code in the program that can
5262 compare the 2 values... */
5263 || (gimple_vdef (call_stmt
)
5264 /* ... unless the call returns a pointer which does
5265 not alias with anything else. In which case the
5266 information that the values are distinct are encoded
5268 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
5269 /* Only perform the following when being called from PRE
5270 which embeds tail merging. */
5271 && default_vn_walk_kind
== VN_WALK
)))
5272 changed
= visit_reference_op_call (lhs
, call_stmt
);
5274 changed
= defs_to_varying (call_stmt
);
5277 changed
= defs_to_varying (stmt
);
5283 /* Allocate a value number table. */
5286 allocate_vn_table (vn_tables_t table
, unsigned size
)
5288 table
->phis
= new vn_phi_table_type (size
);
5289 table
->nary
= new vn_nary_op_table_type (size
);
5290 table
->references
= new vn_reference_table_type (size
);
5293 /* Free a value number table. */
5296 free_vn_table (vn_tables_t table
)
5298 /* Walk over elements and release vectors. */
5299 vn_reference_iterator_type hir
;
5301 FOR_EACH_HASH_TABLE_ELEMENT (*table
->references
, vr
, vn_reference_t
, hir
)
5302 vr
->operands
.release ();
5307 delete table
->references
;
5308 table
->references
= NULL
;
5311 /* Set *ID according to RESULT. */
5314 set_value_id_for_result (tree result
, unsigned int *id
)
5316 if (result
&& TREE_CODE (result
) == SSA_NAME
)
5317 *id
= VN_INFO (result
)->value_id
;
5318 else if (result
&& is_gimple_min_invariant (result
))
5319 *id
= get_or_alloc_constant_value_id (result
);
5321 *id
= get_next_value_id ();
5324 /* Set the value ids in the valid hash tables. */
5327 set_hashtable_value_ids (void)
5329 vn_nary_op_iterator_type hin
;
5330 vn_phi_iterator_type hip
;
5331 vn_reference_iterator_type hir
;
5336 /* Now set the value ids of the things we had put in the hash
5339 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
5340 if (! vno
->predicated_values
)
5341 set_value_id_for_result (vno
->u
.result
, &vno
->value_id
);
5343 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
5344 set_value_id_for_result (vp
->result
, &vp
->value_id
);
5346 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
5348 set_value_id_for_result (vr
->result
, &vr
->value_id
);
5351 /* Return the maximum value id we have ever seen. */
5354 get_max_value_id (void)
5356 return next_value_id
;
5359 /* Return the next unique value id. */
5362 get_next_value_id (void)
5364 return next_value_id
++;
5368 /* Compare two expressions E1 and E2 and return true if they are equal. */
5371 expressions_equal_p (tree e1
, tree e2
)
5373 /* The obvious case. */
5377 /* If either one is VN_TOP consider them equal. */
5378 if (e1
== VN_TOP
|| e2
== VN_TOP
)
5381 /* If only one of them is null, they cannot be equal. */
5385 /* Now perform the actual comparison. */
5386 if (TREE_CODE (e1
) == TREE_CODE (e2
)
5387 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
5394 /* Return true if the nary operation NARY may trap. This is a copy
5395 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5398 vn_nary_may_trap (vn_nary_op_t nary
)
5401 tree rhs2
= NULL_TREE
;
5402 bool honor_nans
= false;
5403 bool honor_snans
= false;
5404 bool fp_operation
= false;
5405 bool honor_trapv
= false;
5409 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
5410 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
5411 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
5414 fp_operation
= FLOAT_TYPE_P (type
);
5417 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
5418 honor_snans
= flag_signaling_nans
!= 0;
5420 else if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_TRAPS (type
))
5423 if (nary
->length
>= 2)
5425 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
5426 honor_trapv
, honor_nans
, honor_snans
,
5431 for (i
= 0; i
< nary
->length
; ++i
)
5432 if (tree_could_trap_p (nary
->op
[i
]))
5438 /* Return true if the reference operation REF may trap. */
5441 vn_reference_may_trap (vn_reference_t ref
)
5443 switch (ref
->operands
[0].opcode
)
5447 /* We do not handle calls. */
5449 /* And toplevel address computations never trap. */
5454 vn_reference_op_t op
;
5456 FOR_EACH_VEC_ELT (ref
->operands
, i
, op
)
5460 case WITH_SIZE_EXPR
:
5461 case TARGET_MEM_REF
:
5462 /* Always variable. */
5465 if (op
->op1
&& TREE_CODE (op
->op1
) == SSA_NAME
)
5468 case ARRAY_RANGE_REF
:
5470 if (TREE_CODE (op
->op0
) == SSA_NAME
)
5474 /* Nothing interesting in itself, the base is separate. */
5476 /* The following are the address bases. */
5481 return tree_could_trap_p (TREE_OPERAND (op
->op0
, 0));
5489 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction
,
5490 bitmap inserted_exprs_
)
5491 : dom_walker (direction
), do_pre (inserted_exprs_
!= NULL
),
5492 el_todo (0), eliminations (0), insertions (0),
5493 inserted_exprs (inserted_exprs_
)
5495 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
5496 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
5499 eliminate_dom_walker::~eliminate_dom_walker ()
5501 BITMAP_FREE (need_eh_cleanup
);
5502 BITMAP_FREE (need_ab_cleanup
);
5505 /* Return a leader for OP that is available at the current point of the
5506 eliminate domwalk. */
5509 eliminate_dom_walker::eliminate_avail (basic_block
, tree op
)
5511 tree valnum
= VN_INFO (op
)->valnum
;
5512 if (TREE_CODE (valnum
) == SSA_NAME
)
5514 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
5516 if (avail
.length () > SSA_NAME_VERSION (valnum
))
5517 return avail
[SSA_NAME_VERSION (valnum
)];
5519 else if (is_gimple_min_invariant (valnum
))
5524 /* At the current point of the eliminate domwalk make OP available. */
5527 eliminate_dom_walker::eliminate_push_avail (basic_block
, tree op
)
5529 tree valnum
= VN_INFO (op
)->valnum
;
5530 if (TREE_CODE (valnum
) == SSA_NAME
)
5532 if (avail
.length () <= SSA_NAME_VERSION (valnum
))
5533 avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
5535 if (avail
[SSA_NAME_VERSION (valnum
)])
5536 pushop
= avail
[SSA_NAME_VERSION (valnum
)];
5537 avail_stack
.safe_push (pushop
);
5538 avail
[SSA_NAME_VERSION (valnum
)] = op
;
5542 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5543 the leader for the expression if insertion was successful. */
5546 eliminate_dom_walker::eliminate_insert (basic_block bb
,
5547 gimple_stmt_iterator
*gsi
, tree val
)
5549 /* We can insert a sequence with a single assignment only. */
5550 gimple_seq stmts
= VN_INFO (val
)->expr
;
5551 if (!gimple_seq_singleton_p (stmts
))
5553 gassign
*stmt
= dyn_cast
<gassign
*> (gimple_seq_first_stmt (stmts
));
5555 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
5556 && gimple_assign_rhs_code (stmt
) != VIEW_CONVERT_EXPR
5557 && gimple_assign_rhs_code (stmt
) != BIT_FIELD_REF
5558 && (gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
5559 || TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)))
5562 tree op
= gimple_assign_rhs1 (stmt
);
5563 if (gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
5564 || gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
5565 op
= TREE_OPERAND (op
, 0);
5566 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (bb
, op
) : op
;
5572 if (gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
5573 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
5574 TREE_TYPE (val
), leader
,
5575 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1),
5576 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2));
5577 else if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
)
5578 res
= gimple_build (&stmts
, BIT_AND_EXPR
,
5579 TREE_TYPE (val
), leader
, gimple_assign_rhs2 (stmt
));
5581 res
= gimple_build (&stmts
, gimple_assign_rhs_code (stmt
),
5582 TREE_TYPE (val
), leader
);
5583 if (TREE_CODE (res
) != SSA_NAME
5584 || SSA_NAME_IS_DEFAULT_DEF (res
)
5585 || gimple_bb (SSA_NAME_DEF_STMT (res
)))
5587 gimple_seq_discard (stmts
);
5589 /* During propagation we have to treat SSA info conservatively
5590 and thus we can end up simplifying the inserted expression
5591 at elimination time to sth not defined in stmts. */
5592 /* But then this is a redundancy we failed to detect. Which means
5593 res now has two values. That doesn't play well with how
5594 we track availability here, so give up. */
5595 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5597 if (TREE_CODE (res
) == SSA_NAME
)
5598 res
= eliminate_avail (bb
, res
);
5601 fprintf (dump_file
, "Failed to insert expression for value ");
5602 print_generic_expr (dump_file
, val
);
5603 fprintf (dump_file
, " which is really fully redundant to ");
5604 print_generic_expr (dump_file
, res
);
5605 fprintf (dump_file
, "\n");
5613 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
5614 VN_INFO (res
)->valnum
= val
;
5615 VN_INFO (res
)->visited
= true;
5619 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5621 fprintf (dump_file
, "Inserted ");
5622 print_gimple_stmt (dump_file
, SSA_NAME_DEF_STMT (res
), 0);
5629 eliminate_dom_walker::eliminate_stmt (basic_block b
, gimple_stmt_iterator
*gsi
)
5631 tree sprime
= NULL_TREE
;
5632 gimple
*stmt
= gsi_stmt (*gsi
);
5633 tree lhs
= gimple_get_lhs (stmt
);
5634 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
5635 && !gimple_has_volatile_ops (stmt
)
5636 /* See PR43491. Do not replace a global register variable when
5637 it is a the RHS of an assignment. Do replace local register
5638 variables since gcc does not guarantee a local variable will
5639 be allocated in register.
5640 ??? The fix isn't effective here. This should instead
5641 be ensured by not value-numbering them the same but treating
5642 them like volatiles? */
5643 && !(gimple_assign_single_p (stmt
)
5644 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
5645 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
5646 && is_global_var (gimple_assign_rhs1 (stmt
)))))
5648 sprime
= eliminate_avail (b
, lhs
);
5651 /* If there is no existing usable leader but SCCVN thinks
5652 it has an expression it wants to use as replacement,
5654 tree val
= VN_INFO (lhs
)->valnum
;
5656 && TREE_CODE (val
) == SSA_NAME
5657 && VN_INFO (val
)->needs_insertion
5658 && VN_INFO (val
)->expr
!= NULL
5659 && (sprime
= eliminate_insert (b
, gsi
, val
)) != NULL_TREE
)
5660 eliminate_push_avail (b
, sprime
);
5663 /* If this now constitutes a copy duplicate points-to
5664 and range info appropriately. This is especially
5665 important for inserted code. See tree-ssa-copy.c
5666 for similar code. */
5668 && TREE_CODE (sprime
) == SSA_NAME
)
5670 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
5671 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
5672 && SSA_NAME_PTR_INFO (lhs
)
5673 && ! SSA_NAME_PTR_INFO (sprime
))
5675 duplicate_ssa_name_ptr_info (sprime
,
5676 SSA_NAME_PTR_INFO (lhs
));
5678 mark_ptr_info_alignment_unknown
5679 (SSA_NAME_PTR_INFO (sprime
));
5681 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5682 && SSA_NAME_RANGE_INFO (lhs
)
5683 && ! SSA_NAME_RANGE_INFO (sprime
)
5685 duplicate_ssa_name_range_info (sprime
,
5686 SSA_NAME_RANGE_TYPE (lhs
),
5687 SSA_NAME_RANGE_INFO (lhs
));
5690 /* Inhibit the use of an inserted PHI on a loop header when
5691 the address of the memory reference is a simple induction
5692 variable. In other cases the vectorizer won't do anything
5693 anyway (either it's loop invariant or a complicated
5696 && TREE_CODE (sprime
) == SSA_NAME
5698 && (flag_tree_loop_vectorize
|| flag_tree_parallelize_loops
> 1)
5699 && loop_outer (b
->loop_father
)
5700 && has_zero_uses (sprime
)
5701 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
5702 && gimple_assign_load_p (stmt
))
5704 gimple
*def_stmt
= SSA_NAME_DEF_STMT (sprime
);
5705 basic_block def_bb
= gimple_bb (def_stmt
);
5706 if (gimple_code (def_stmt
) == GIMPLE_PHI
5707 && def_bb
->loop_father
->header
== def_bb
)
5709 loop_p loop
= def_bb
->loop_father
;
5713 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5716 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
5718 && flow_bb_inside_loop_p (loop
, def_bb
)
5719 && simple_iv (loop
, loop
, op
, &iv
, true))
5727 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5729 fprintf (dump_file
, "Not replacing ");
5730 print_gimple_expr (dump_file
, stmt
, 0);
5731 fprintf (dump_file
, " with ");
5732 print_generic_expr (dump_file
, sprime
);
5733 fprintf (dump_file
, " which would add a loop"
5734 " carried dependence to loop %d\n",
5737 /* Don't keep sprime available. */
5745 /* If we can propagate the value computed for LHS into
5746 all uses don't bother doing anything with this stmt. */
5747 if (may_propagate_copy (lhs
, sprime
))
5749 /* Mark it for removal. */
5750 to_remove
.safe_push (stmt
);
5752 /* ??? Don't count copy/constant propagations. */
5753 if (gimple_assign_single_p (stmt
)
5754 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
5755 || gimple_assign_rhs1 (stmt
) == sprime
))
5758 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5760 fprintf (dump_file
, "Replaced ");
5761 print_gimple_expr (dump_file
, stmt
, 0);
5762 fprintf (dump_file
, " with ");
5763 print_generic_expr (dump_file
, sprime
);
5764 fprintf (dump_file
, " in all uses of ");
5765 print_gimple_stmt (dump_file
, stmt
, 0);
5772 /* If this is an assignment from our leader (which
5773 happens in the case the value-number is a constant)
5774 then there is nothing to do. Likewise if we run into
5775 inserted code that needed a conversion because of
5776 our type-agnostic value-numbering of loads. */
5777 if ((gimple_assign_single_p (stmt
)
5778 || (is_gimple_assign (stmt
)
5779 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
5780 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)))
5781 && sprime
== gimple_assign_rhs1 (stmt
))
5784 /* Else replace its RHS. */
5785 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5787 fprintf (dump_file
, "Replaced ");
5788 print_gimple_expr (dump_file
, stmt
, 0);
5789 fprintf (dump_file
, " with ");
5790 print_generic_expr (dump_file
, sprime
);
5791 fprintf (dump_file
, " in ");
5792 print_gimple_stmt (dump_file
, stmt
, 0);
5796 bool can_make_abnormal_goto
= (is_gimple_call (stmt
)
5797 && stmt_can_make_abnormal_goto (stmt
));
5798 gimple
*orig_stmt
= stmt
;
5799 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
5800 TREE_TYPE (sprime
)))
5802 /* We preserve conversions to but not from function or method
5803 types. This asymmetry makes it necessary to re-instantiate
5804 conversions here. */
5805 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
5806 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs
))))
5807 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
5811 tree vdef
= gimple_vdef (stmt
);
5812 tree vuse
= gimple_vuse (stmt
);
5813 propagate_tree_value_into_stmt (gsi
, sprime
);
5814 stmt
= gsi_stmt (*gsi
);
5816 /* In case the VDEF on the original stmt was released, value-number
5817 it to the VUSE. This is to make vuse_ssa_val able to skip
5818 released virtual operands. */
5819 if (vdef
!= gimple_vdef (stmt
))
5821 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef
));
5822 VN_INFO (vdef
)->valnum
= vuse
;
5825 /* If we removed EH side-effects from the statement, clean
5826 its EH information. */
5827 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
5829 bitmap_set_bit (need_eh_cleanup
,
5830 gimple_bb (stmt
)->index
);
5831 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5832 fprintf (dump_file
, " Removed EH side-effects.\n");
5835 /* Likewise for AB side-effects. */
5836 if (can_make_abnormal_goto
5837 && !stmt_can_make_abnormal_goto (stmt
))
5839 bitmap_set_bit (need_ab_cleanup
,
5840 gimple_bb (stmt
)->index
);
5841 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5842 fprintf (dump_file
, " Removed AB side-effects.\n");
5849 /* If the statement is a scalar store, see if the expression
5850 has the same value number as its rhs. If so, the store is
5852 if (gimple_assign_single_p (stmt
)
5853 && !gimple_has_volatile_ops (stmt
)
5854 && !is_gimple_reg (gimple_assign_lhs (stmt
))
5855 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
5856 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
5858 tree rhs
= gimple_assign_rhs1 (stmt
);
5859 vn_reference_t vnresult
;
5860 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
5861 typed load of a byte known to be 0x11 as 1 so a store of
5862 a boolean 1 is detected as redundant. Because of this we
5863 have to make sure to lookup with a ref where its size
5864 matches the precision. */
5865 tree lookup_lhs
= lhs
;
5866 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5867 && (TREE_CODE (lhs
) != COMPONENT_REF
5868 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
5869 && !type_has_mode_precision_p (TREE_TYPE (lhs
)))
5871 if (TREE_CODE (lhs
) == COMPONENT_REF
5872 || TREE_CODE (lhs
) == MEM_REF
)
5874 tree ltype
= build_nonstandard_integer_type
5875 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs
))),
5876 TYPE_UNSIGNED (TREE_TYPE (lhs
)));
5877 if (TREE_CODE (lhs
) == COMPONENT_REF
)
5879 tree foff
= component_ref_field_offset (lhs
);
5880 tree f
= TREE_OPERAND (lhs
, 1);
5881 if (!poly_int_tree_p (foff
))
5882 lookup_lhs
= NULL_TREE
;
5884 lookup_lhs
= build3 (BIT_FIELD_REF
, ltype
,
5885 TREE_OPERAND (lhs
, 0),
5886 TYPE_SIZE (TREE_TYPE (lhs
)),
5888 (foff
, DECL_FIELD_BIT_OFFSET (f
)));
5891 lookup_lhs
= build2 (MEM_REF
, ltype
,
5892 TREE_OPERAND (lhs
, 0),
5893 TREE_OPERAND (lhs
, 1));
5896 lookup_lhs
= NULL_TREE
;
5898 tree val
= NULL_TREE
;
5900 val
= vn_reference_lookup (lookup_lhs
, gimple_vuse (stmt
),
5901 VN_WALKREWRITE
, &vnresult
, false);
5902 if (TREE_CODE (rhs
) == SSA_NAME
)
5903 rhs
= VN_INFO (rhs
)->valnum
;
5905 && (operand_equal_p (val
, rhs
, 0)
5906 /* Due to the bitfield lookups above we can get bit
5907 interpretations of the same RHS as values here. Those
5908 are redundant as well. */
5909 || (TREE_CODE (val
) == SSA_NAME
5910 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val
))
5911 && (val
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val
)))
5912 && TREE_CODE (val
) == VIEW_CONVERT_EXPR
5913 && TREE_OPERAND (val
, 0) == rhs
)))
5915 /* We can only remove the later store if the former aliases
5916 at least all accesses the later one does or if the store
5917 was to readonly memory storing the same value. */
5918 alias_set_type set
= get_alias_set (lhs
);
5920 || vnresult
->set
== set
5921 || alias_set_subset_of (set
, vnresult
->set
))
5923 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5925 fprintf (dump_file
, "Deleted redundant store ");
5926 print_gimple_stmt (dump_file
, stmt
, 0);
5929 /* Queue stmt for removal. */
5930 to_remove
.safe_push (stmt
);
5936 /* If this is a control statement value numbering left edges
5937 unexecuted on force the condition in a way consistent with
5939 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
5941 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
5942 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
5944 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5946 fprintf (dump_file
, "Removing unexecutable edge from ");
5947 print_gimple_stmt (dump_file
, stmt
, 0);
5949 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
5950 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
5951 gimple_cond_make_true (cond
);
5953 gimple_cond_make_false (cond
);
5955 el_todo
|= TODO_cleanup_cfg
;
5960 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
5961 bool was_noreturn
= (is_gimple_call (stmt
)
5962 && gimple_call_noreturn_p (stmt
));
5963 tree vdef
= gimple_vdef (stmt
);
5964 tree vuse
= gimple_vuse (stmt
);
5966 /* If we didn't replace the whole stmt (or propagate the result
5967 into all uses), replace all uses on this stmt with their
5969 bool modified
= false;
5970 use_operand_p use_p
;
5972 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5974 tree use
= USE_FROM_PTR (use_p
);
5975 /* ??? The call code above leaves stmt operands un-updated. */
5976 if (TREE_CODE (use
) != SSA_NAME
)
5979 if (SSA_NAME_IS_DEFAULT_DEF (use
))
5980 /* ??? For default defs BB shouldn't matter, but we have to
5981 solve the inconsistency between rpo eliminate and
5982 dom eliminate avail valueization first. */
5983 sprime
= eliminate_avail (b
, use
);
5985 /* Look for sth available at the definition block of the argument.
5986 This avoids inconsistencies between availability there which
5987 decides if the stmt can be removed and availability at the
5988 use site. The SSA property ensures that things available
5989 at the definition are also available at uses. */
5990 sprime
= eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use
)), use
);
5991 if (sprime
&& sprime
!= use
5992 && may_propagate_copy (use
, sprime
)
5993 /* We substitute into debug stmts to avoid excessive
5994 debug temporaries created by removed stmts, but we need
5995 to avoid doing so for inserted sprimes as we never want
5996 to create debug temporaries for them. */
5998 || TREE_CODE (sprime
) != SSA_NAME
5999 || !is_gimple_debug (stmt
)
6000 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
6002 propagate_value (use_p
, sprime
);
6007 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6008 into which is a requirement for the IPA devirt machinery. */
6009 gimple
*old_stmt
= stmt
;
6012 /* If a formerly non-invariant ADDR_EXPR is turned into an
6013 invariant one it was on a separate stmt. */
6014 if (gimple_assign_single_p (stmt
)
6015 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
6016 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
6017 gimple_stmt_iterator prev
= *gsi
;
6019 if (fold_stmt (gsi
))
6021 /* fold_stmt may have created new stmts inbetween
6022 the previous stmt and the folded stmt. Mark
6023 all defs created there as varying to not confuse
6024 the SCCVN machinery as we're using that even during
6026 if (gsi_end_p (prev
))
6027 prev
= gsi_start_bb (b
);
6030 if (gsi_stmt (prev
) != gsi_stmt (*gsi
))
6035 FOR_EACH_SSA_TREE_OPERAND (def
, gsi_stmt (prev
),
6036 dit
, SSA_OP_ALL_DEFS
)
6037 /* As existing DEFs may move between stmts
6038 only process new ones. */
6039 if (! has_VN_INFO (def
))
6041 VN_INFO (def
)->valnum
= def
;
6042 VN_INFO (def
)->visited
= true;
6044 if (gsi_stmt (prev
) == gsi_stmt (*gsi
))
6050 stmt
= gsi_stmt (*gsi
);
6051 /* In case we folded the stmt away schedule the NOP for removal. */
6052 if (gimple_nop_p (stmt
))
6053 to_remove
.safe_push (stmt
);
6056 /* Visit indirect calls and turn them into direct calls if
6057 possible using the devirtualization machinery. Do this before
6058 checking for required EH/abnormal/noreturn cleanup as devird
6059 may expose more of those. */
6060 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
6062 tree fn
= gimple_call_fn (call_stmt
);
6064 && flag_devirtualize
6065 && virtual_method_call_p (fn
))
6067 tree otr_type
= obj_type_ref_class (fn
);
6068 unsigned HOST_WIDE_INT otr_tok
6069 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn
));
6071 ipa_polymorphic_call_context
context (current_function_decl
,
6072 fn
, stmt
, &instance
);
6073 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
),
6074 otr_type
, stmt
, NULL
);
6076 vec
<cgraph_node
*> targets
6077 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
6078 otr_tok
, context
, &final
);
6080 dump_possible_polymorphic_call_targets (dump_file
,
6081 obj_type_ref_class (fn
),
6083 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
6086 if (targets
.length () == 1)
6087 fn
= targets
[0]->decl
;
6089 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6090 if (dump_enabled_p ())
6092 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
6093 "converting indirect call to "
6095 lang_hooks
.decl_printable_name (fn
, 2));
6097 gimple_call_set_fndecl (call_stmt
, fn
);
6098 /* If changing the call to __builtin_unreachable
6099 or similar noreturn function, adjust gimple_call_fntype
6101 if (gimple_call_noreturn_p (call_stmt
)
6102 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn
)))
6103 && TYPE_ARG_TYPES (TREE_TYPE (fn
))
6104 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn
)))
6106 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fn
));
6107 maybe_remove_unused_call_args (cfun
, call_stmt
);
6115 /* When changing a call into a noreturn call, cfg cleanup
6116 is needed to fix up the noreturn call. */
6118 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
6119 to_fixup
.safe_push (stmt
);
6120 /* When changing a condition or switch into one we know what
6121 edge will be executed, schedule a cfg cleanup. */
6122 if ((gimple_code (stmt
) == GIMPLE_COND
6123 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
6124 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
6125 || (gimple_code (stmt
) == GIMPLE_SWITCH
6126 && TREE_CODE (gimple_switch_index
6127 (as_a
<gswitch
*> (stmt
))) == INTEGER_CST
))
6128 el_todo
|= TODO_cleanup_cfg
;
6129 /* If we removed EH side-effects from the statement, clean
6130 its EH information. */
6131 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
6133 bitmap_set_bit (need_eh_cleanup
,
6134 gimple_bb (stmt
)->index
);
6135 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6136 fprintf (dump_file
, " Removed EH side-effects.\n");
6138 /* Likewise for AB side-effects. */
6139 if (can_make_abnormal_goto
6140 && !stmt_can_make_abnormal_goto (stmt
))
6142 bitmap_set_bit (need_ab_cleanup
,
6143 gimple_bb (stmt
)->index
);
6144 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6145 fprintf (dump_file
, " Removed AB side-effects.\n");
6148 /* In case the VDEF on the original stmt was released, value-number
6149 it to the VUSE. This is to make vuse_ssa_val able to skip
6150 released virtual operands. */
6151 if (vdef
&& SSA_NAME_IN_FREE_LIST (vdef
))
6152 VN_INFO (vdef
)->valnum
= vuse
;
6155 /* Make new values available - for fully redundant LHS we
6156 continue with the next stmt above and skip this. */
6158 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
6159 eliminate_push_avail (b
, DEF_FROM_PTR (defp
));
6162 /* Perform elimination for the basic-block B during the domwalk. */
6165 eliminate_dom_walker::before_dom_children (basic_block b
)
6168 avail_stack
.safe_push (NULL_TREE
);
6170 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
6171 if (!(b
->flags
& BB_EXECUTABLE
))
6176 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
6178 gphi
*phi
= gsi
.phi ();
6179 tree res
= PHI_RESULT (phi
);
6181 if (virtual_operand_p (res
))
6187 tree sprime
= eliminate_avail (b
, res
);
6191 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6193 fprintf (dump_file
, "Replaced redundant PHI node defining ");
6194 print_generic_expr (dump_file
, res
);
6195 fprintf (dump_file
, " with ");
6196 print_generic_expr (dump_file
, sprime
);
6197 fprintf (dump_file
, "\n");
6200 /* If we inserted this PHI node ourself, it's not an elimination. */
6201 if (! inserted_exprs
6202 || ! bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
6205 /* If we will propagate into all uses don't bother to do
6207 if (may_propagate_copy (res
, sprime
))
6209 /* Mark the PHI for removal. */
6210 to_remove
.safe_push (phi
);
6215 remove_phi_node (&gsi
, false);
6217 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
6218 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
6219 gimple
*stmt
= gimple_build_assign (res
, sprime
);
6220 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
6221 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
6225 eliminate_push_avail (b
, res
);
6229 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
6232 eliminate_stmt (b
, &gsi
);
6234 /* Replace destination PHI arguments. */
6237 FOR_EACH_EDGE (e
, ei
, b
->succs
)
6238 if (e
->flags
& EDGE_EXECUTABLE
)
6239 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
6243 gphi
*phi
= gsi
.phi ();
6244 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
6245 tree arg
= USE_FROM_PTR (use_p
);
6246 if (TREE_CODE (arg
) != SSA_NAME
6247 || virtual_operand_p (arg
))
6249 tree sprime
= eliminate_avail (b
, arg
);
6250 if (sprime
&& may_propagate_copy (arg
, sprime
))
6251 propagate_value (use_p
, sprime
);
6254 vn_context_bb
= NULL
;
6259 /* Make no longer available leaders no longer available. */
6262 eliminate_dom_walker::after_dom_children (basic_block
)
6265 while ((entry
= avail_stack
.pop ()) != NULL_TREE
)
6267 tree valnum
= VN_INFO (entry
)->valnum
;
6268 tree old
= avail
[SSA_NAME_VERSION (valnum
)];
6270 avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
6272 avail
[SSA_NAME_VERSION (valnum
)] = entry
;
6276 /* Remove queued stmts and perform delayed cleanups. */
6279 eliminate_dom_walker::eliminate_cleanup (bool region_p
)
6281 statistics_counter_event (cfun
, "Eliminated", eliminations
);
6282 statistics_counter_event (cfun
, "Insertions", insertions
);
6284 /* We cannot remove stmts during BB walk, especially not release SSA
6285 names there as this confuses the VN machinery. The stmts ending
6286 up in to_remove are either stores or simple copies.
6287 Remove stmts in reverse order to make debug stmt creation possible. */
6288 while (!to_remove
.is_empty ())
6290 bool do_release_defs
= true;
6291 gimple
*stmt
= to_remove
.pop ();
6293 /* When we are value-numbering a region we do not require exit PHIs to
6294 be present so we have to make sure to deal with uses outside of the
6295 region of stmts that we thought are eliminated.
6296 ??? Note we may be confused by uses in dead regions we didn't run
6297 elimination on. Rather than checking individual uses we accept
6298 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6299 contains such example). */
6302 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
6304 tree lhs
= gimple_phi_result (phi
);
6305 if (!has_zero_uses (lhs
))
6307 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6308 fprintf (dump_file
, "Keeping eliminated stmt live "
6309 "as copy because of out-of-region uses\n");
6310 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
6311 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
6312 gimple_stmt_iterator gsi
6313 = gsi_after_labels (gimple_bb (stmt
));
6314 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
6315 do_release_defs
= false;
6318 else if (tree lhs
= gimple_get_lhs (stmt
))
6319 if (TREE_CODE (lhs
) == SSA_NAME
6320 && !has_zero_uses (lhs
))
6322 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6323 fprintf (dump_file
, "Keeping eliminated stmt live "
6324 "as copy because of out-of-region uses\n");
6325 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
6326 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
6327 if (is_gimple_assign (stmt
))
6329 gimple_assign_set_rhs_from_tree (&gsi
, sprime
);
6330 stmt
= gsi_stmt (gsi
);
6332 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
6333 bitmap_set_bit (need_eh_cleanup
, gimple_bb (stmt
)->index
);
6338 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
6339 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
6340 do_release_defs
= false;
6345 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6347 fprintf (dump_file
, "Removing dead stmt ");
6348 print_gimple_stmt (dump_file
, stmt
, 0, TDF_NONE
);
6351 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
6352 if (gimple_code (stmt
) == GIMPLE_PHI
)
6353 remove_phi_node (&gsi
, do_release_defs
);
6356 basic_block bb
= gimple_bb (stmt
);
6357 unlink_stmt_vdef (stmt
);
6358 if (gsi_remove (&gsi
, true))
6359 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
6360 if (is_gimple_call (stmt
) && stmt_can_make_abnormal_goto (stmt
))
6361 bitmap_set_bit (need_ab_cleanup
, bb
->index
);
6362 if (do_release_defs
)
6363 release_defs (stmt
);
6366 /* Removing a stmt may expose a forwarder block. */
6367 el_todo
|= TODO_cleanup_cfg
;
6370 /* Fixup stmts that became noreturn calls. This may require splitting
6371 blocks and thus isn't possible during the dominator walk. Do this
6372 in reverse order so we don't inadvertedly remove a stmt we want to
6373 fixup by visiting a dominating now noreturn call first. */
6374 while (!to_fixup
.is_empty ())
6376 gimple
*stmt
= to_fixup
.pop ();
6378 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6380 fprintf (dump_file
, "Fixing up noreturn call ");
6381 print_gimple_stmt (dump_file
, stmt
, 0);
6384 if (fixup_noreturn_call (stmt
))
6385 el_todo
|= TODO_cleanup_cfg
;
6388 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
6389 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
6392 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
6395 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
6397 if (do_eh_cleanup
|| do_ab_cleanup
)
6398 el_todo
|= TODO_cleanup_cfg
;
6403 /* Eliminate fully redundant computations. */
6406 eliminate_with_rpo_vn (bitmap inserted_exprs
)
6408 eliminate_dom_walker
walker (CDI_DOMINATORS
, inserted_exprs
);
6410 walker
.walk (cfun
->cfg
->x_entry_block_ptr
);
6411 return walker
.eliminate_cleanup ();
6415 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
6416 bool iterate
, bool eliminate
);
6419 run_rpo_vn (vn_lookup_kind kind
)
6421 default_vn_walk_kind
= kind
;
6422 do_rpo_vn (cfun
, NULL
, NULL
, true, false);
6424 /* ??? Prune requirement of these. */
6425 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
6426 constant_value_ids
= BITMAP_ALLOC (NULL
);
6428 /* Initialize the value ids and prune out remaining VN_TOPs
6432 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6434 vn_ssa_aux_t info
= VN_INFO (name
);
6436 || info
->valnum
== VN_TOP
)
6437 info
->valnum
= name
;
6438 if (info
->valnum
== name
)
6439 info
->value_id
= get_next_value_id ();
6440 else if (is_gimple_min_invariant (info
->valnum
))
6441 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
6445 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6447 vn_ssa_aux_t info
= VN_INFO (name
);
6448 if (TREE_CODE (info
->valnum
) == SSA_NAME
6449 && info
->valnum
!= name
6450 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
6451 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
6454 set_hashtable_value_ids ();
6456 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6458 fprintf (dump_file
, "Value numbers:\n");
6459 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6461 if (VN_INFO (name
)->visited
6462 && SSA_VAL (name
) != name
)
6464 print_generic_expr (dump_file
, name
);
6465 fprintf (dump_file
, " = ");
6466 print_generic_expr (dump_file
, SSA_VAL (name
));
6467 fprintf (dump_file
, " (%04d)\n", VN_INFO (name
)->value_id
);
6473 /* Free VN associated data structures. */
6478 free_vn_table (valid_info
);
6479 XDELETE (valid_info
);
6480 obstack_free (&vn_tables_obstack
, NULL
);
6481 obstack_free (&vn_tables_insert_obstack
, NULL
);
6483 vn_ssa_aux_iterator_type it
;
6485 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash
, info
, vn_ssa_aux_t
, it
)
6486 if (info
->needs_insertion
)
6487 release_ssa_name (info
->name
);
6488 obstack_free (&vn_ssa_aux_obstack
, NULL
);
6489 delete vn_ssa_aux_hash
;
6491 delete constant_to_value_id
;
6492 constant_to_value_id
= NULL
;
6493 BITMAP_FREE (constant_value_ids
);
6496 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6499 vn_lookup_simplify_result (gimple_match_op
*res_op
)
6501 if (!res_op
->code
.is_tree_code ())
6503 tree
*ops
= res_op
->ops
;
6504 unsigned int length
= res_op
->num_ops
;
6505 if (res_op
->code
== CONSTRUCTOR
6506 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6507 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6508 && TREE_CODE (res_op
->ops
[0]) == CONSTRUCTOR
)
6510 length
= CONSTRUCTOR_NELTS (res_op
->ops
[0]);
6511 ops
= XALLOCAVEC (tree
, length
);
6512 for (unsigned i
= 0; i
< length
; ++i
)
6513 ops
[i
] = CONSTRUCTOR_ELT (res_op
->ops
[0], i
)->value
;
6515 vn_nary_op_t vnresult
= NULL
;
6516 tree res
= vn_nary_op_lookup_pieces (length
, (tree_code
) res_op
->code
,
6517 res_op
->type
, ops
, &vnresult
);
6518 /* If this is used from expression simplification make sure to
6519 return an available expression. */
6520 if (res
&& TREE_CODE (res
) == SSA_NAME
&& mprts_hook
&& rpo_avail
)
6521 res
= rpo_avail
->eliminate_avail (vn_context_bb
, res
);
6525 /* Return a leader for OPs value that is valid at BB. */
6528 rpo_elim::eliminate_avail (basic_block bb
, tree op
)
6531 tree valnum
= SSA_VAL (op
, &visited
);
6532 /* If we didn't visit OP then it must be defined outside of the
6533 region we process and also dominate it. So it is available. */
6536 if (TREE_CODE (valnum
) == SSA_NAME
)
6538 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
6540 vn_avail
*av
= VN_INFO (valnum
)->avail
;
6543 if (av
->location
== bb
->index
)
6544 /* On tramp3d 90% of the cases are here. */
6545 return ssa_name (av
->leader
);
6548 basic_block abb
= BASIC_BLOCK_FOR_FN (cfun
, av
->location
);
6549 /* ??? During elimination we have to use availability at the
6550 definition site of a use we try to replace. This
6551 is required to not run into inconsistencies because
6552 of dominated_by_p_w_unex behavior and removing a definition
6553 while not replacing all uses.
6554 ??? We could try to consistently walk dominators
6555 ignoring non-executable regions. The nearest common
6556 dominator of bb and abb is where we can stop walking. We
6557 may also be able to "pre-compute" (bits of) the next immediate
6558 (non-)dominator during the RPO walk when marking edges as
6560 if (dominated_by_p_w_unex (bb
, abb
))
6562 tree leader
= ssa_name (av
->leader
);
6563 /* Prevent eliminations that break loop-closed SSA. */
6564 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
)
6565 && ! SSA_NAME_IS_DEFAULT_DEF (leader
)
6566 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6567 (leader
))->loop_father
,
6570 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6572 print_generic_expr (dump_file
, leader
);
6573 fprintf (dump_file
, " is available for ");
6574 print_generic_expr (dump_file
, valnum
);
6575 fprintf (dump_file
, "\n");
6577 /* On tramp3d 99% of the _remaining_ cases succeed at
6581 /* ??? Can we somehow skip to the immediate dominator
6582 RPO index (bb_to_rpo)? Again, maybe not worth, on
6583 tramp3d the worst number of elements in the vector is 9. */
6588 else if (valnum
!= VN_TOP
)
6589 /* valnum is is_gimple_min_invariant. */
6594 /* Make LEADER a leader for its value at BB. */
6597 rpo_elim::eliminate_push_avail (basic_block bb
, tree leader
)
6599 tree valnum
= VN_INFO (leader
)->valnum
;
6600 if (valnum
== VN_TOP
6601 || is_gimple_min_invariant (valnum
))
6603 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6605 fprintf (dump_file
, "Making available beyond BB%d ", bb
->index
);
6606 print_generic_expr (dump_file
, leader
);
6607 fprintf (dump_file
, " for value ");
6608 print_generic_expr (dump_file
, valnum
);
6609 fprintf (dump_file
, "\n");
6611 vn_ssa_aux_t value
= VN_INFO (valnum
);
6613 if (m_avail_freelist
)
6615 av
= m_avail_freelist
;
6616 m_avail_freelist
= m_avail_freelist
->next
;
6619 av
= XOBNEW (&vn_ssa_aux_obstack
, vn_avail
);
6620 av
->location
= bb
->index
;
6621 av
->leader
= SSA_NAME_VERSION (leader
);
6622 av
->next
= value
->avail
;
6626 /* Valueization hook for RPO VN plus required state. */
6629 rpo_vn_valueize (tree name
)
6631 if (TREE_CODE (name
) == SSA_NAME
)
6633 vn_ssa_aux_t val
= VN_INFO (name
);
6636 tree tem
= val
->valnum
;
6637 if (tem
!= VN_TOP
&& tem
!= name
)
6639 if (TREE_CODE (tem
) != SSA_NAME
)
6641 /* For all values we only valueize to an available leader
6642 which means we can use SSA name info without restriction. */
6643 tem
= rpo_avail
->eliminate_avail (vn_context_bb
, tem
);
6652 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
6653 inverted condition. */
6656 insert_related_predicates_on_edge (enum tree_code code
, tree
*ops
, edge pred_e
)
6661 /* a < b -> a {!,<}= b */
6662 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
6663 ops
, boolean_true_node
, 0, pred_e
);
6664 vn_nary_op_insert_pieces_predicated (2, LE_EXPR
, boolean_type_node
,
6665 ops
, boolean_true_node
, 0, pred_e
);
6666 /* a < b -> ! a {>,=} b */
6667 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
6668 ops
, boolean_false_node
, 0, pred_e
);
6669 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
6670 ops
, boolean_false_node
, 0, pred_e
);
6673 /* a > b -> a {!,>}= b */
6674 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
6675 ops
, boolean_true_node
, 0, pred_e
);
6676 vn_nary_op_insert_pieces_predicated (2, GE_EXPR
, boolean_type_node
,
6677 ops
, boolean_true_node
, 0, pred_e
);
6678 /* a > b -> ! a {<,=} b */
6679 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
6680 ops
, boolean_false_node
, 0, pred_e
);
6681 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
6682 ops
, boolean_false_node
, 0, pred_e
);
6685 /* a == b -> ! a {<,>} b */
6686 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
6687 ops
, boolean_false_node
, 0, pred_e
);
6688 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
6689 ops
, boolean_false_node
, 0, pred_e
);
6694 /* Nothing besides inverted condition. */
6700 /* Main stmt worker for RPO VN, process BB. */
6703 process_bb (rpo_elim
&avail
, basic_block bb
,
6704 bool bb_visited
, bool iterate_phis
, bool iterate
, bool eliminate
,
6705 bool do_region
, bitmap exit_bbs
, bool skip_phis
)
6713 /* If we are in loop-closed SSA preserve this state. This is
6714 relevant when called on regions from outside of FRE/PRE. */
6715 bool lc_phi_nodes
= false;
6717 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
6718 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6719 if (e
->src
->loop_father
!= e
->dest
->loop_father
6720 && flow_loop_nested_p (e
->dest
->loop_father
,
6721 e
->src
->loop_father
))
6723 lc_phi_nodes
= true;
6727 /* When we visit a loop header substitute into loop info. */
6728 if (!iterate
&& eliminate
&& bb
->loop_father
->header
== bb
)
6730 /* Keep fields in sync with substitute_in_loop_info. */
6731 if (bb
->loop_father
->nb_iterations
)
6732 bb
->loop_father
->nb_iterations
6733 = simplify_replace_tree (bb
->loop_father
->nb_iterations
,
6734 NULL_TREE
, NULL_TREE
, &vn_valueize_wrapper
);
6737 /* Value-number all defs in the basic-block. */
6739 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6742 gphi
*phi
= gsi
.phi ();
6743 tree res
= PHI_RESULT (phi
);
6744 vn_ssa_aux_t res_info
= VN_INFO (res
);
6747 gcc_assert (!res_info
->visited
);
6748 res_info
->valnum
= VN_TOP
;
6749 res_info
->visited
= true;
6752 /* When not iterating force backedge values to varying. */
6753 visit_stmt (phi
, !iterate_phis
);
6754 if (virtual_operand_p (res
))
6758 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6759 how we handle backedges and availability.
6760 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
6761 tree val
= res_info
->valnum
;
6762 if (res
!= val
&& !iterate
&& eliminate
)
6764 if (tree leader
= avail
.eliminate_avail (bb
, res
))
6767 /* Preserve loop-closed SSA form. */
6769 || is_gimple_min_invariant (leader
)))
6771 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6773 fprintf (dump_file
, "Replaced redundant PHI node "
6775 print_generic_expr (dump_file
, res
);
6776 fprintf (dump_file
, " with ");
6777 print_generic_expr (dump_file
, leader
);
6778 fprintf (dump_file
, "\n");
6780 avail
.eliminations
++;
6782 if (may_propagate_copy (res
, leader
))
6784 /* Schedule for removal. */
6785 avail
.to_remove
.safe_push (phi
);
6788 /* ??? Else generate a copy stmt. */
6792 /* Only make defs available that not already are. But make
6793 sure loop-closed SSA PHI node defs are picked up for
6797 || ! avail
.eliminate_avail (bb
, res
))
6798 avail
.eliminate_push_avail (bb
, res
);
6801 /* For empty BBs mark outgoing edges executable. For non-empty BBs
6802 we do this when processing the last stmt as we have to do this
6803 before elimination which otherwise forces GIMPLE_CONDs to
6804 if (1 != 0) style when seeing non-executable edges. */
6805 if (gsi_end_p (gsi_start_bb (bb
)))
6807 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6809 if (!(e
->flags
& EDGE_EXECUTABLE
))
6811 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6813 "marking outgoing edge %d -> %d executable\n",
6814 e
->src
->index
, e
->dest
->index
);
6815 e
->flags
|= EDGE_EXECUTABLE
;
6816 e
->dest
->flags
|= BB_EXECUTABLE
;
6818 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6820 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6822 "marking destination block %d reachable\n",
6824 e
->dest
->flags
|= BB_EXECUTABLE
;
6828 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
6829 !gsi_end_p (gsi
); gsi_next (&gsi
))
6835 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
6837 vn_ssa_aux_t op_info
= VN_INFO (op
);
6838 gcc_assert (!op_info
->visited
);
6839 op_info
->valnum
= VN_TOP
;
6840 op_info
->visited
= true;
6843 /* We somehow have to deal with uses that are not defined
6844 in the processed region. Forcing unvisited uses to
6845 varying here doesn't play well with def-use following during
6846 expression simplification, so we deal with this by checking
6847 the visited flag in SSA_VAL. */
6850 visit_stmt (gsi_stmt (gsi
));
6852 gimple
*last
= gsi_stmt (gsi
);
6854 switch (gimple_code (last
))
6857 e
= find_taken_edge (bb
, vn_valueize (gimple_switch_index
6858 (as_a
<gswitch
*> (last
))));
6862 tree lhs
= vn_valueize (gimple_cond_lhs (last
));
6863 tree rhs
= vn_valueize (gimple_cond_rhs (last
));
6864 tree val
= gimple_simplify (gimple_cond_code (last
),
6865 boolean_type_node
, lhs
, rhs
,
6867 /* If the condition didn't simplfy see if we have recorded
6868 an expression from sofar taken edges. */
6869 if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
6871 vn_nary_op_t vnresult
;
6875 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (last
),
6876 boolean_type_node
, ops
,
6878 /* Did we get a predicated value? */
6879 if (! val
&& vnresult
&& vnresult
->predicated_values
)
6881 val
= vn_nary_op_get_predicated_value (vnresult
, bb
);
6882 if (val
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
6884 fprintf (dump_file
, "Got predicated value ");
6885 print_generic_expr (dump_file
, val
, TDF_NONE
);
6886 fprintf (dump_file
, " for ");
6887 print_gimple_stmt (dump_file
, last
, TDF_SLIM
);
6892 e
= find_taken_edge (bb
, val
);
6895 /* If we didn't manage to compute the taken edge then
6896 push predicated expressions for the condition itself
6897 and related conditions to the hashtables. This allows
6898 simplification of redundant conditions which is
6899 important as early cleanup. */
6900 edge true_e
, false_e
;
6901 extract_true_false_edges_from_block (bb
, &true_e
, &false_e
);
6902 enum tree_code code
= gimple_cond_code (last
);
6903 enum tree_code icode
6904 = invert_tree_comparison (code
, HONOR_NANS (lhs
));
6909 && bitmap_bit_p (exit_bbs
, true_e
->dest
->index
))
6912 && bitmap_bit_p (exit_bbs
, false_e
->dest
->index
))
6915 vn_nary_op_insert_pieces_predicated
6916 (2, code
, boolean_type_node
, ops
,
6917 boolean_true_node
, 0, true_e
);
6919 vn_nary_op_insert_pieces_predicated
6920 (2, code
, boolean_type_node
, ops
,
6921 boolean_false_node
, 0, false_e
);
6922 if (icode
!= ERROR_MARK
)
6925 vn_nary_op_insert_pieces_predicated
6926 (2, icode
, boolean_type_node
, ops
,
6927 boolean_false_node
, 0, true_e
);
6929 vn_nary_op_insert_pieces_predicated
6930 (2, icode
, boolean_type_node
, ops
,
6931 boolean_true_node
, 0, false_e
);
6933 /* Relax for non-integers, inverted condition handled
6935 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6938 insert_related_predicates_on_edge (code
, ops
, true_e
);
6940 insert_related_predicates_on_edge (icode
, ops
, false_e
);
6946 e
= find_taken_edge (bb
, vn_valueize (gimple_goto_dest (last
)));
6953 todo
= TODO_cleanup_cfg
;
6954 if (!(e
->flags
& EDGE_EXECUTABLE
))
6956 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6958 "marking known outgoing %sedge %d -> %d executable\n",
6959 e
->flags
& EDGE_DFS_BACK
? "back-" : "",
6960 e
->src
->index
, e
->dest
->index
);
6961 e
->flags
|= EDGE_EXECUTABLE
;
6962 e
->dest
->flags
|= BB_EXECUTABLE
;
6964 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6966 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6968 "marking destination block %d reachable\n",
6970 e
->dest
->flags
|= BB_EXECUTABLE
;
6973 else if (gsi_one_before_end_p (gsi
))
6975 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6977 if (!(e
->flags
& EDGE_EXECUTABLE
))
6979 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6981 "marking outgoing edge %d -> %d executable\n",
6982 e
->src
->index
, e
->dest
->index
);
6983 e
->flags
|= EDGE_EXECUTABLE
;
6984 e
->dest
->flags
|= BB_EXECUTABLE
;
6986 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6988 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6990 "marking destination block %d reachable\n",
6992 e
->dest
->flags
|= BB_EXECUTABLE
;
6997 /* Eliminate. That also pushes to avail. */
6998 if (eliminate
&& ! iterate
)
6999 avail
.eliminate_stmt (bb
, &gsi
);
7001 /* If not eliminating, make all not already available defs
7003 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_DEF
)
7004 if (! avail
.eliminate_avail (bb
, op
))
7005 avail
.eliminate_push_avail (bb
, op
);
7008 /* Eliminate in destination PHI arguments. Always substitute in dest
7009 PHIs, even for non-executable edges. This handles region
7011 if (!iterate
&& eliminate
)
7012 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7013 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
7014 !gsi_end_p (gsi
); gsi_next (&gsi
))
7016 gphi
*phi
= gsi
.phi ();
7017 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
7018 tree arg
= USE_FROM_PTR (use_p
);
7019 if (TREE_CODE (arg
) != SSA_NAME
7020 || virtual_operand_p (arg
))
7023 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
7025 sprime
= SSA_VAL (arg
);
7026 gcc_assert (TREE_CODE (sprime
) != SSA_NAME
7027 || SSA_NAME_IS_DEFAULT_DEF (sprime
));
7030 /* Look for sth available at the definition block of the argument.
7031 This avoids inconsistencies between availability there which
7032 decides if the stmt can be removed and availability at the
7033 use site. The SSA property ensures that things available
7034 at the definition are also available at uses. */
7035 sprime
= avail
.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg
)),
7039 && may_propagate_copy (arg
, sprime
))
7040 propagate_value (use_p
, sprime
);
7043 vn_context_bb
= NULL
;
7047 /* Unwind state per basic-block. */
7051 /* Times this block has been visited. */
7053 /* Whether to handle this as iteration point or whether to treat
7054 incoming backedge PHI values as varying. */
7056 /* Maximum RPO index this block is reachable from. */
7060 vn_reference_t ref_top
;
7062 vn_nary_op_t nary_top
;
7065 /* Unwind the RPO VN state for iteration. */
7068 do_unwind (unwind_state
*to
, int rpo_idx
, rpo_elim
&avail
, int *bb_to_rpo
)
7070 gcc_assert (to
->iterate
);
7071 for (; last_inserted_nary
!= to
->nary_top
;
7072 last_inserted_nary
= last_inserted_nary
->next
)
7075 slot
= valid_info
->nary
->find_slot_with_hash
7076 (last_inserted_nary
, last_inserted_nary
->hashcode
, NO_INSERT
);
7077 /* Predication causes the need to restore previous state. */
7078 if ((*slot
)->unwind_to
)
7079 *slot
= (*slot
)->unwind_to
;
7081 valid_info
->nary
->clear_slot (slot
);
7083 for (; last_inserted_phi
!= to
->phi_top
;
7084 last_inserted_phi
= last_inserted_phi
->next
)
7087 slot
= valid_info
->phis
->find_slot_with_hash
7088 (last_inserted_phi
, last_inserted_phi
->hashcode
, NO_INSERT
);
7089 valid_info
->phis
->clear_slot (slot
);
7091 for (; last_inserted_ref
!= to
->ref_top
;
7092 last_inserted_ref
= last_inserted_ref
->next
)
7094 vn_reference_t
*slot
;
7095 slot
= valid_info
->references
->find_slot_with_hash
7096 (last_inserted_ref
, last_inserted_ref
->hashcode
, NO_INSERT
);
7097 (*slot
)->operands
.release ();
7098 valid_info
->references
->clear_slot (slot
);
7100 obstack_free (&vn_tables_obstack
, to
->ob_top
);
7102 /* Prune [rpo_idx, ] from avail. */
7103 /* ??? This is O(number-of-values-in-region) which is
7104 O(region-size) rather than O(iteration-piece). */
7105 for (hash_table
<vn_ssa_aux_hasher
>::iterator i
= vn_ssa_aux_hash
->begin ();
7106 i
!= vn_ssa_aux_hash
->end (); ++i
)
7110 if (bb_to_rpo
[(*i
)->avail
->location
] < rpo_idx
)
7112 vn_avail
*av
= (*i
)->avail
;
7113 (*i
)->avail
= (*i
)->avail
->next
;
7114 av
->next
= avail
.m_avail_freelist
;
7115 avail
.m_avail_freelist
= av
;
7120 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7121 If ITERATE is true then treat backedges optimistically as not
7122 executed and iterate. If ELIMINATE is true then perform
7123 elimination, otherwise leave that to the caller. */
7126 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
7127 bool iterate
, bool eliminate
)
7131 /* We currently do not support region-based iteration when
7132 elimination is requested. */
7133 gcc_assert (!entry
|| !iterate
|| !eliminate
);
7134 /* When iterating we need loop info up-to-date. */
7135 gcc_assert (!iterate
|| !loops_state_satisfies_p (LOOPS_NEED_FIXUP
));
7137 bool do_region
= entry
!= NULL
;
7140 entry
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn
));
7141 exit_bbs
= BITMAP_ALLOC (NULL
);
7142 bitmap_set_bit (exit_bbs
, EXIT_BLOCK
);
7145 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7146 re-mark those that are contained in the region. */
7149 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
7150 e
->flags
&= ~EDGE_DFS_BACK
;
7152 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
);
7153 int n
= rev_post_order_and_mark_dfs_back_seme
7154 (fn
, entry
, exit_bbs
, !loops_state_satisfies_p (LOOPS_NEED_FIXUP
), rpo
);
7155 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
7156 for (int i
= 0; i
< n
/ 2; ++i
)
7157 std::swap (rpo
[i
], rpo
[n
-i
-1]);
7160 BITMAP_FREE (exit_bbs
);
7162 /* If there are any non-DFS_BACK edges into entry->dest skip
7163 processing PHI nodes for that block. This supports
7164 value-numbering loop bodies w/o the actual loop. */
7165 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
7167 && !(e
->flags
& EDGE_DFS_BACK
))
7169 bool skip_entry_phis
= e
!= NULL
;
7170 if (skip_entry_phis
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
7171 fprintf (dump_file
, "Region does not contain all edges into "
7172 "the entry block, skipping its PHIs.\n");
7174 int *bb_to_rpo
= XNEWVEC (int, last_basic_block_for_fn (fn
));
7175 for (int i
= 0; i
< n
; ++i
)
7176 bb_to_rpo
[rpo
[i
]] = i
;
7178 unwind_state
*rpo_state
= XNEWVEC (unwind_state
, n
);
7180 rpo_elim
avail (entry
->dest
);
7183 /* Verify we have no extra entries into the region. */
7184 if (flag_checking
&& do_region
)
7186 auto_bb_flag
bb_in_region (fn
);
7187 for (int i
= 0; i
< n
; ++i
)
7189 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7190 bb
->flags
|= bb_in_region
;
7192 /* We can't merge the first two loops because we cannot rely
7193 on EDGE_DFS_BACK for edges not within the region. But if
7194 we decide to always have the bb_in_region flag we can
7195 do the checking during the RPO walk itself (but then it's
7196 also easy to handle MEME conservatively). */
7197 for (int i
= 0; i
< n
; ++i
)
7199 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7202 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7203 gcc_assert (e
== entry
7204 || (skip_entry_phis
&& bb
== entry
->dest
)
7205 || (e
->src
->flags
& bb_in_region
));
7207 for (int i
= 0; i
< n
; ++i
)
7209 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7210 bb
->flags
&= ~bb_in_region
;
7214 /* Create the VN state. For the initial size of the various hashtables
7215 use a heuristic based on region size and number of SSA names. */
7216 unsigned region_size
= (((unsigned HOST_WIDE_INT
)n
* num_ssa_names
)
7217 / (n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
));
7218 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
7221 vn_ssa_aux_hash
= new hash_table
<vn_ssa_aux_hasher
> (region_size
* 2);
7222 gcc_obstack_init (&vn_ssa_aux_obstack
);
7224 gcc_obstack_init (&vn_tables_obstack
);
7225 gcc_obstack_init (&vn_tables_insert_obstack
);
7226 valid_info
= XCNEW (struct vn_tables_s
);
7227 allocate_vn_table (valid_info
, region_size
);
7228 last_inserted_ref
= NULL
;
7229 last_inserted_phi
= NULL
;
7230 last_inserted_nary
= NULL
;
7232 vn_valueize
= rpo_vn_valueize
;
7234 /* Initialize the unwind state and edge/BB executable state. */
7235 bool need_max_rpo_iterate
= false;
7236 for (int i
= 0; i
< n
; ++i
)
7238 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7239 rpo_state
[i
].visited
= 0;
7240 rpo_state
[i
].max_rpo
= i
;
7241 bb
->flags
&= ~BB_EXECUTABLE
;
7242 bool has_backedges
= false;
7245 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7247 if (e
->flags
& EDGE_DFS_BACK
)
7248 has_backedges
= true;
7249 e
->flags
&= ~EDGE_EXECUTABLE
;
7250 if (iterate
|| e
== entry
|| (skip_entry_phis
&& bb
== entry
->dest
))
7252 if (bb_to_rpo
[e
->src
->index
] > i
)
7254 rpo_state
[i
].max_rpo
= MAX (rpo_state
[i
].max_rpo
,
7255 bb_to_rpo
[e
->src
->index
]);
7256 need_max_rpo_iterate
= true;
7259 rpo_state
[i
].max_rpo
7260 = MAX (rpo_state
[i
].max_rpo
,
7261 rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
);
7263 rpo_state
[i
].iterate
= iterate
&& has_backedges
;
7265 entry
->flags
|= EDGE_EXECUTABLE
;
7266 entry
->dest
->flags
|= BB_EXECUTABLE
;
7268 /* When there are irreducible regions the simplistic max_rpo computation
7269 above for the case of backedges doesn't work and we need to iterate
7270 until there are no more changes. */
7272 while (need_max_rpo_iterate
)
7275 need_max_rpo_iterate
= false;
7276 for (int i
= 0; i
< n
; ++i
)
7278 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7281 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7283 if (e
== entry
|| (skip_entry_phis
&& bb
== entry
->dest
))
7285 int max_rpo
= MAX (rpo_state
[i
].max_rpo
,
7286 rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
);
7287 if (rpo_state
[i
].max_rpo
!= max_rpo
)
7289 rpo_state
[i
].max_rpo
= max_rpo
;
7290 need_max_rpo_iterate
= true;
7295 statistics_histogram_event (cfun
, "RPO max_rpo iterations", nit
);
7297 /* As heuristic to improve compile-time we handle only the N innermost
7298 loops and the outermost one optimistically. */
7302 unsigned max_depth
= param_rpo_vn_max_loop_depth
;
7303 FOR_EACH_LOOP (loop
, LI_ONLY_INNERMOST
)
7304 if (loop_depth (loop
) > max_depth
)
7305 for (unsigned i
= 2;
7306 i
< loop_depth (loop
) - max_depth
; ++i
)
7308 basic_block header
= superloop_at_depth (loop
, i
)->header
;
7309 bool non_latch_backedge
= false;
7312 FOR_EACH_EDGE (e
, ei
, header
->preds
)
7313 if (e
->flags
& EDGE_DFS_BACK
)
7315 /* There can be a non-latch backedge into the header
7316 which is part of an outer irreducible region. We
7317 cannot avoid iterating this block then. */
7318 if (!dominated_by_p (CDI_DOMINATORS
,
7321 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7322 fprintf (dump_file
, "non-latch backedge %d -> %d "
7323 "forces iteration of loop %d\n",
7324 e
->src
->index
, e
->dest
->index
, loop
->num
);
7325 non_latch_backedge
= true;
7328 e
->flags
|= EDGE_EXECUTABLE
;
7330 rpo_state
[bb_to_rpo
[header
->index
]].iterate
= non_latch_backedge
;
7337 /* Go and process all blocks, iterating as necessary. */
7340 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
7342 /* If the block has incoming backedges remember unwind state. This
7343 is required even for non-executable blocks since in irreducible
7344 regions we might reach them via the backedge and re-start iterating
7346 Note we can individually mark blocks with incoming backedges to
7347 not iterate where we then handle PHIs conservatively. We do that
7348 heuristically to reduce compile-time for degenerate cases. */
7349 if (rpo_state
[idx
].iterate
)
7351 rpo_state
[idx
].ob_top
= obstack_alloc (&vn_tables_obstack
, 0);
7352 rpo_state
[idx
].ref_top
= last_inserted_ref
;
7353 rpo_state
[idx
].phi_top
= last_inserted_phi
;
7354 rpo_state
[idx
].nary_top
= last_inserted_nary
;
7357 if (!(bb
->flags
& BB_EXECUTABLE
))
7359 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7360 fprintf (dump_file
, "Block %d: BB%d found not executable\n",
7366 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7367 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
7369 todo
|= process_bb (avail
, bb
,
7370 rpo_state
[idx
].visited
!= 0,
7371 rpo_state
[idx
].iterate
,
7372 iterate
, eliminate
, do_region
, exit_bbs
, false);
7373 rpo_state
[idx
].visited
++;
7375 /* Verify if changed values flow over executable outgoing backedges
7376 and those change destination PHI values (that's the thing we
7377 can easily verify). Reduce over all such edges to the farthest
7379 int iterate_to
= -1;
7382 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7383 if ((e
->flags
& (EDGE_DFS_BACK
|EDGE_EXECUTABLE
))
7384 == (EDGE_DFS_BACK
|EDGE_EXECUTABLE
)
7385 && rpo_state
[bb_to_rpo
[e
->dest
->index
]].iterate
)
7387 int destidx
= bb_to_rpo
[e
->dest
->index
];
7388 if (!rpo_state
[destidx
].visited
)
7390 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7391 fprintf (dump_file
, "Unvisited destination %d\n",
7393 if (iterate_to
== -1 || destidx
< iterate_to
)
7394 iterate_to
= destidx
;
7397 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7398 fprintf (dump_file
, "Looking for changed values of backedge"
7399 " %d->%d destination PHIs\n",
7400 e
->src
->index
, e
->dest
->index
);
7401 vn_context_bb
= e
->dest
;
7403 for (gsi
= gsi_start_phis (e
->dest
);
7404 !gsi_end_p (gsi
); gsi_next (&gsi
))
7406 bool inserted
= false;
7407 /* While we'd ideally just iterate on value changes
7408 we CSE PHIs and do that even across basic-block
7409 boundaries. So even hashtable state changes can
7410 be important (which is roughly equivalent to
7411 PHI argument value changes). To not excessively
7412 iterate because of that we track whether a PHI
7413 was CSEd to with GF_PLF_1. */
7414 bool phival_changed
;
7415 if ((phival_changed
= visit_phi (gsi
.phi (),
7417 || (inserted
&& gimple_plf (gsi
.phi (), GF_PLF_1
)))
7420 && dump_file
&& (dump_flags
& TDF_DETAILS
))
7421 fprintf (dump_file
, "PHI was CSEd and hashtable "
7422 "state (changed)\n");
7423 if (iterate_to
== -1 || destidx
< iterate_to
)
7424 iterate_to
= destidx
;
7428 vn_context_bb
= NULL
;
7430 if (iterate_to
!= -1)
7432 do_unwind (&rpo_state
[iterate_to
], iterate_to
, avail
, bb_to_rpo
);
7434 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7435 fprintf (dump_file
, "Iterating to %d BB%d\n",
7436 iterate_to
, rpo
[iterate_to
]);
7446 /* Process all blocks greedily with a worklist that enforces RPO
7447 processing of reachable blocks. */
7448 auto_bitmap worklist
;
7449 bitmap_set_bit (worklist
, 0);
7450 while (!bitmap_empty_p (worklist
))
7452 int idx
= bitmap_first_set_bit (worklist
);
7453 bitmap_clear_bit (worklist
, idx
);
7454 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
7455 gcc_assert ((bb
->flags
& BB_EXECUTABLE
)
7456 && !rpo_state
[idx
].visited
);
7458 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7459 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
7461 /* When we run into predecessor edges where we cannot trust its
7462 executable state mark them executable so PHI processing will
7464 ??? Do we need to force arguments flowing over that edge
7465 to be varying or will they even always be? */
7468 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7469 if (!(e
->flags
& EDGE_EXECUTABLE
)
7470 && (bb
== entry
->dest
7471 || (!rpo_state
[bb_to_rpo
[e
->src
->index
]].visited
7472 && (rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
7475 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7476 fprintf (dump_file
, "Cannot trust state of predecessor "
7477 "edge %d -> %d, marking executable\n",
7478 e
->src
->index
, e
->dest
->index
);
7479 e
->flags
|= EDGE_EXECUTABLE
;
7483 todo
|= process_bb (avail
, bb
, false, false, false, eliminate
,
7484 do_region
, exit_bbs
,
7485 skip_entry_phis
&& bb
== entry
->dest
);
7486 rpo_state
[idx
].visited
++;
7488 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7489 if ((e
->flags
& EDGE_EXECUTABLE
)
7490 && e
->dest
->index
!= EXIT_BLOCK
7491 && (!do_region
|| !bitmap_bit_p (exit_bbs
, e
->dest
->index
))
7492 && !rpo_state
[bb_to_rpo
[e
->dest
->index
]].visited
)
7493 bitmap_set_bit (worklist
, bb_to_rpo
[e
->dest
->index
]);
7497 /* If statistics or dump file active. */
7499 unsigned max_visited
= 1;
7500 for (int i
= 0; i
< n
; ++i
)
7502 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7503 if (bb
->flags
& BB_EXECUTABLE
)
7505 statistics_histogram_event (cfun
, "RPO block visited times",
7506 rpo_state
[i
].visited
);
7507 if (rpo_state
[i
].visited
> max_visited
)
7508 max_visited
= rpo_state
[i
].visited
;
7510 unsigned nvalues
= 0, navail
= 0;
7511 for (hash_table
<vn_ssa_aux_hasher
>::iterator i
= vn_ssa_aux_hash
->begin ();
7512 i
!= vn_ssa_aux_hash
->end (); ++i
)
7515 vn_avail
*av
= (*i
)->avail
;
7522 statistics_counter_event (cfun
, "RPO blocks", n
);
7523 statistics_counter_event (cfun
, "RPO blocks visited", nblk
);
7524 statistics_counter_event (cfun
, "RPO blocks executable", nex
);
7525 statistics_histogram_event (cfun
, "RPO iterations", 10*nblk
/ nex
);
7526 statistics_histogram_event (cfun
, "RPO num values", nvalues
);
7527 statistics_histogram_event (cfun
, "RPO num avail", navail
);
7528 statistics_histogram_event (cfun
, "RPO num lattice",
7529 vn_ssa_aux_hash
->elements ());
7530 if (dump_file
&& (dump_flags
& (TDF_DETAILS
|TDF_STATS
)))
7532 fprintf (dump_file
, "RPO iteration over %d blocks visited %" PRIu64
7533 " blocks in total discovering %d executable blocks iterating "
7534 "%d.%d times, a block was visited max. %u times\n",
7536 (int)((10*nblk
/ nex
)/10), (int)((10*nblk
/ nex
)%10),
7538 fprintf (dump_file
, "RPO tracked %d values available at %d locations "
7539 "and %" PRIu64
" lattice elements\n",
7540 nvalues
, navail
, (uint64_t) vn_ssa_aux_hash
->elements ());
7545 /* When !iterate we already performed elimination during the RPO
7549 /* Elimination for region-based VN needs to be done within the
7551 gcc_assert (! do_region
);
7552 /* Note we can't use avail.walk here because that gets confused
7553 by the existing availability and it will be less efficient
7555 todo
|= eliminate_with_rpo_vn (NULL
);
7558 todo
|= avail
.eliminate_cleanup (do_region
);
7564 XDELETEVEC (bb_to_rpo
);
7566 XDELETEVEC (rpo_state
);
7571 /* Region-based entry for RPO VN. Performs value-numbering and elimination
7572 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
7573 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7574 are not considered. */
7577 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
)
7579 default_vn_walk_kind
= VN_WALKREWRITE
;
7580 unsigned todo
= do_rpo_vn (fn
, entry
, exit_bbs
, false, true);
7588 const pass_data pass_data_fre
=
7590 GIMPLE_PASS
, /* type */
7592 OPTGROUP_NONE
, /* optinfo_flags */
7593 TV_TREE_FRE
, /* tv_id */
7594 ( PROP_cfg
| PROP_ssa
), /* properties_required */
7595 0, /* properties_provided */
7596 0, /* properties_destroyed */
7597 0, /* todo_flags_start */
7598 0, /* todo_flags_finish */
7601 class pass_fre
: public gimple_opt_pass
7604 pass_fre (gcc::context
*ctxt
)
7605 : gimple_opt_pass (pass_data_fre
, ctxt
), may_iterate (true)
7608 /* opt_pass methods: */
7609 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
7610 void set_pass_param (unsigned int n
, bool param
)
7612 gcc_assert (n
== 0);
7613 may_iterate
= param
;
7615 virtual bool gate (function
*)
7617 return flag_tree_fre
!= 0 && (may_iterate
|| optimize
> 1);
7619 virtual unsigned int execute (function
*);
7623 }; // class pass_fre
7626 pass_fre::execute (function
*fun
)
7630 /* At -O[1g] use the cheap non-iterating mode. */
7631 bool iterate_p
= may_iterate
&& (optimize
> 1);
7632 calculate_dominance_info (CDI_DOMINATORS
);
7634 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
7636 default_vn_walk_kind
= VN_WALKREWRITE
;
7637 todo
= do_rpo_vn (fun
, NULL
, NULL
, iterate_p
, true);
7641 loop_optimizer_finalize ();
7643 /* For late FRE after IVOPTs and unrolling, see if we can
7644 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
7646 todo
|= TODO_update_address_taken
;
7654 make_pass_fre (gcc::context
*ctxt
)
7656 return new pass_fre (ctxt
);
7659 #undef BB_EXECUTABLE