1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
32 #include "tree-pretty-print.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
39 #include "ipa-reference.h"
42 /* Broad overview of how alias analysis on gimple works:
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
61 The main alias-oracle entry-points are
63 bool stmt_may_clobber_ref_p (gimple *, tree)
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
77 bool refs_may_alias_p (tree, tree)
79 This function tries to disambiguate two reference trees.
81 bool ptr_deref_may_alias_global_p (tree)
83 This function queries if dereferencing a pointer variable may
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
90 static int nonoverlapping_refs_since_match_p (tree
, tree
, tree
, tree
, bool);
91 static bool nonoverlapping_component_refs_p (const_tree
, const_tree
);
93 /* Query statistics for the different low-level disambiguators.
94 A high-level query may trigger multiple of them. */
97 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias
;
98 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias
;
99 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias
;
100 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias
;
101 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias
;
102 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias
;
103 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias
;
104 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias
;
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias
;
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias
;
107 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias
;
108 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap
;
109 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias
;
113 dump_alias_stats (FILE *s
)
115 fprintf (s
, "\nAlias oracle query stats:\n");
116 fprintf (s
, " refs_may_alias_p: "
117 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
118 HOST_WIDE_INT_PRINT_DEC
" queries\n",
119 alias_stats
.refs_may_alias_p_no_alias
,
120 alias_stats
.refs_may_alias_p_no_alias
121 + alias_stats
.refs_may_alias_p_may_alias
);
122 fprintf (s
, " ref_maybe_used_by_call_p: "
123 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
124 HOST_WIDE_INT_PRINT_DEC
" queries\n",
125 alias_stats
.ref_maybe_used_by_call_p_no_alias
,
126 alias_stats
.refs_may_alias_p_no_alias
127 + alias_stats
.ref_maybe_used_by_call_p_may_alias
);
128 fprintf (s
, " call_may_clobber_ref_p: "
129 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
130 HOST_WIDE_INT_PRINT_DEC
" queries\n",
131 alias_stats
.call_may_clobber_ref_p_no_alias
,
132 alias_stats
.call_may_clobber_ref_p_no_alias
133 + alias_stats
.call_may_clobber_ref_p_may_alias
);
134 fprintf (s
, " nonoverlapping_component_refs_p: "
135 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
136 HOST_WIDE_INT_PRINT_DEC
" queries\n",
137 alias_stats
.nonoverlapping_component_refs_p_no_alias
,
138 alias_stats
.nonoverlapping_component_refs_p_no_alias
139 + alias_stats
.nonoverlapping_component_refs_p_may_alias
);
140 fprintf (s
, " nonoverlapping_refs_since_match_p: "
141 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
142 HOST_WIDE_INT_PRINT_DEC
" must overlaps, "
143 HOST_WIDE_INT_PRINT_DEC
" queries\n",
144 alias_stats
.nonoverlapping_refs_since_match_p_no_alias
,
145 alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
,
146 alias_stats
.nonoverlapping_refs_since_match_p_no_alias
147 + alias_stats
.nonoverlapping_refs_since_match_p_may_alias
148 + alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
);
149 fprintf (s
, " aliasing_component_refs_p: "
150 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC
" queries\n",
152 alias_stats
.aliasing_component_refs_p_no_alias
,
153 alias_stats
.aliasing_component_refs_p_no_alias
154 + alias_stats
.aliasing_component_refs_p_may_alias
);
155 dump_alias_stats_in_alias_c (s
);
159 /* Return true, if dereferencing PTR may alias with a global variable. */
162 ptr_deref_may_alias_global_p (tree ptr
)
164 struct ptr_info_def
*pi
;
166 /* If we end up with a pointer constant here that may point
168 if (TREE_CODE (ptr
) != SSA_NAME
)
171 pi
= SSA_NAME_PTR_INFO (ptr
);
173 /* If we do not have points-to information for this variable,
178 /* ??? This does not use TBAA to prune globals ptr may not access. */
179 return pt_solution_includes_global (&pi
->pt
);
182 /* Return true if dereferencing PTR may alias DECL.
183 The caller is responsible for applying TBAA to see if PTR
184 may access DECL at all. */
187 ptr_deref_may_alias_decl_p (tree ptr
, tree decl
)
189 struct ptr_info_def
*pi
;
191 /* Conversions are irrelevant for points-to information and
192 data-dependence analysis can feed us those. */
195 /* Anything we do not explicilty handle aliases. */
196 if ((TREE_CODE (ptr
) != SSA_NAME
197 && TREE_CODE (ptr
) != ADDR_EXPR
198 && TREE_CODE (ptr
) != POINTER_PLUS_EXPR
)
199 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
201 && TREE_CODE (decl
) != PARM_DECL
202 && TREE_CODE (decl
) != RESULT_DECL
))
205 /* Disregard pointer offsetting. */
206 if (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
)
210 ptr
= TREE_OPERAND (ptr
, 0);
212 while (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
);
213 return ptr_deref_may_alias_decl_p (ptr
, decl
);
216 /* ADDR_EXPR pointers either just offset another pointer or directly
217 specify the pointed-to set. */
218 if (TREE_CODE (ptr
) == ADDR_EXPR
)
220 tree base
= get_base_address (TREE_OPERAND (ptr
, 0));
222 && (TREE_CODE (base
) == MEM_REF
223 || TREE_CODE (base
) == TARGET_MEM_REF
))
224 ptr
= TREE_OPERAND (base
, 0);
227 return compare_base_decls (base
, decl
) != 0;
229 && CONSTANT_CLASS_P (base
))
235 /* Non-aliased variables cannot be pointed to. */
236 if (!may_be_aliased (decl
))
239 /* If we do not have useful points-to information for this pointer
240 we cannot disambiguate anything else. */
241 pi
= SSA_NAME_PTR_INFO (ptr
);
245 return pt_solution_includes (&pi
->pt
, decl
);
248 /* Return true if dereferenced PTR1 and PTR2 may alias.
249 The caller is responsible for applying TBAA to see if accesses
250 through PTR1 and PTR2 may conflict at all. */
253 ptr_derefs_may_alias_p (tree ptr1
, tree ptr2
)
255 struct ptr_info_def
*pi1
, *pi2
;
257 /* Conversions are irrelevant for points-to information and
258 data-dependence analysis can feed us those. */
262 /* Disregard pointer offsetting. */
263 if (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
)
267 ptr1
= TREE_OPERAND (ptr1
, 0);
269 while (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
);
270 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
272 if (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
)
276 ptr2
= TREE_OPERAND (ptr2
, 0);
278 while (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
);
279 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
282 /* ADDR_EXPR pointers either just offset another pointer or directly
283 specify the pointed-to set. */
284 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
286 tree base
= get_base_address (TREE_OPERAND (ptr1
, 0));
288 && (TREE_CODE (base
) == MEM_REF
289 || TREE_CODE (base
) == TARGET_MEM_REF
))
290 return ptr_derefs_may_alias_p (TREE_OPERAND (base
, 0), ptr2
);
293 return ptr_deref_may_alias_decl_p (ptr2
, base
);
297 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
299 tree base
= get_base_address (TREE_OPERAND (ptr2
, 0));
301 && (TREE_CODE (base
) == MEM_REF
302 || TREE_CODE (base
) == TARGET_MEM_REF
))
303 return ptr_derefs_may_alias_p (ptr1
, TREE_OPERAND (base
, 0));
306 return ptr_deref_may_alias_decl_p (ptr1
, base
);
311 /* From here we require SSA name pointers. Anything else aliases. */
312 if (TREE_CODE (ptr1
) != SSA_NAME
313 || TREE_CODE (ptr2
) != SSA_NAME
314 || !POINTER_TYPE_P (TREE_TYPE (ptr1
))
315 || !POINTER_TYPE_P (TREE_TYPE (ptr2
)))
318 /* We may end up with two empty points-to solutions for two same pointers.
319 In this case we still want to say both pointers alias, so shortcut
324 /* If we do not have useful points-to information for either pointer
325 we cannot disambiguate anything else. */
326 pi1
= SSA_NAME_PTR_INFO (ptr1
);
327 pi2
= SSA_NAME_PTR_INFO (ptr2
);
331 /* ??? This does not use TBAA to prune decls from the intersection
332 that not both pointers may access. */
333 return pt_solutions_intersect (&pi1
->pt
, &pi2
->pt
);
336 /* Return true if dereferencing PTR may alias *REF.
337 The caller is responsible for applying TBAA to see if PTR
338 may access *REF at all. */
341 ptr_deref_may_alias_ref_p_1 (tree ptr
, ao_ref
*ref
)
343 tree base
= ao_ref_base (ref
);
345 if (TREE_CODE (base
) == MEM_REF
346 || TREE_CODE (base
) == TARGET_MEM_REF
)
347 return ptr_derefs_may_alias_p (ptr
, TREE_OPERAND (base
, 0));
348 else if (DECL_P (base
))
349 return ptr_deref_may_alias_decl_p (ptr
, base
);
354 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
357 ptrs_compare_unequal (tree ptr1
, tree ptr2
)
359 /* First resolve the pointers down to a SSA name pointer base or
360 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
361 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
362 or STRING_CSTs which needs points-to adjustments to track them
363 in the points-to sets. */
364 tree obj1
= NULL_TREE
;
365 tree obj2
= NULL_TREE
;
366 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
368 tree tem
= get_base_address (TREE_OPERAND (ptr1
, 0));
372 || TREE_CODE (tem
) == PARM_DECL
373 || TREE_CODE (tem
) == RESULT_DECL
)
375 else if (TREE_CODE (tem
) == MEM_REF
)
376 ptr1
= TREE_OPERAND (tem
, 0);
378 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
380 tree tem
= get_base_address (TREE_OPERAND (ptr2
, 0));
384 || TREE_CODE (tem
) == PARM_DECL
385 || TREE_CODE (tem
) == RESULT_DECL
)
387 else if (TREE_CODE (tem
) == MEM_REF
)
388 ptr2
= TREE_OPERAND (tem
, 0);
391 /* Canonicalize ptr vs. object. */
392 if (TREE_CODE (ptr1
) == SSA_NAME
&& obj2
)
394 std::swap (ptr1
, ptr2
);
395 std::swap (obj1
, obj2
);
399 /* Other code handles this correctly, no need to duplicate it here. */;
400 else if (obj1
&& TREE_CODE (ptr2
) == SSA_NAME
)
402 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr2
);
403 /* We may not use restrict to optimize pointer comparisons.
404 See PR71062. So we have to assume that restrict-pointed-to
405 may be in fact obj1. */
407 || pi
->pt
.vars_contains_restrict
408 || pi
->pt
.vars_contains_interposable
)
411 && (TREE_STATIC (obj1
) || DECL_EXTERNAL (obj1
)))
413 varpool_node
*node
= varpool_node::get (obj1
);
414 /* If obj1 may bind to NULL give up (see below). */
416 || ! node
->nonzero_address ()
417 || ! decl_binds_to_current_def_p (obj1
))
420 return !pt_solution_includes (&pi
->pt
, obj1
);
423 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
424 but those require pt.null to be conservatively correct. */
429 /* Returns whether reference REF to BASE may refer to global memory. */
432 ref_may_alias_global_p_1 (tree base
)
435 return is_global_var (base
);
436 else if (TREE_CODE (base
) == MEM_REF
437 || TREE_CODE (base
) == TARGET_MEM_REF
)
438 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
443 ref_may_alias_global_p (ao_ref
*ref
)
445 tree base
= ao_ref_base (ref
);
446 return ref_may_alias_global_p_1 (base
);
450 ref_may_alias_global_p (tree ref
)
452 tree base
= get_base_address (ref
);
453 return ref_may_alias_global_p_1 (base
);
456 /* Return true whether STMT may clobber global memory. */
459 stmt_may_clobber_global_p (gimple
*stmt
)
463 if (!gimple_vdef (stmt
))
466 /* ??? We can ask the oracle whether an artificial pointer
467 dereference with a pointer with points-to information covering
468 all global memory (what about non-address taken memory?) maybe
469 clobbered by this call. As there is at the moment no convenient
470 way of doing that without generating garbage do some manual
472 ??? We could make a NULL ao_ref argument to the various
473 predicates special, meaning any global memory. */
475 switch (gimple_code (stmt
))
478 lhs
= gimple_assign_lhs (stmt
);
479 return (TREE_CODE (lhs
) != SSA_NAME
480 && ref_may_alias_global_p (lhs
));
489 /* Dump alias information on FILE. */
492 dump_alias_info (FILE *file
)
497 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
500 fprintf (file
, "\n\nAlias information for %s\n\n", funcname
);
502 fprintf (file
, "Aliased symbols\n\n");
504 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
506 if (may_be_aliased (var
))
507 dump_variable (file
, var
);
510 fprintf (file
, "\nCall clobber information\n");
512 fprintf (file
, "\nESCAPED");
513 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped
);
515 fprintf (file
, "\n\nFlow-insensitive points-to information\n\n");
517 FOR_EACH_SSA_NAME (i
, ptr
, cfun
)
519 struct ptr_info_def
*pi
;
521 if (!POINTER_TYPE_P (TREE_TYPE (ptr
))
522 || SSA_NAME_IN_FREE_LIST (ptr
))
525 pi
= SSA_NAME_PTR_INFO (ptr
);
527 dump_points_to_info_for (file
, ptr
);
530 fprintf (file
, "\n");
534 /* Dump alias information on stderr. */
537 debug_alias_info (void)
539 dump_alias_info (stderr
);
543 /* Dump the points-to set *PT into FILE. */
546 dump_points_to_solution (FILE *file
, struct pt_solution
*pt
)
549 fprintf (file
, ", points-to anything");
552 fprintf (file
, ", points-to non-local");
555 fprintf (file
, ", points-to escaped");
558 fprintf (file
, ", points-to unit escaped");
561 fprintf (file
, ", points-to NULL");
565 fprintf (file
, ", points-to vars: ");
566 dump_decl_set (file
, pt
->vars
);
567 if (pt
->vars_contains_nonlocal
568 || pt
->vars_contains_escaped
569 || pt
->vars_contains_escaped_heap
570 || pt
->vars_contains_restrict
)
572 const char *comma
= "";
573 fprintf (file
, " (");
574 if (pt
->vars_contains_nonlocal
)
576 fprintf (file
, "nonlocal");
579 if (pt
->vars_contains_escaped
)
581 fprintf (file
, "%sescaped", comma
);
584 if (pt
->vars_contains_escaped_heap
)
586 fprintf (file
, "%sescaped heap", comma
);
589 if (pt
->vars_contains_restrict
)
591 fprintf (file
, "%srestrict", comma
);
594 if (pt
->vars_contains_interposable
)
595 fprintf (file
, "%sinterposable", comma
);
602 /* Unified dump function for pt_solution. */
605 debug (pt_solution
&ref
)
607 dump_points_to_solution (stderr
, &ref
);
611 debug (pt_solution
*ptr
)
616 fprintf (stderr
, "<nil>\n");
620 /* Dump points-to information for SSA_NAME PTR into FILE. */
623 dump_points_to_info_for (FILE *file
, tree ptr
)
625 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
627 print_generic_expr (file
, ptr
, dump_flags
);
630 dump_points_to_solution (file
, &pi
->pt
);
632 fprintf (file
, ", points-to anything");
634 fprintf (file
, "\n");
638 /* Dump points-to information for VAR into stderr. */
641 debug_points_to_info_for (tree var
)
643 dump_points_to_info_for (stderr
, var
);
647 /* Initializes the alias-oracle reference representation *R from REF. */
650 ao_ref_init (ao_ref
*r
, tree ref
)
657 r
->ref_alias_set
= -1;
658 r
->base_alias_set
= -1;
659 r
->volatile_p
= ref
? TREE_THIS_VOLATILE (ref
) : false;
662 /* Returns the base object of the memory reference *REF. */
665 ao_ref_base (ao_ref
*ref
)
671 ref
->base
= get_ref_base_and_extent (ref
->ref
, &ref
->offset
, &ref
->size
,
672 &ref
->max_size
, &reverse
);
676 /* Returns the base object alias set of the memory reference *REF. */
679 ao_ref_base_alias_set (ao_ref
*ref
)
682 if (ref
->base_alias_set
!= -1)
683 return ref
->base_alias_set
;
687 while (handled_component_p (base_ref
))
688 base_ref
= TREE_OPERAND (base_ref
, 0);
689 ref
->base_alias_set
= get_alias_set (base_ref
);
690 return ref
->base_alias_set
;
693 /* Returns the reference alias set of the memory reference *REF. */
696 ao_ref_alias_set (ao_ref
*ref
)
698 if (ref
->ref_alias_set
!= -1)
699 return ref
->ref_alias_set
;
700 ref
->ref_alias_set
= get_alias_set (ref
->ref
);
701 return ref
->ref_alias_set
;
704 /* Init an alias-oracle reference representation from a gimple pointer
705 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
706 size is assumed to be unknown. The access is assumed to be only
707 to or after of the pointer target, not before it. */
710 ao_ref_init_from_ptr_and_size (ao_ref
*ref
, tree ptr
, tree size
)
712 poly_int64 t
, size_hwi
, extra_offset
= 0;
713 ref
->ref
= NULL_TREE
;
714 if (TREE_CODE (ptr
) == SSA_NAME
)
716 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
717 if (gimple_assign_single_p (stmt
)
718 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
719 ptr
= gimple_assign_rhs1 (stmt
);
720 else if (is_gimple_assign (stmt
)
721 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
722 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt
), &extra_offset
))
724 ptr
= gimple_assign_rhs1 (stmt
);
725 extra_offset
*= BITS_PER_UNIT
;
729 if (TREE_CODE (ptr
) == ADDR_EXPR
)
731 ref
->base
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &t
);
733 ref
->offset
= BITS_PER_UNIT
* t
;
738 ref
->base
= get_base_address (TREE_OPERAND (ptr
, 0));
743 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
744 ref
->base
= build2 (MEM_REF
, char_type_node
,
745 ptr
, null_pointer_node
);
748 ref
->offset
+= extra_offset
;
750 && poly_int_tree_p (size
, &size_hwi
)
751 && coeffs_in_range_p (size_hwi
, 0, HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
752 ref
->max_size
= ref
->size
= size_hwi
* BITS_PER_UNIT
;
754 ref
->max_size
= ref
->size
= -1;
755 ref
->ref_alias_set
= 0;
756 ref
->base_alias_set
= 0;
757 ref
->volatile_p
= false;
760 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
763 Return 0 if equal or incomparable. */
766 compare_sizes (tree s1
, tree s2
)
774 if (!poly_int_tree_p (s1
, &size1
) || !poly_int_tree_p (s2
, &size2
))
776 if (known_lt (size1
, size2
))
778 if (known_lt (size2
, size1
))
783 /* Compare TYPE1 and TYPE2 by its size.
784 Return -1 if size of TYPE1 < size of TYPE2
785 Return 1 if size of TYPE1 > size of TYPE2
786 Return 0 if types are of equal sizes or we can not compare them. */
789 compare_type_sizes (tree type1
, tree type2
)
791 /* Be conservative for arrays and vectors. We want to support partial
792 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
793 while (TREE_CODE (type1
) == ARRAY_TYPE
794 || TREE_CODE (type1
) == VECTOR_TYPE
)
795 type1
= TREE_TYPE (type1
);
796 while (TREE_CODE (type2
) == ARRAY_TYPE
797 || TREE_CODE (type2
) == VECTOR_TYPE
)
798 type2
= TREE_TYPE (type2
);
799 return compare_sizes (TYPE_SIZE (type1
), TYPE_SIZE (type2
));
802 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
803 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
807 same_type_for_tbaa (tree type1
, tree type2
)
809 type1
= TYPE_MAIN_VARIANT (type1
);
810 type2
= TYPE_MAIN_VARIANT (type2
);
812 /* Handle the most common case first. */
816 /* If we would have to do structural comparison bail out. */
817 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
818 || TYPE_STRUCTURAL_EQUALITY_P (type2
))
821 /* Compare the canonical types. */
822 if (TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
))
825 /* ??? Array types are not properly unified in all cases as we have
826 spurious changes in the index types for example. Removing this
827 causes all sorts of problems with the Fortran frontend. */
828 if (TREE_CODE (type1
) == ARRAY_TYPE
829 && TREE_CODE (type2
) == ARRAY_TYPE
)
832 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
833 object of one of its constrained subtypes, e.g. when a function with an
834 unconstrained parameter passed by reference is called on an object and
835 inlined. But, even in the case of a fixed size, type and subtypes are
836 not equivalent enough as to share the same TYPE_CANONICAL, since this
837 would mean that conversions between them are useless, whereas they are
838 not (e.g. type and subtypes can have different modes). So, in the end,
839 they are only guaranteed to have the same alias set. */
840 if (get_alias_set (type1
) == get_alias_set (type2
))
843 /* The types are known to be not equal. */
847 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
848 components on it). */
851 type_has_components_p (tree type
)
853 return AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
)
854 || TREE_CODE (type
) == COMPLEX_TYPE
;
857 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
858 respectively are either pointing to same address or are completely
859 disjoint. If PARITAL_OVERLAP is true, assume that outermost arrays may
862 Try to disambiguate using the access path starting from the match
863 and return false if there is no conflict.
865 Helper for aliasing_component_refs_p. */
868 aliasing_matching_component_refs_p (tree match1
, tree ref1
,
869 poly_int64 offset1
, poly_int64 max_size1
,
870 tree match2
, tree ref2
,
871 poly_int64 offset2
, poly_int64 max_size2
,
872 bool partial_overlap
)
874 poly_int64 offadj
, sztmp
, msztmp
;
877 if (!partial_overlap
)
879 get_ref_base_and_extent (match2
, &offadj
, &sztmp
, &msztmp
, &reverse
);
881 get_ref_base_and_extent (match1
, &offadj
, &sztmp
, &msztmp
, &reverse
);
883 if (!ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
885 ++alias_stats
.aliasing_component_refs_p_no_alias
;
890 int cmp
= nonoverlapping_refs_since_match_p (match1
, ref1
, match2
, ref2
,
893 || (cmp
== -1 && nonoverlapping_component_refs_p (ref1
, ref2
)))
895 ++alias_stats
.aliasing_component_refs_p_no_alias
;
898 ++alias_stats
.aliasing_component_refs_p_may_alias
;
902 /* Return true if REF is reference to zero sized trailing array. I.e.
903 struct foo {int bar; int array[0];} *fooptr;
907 component_ref_to_zero_sized_trailing_array_p (tree ref
)
909 return (TREE_CODE (ref
) == COMPONENT_REF
910 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
911 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)))
912 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)))))
913 && array_at_struct_end_p (ref
));
916 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
917 aliasing_component_refs_p.
919 Walk access path REF2 and try to find type matching TYPE1
920 (which is a start of possibly aliasing access path REF1).
921 If match is found, try to disambiguate.
923 Return 0 for sucessful disambiguation.
924 Return 1 if match was found but disambiguation failed
925 Return -1 if there is no match.
926 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
927 in access patch REF2 and -1 if we are not sure. */
930 aliasing_component_refs_walk (tree ref1
, tree type1
, tree base1
,
931 poly_int64 offset1
, poly_int64 max_size1
,
932 tree end_struct_ref1
,
933 tree ref2
, tree base2
,
934 poly_int64 offset2
, poly_int64 max_size2
,
942 /* We walk from inner type to the outer types. If type we see is
943 already too large to be part of type1, terminate the search. */
944 int cmp
= compare_type_sizes (type1
, TREE_TYPE (ref
));
948 || compare_type_sizes (TREE_TYPE (end_struct_ref1
),
949 TREE_TYPE (ref
)) < 0))
951 /* If types may be of same size, see if we can decide about their
955 same_p
= same_type_for_tbaa (TREE_TYPE (ref
), type1
);
958 /* In case we can't decide whether types are same try to
959 continue looking for the exact match.
960 Remember however that we possibly saw a match
961 to bypass the access path continuations tests we do later. */
965 if (!handled_component_p (ref
))
967 ref
= TREE_OPERAND (ref
, 0);
971 bool partial_overlap
= false;
973 /* We assume that arrays can overlap by multiple of their elements
974 size as tested in gcc.dg/torture/alias-2.c.
975 This partial overlap happen only when both arrays are bases of
976 the access and not contained within another component ref.
977 To be safe we also assume partial overlap for VLAs. */
978 if (TREE_CODE (TREE_TYPE (base1
)) == ARRAY_TYPE
979 && (!TYPE_SIZE (TREE_TYPE (base1
))
980 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1
))) != INTEGER_CST
983 /* Setting maybe_match to true triggers
984 nonoverlapping_component_refs_p test later that still may do
985 useful disambiguation. */
987 partial_overlap
= true;
989 return aliasing_matching_component_refs_p (base1
, ref1
,
998 /* Determine if the two component references REF1 and REF2 which are
999 based on access types TYPE1 and TYPE2 and of which at least one is based
1000 on an indirect reference may alias.
1001 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1002 are the respective alias sets. */
1005 aliasing_component_refs_p (tree ref1
,
1006 alias_set_type ref1_alias_set
,
1007 alias_set_type base1_alias_set
,
1008 poly_int64 offset1
, poly_int64 max_size1
,
1010 alias_set_type ref2_alias_set
,
1011 alias_set_type base2_alias_set
,
1012 poly_int64 offset2
, poly_int64 max_size2
)
1014 /* If one reference is a component references through pointers try to find a
1015 common base and apply offset based disambiguation. This handles
1017 struct A { int i; int j; } *q;
1018 struct B { struct A a; int k; } *p;
1019 disambiguating q->i and p->a.j. */
1022 bool maybe_match
= false;
1023 tree end_struct_ref1
= NULL
, end_struct_ref2
= NULL
;
1025 /* Choose bases and base types to search for. */
1027 while (handled_component_p (base1
))
1029 /* Generally access paths are monotous in the size of object. The
1030 exception are trailing arrays of structures. I.e.
1031 struct a {int array[0];};
1033 struct a {int array1[0]; int array[];};
1034 Such struct has size 0 but accesses to a.array may have non-zero size.
1035 In this case the size of TREE_TYPE (base1) is smaller than
1036 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
1038 Because we compare sizes of arrays just by sizes of their elements,
1039 we only need to care about zero sized array fields here. */
1040 if (component_ref_to_zero_sized_trailing_array_p (base1
))
1042 gcc_checking_assert (!end_struct_ref1
);
1043 end_struct_ref1
= base1
;
1045 if (TREE_CODE (base1
) == VIEW_CONVERT_EXPR
1046 || TREE_CODE (base1
) == BIT_FIELD_REF
)
1047 ref1
= TREE_OPERAND (base1
, 0);
1048 base1
= TREE_OPERAND (base1
, 0);
1050 type1
= TREE_TYPE (base1
);
1052 while (handled_component_p (base2
))
1054 if (component_ref_to_zero_sized_trailing_array_p (base2
))
1056 gcc_checking_assert (!end_struct_ref2
);
1057 end_struct_ref2
= base2
;
1059 if (TREE_CODE (base2
) == VIEW_CONVERT_EXPR
1060 || TREE_CODE (base2
) == BIT_FIELD_REF
)
1061 ref2
= TREE_OPERAND (base2
, 0);
1062 base2
= TREE_OPERAND (base2
, 0);
1064 type2
= TREE_TYPE (base2
);
1066 /* Now search for the type1 in the access path of ref2. This
1067 would be a common base for doing offset based disambiguation on.
1068 This however only makes sense if type2 is big enough to hold type1. */
1069 int cmp_outer
= compare_type_sizes (type2
, type1
);
1071 /* If type2 is big enough to contain type1 walk its access path.
1072 We also need to care of arrays at the end of structs that may extend
1073 beyond the end of structure. */
1076 && compare_type_sizes (TREE_TYPE (end_struct_ref2
), type1
) >= 0))
1078 int res
= aliasing_component_refs_walk (ref1
, type1
, base1
,
1081 ref2
, base2
, offset2
, max_size2
,
1087 /* If we didn't find a common base, try the other way around. */
1090 && compare_type_sizes (TREE_TYPE (end_struct_ref1
), type1
) <= 0))
1092 int res
= aliasing_component_refs_walk (ref2
, type2
, base2
,
1095 ref1
, base1
, offset1
, max_size1
,
1101 /* In the following code we make an assumption that the types in access
1102 paths do not overlap and thus accesses alias only if one path can be
1103 continuation of another. If we was not able to decide about equivalence,
1104 we need to give up. */
1107 if (!nonoverlapping_component_refs_p (ref1
, ref2
))
1109 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1112 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1116 /* If we have two type access paths B1.path1 and B2.path2 they may
1117 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
1118 But we can still have a path that goes B1.path1...B2.path2 with
1119 a part that we do not see. So we can only disambiguate now
1120 if there is no B2 in the tail of path1 and no B1 on the
1122 if (compare_type_sizes (TREE_TYPE (ref2
), type1
) >= 0
1123 && (!end_struct_ref1
1124 || compare_type_sizes (TREE_TYPE (ref2
),
1125 TREE_TYPE (end_struct_ref1
)) >= 0)
1126 && type_has_components_p (TREE_TYPE (ref2
))
1127 && (base1_alias_set
== ref2_alias_set
1128 || alias_set_subset_of (base1_alias_set
, ref2_alias_set
)))
1130 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1133 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
1134 if (compare_type_sizes (TREE_TYPE (ref1
), type2
) >= 0
1135 && (!end_struct_ref2
1136 || compare_type_sizes (TREE_TYPE (ref1
),
1137 TREE_TYPE (end_struct_ref2
)) >= 0)
1138 && type_has_components_p (TREE_TYPE (ref1
))
1139 && (base2_alias_set
== ref1_alias_set
1140 || alias_set_subset_of (base2_alias_set
, ref1_alias_set
)))
1142 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1145 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1149 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1150 that bases of both component refs are either equivalent or nonoverlapping.
1151 We do not assume that the containers of FIELD1 and FIELD2 are of the
1154 Return 0 in case the base address of component_refs are same then
1155 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1156 may not be of same type or size.
1158 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1160 Return -1 otherwise.
1162 Main difference between 0 and -1 is to let
1163 nonoverlapping_component_refs_since_match_p discover the semantically
1164 equivalent part of the access path.
1166 Note that this function is used even with -fno-strict-aliasing
1167 and makes use of no TBAA assumptions. */
1170 nonoverlapping_component_refs_p_1 (const_tree field1
, const_tree field2
)
1172 /* If both fields are of the same type, we could save hard work of
1173 comparing offsets. */
1174 tree type1
= DECL_CONTEXT (field1
);
1175 tree type2
= DECL_CONTEXT (field2
);
1177 if (TREE_CODE (type1
) == RECORD_TYPE
1178 && DECL_BIT_FIELD_REPRESENTATIVE (field1
))
1179 field1
= DECL_BIT_FIELD_REPRESENTATIVE (field1
);
1180 if (TREE_CODE (type2
) == RECORD_TYPE
1181 && DECL_BIT_FIELD_REPRESENTATIVE (field2
))
1182 field2
= DECL_BIT_FIELD_REPRESENTATIVE (field2
);
1184 /* ??? Bitfields can overlap at RTL level so punt on them.
1185 FIXME: RTL expansion should be fixed by adjusting the access path
1186 when producing MEM_ATTRs for MEMs which are wider than
1187 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1188 if (DECL_BIT_FIELD (field1
) && DECL_BIT_FIELD (field2
))
1191 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1192 if (type1
== type2
&& TREE_CODE (type1
) == RECORD_TYPE
)
1193 return field1
!= field2
;
1195 /* In common case the offsets and bit offsets will be the same.
1196 However if frontends do not agree on the alignment, they may be
1197 different even if they actually represent same address.
1198 Try the common case first and if that fails calcualte the
1199 actual bit offset. */
1200 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1
),
1201 DECL_FIELD_OFFSET (field2
))
1202 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1
),
1203 DECL_FIELD_BIT_OFFSET (field2
)))
1206 /* Note that it may be possible to use component_ref_field_offset
1207 which would provide offsets as trees. However constructing and folding
1208 trees is expensive and does not seem to be worth the compile time
1211 poly_uint64 offset1
, offset2
;
1212 poly_uint64 bit_offset1
, bit_offset2
;
1214 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1
), &offset1
)
1215 && poly_int_tree_p (DECL_FIELD_OFFSET (field2
), &offset2
)
1216 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1
), &bit_offset1
)
1217 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2
), &bit_offset2
))
1219 offset1
= (offset1
<< LOG2_BITS_PER_UNIT
) + bit_offset1
;
1220 offset2
= (offset2
<< LOG2_BITS_PER_UNIT
) + bit_offset2
;
1222 if (known_eq (offset1
, offset2
))
1225 poly_uint64 size1
, size2
;
1227 if (poly_int_tree_p (DECL_SIZE (field1
), &size1
)
1228 && poly_int_tree_p (DECL_SIZE (field2
), &size2
)
1229 && !ranges_maybe_overlap_p (offset1
, size1
, offset2
, size2
))
1232 /* Resort to slower overlap checking by looking for matching types in
1233 the middle of access path. */
1237 /* Return low bound of array. Do not produce new trees
1238 and thus do not care about particular type of integer constant
1239 and placeholder exprs. */
1242 cheap_array_ref_low_bound (tree ref
)
1244 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
1246 /* Avoid expensive array_ref_low_bound.
1247 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1248 type or it is zero. */
1249 if (TREE_OPERAND (ref
, 2))
1250 return TREE_OPERAND (ref
, 2);
1251 else if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
1252 return TYPE_MIN_VALUE (domain_type
);
1254 return integer_zero_node
;
1257 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1258 completely disjoint.
1260 Return 1 if the refs are non-overlapping.
1261 Return 0 if they are possibly overlapping but if so the overlap again
1262 starts on the same address.
1263 Return -1 otherwise. */
1266 nonoverlapping_array_refs_p (tree ref1
, tree ref2
)
1268 tree index1
= TREE_OPERAND (ref1
, 1);
1269 tree index2
= TREE_OPERAND (ref2
, 1);
1270 tree low_bound1
= cheap_array_ref_low_bound(ref1
);
1271 tree low_bound2
= cheap_array_ref_low_bound(ref2
);
1273 /* Handle zero offsets first: we do not need to match type size in this
1275 if (operand_equal_p (index1
, low_bound1
, 0)
1276 && operand_equal_p (index2
, low_bound2
, 0))
1279 /* If type sizes are different, give up.
1281 Avoid expensive array_ref_element_size.
1282 If operand 3 is present it denotes size in the alignmnet units.
1283 Otherwise size is TYPE_SIZE of the element type.
1284 Handle only common cases where types are of the same "kind". */
1285 if ((TREE_OPERAND (ref1
, 3) == NULL
) != (TREE_OPERAND (ref2
, 3) == NULL
))
1288 tree elmt_type1
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1
, 0)));
1289 tree elmt_type2
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2
, 0)));
1291 if (TREE_OPERAND (ref1
, 3))
1293 if (TYPE_ALIGN (elmt_type1
) != TYPE_ALIGN (elmt_type2
)
1294 || !operand_equal_p (TREE_OPERAND (ref1
, 3),
1295 TREE_OPERAND (ref2
, 3), 0))
1300 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1
),
1301 TYPE_SIZE_UNIT (elmt_type2
), 0))
1305 /* Since we know that type sizes are the same, there is no need to return
1306 -1 after this point. Partial overlap can not be introduced. */
1308 /* We may need to fold trees in this case.
1309 TODO: Handle integer constant case at least. */
1310 if (!operand_equal_p (low_bound1
, low_bound2
, 0))
1313 if (TREE_CODE (index1
) == INTEGER_CST
&& TREE_CODE (index2
) == INTEGER_CST
)
1315 if (tree_int_cst_equal (index1
, index2
))
1319 /* TODO: We can use VRP to further disambiguate here. */
1323 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1324 MATCH2 either point to the same address or are disjoint.
1325 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1326 respectively or NULL in the case we established equivalence of bases.
1327 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1328 overlap by exact multiply of their element size.
1330 This test works by matching the initial segment of the access path
1331 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1332 match was determined without use of TBAA oracle.
1334 Return 1 if we can determine that component references REF1 and REF2,
1335 that are within a common DECL, cannot overlap.
1337 Return 0 if paths are same and thus there is nothing to disambiguate more
1338 (i.e. there is must alias assuming there is must alias between MATCH1 and
1341 Return -1 if we can not determine 0 or 1 - this happens when we met
1342 non-matching types was met in the path.
1343 In this case it may make sense to continue by other disambiguation
1347 nonoverlapping_refs_since_match_p (tree match1
, tree ref1
,
1348 tree match2
, tree ref2
,
1349 bool partial_overlap
)
1351 /* Early return if there are no references to match, we do not need
1352 to walk the access paths.
1354 Do not consider this as may-alias for stats - it is more useful
1355 to have information how many disambiguations happened provided that
1356 the query was meaningful. */
1358 if (match1
== ref1
|| !handled_component_p (ref1
)
1359 || match2
== ref2
|| !handled_component_p (ref2
))
1362 auto_vec
<tree
, 16> component_refs1
;
1363 auto_vec
<tree
, 16> component_refs2
;
1365 /* Create the stack of handled components for REF1. */
1366 while (handled_component_p (ref1
) && ref1
!= match1
)
1368 if (TREE_CODE (ref1
) == VIEW_CONVERT_EXPR
1369 || TREE_CODE (ref1
) == BIT_FIELD_REF
)
1370 component_refs1
.truncate (0);
1372 component_refs1
.safe_push (ref1
);
1373 ref1
= TREE_OPERAND (ref1
, 0);
1376 /* Create the stack of handled components for REF2. */
1377 while (handled_component_p (ref2
) && ref2
!= match2
)
1379 if (TREE_CODE (ref2
) == VIEW_CONVERT_EXPR
1380 || TREE_CODE (ref2
) == BIT_FIELD_REF
)
1381 component_refs2
.truncate (0);
1383 component_refs2
.safe_push (ref2
);
1384 ref2
= TREE_OPERAND (ref2
, 0);
1387 bool mem_ref1
= TREE_CODE (ref1
) == MEM_REF
&& ref1
!= match1
;
1388 bool mem_ref2
= TREE_CODE (ref2
) == MEM_REF
&& ref2
!= match2
;
1390 /* If only one of access path starts with MEM_REF check that offset is 0
1391 so the addresses stays the same after stripping it.
1392 TODO: In this case we may walk the other access path until we get same
1395 If both starts with MEM_REF, offset has to be same. */
1396 if ((mem_ref1
&& !mem_ref2
&& !integer_zerop (TREE_OPERAND (ref1
, 1)))
1397 || (mem_ref2
&& !mem_ref1
&& !integer_zerop (TREE_OPERAND (ref2
, 1)))
1398 || (mem_ref1
&& mem_ref2
1399 && !tree_int_cst_equal (TREE_OPERAND (ref1
, 1),
1400 TREE_OPERAND (ref2
, 1))))
1402 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1406 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1407 to handle them here at all. */
1408 gcc_checking_assert (TREE_CODE (ref1
) != TARGET_MEM_REF
1409 && TREE_CODE (ref2
) != TARGET_MEM_REF
);
1411 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1412 rank. This is sufficient because we start from the same DECL and you
1413 cannot reference several fields at a time with COMPONENT_REFs (unlike
1414 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1415 of them to access a sub-component, unless you're in a union, in which
1416 case the return value will precisely be false. */
1419 /* Track if we seen unmatched ref with non-zero offset. In this case
1420 we must look for partial overlaps. */
1421 bool seen_unmatched_ref_p
= false;
1423 /* First match ARRAY_REFs an try to disambiguate. */
1424 if (!component_refs1
.is_empty ()
1425 && !component_refs2
.is_empty ())
1427 unsigned int narray_refs1
=0, narray_refs2
=0;
1429 /* We generally assume that both access paths starts by same sequence
1430 of refs. However if number of array refs is not in sync, try
1431 to recover and pop elts until number match. This helps the case
1432 where one access path starts by array and other by element. */
1433 for (narray_refs1
= 0; narray_refs1
< component_refs1
.length ();
1435 if (TREE_CODE (component_refs1
[component_refs1
.length()
1436 - 1 - narray_refs1
]) != ARRAY_REF
)
1439 for (narray_refs2
= 0; narray_refs2
< component_refs2
.length ();
1441 if (TREE_CODE (component_refs2
[component_refs2
.length()
1442 - 1 - narray_refs2
]) != ARRAY_REF
)
1444 for (; narray_refs1
> narray_refs2
; narray_refs1
--)
1446 ref1
= component_refs1
.pop ();
1447 /* Track whether we possibly introduced partial overlap assuming
1448 that innermost type sizes does not match. This only can
1449 happen if the offset introduced by the ARRAY_REF
1451 if (!operand_equal_p (TREE_OPERAND (ref1
, 1),
1452 cheap_array_ref_low_bound (ref1
), 0))
1453 seen_unmatched_ref_p
= true;
1455 for (; narray_refs2
> narray_refs1
; narray_refs2
--)
1457 ref2
= component_refs2
.pop ();
1458 if (!operand_equal_p (TREE_OPERAND (ref2
, 1),
1459 cheap_array_ref_low_bound (ref2
), 0))
1460 seen_unmatched_ref_p
= true;
1462 /* Try to disambiguate matched arrays. */
1463 for (unsigned int i
= 0; i
< narray_refs1
; i
++)
1465 int cmp
= nonoverlapping_array_refs_p (component_refs1
.pop (),
1466 component_refs2
.pop ());
1467 if (cmp
== 1 && !partial_overlap
)
1470 .nonoverlapping_refs_since_match_p_no_alias
;
1473 partial_overlap
= false;
1475 seen_unmatched_ref_p
= true;
1479 /* Next look for component_refs. */
1482 if (component_refs1
.is_empty ())
1485 .nonoverlapping_refs_since_match_p_must_overlap
;
1488 ref1
= component_refs1
.pop ();
1489 if (TREE_CODE (ref1
) != COMPONENT_REF
)
1490 seen_unmatched_ref_p
= true;
1492 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1
, 0))));
1496 if (component_refs2
.is_empty ())
1499 .nonoverlapping_refs_since_match_p_must_overlap
;
1502 ref2
= component_refs2
.pop ();
1503 if (TREE_CODE (ref2
) != COMPONENT_REF
)
1504 seen_unmatched_ref_p
= true;
1506 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2
, 0))));
1508 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1510 gcc_checking_assert (TREE_CODE (ref1
) == COMPONENT_REF
1511 && TREE_CODE (ref2
) == COMPONENT_REF
);
1513 tree field1
= TREE_OPERAND (ref1
, 1);
1514 tree field2
= TREE_OPERAND (ref2
, 1);
1516 /* ??? We cannot simply use the type of operand #0 of the refs here
1517 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1518 for common blocks instead of using unions like everyone else. */
1519 tree type1
= DECL_CONTEXT (field1
);
1520 tree type2
= DECL_CONTEXT (field2
);
1522 partial_overlap
= false;
1524 /* If we skipped array refs on type of different sizes, we can
1525 no longer be sure that there are not partial overlaps. */
1526 if (seen_unmatched_ref_p
1527 && !operand_equal_p (TYPE_SIZE (type1
), TYPE_SIZE (type2
), 0))
1530 .nonoverlapping_refs_since_match_p_may_alias
;
1534 int cmp
= nonoverlapping_component_refs_p_1 (field1
, field2
);
1538 .nonoverlapping_refs_since_match_p_may_alias
;
1544 .nonoverlapping_refs_since_match_p_no_alias
;
1549 ++alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
;
1553 /* Return TYPE_UID which can be used to match record types we consider
1554 same for TBAA purposes. */
1557 ncr_type_uid (const_tree field
)
1559 /* ??? We cannot simply use the type of operand #0 of the refs here
1560 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1561 for common blocks instead of using unions like everyone else. */
1562 tree type
= DECL_FIELD_CONTEXT (field
);
1563 /* With LTO types considered same_type_for_tbaa_p
1564 from different translation unit may not have same
1565 main variant. They however have same TYPE_CANONICAL. */
1566 if (TYPE_CANONICAL (type
))
1567 return TYPE_UID (TYPE_CANONICAL (type
));
1568 return TYPE_UID (type
);
1571 /* qsort compare function to sort FIELD_DECLs after their
1572 DECL_FIELD_CONTEXT TYPE_UID. */
1575 ncr_compar (const void *field1_
, const void *field2_
)
1577 const_tree field1
= *(const_tree
*) const_cast <void *>(field1_
);
1578 const_tree field2
= *(const_tree
*) const_cast <void *>(field2_
);
1579 unsigned int uid1
= ncr_type_uid (field1
);
1580 unsigned int uid2
= ncr_type_uid (field2
);
1584 else if (uid1
> uid2
)
1589 /* Return true if we can determine that the fields referenced cannot
1590 overlap for any pair of objects. This relies on TBAA. */
1593 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
1595 /* Early return if we have nothing to do.
1597 Do not consider this as may-alias for stats - it is more useful
1598 to have information how many disambiguations happened provided that
1599 the query was meaningful. */
1600 if (!flag_strict_aliasing
1602 || !handled_component_p (x
)
1603 || !handled_component_p (y
))
1606 auto_vec
<const_tree
, 16> fieldsx
;
1607 while (handled_component_p (x
))
1609 if (TREE_CODE (x
) == COMPONENT_REF
)
1611 tree field
= TREE_OPERAND (x
, 1);
1612 tree type
= DECL_FIELD_CONTEXT (field
);
1613 if (TREE_CODE (type
) == RECORD_TYPE
)
1614 fieldsx
.safe_push (field
);
1616 else if (TREE_CODE (x
) == VIEW_CONVERT_EXPR
1617 || TREE_CODE (x
) == BIT_FIELD_REF
)
1618 fieldsx
.truncate (0);
1619 x
= TREE_OPERAND (x
, 0);
1621 if (fieldsx
.length () == 0)
1623 auto_vec
<const_tree
, 16> fieldsy
;
1624 while (handled_component_p (y
))
1626 if (TREE_CODE (y
) == COMPONENT_REF
)
1628 tree field
= TREE_OPERAND (y
, 1);
1629 tree type
= DECL_FIELD_CONTEXT (field
);
1630 if (TREE_CODE (type
) == RECORD_TYPE
)
1631 fieldsy
.safe_push (TREE_OPERAND (y
, 1));
1633 else if (TREE_CODE (y
) == VIEW_CONVERT_EXPR
1634 || TREE_CODE (y
) == BIT_FIELD_REF
)
1635 fieldsy
.truncate (0);
1636 y
= TREE_OPERAND (y
, 0);
1638 if (fieldsy
.length () == 0)
1640 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1644 /* Most common case first. */
1645 if (fieldsx
.length () == 1
1646 && fieldsy
.length () == 1)
1648 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx
[0]),
1649 DECL_FIELD_CONTEXT (fieldsy
[0])) == 1
1650 && nonoverlapping_component_refs_p_1 (fieldsx
[0], fieldsy
[0]) == 1)
1652 ++alias_stats
.nonoverlapping_component_refs_p_no_alias
;
1657 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1662 if (fieldsx
.length () == 2)
1664 if (ncr_compar (&fieldsx
[0], &fieldsx
[1]) == 1)
1665 std::swap (fieldsx
[0], fieldsx
[1]);
1668 fieldsx
.qsort (ncr_compar
);
1670 if (fieldsy
.length () == 2)
1672 if (ncr_compar (&fieldsy
[0], &fieldsy
[1]) == 1)
1673 std::swap (fieldsy
[0], fieldsy
[1]);
1676 fieldsy
.qsort (ncr_compar
);
1678 unsigned i
= 0, j
= 0;
1681 const_tree fieldx
= fieldsx
[i
];
1682 const_tree fieldy
= fieldsy
[j
];
1684 /* We're left with accessing different fields of a structure,
1685 no possible overlap. */
1686 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx
),
1687 DECL_FIELD_CONTEXT (fieldy
)) == 1
1688 && nonoverlapping_component_refs_p_1 (fieldx
, fieldy
) == 1)
1690 ++alias_stats
.nonoverlapping_component_refs_p_no_alias
;
1694 if (ncr_type_uid (fieldx
) < ncr_type_uid (fieldy
))
1697 if (i
== fieldsx
.length ())
1703 if (j
== fieldsy
.length ())
1709 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1714 /* Return true if two memory references based on the variables BASE1
1715 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1716 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1717 if non-NULL are the complete memory reference trees. */
1720 decl_refs_may_alias_p (tree ref1
, tree base1
,
1721 poly_int64 offset1
, poly_int64 max_size1
,
1723 tree ref2
, tree base2
,
1724 poly_int64 offset2
, poly_int64 max_size2
,
1727 gcc_checking_assert (DECL_P (base1
) && DECL_P (base2
));
1729 /* If both references are based on different variables, they cannot alias. */
1730 if (compare_base_decls (base1
, base2
) == 0)
1733 /* If both references are based on the same variable, they cannot alias if
1734 the accesses do not overlap. */
1735 if (!ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
1738 /* If there is must alias, there is no use disambiguating further. */
1739 if (known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
))
1742 /* For components with variable position, the above test isn't sufficient,
1743 so we disambiguate component references manually. */
1745 && handled_component_p (ref1
) && handled_component_p (ref2
)
1746 && nonoverlapping_refs_since_match_p (NULL
, ref1
, NULL
, ref2
, false) == 1)
1752 /* Return true if an indirect reference based on *PTR1 constrained
1753 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1754 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1755 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1756 in which case they are computed on-demand. REF1 and REF2
1757 if non-NULL are the complete memory reference trees. */
1760 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1761 poly_int64 offset1
, poly_int64 max_size1
,
1763 alias_set_type ref1_alias_set
,
1764 alias_set_type base1_alias_set
,
1765 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1766 poly_int64 offset2
, poly_int64 max_size2
,
1768 alias_set_type ref2_alias_set
,
1769 alias_set_type base2_alias_set
, bool tbaa_p
)
1772 tree ptrtype1
, dbase2
;
1774 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1775 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1778 ptr1
= TREE_OPERAND (base1
, 0);
1779 poly_offset_int moff
= mem_ref_offset (base1
) << LOG2_BITS_PER_UNIT
;
1781 /* If only one reference is based on a variable, they cannot alias if
1782 the pointer access is beyond the extent of the variable access.
1783 (the pointer base cannot validly point to an offset less than zero
1785 ??? IVOPTs creates bases that do not honor this restriction,
1786 so do not apply this optimization for TARGET_MEM_REFs. */
1787 if (TREE_CODE (base1
) != TARGET_MEM_REF
1788 && !ranges_maybe_overlap_p (offset1
+ moff
, -1, offset2
, max_size2
))
1790 /* They also cannot alias if the pointer may not point to the decl. */
1791 if (!ptr_deref_may_alias_decl_p (ptr1
, base2
))
1794 /* Disambiguations that rely on strict aliasing rules follow. */
1795 if (!flag_strict_aliasing
|| !tbaa_p
)
1798 /* If the alias set for a pointer access is zero all bets are off. */
1799 if (base1_alias_set
== 0 || base2_alias_set
== 0)
1802 /* When we are trying to disambiguate an access with a pointer dereference
1803 as base versus one with a decl as base we can use both the size
1804 of the decl and its dynamic type for extra disambiguation.
1805 ??? We do not know anything about the dynamic type of the decl
1806 other than that its alias-set contains base2_alias_set as a subset
1807 which does not help us here. */
1808 /* As we know nothing useful about the dynamic type of the decl just
1809 use the usual conflict check rather than a subset test.
1810 ??? We could introduce -fvery-strict-aliasing when the language
1811 does not allow decls to have a dynamic type that differs from their
1812 static type. Then we can check
1813 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1814 if (base1_alias_set
!= base2_alias_set
1815 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1818 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1820 /* If the size of the access relevant for TBAA through the pointer
1821 is bigger than the size of the decl we can't possibly access the
1822 decl via that pointer. */
1823 if (/* ??? This in turn may run afoul when a decl of type T which is
1824 a member of union type U is accessed through a pointer to
1825 type U and sizeof T is smaller than sizeof U. */
1826 TREE_CODE (TREE_TYPE (ptrtype1
)) != UNION_TYPE
1827 && TREE_CODE (TREE_TYPE (ptrtype1
)) != QUAL_UNION_TYPE
1828 && compare_sizes (DECL_SIZE (base2
),
1829 TYPE_SIZE (TREE_TYPE (ptrtype1
))) < 0)
1835 /* If the decl is accessed via a MEM_REF, reconstruct the base
1836 we can use for TBAA and an appropriately adjusted offset. */
1838 while (handled_component_p (dbase2
))
1839 dbase2
= TREE_OPERAND (dbase2
, 0);
1840 poly_int64 doffset1
= offset1
;
1841 poly_offset_int doffset2
= offset2
;
1842 if (TREE_CODE (dbase2
) == MEM_REF
1843 || TREE_CODE (dbase2
) == TARGET_MEM_REF
)
1845 doffset2
-= mem_ref_offset (dbase2
) << LOG2_BITS_PER_UNIT
;
1846 tree ptrtype2
= TREE_TYPE (TREE_OPERAND (dbase2
, 1));
1847 /* If second reference is view-converted, give up now. */
1848 if (same_type_for_tbaa (TREE_TYPE (dbase2
), TREE_TYPE (ptrtype2
)) != 1)
1852 /* If first reference is view-converted, give up now. */
1853 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1)
1856 /* If both references are through the same type, they do not alias
1857 if the accesses do not overlap. This does extra disambiguation
1858 for mixed/pointer accesses but requires strict aliasing.
1859 For MEM_REFs we require that the component-ref offset we computed
1860 is relative to the start of the type which we ensure by
1861 comparing rvalue and access type and disregarding the constant
1864 But avoid treating variable length arrays as "objects", instead assume they
1865 can overlap by an exact multiple of their element size.
1866 See gcc.dg/torture/alias-2.c. */
1867 if (((TREE_CODE (base1
) != TARGET_MEM_REF
1868 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1869 && (TREE_CODE (dbase2
) != TARGET_MEM_REF
1870 || (!TMR_INDEX (dbase2
) && !TMR_INDEX2 (dbase2
))))
1871 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (dbase2
)) == 1)
1873 bool partial_overlap
= (TREE_CODE (TREE_TYPE (base1
)) == ARRAY_TYPE
1874 && (TYPE_SIZE (TREE_TYPE (base1
))
1875 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1
)))
1877 if (!partial_overlap
1878 && !ranges_maybe_overlap_p (doffset1
, max_size1
, doffset2
, max_size2
))
1881 /* If there is must alias, there is no use disambiguating further. */
1882 || (!partial_overlap
1883 && known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
)))
1885 int res
= nonoverlapping_refs_since_match_p (base1
, ref1
, base2
, ref2
,
1888 return !nonoverlapping_component_refs_p (ref1
, ref2
);
1892 /* Do access-path based disambiguation. */
1894 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1895 return aliasing_component_refs_p (ref1
,
1896 ref1_alias_set
, base1_alias_set
,
1899 ref2_alias_set
, base2_alias_set
,
1900 offset2
, max_size2
);
1905 /* Return true if two indirect references based on *PTR1
1906 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1907 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1908 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1909 in which case they are computed on-demand. REF1 and REF2
1910 if non-NULL are the complete memory reference trees. */
1913 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1914 poly_int64 offset1
, poly_int64 max_size1
,
1916 alias_set_type ref1_alias_set
,
1917 alias_set_type base1_alias_set
,
1918 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1919 poly_int64 offset2
, poly_int64 max_size2
,
1921 alias_set_type ref2_alias_set
,
1922 alias_set_type base2_alias_set
, bool tbaa_p
)
1926 tree ptrtype1
, ptrtype2
;
1928 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1929 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1930 && (TREE_CODE (base2
) == MEM_REF
1931 || TREE_CODE (base2
) == TARGET_MEM_REF
));
1933 ptr1
= TREE_OPERAND (base1
, 0);
1934 ptr2
= TREE_OPERAND (base2
, 0);
1936 /* If both bases are based on pointers they cannot alias if they may not
1937 point to the same memory object or if they point to the same object
1938 and the accesses do not overlap. */
1939 if ((!cfun
|| gimple_in_ssa_p (cfun
))
1940 && operand_equal_p (ptr1
, ptr2
, 0)
1941 && (((TREE_CODE (base1
) != TARGET_MEM_REF
1942 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1943 && (TREE_CODE (base2
) != TARGET_MEM_REF
1944 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
))))
1945 || (TREE_CODE (base1
) == TARGET_MEM_REF
1946 && TREE_CODE (base2
) == TARGET_MEM_REF
1947 && (TMR_STEP (base1
) == TMR_STEP (base2
)
1948 || (TMR_STEP (base1
) && TMR_STEP (base2
)
1949 && operand_equal_p (TMR_STEP (base1
),
1950 TMR_STEP (base2
), 0)))
1951 && (TMR_INDEX (base1
) == TMR_INDEX (base2
)
1952 || (TMR_INDEX (base1
) && TMR_INDEX (base2
)
1953 && operand_equal_p (TMR_INDEX (base1
),
1954 TMR_INDEX (base2
), 0)))
1955 && (TMR_INDEX2 (base1
) == TMR_INDEX2 (base2
)
1956 || (TMR_INDEX2 (base1
) && TMR_INDEX2 (base2
)
1957 && operand_equal_p (TMR_INDEX2 (base1
),
1958 TMR_INDEX2 (base2
), 0))))))
1960 poly_offset_int moff1
= mem_ref_offset (base1
) << LOG2_BITS_PER_UNIT
;
1961 poly_offset_int moff2
= mem_ref_offset (base2
) << LOG2_BITS_PER_UNIT
;
1962 if (!ranges_maybe_overlap_p (offset1
+ moff1
, max_size1
,
1963 offset2
+ moff2
, max_size2
))
1965 /* If there is must alias, there is no use disambiguating further. */
1966 if (known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
))
1970 int res
= nonoverlapping_refs_since_match_p (NULL
, ref1
, NULL
, ref2
,
1976 if (!ptr_derefs_may_alias_p (ptr1
, ptr2
))
1979 /* Disambiguations that rely on strict aliasing rules follow. */
1980 if (!flag_strict_aliasing
|| !tbaa_p
)
1983 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1984 ptrtype2
= TREE_TYPE (TREE_OPERAND (base2
, 1));
1986 /* If the alias set for a pointer access is zero all bets are off. */
1987 if (base1_alias_set
== 0
1988 || base2_alias_set
== 0)
1991 /* Do type-based disambiguation. */
1992 if (base1_alias_set
!= base2_alias_set
1993 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1996 /* If either reference is view-converted, give up now. */
1997 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1998 || same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) != 1)
2001 /* If both references are through the same type, they do not alias
2002 if the accesses do not overlap. This does extra disambiguation
2003 for mixed/pointer accesses but requires strict aliasing. */
2004 if ((TREE_CODE (base1
) != TARGET_MEM_REF
2005 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2006 && (TREE_CODE (base2
) != TARGET_MEM_REF
2007 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
)))
2008 && same_type_for_tbaa (TREE_TYPE (ptrtype1
),
2009 TREE_TYPE (ptrtype2
)) == 1)
2011 /* But avoid treating arrays as "objects", instead assume they
2012 can overlap by an exact multiple of their element size.
2013 See gcc.dg/torture/alias-2.c. */
2014 bool partial_overlap
= TREE_CODE (TREE_TYPE (ptrtype1
)) == ARRAY_TYPE
;
2016 if (!partial_overlap
2017 && !ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
2020 || (!partial_overlap
2021 && known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
)))
2023 int res
= nonoverlapping_refs_since_match_p (base1
, ref1
, base2
, ref2
,
2026 return !nonoverlapping_component_refs_p (ref1
, ref2
);
2030 /* Do access-path based disambiguation. */
2032 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
2033 return aliasing_component_refs_p (ref1
,
2034 ref1_alias_set
, base1_alias_set
,
2037 ref2_alias_set
, base2_alias_set
,
2038 offset2
, max_size2
);
2043 /* Return true, if the two memory references REF1 and REF2 may alias. */
2046 refs_may_alias_p_2 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
2049 poly_int64 offset1
= 0, offset2
= 0;
2050 poly_int64 max_size1
= -1, max_size2
= -1;
2051 bool var1_p
, var2_p
, ind1_p
, ind2_p
;
2053 gcc_checking_assert ((!ref1
->ref
2054 || TREE_CODE (ref1
->ref
) == SSA_NAME
2055 || DECL_P (ref1
->ref
)
2056 || TREE_CODE (ref1
->ref
) == STRING_CST
2057 || handled_component_p (ref1
->ref
)
2058 || TREE_CODE (ref1
->ref
) == MEM_REF
2059 || TREE_CODE (ref1
->ref
) == TARGET_MEM_REF
)
2061 || TREE_CODE (ref2
->ref
) == SSA_NAME
2062 || DECL_P (ref2
->ref
)
2063 || TREE_CODE (ref2
->ref
) == STRING_CST
2064 || handled_component_p (ref2
->ref
)
2065 || TREE_CODE (ref2
->ref
) == MEM_REF
2066 || TREE_CODE (ref2
->ref
) == TARGET_MEM_REF
));
2068 /* Decompose the references into their base objects and the access. */
2069 base1
= ao_ref_base (ref1
);
2070 offset1
= ref1
->offset
;
2071 max_size1
= ref1
->max_size
;
2072 base2
= ao_ref_base (ref2
);
2073 offset2
= ref2
->offset
;
2074 max_size2
= ref2
->max_size
;
2076 /* We can end up with registers or constants as bases for example from
2077 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2078 which is seen as a struct copy. */
2079 if (TREE_CODE (base1
) == SSA_NAME
2080 || TREE_CODE (base1
) == CONST_DECL
2081 || TREE_CODE (base1
) == CONSTRUCTOR
2082 || TREE_CODE (base1
) == ADDR_EXPR
2083 || CONSTANT_CLASS_P (base1
)
2084 || TREE_CODE (base2
) == SSA_NAME
2085 || TREE_CODE (base2
) == CONST_DECL
2086 || TREE_CODE (base2
) == CONSTRUCTOR
2087 || TREE_CODE (base2
) == ADDR_EXPR
2088 || CONSTANT_CLASS_P (base2
))
2091 /* We can end up referring to code via function and label decls.
2092 As we likely do not properly track code aliases conservatively
2094 if (TREE_CODE (base1
) == FUNCTION_DECL
2095 || TREE_CODE (base1
) == LABEL_DECL
2096 || TREE_CODE (base2
) == FUNCTION_DECL
2097 || TREE_CODE (base2
) == LABEL_DECL
)
2100 /* Two volatile accesses always conflict. */
2101 if (ref1
->volatile_p
2102 && ref2
->volatile_p
)
2105 /* Defer to simple offset based disambiguation if we have
2106 references based on two decls. Do this before defering to
2107 TBAA to handle must-alias cases in conformance with the
2108 GCC extension of allowing type-punning through unions. */
2109 var1_p
= DECL_P (base1
);
2110 var2_p
= DECL_P (base2
);
2111 if (var1_p
&& var2_p
)
2112 return decl_refs_may_alias_p (ref1
->ref
, base1
, offset1
, max_size1
,
2114 ref2
->ref
, base2
, offset2
, max_size2
,
2117 /* Handle restrict based accesses.
2118 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2120 tree rbase1
= base1
;
2121 tree rbase2
= base2
;
2126 while (handled_component_p (rbase1
))
2127 rbase1
= TREE_OPERAND (rbase1
, 0);
2133 while (handled_component_p (rbase2
))
2134 rbase2
= TREE_OPERAND (rbase2
, 0);
2136 if (rbase1
&& rbase2
2137 && (TREE_CODE (base1
) == MEM_REF
|| TREE_CODE (base1
) == TARGET_MEM_REF
)
2138 && (TREE_CODE (base2
) == MEM_REF
|| TREE_CODE (base2
) == TARGET_MEM_REF
)
2139 /* If the accesses are in the same restrict clique... */
2140 && MR_DEPENDENCE_CLIQUE (base1
) == MR_DEPENDENCE_CLIQUE (base2
)
2141 /* But based on different pointers they do not alias. */
2142 && MR_DEPENDENCE_BASE (base1
) != MR_DEPENDENCE_BASE (base2
))
2145 ind1_p
= (TREE_CODE (base1
) == MEM_REF
2146 || TREE_CODE (base1
) == TARGET_MEM_REF
);
2147 ind2_p
= (TREE_CODE (base2
) == MEM_REF
2148 || TREE_CODE (base2
) == TARGET_MEM_REF
);
2150 /* Canonicalize the pointer-vs-decl case. */
2151 if (ind1_p
&& var2_p
)
2153 std::swap (offset1
, offset2
);
2154 std::swap (max_size1
, max_size2
);
2155 std::swap (base1
, base2
);
2156 std::swap (ref1
, ref2
);
2163 /* First defer to TBAA if possible. */
2165 && flag_strict_aliasing
2166 && !alias_sets_conflict_p (ao_ref_alias_set (ref1
),
2167 ao_ref_alias_set (ref2
)))
2170 /* If the reference is based on a pointer that points to memory
2171 that may not be written to then the other reference cannot possibly
2173 if ((TREE_CODE (TREE_OPERAND (base2
, 0)) == SSA_NAME
2174 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2
, 0)))
2176 && TREE_CODE (TREE_OPERAND (base1
, 0)) == SSA_NAME
2177 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1
, 0))))
2180 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2181 if (var1_p
&& ind2_p
)
2182 return indirect_ref_may_alias_decl_p (ref2
->ref
, base2
,
2183 offset2
, max_size2
, ref2
->size
,
2184 ao_ref_alias_set (ref2
),
2185 ao_ref_base_alias_set (ref2
),
2187 offset1
, max_size1
, ref1
->size
,
2188 ao_ref_alias_set (ref1
),
2189 ao_ref_base_alias_set (ref1
),
2191 else if (ind1_p
&& ind2_p
)
2192 return indirect_refs_may_alias_p (ref1
->ref
, base1
,
2193 offset1
, max_size1
, ref1
->size
,
2194 ao_ref_alias_set (ref1
),
2195 ao_ref_base_alias_set (ref1
),
2197 offset2
, max_size2
, ref2
->size
,
2198 ao_ref_alias_set (ref2
),
2199 ao_ref_base_alias_set (ref2
),
2205 /* Return true, if the two memory references REF1 and REF2 may alias
2206 and update statistics. */
2209 refs_may_alias_p_1 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
2211 bool res
= refs_may_alias_p_2 (ref1
, ref2
, tbaa_p
);
2213 ++alias_stats
.refs_may_alias_p_may_alias
;
2215 ++alias_stats
.refs_may_alias_p_no_alias
;
2220 refs_may_alias_p (tree ref1
, ao_ref
*ref2
, bool tbaa_p
)
2223 ao_ref_init (&r1
, ref1
);
2224 return refs_may_alias_p_1 (&r1
, ref2
, tbaa_p
);
2228 refs_may_alias_p (tree ref1
, tree ref2
, bool tbaa_p
)
2231 ao_ref_init (&r1
, ref1
);
2232 ao_ref_init (&r2
, ref2
);
2233 return refs_may_alias_p_1 (&r1
, &r2
, tbaa_p
);
2236 /* Returns true if there is a anti-dependence for the STORE that
2237 executes after the LOAD. */
2240 refs_anti_dependent_p (tree load
, tree store
)
2243 ao_ref_init (&r1
, load
);
2244 ao_ref_init (&r2
, store
);
2245 return refs_may_alias_p_1 (&r1
, &r2
, false);
2248 /* Returns true if there is a output dependence for the stores
2249 STORE1 and STORE2. */
2252 refs_output_dependent_p (tree store1
, tree store2
)
2255 ao_ref_init (&r1
, store1
);
2256 ao_ref_init (&r2
, store2
);
2257 return refs_may_alias_p_1 (&r1
, &r2
, false);
2260 /* If the call CALL may use the memory reference REF return true,
2261 otherwise return false. */
2264 ref_maybe_used_by_call_p_1 (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2268 int flags
= gimple_call_flags (call
);
2270 /* Const functions without a static chain do not implicitly use memory. */
2271 if (!gimple_call_chain (call
)
2272 && (flags
& (ECF_CONST
|ECF_NOVOPS
)))
2275 base
= ao_ref_base (ref
);
2279 /* A call that is not without side-effects might involve volatile
2280 accesses and thus conflicts with all other volatile accesses. */
2281 if (ref
->volatile_p
)
2284 /* If the reference is based on a decl that is not aliased the call
2285 cannot possibly use it. */
2287 && !may_be_aliased (base
)
2288 /* But local statics can be used through recursion. */
2289 && !is_global_var (base
))
2292 callee
= gimple_call_fndecl (call
);
2294 /* Handle those builtin functions explicitly that do not act as
2295 escape points. See tree-ssa-structalias.c:find_func_aliases
2296 for the list of builtins we might need to handle here. */
2297 if (callee
!= NULL_TREE
2298 && gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2299 switch (DECL_FUNCTION_CODE (callee
))
2301 /* All the following functions read memory pointed to by
2302 their second argument. strcat/strncat additionally
2303 reads memory pointed to by the first argument. */
2304 case BUILT_IN_STRCAT
:
2305 case BUILT_IN_STRNCAT
:
2308 ao_ref_init_from_ptr_and_size (&dref
,
2309 gimple_call_arg (call
, 0),
2311 if (refs_may_alias_p_1 (&dref
, ref
, false))
2315 case BUILT_IN_STRCPY
:
2316 case BUILT_IN_STRNCPY
:
2317 case BUILT_IN_MEMCPY
:
2318 case BUILT_IN_MEMMOVE
:
2319 case BUILT_IN_MEMPCPY
:
2320 case BUILT_IN_STPCPY
:
2321 case BUILT_IN_STPNCPY
:
2322 case BUILT_IN_TM_MEMCPY
:
2323 case BUILT_IN_TM_MEMMOVE
:
2326 tree size
= NULL_TREE
;
2327 if (gimple_call_num_args (call
) == 3)
2328 size
= gimple_call_arg (call
, 2);
2329 ao_ref_init_from_ptr_and_size (&dref
,
2330 gimple_call_arg (call
, 1),
2332 return refs_may_alias_p_1 (&dref
, ref
, false);
2334 case BUILT_IN_STRCAT_CHK
:
2335 case BUILT_IN_STRNCAT_CHK
:
2338 ao_ref_init_from_ptr_and_size (&dref
,
2339 gimple_call_arg (call
, 0),
2341 if (refs_may_alias_p_1 (&dref
, ref
, false))
2345 case BUILT_IN_STRCPY_CHK
:
2346 case BUILT_IN_STRNCPY_CHK
:
2347 case BUILT_IN_MEMCPY_CHK
:
2348 case BUILT_IN_MEMMOVE_CHK
:
2349 case BUILT_IN_MEMPCPY_CHK
:
2350 case BUILT_IN_STPCPY_CHK
:
2351 case BUILT_IN_STPNCPY_CHK
:
2354 tree size
= NULL_TREE
;
2355 if (gimple_call_num_args (call
) == 4)
2356 size
= gimple_call_arg (call
, 2);
2357 ao_ref_init_from_ptr_and_size (&dref
,
2358 gimple_call_arg (call
, 1),
2360 return refs_may_alias_p_1 (&dref
, ref
, false);
2362 case BUILT_IN_BCOPY
:
2365 tree size
= gimple_call_arg (call
, 2);
2366 ao_ref_init_from_ptr_and_size (&dref
,
2367 gimple_call_arg (call
, 0),
2369 return refs_may_alias_p_1 (&dref
, ref
, false);
2372 /* The following functions read memory pointed to by their
2374 CASE_BUILT_IN_TM_LOAD (1):
2375 CASE_BUILT_IN_TM_LOAD (2):
2376 CASE_BUILT_IN_TM_LOAD (4):
2377 CASE_BUILT_IN_TM_LOAD (8):
2378 CASE_BUILT_IN_TM_LOAD (FLOAT
):
2379 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
2380 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
2381 CASE_BUILT_IN_TM_LOAD (M64
):
2382 CASE_BUILT_IN_TM_LOAD (M128
):
2383 CASE_BUILT_IN_TM_LOAD (M256
):
2384 case BUILT_IN_TM_LOG
:
2385 case BUILT_IN_TM_LOG_1
:
2386 case BUILT_IN_TM_LOG_2
:
2387 case BUILT_IN_TM_LOG_4
:
2388 case BUILT_IN_TM_LOG_8
:
2389 case BUILT_IN_TM_LOG_FLOAT
:
2390 case BUILT_IN_TM_LOG_DOUBLE
:
2391 case BUILT_IN_TM_LOG_LDOUBLE
:
2392 case BUILT_IN_TM_LOG_M64
:
2393 case BUILT_IN_TM_LOG_M128
:
2394 case BUILT_IN_TM_LOG_M256
:
2395 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call
, 0), ref
);
2397 /* These read memory pointed to by the first argument. */
2398 case BUILT_IN_STRDUP
:
2399 case BUILT_IN_STRNDUP
:
2400 case BUILT_IN_REALLOC
:
2403 tree size
= NULL_TREE
;
2404 if (gimple_call_num_args (call
) == 2)
2405 size
= gimple_call_arg (call
, 1);
2406 ao_ref_init_from_ptr_and_size (&dref
,
2407 gimple_call_arg (call
, 0),
2409 return refs_may_alias_p_1 (&dref
, ref
, false);
2411 /* These read memory pointed to by the first argument. */
2412 case BUILT_IN_INDEX
:
2413 case BUILT_IN_STRCHR
:
2414 case BUILT_IN_STRRCHR
:
2417 ao_ref_init_from_ptr_and_size (&dref
,
2418 gimple_call_arg (call
, 0),
2420 return refs_may_alias_p_1 (&dref
, ref
, false);
2422 /* These read memory pointed to by the first argument with size
2423 in the third argument. */
2424 case BUILT_IN_MEMCHR
:
2427 ao_ref_init_from_ptr_and_size (&dref
,
2428 gimple_call_arg (call
, 0),
2429 gimple_call_arg (call
, 2));
2430 return refs_may_alias_p_1 (&dref
, ref
, false);
2432 /* These read memory pointed to by the first and second arguments. */
2433 case BUILT_IN_STRSTR
:
2434 case BUILT_IN_STRPBRK
:
2437 ao_ref_init_from_ptr_and_size (&dref
,
2438 gimple_call_arg (call
, 0),
2440 if (refs_may_alias_p_1 (&dref
, ref
, false))
2442 ao_ref_init_from_ptr_and_size (&dref
,
2443 gimple_call_arg (call
, 1),
2445 return refs_may_alias_p_1 (&dref
, ref
, false);
2448 /* The following builtins do not read from memory. */
2450 case BUILT_IN_MALLOC
:
2451 case BUILT_IN_POSIX_MEMALIGN
:
2452 case BUILT_IN_ALIGNED_ALLOC
:
2453 case BUILT_IN_CALLOC
:
2454 CASE_BUILT_IN_ALLOCA
:
2455 case BUILT_IN_STACK_SAVE
:
2456 case BUILT_IN_STACK_RESTORE
:
2457 case BUILT_IN_MEMSET
:
2458 case BUILT_IN_TM_MEMSET
:
2459 case BUILT_IN_MEMSET_CHK
:
2460 case BUILT_IN_FREXP
:
2461 case BUILT_IN_FREXPF
:
2462 case BUILT_IN_FREXPL
:
2463 case BUILT_IN_GAMMA_R
:
2464 case BUILT_IN_GAMMAF_R
:
2465 case BUILT_IN_GAMMAL_R
:
2466 case BUILT_IN_LGAMMA_R
:
2467 case BUILT_IN_LGAMMAF_R
:
2468 case BUILT_IN_LGAMMAL_R
:
2470 case BUILT_IN_MODFF
:
2471 case BUILT_IN_MODFL
:
2472 case BUILT_IN_REMQUO
:
2473 case BUILT_IN_REMQUOF
:
2474 case BUILT_IN_REMQUOL
:
2475 case BUILT_IN_SINCOS
:
2476 case BUILT_IN_SINCOSF
:
2477 case BUILT_IN_SINCOSL
:
2478 case BUILT_IN_ASSUME_ALIGNED
:
2479 case BUILT_IN_VA_END
:
2481 /* __sync_* builtins and some OpenMP builtins act as threading
2483 #undef DEF_SYNC_BUILTIN
2484 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2485 #include "sync-builtins.def"
2486 #undef DEF_SYNC_BUILTIN
2487 case BUILT_IN_GOMP_ATOMIC_START
:
2488 case BUILT_IN_GOMP_ATOMIC_END
:
2489 case BUILT_IN_GOMP_BARRIER
:
2490 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2491 case BUILT_IN_GOMP_TASKWAIT
:
2492 case BUILT_IN_GOMP_TASKGROUP_END
:
2493 case BUILT_IN_GOMP_CRITICAL_START
:
2494 case BUILT_IN_GOMP_CRITICAL_END
:
2495 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2496 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2497 case BUILT_IN_GOMP_LOOP_END
:
2498 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2499 case BUILT_IN_GOMP_ORDERED_START
:
2500 case BUILT_IN_GOMP_ORDERED_END
:
2501 case BUILT_IN_GOMP_SECTIONS_END
:
2502 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2503 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2504 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2508 /* Fallthru to general call handling. */;
2511 /* Check if base is a global static variable that is not read
2513 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
2515 struct cgraph_node
*node
= cgraph_node::get (callee
);
2518 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2519 node yet. We should enforce that there are nodes for all decls in the
2520 IL and remove this check instead. */
2522 && (not_read
= ipa_reference_get_not_read_global (node
))
2523 && bitmap_bit_p (not_read
, ipa_reference_var_uid (base
)))
2527 /* Check if the base variable is call-used. */
2530 if (pt_solution_includes (gimple_call_use_set (call
), base
))
2533 else if ((TREE_CODE (base
) == MEM_REF
2534 || TREE_CODE (base
) == TARGET_MEM_REF
)
2535 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2537 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2541 if (pt_solutions_intersect (gimple_call_use_set (call
), &pi
->pt
))
2547 /* Inspect call arguments for passed-by-value aliases. */
2549 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
2551 tree op
= gimple_call_arg (call
, i
);
2552 int flags
= gimple_call_arg_flags (call
, i
);
2554 if (flags
& EAF_UNUSED
)
2557 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
2558 op
= TREE_OPERAND (op
, 0);
2560 if (TREE_CODE (op
) != SSA_NAME
2561 && !is_gimple_min_invariant (op
))
2564 ao_ref_init (&r
, op
);
2565 if (refs_may_alias_p_1 (&r
, ref
, tbaa_p
))
2574 ref_maybe_used_by_call_p (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2577 res
= ref_maybe_used_by_call_p_1 (call
, ref
, tbaa_p
);
2579 ++alias_stats
.ref_maybe_used_by_call_p_may_alias
;
2581 ++alias_stats
.ref_maybe_used_by_call_p_no_alias
;
2586 /* If the statement STMT may use the memory reference REF return
2587 true, otherwise return false. */
2590 ref_maybe_used_by_stmt_p (gimple
*stmt
, ao_ref
*ref
, bool tbaa_p
)
2592 if (is_gimple_assign (stmt
))
2596 /* All memory assign statements are single. */
2597 if (!gimple_assign_single_p (stmt
))
2600 rhs
= gimple_assign_rhs1 (stmt
);
2601 if (is_gimple_reg (rhs
)
2602 || is_gimple_min_invariant (rhs
)
2603 || gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
)
2606 return refs_may_alias_p (rhs
, ref
, tbaa_p
);
2608 else if (is_gimple_call (stmt
))
2609 return ref_maybe_used_by_call_p (as_a
<gcall
*> (stmt
), ref
, tbaa_p
);
2610 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
2612 tree retval
= gimple_return_retval (return_stmt
);
2614 && TREE_CODE (retval
) != SSA_NAME
2615 && !is_gimple_min_invariant (retval
)
2616 && refs_may_alias_p (retval
, ref
, tbaa_p
))
2618 /* If ref escapes the function then the return acts as a use. */
2619 tree base
= ao_ref_base (ref
);
2622 else if (DECL_P (base
))
2623 return is_global_var (base
);
2624 else if (TREE_CODE (base
) == MEM_REF
2625 || TREE_CODE (base
) == TARGET_MEM_REF
)
2626 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
2634 ref_maybe_used_by_stmt_p (gimple
*stmt
, tree ref
, bool tbaa_p
)
2637 ao_ref_init (&r
, ref
);
2638 return ref_maybe_used_by_stmt_p (stmt
, &r
, tbaa_p
);
2641 /* If the call in statement CALL may clobber the memory reference REF
2642 return true, otherwise return false. */
2645 call_may_clobber_ref_p_1 (gcall
*call
, ao_ref
*ref
)
2650 /* If the call is pure or const it cannot clobber anything. */
2651 if (gimple_call_flags (call
)
2652 & (ECF_PURE
|ECF_CONST
|ECF_LOOPING_CONST_OR_PURE
|ECF_NOVOPS
))
2654 if (gimple_call_internal_p (call
))
2655 switch (gimple_call_internal_fn (call
))
2657 /* Treat these internal calls like ECF_PURE for aliasing,
2658 they don't write to any memory the program should care about.
2659 They have important other side-effects, and read memory,
2660 so can't be ECF_NOVOPS. */
2661 case IFN_UBSAN_NULL
:
2662 case IFN_UBSAN_BOUNDS
:
2663 case IFN_UBSAN_VPTR
:
2664 case IFN_UBSAN_OBJECT_SIZE
:
2666 case IFN_ASAN_CHECK
:
2672 base
= ao_ref_base (ref
);
2676 if (TREE_CODE (base
) == SSA_NAME
2677 || CONSTANT_CLASS_P (base
))
2680 /* A call that is not without side-effects might involve volatile
2681 accesses and thus conflicts with all other volatile accesses. */
2682 if (ref
->volatile_p
)
2685 /* If the reference is based on a decl that is not aliased the call
2686 cannot possibly clobber it. */
2688 && !may_be_aliased (base
)
2689 /* But local non-readonly statics can be modified through recursion
2690 or the call may implement a threading barrier which we must
2691 treat as may-def. */
2692 && (TREE_READONLY (base
)
2693 || !is_global_var (base
)))
2696 /* If the reference is based on a pointer that points to memory
2697 that may not be written to then the call cannot possibly clobber it. */
2698 if ((TREE_CODE (base
) == MEM_REF
2699 || TREE_CODE (base
) == TARGET_MEM_REF
)
2700 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
2701 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base
, 0)))
2704 callee
= gimple_call_fndecl (call
);
2706 /* Handle those builtin functions explicitly that do not act as
2707 escape points. See tree-ssa-structalias.c:find_func_aliases
2708 for the list of builtins we might need to handle here. */
2709 if (callee
!= NULL_TREE
2710 && gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2711 switch (DECL_FUNCTION_CODE (callee
))
2713 /* All the following functions clobber memory pointed to by
2714 their first argument. */
2715 case BUILT_IN_STRCPY
:
2716 case BUILT_IN_STRNCPY
:
2717 case BUILT_IN_MEMCPY
:
2718 case BUILT_IN_MEMMOVE
:
2719 case BUILT_IN_MEMPCPY
:
2720 case BUILT_IN_STPCPY
:
2721 case BUILT_IN_STPNCPY
:
2722 case BUILT_IN_STRCAT
:
2723 case BUILT_IN_STRNCAT
:
2724 case BUILT_IN_MEMSET
:
2725 case BUILT_IN_TM_MEMSET
:
2726 CASE_BUILT_IN_TM_STORE (1):
2727 CASE_BUILT_IN_TM_STORE (2):
2728 CASE_BUILT_IN_TM_STORE (4):
2729 CASE_BUILT_IN_TM_STORE (8):
2730 CASE_BUILT_IN_TM_STORE (FLOAT
):
2731 CASE_BUILT_IN_TM_STORE (DOUBLE
):
2732 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
2733 CASE_BUILT_IN_TM_STORE (M64
):
2734 CASE_BUILT_IN_TM_STORE (M128
):
2735 CASE_BUILT_IN_TM_STORE (M256
):
2736 case BUILT_IN_TM_MEMCPY
:
2737 case BUILT_IN_TM_MEMMOVE
:
2740 tree size
= NULL_TREE
;
2741 /* Don't pass in size for strncat, as the maximum size
2742 is strlen (dest) + n + 1 instead of n, resp.
2743 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2745 if (gimple_call_num_args (call
) == 3
2746 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT
)
2747 size
= gimple_call_arg (call
, 2);
2748 ao_ref_init_from_ptr_and_size (&dref
,
2749 gimple_call_arg (call
, 0),
2751 return refs_may_alias_p_1 (&dref
, ref
, false);
2753 case BUILT_IN_STRCPY_CHK
:
2754 case BUILT_IN_STRNCPY_CHK
:
2755 case BUILT_IN_MEMCPY_CHK
:
2756 case BUILT_IN_MEMMOVE_CHK
:
2757 case BUILT_IN_MEMPCPY_CHK
:
2758 case BUILT_IN_STPCPY_CHK
:
2759 case BUILT_IN_STPNCPY_CHK
:
2760 case BUILT_IN_STRCAT_CHK
:
2761 case BUILT_IN_STRNCAT_CHK
:
2762 case BUILT_IN_MEMSET_CHK
:
2765 tree size
= NULL_TREE
;
2766 /* Don't pass in size for __strncat_chk, as the maximum size
2767 is strlen (dest) + n + 1 instead of n, resp.
2768 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2770 if (gimple_call_num_args (call
) == 4
2771 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT_CHK
)
2772 size
= gimple_call_arg (call
, 2);
2773 ao_ref_init_from_ptr_and_size (&dref
,
2774 gimple_call_arg (call
, 0),
2776 return refs_may_alias_p_1 (&dref
, ref
, false);
2778 case BUILT_IN_BCOPY
:
2781 tree size
= gimple_call_arg (call
, 2);
2782 ao_ref_init_from_ptr_and_size (&dref
,
2783 gimple_call_arg (call
, 1),
2785 return refs_may_alias_p_1 (&dref
, ref
, false);
2787 /* Allocating memory does not have any side-effects apart from
2788 being the definition point for the pointer. */
2789 case BUILT_IN_MALLOC
:
2790 case BUILT_IN_ALIGNED_ALLOC
:
2791 case BUILT_IN_CALLOC
:
2792 case BUILT_IN_STRDUP
:
2793 case BUILT_IN_STRNDUP
:
2794 /* Unix98 specifies that errno is set on allocation failure. */
2796 && targetm
.ref_may_alias_errno (ref
))
2799 case BUILT_IN_STACK_SAVE
:
2800 CASE_BUILT_IN_ALLOCA
:
2801 case BUILT_IN_ASSUME_ALIGNED
:
2803 /* But posix_memalign stores a pointer into the memory pointed to
2804 by its first argument. */
2805 case BUILT_IN_POSIX_MEMALIGN
:
2807 tree ptrptr
= gimple_call_arg (call
, 0);
2809 ao_ref_init_from_ptr_and_size (&dref
, ptrptr
,
2810 TYPE_SIZE_UNIT (ptr_type_node
));
2811 return (refs_may_alias_p_1 (&dref
, ref
, false)
2813 && targetm
.ref_may_alias_errno (ref
)));
2815 /* Freeing memory kills the pointed-to memory. More importantly
2816 the call has to serve as a barrier for moving loads and stores
2819 case BUILT_IN_VA_END
:
2821 tree ptr
= gimple_call_arg (call
, 0);
2822 return ptr_deref_may_alias_ref_p_1 (ptr
, ref
);
2824 /* Realloc serves both as allocation point and deallocation point. */
2825 case BUILT_IN_REALLOC
:
2827 tree ptr
= gimple_call_arg (call
, 0);
2828 /* Unix98 specifies that errno is set on allocation failure. */
2829 return ((flag_errno_math
2830 && targetm
.ref_may_alias_errno (ref
))
2831 || ptr_deref_may_alias_ref_p_1 (ptr
, ref
));
2833 case BUILT_IN_GAMMA_R
:
2834 case BUILT_IN_GAMMAF_R
:
2835 case BUILT_IN_GAMMAL_R
:
2836 case BUILT_IN_LGAMMA_R
:
2837 case BUILT_IN_LGAMMAF_R
:
2838 case BUILT_IN_LGAMMAL_R
:
2840 tree out
= gimple_call_arg (call
, 1);
2841 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2843 if (flag_errno_math
)
2847 case BUILT_IN_FREXP
:
2848 case BUILT_IN_FREXPF
:
2849 case BUILT_IN_FREXPL
:
2851 case BUILT_IN_MODFF
:
2852 case BUILT_IN_MODFL
:
2854 tree out
= gimple_call_arg (call
, 1);
2855 return ptr_deref_may_alias_ref_p_1 (out
, ref
);
2857 case BUILT_IN_REMQUO
:
2858 case BUILT_IN_REMQUOF
:
2859 case BUILT_IN_REMQUOL
:
2861 tree out
= gimple_call_arg (call
, 2);
2862 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2864 if (flag_errno_math
)
2868 case BUILT_IN_SINCOS
:
2869 case BUILT_IN_SINCOSF
:
2870 case BUILT_IN_SINCOSL
:
2872 tree sin
= gimple_call_arg (call
, 1);
2873 tree cos
= gimple_call_arg (call
, 2);
2874 return (ptr_deref_may_alias_ref_p_1 (sin
, ref
)
2875 || ptr_deref_may_alias_ref_p_1 (cos
, ref
));
2877 /* __sync_* builtins and some OpenMP builtins act as threading
2879 #undef DEF_SYNC_BUILTIN
2880 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2881 #include "sync-builtins.def"
2882 #undef DEF_SYNC_BUILTIN
2883 case BUILT_IN_GOMP_ATOMIC_START
:
2884 case BUILT_IN_GOMP_ATOMIC_END
:
2885 case BUILT_IN_GOMP_BARRIER
:
2886 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2887 case BUILT_IN_GOMP_TASKWAIT
:
2888 case BUILT_IN_GOMP_TASKGROUP_END
:
2889 case BUILT_IN_GOMP_CRITICAL_START
:
2890 case BUILT_IN_GOMP_CRITICAL_END
:
2891 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2892 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2893 case BUILT_IN_GOMP_LOOP_END
:
2894 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2895 case BUILT_IN_GOMP_ORDERED_START
:
2896 case BUILT_IN_GOMP_ORDERED_END
:
2897 case BUILT_IN_GOMP_SECTIONS_END
:
2898 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2899 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2900 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2903 /* Fallthru to general call handling. */;
2906 /* Check if base is a global static variable that is not written
2908 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
2910 struct cgraph_node
*node
= cgraph_node::get (callee
);
2914 && (not_written
= ipa_reference_get_not_written_global (node
))
2915 && bitmap_bit_p (not_written
, ipa_reference_var_uid (base
)))
2919 /* Check if the base variable is call-clobbered. */
2921 return pt_solution_includes (gimple_call_clobber_set (call
), base
);
2922 else if ((TREE_CODE (base
) == MEM_REF
2923 || TREE_CODE (base
) == TARGET_MEM_REF
)
2924 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2926 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2930 return pt_solutions_intersect (gimple_call_clobber_set (call
), &pi
->pt
);
2936 /* If the call in statement CALL may clobber the memory reference REF
2937 return true, otherwise return false. */
2940 call_may_clobber_ref_p (gcall
*call
, tree ref
)
2944 ao_ref_init (&r
, ref
);
2945 res
= call_may_clobber_ref_p_1 (call
, &r
);
2947 ++alias_stats
.call_may_clobber_ref_p_may_alias
;
2949 ++alias_stats
.call_may_clobber_ref_p_no_alias
;
2954 /* If the statement STMT may clobber the memory reference REF return true,
2955 otherwise return false. */
2958 stmt_may_clobber_ref_p_1 (gimple
*stmt
, ao_ref
*ref
, bool tbaa_p
)
2960 if (is_gimple_call (stmt
))
2962 tree lhs
= gimple_call_lhs (stmt
);
2964 && TREE_CODE (lhs
) != SSA_NAME
)
2967 ao_ref_init (&r
, lhs
);
2968 if (refs_may_alias_p_1 (ref
, &r
, tbaa_p
))
2972 return call_may_clobber_ref_p_1 (as_a
<gcall
*> (stmt
), ref
);
2974 else if (gimple_assign_single_p (stmt
))
2976 tree lhs
= gimple_assign_lhs (stmt
);
2977 if (TREE_CODE (lhs
) != SSA_NAME
)
2980 ao_ref_init (&r
, lhs
);
2981 return refs_may_alias_p_1 (ref
, &r
, tbaa_p
);
2984 else if (gimple_code (stmt
) == GIMPLE_ASM
)
2991 stmt_may_clobber_ref_p (gimple
*stmt
, tree ref
, bool tbaa_p
)
2994 ao_ref_init (&r
, ref
);
2995 return stmt_may_clobber_ref_p_1 (stmt
, &r
, tbaa_p
);
2998 /* Return true if store1 and store2 described by corresponding tuples
2999 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3003 same_addr_size_stores_p (tree base1
, poly_int64 offset1
, poly_int64 size1
,
3004 poly_int64 max_size1
,
3005 tree base2
, poly_int64 offset2
, poly_int64 size2
,
3006 poly_int64 max_size2
)
3008 /* Offsets need to be 0. */
3009 if (maybe_ne (offset1
, 0)
3010 || maybe_ne (offset2
, 0))
3013 bool base1_obj_p
= SSA_VAR_P (base1
);
3014 bool base2_obj_p
= SSA_VAR_P (base2
);
3016 /* We need one object. */
3017 if (base1_obj_p
== base2_obj_p
)
3019 tree obj
= base1_obj_p
? base1
: base2
;
3021 /* And we need one MEM_REF. */
3022 bool base1_memref_p
= TREE_CODE (base1
) == MEM_REF
;
3023 bool base2_memref_p
= TREE_CODE (base2
) == MEM_REF
;
3024 if (base1_memref_p
== base2_memref_p
)
3026 tree memref
= base1_memref_p
? base1
: base2
;
3028 /* Sizes need to be valid. */
3029 if (!known_size_p (max_size1
)
3030 || !known_size_p (max_size2
)
3031 || !known_size_p (size1
)
3032 || !known_size_p (size2
))
3035 /* Max_size needs to match size. */
3036 if (maybe_ne (max_size1
, size1
)
3037 || maybe_ne (max_size2
, size2
))
3040 /* Sizes need to match. */
3041 if (maybe_ne (size1
, size2
))
3045 /* Check that memref is a store to pointer with singleton points-to info. */
3046 if (!integer_zerop (TREE_OPERAND (memref
, 1)))
3048 tree ptr
= TREE_OPERAND (memref
, 0);
3049 if (TREE_CODE (ptr
) != SSA_NAME
)
3051 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
3052 unsigned int pt_uid
;
3054 || !pt_solution_singleton_or_null_p (&pi
->pt
, &pt_uid
))
3057 /* Be conservative with non-call exceptions when the address might
3059 if (cfun
->can_throw_non_call_exceptions
&& pi
->pt
.null
)
3062 /* Check that ptr points relative to obj. */
3063 unsigned int obj_uid
= DECL_PT_UID (obj
);
3064 if (obj_uid
!= pt_uid
)
3067 /* Check that the object size is the same as the store size. That ensures us
3068 that ptr points to the start of obj. */
3069 return (DECL_SIZE (obj
)
3070 && poly_int_tree_p (DECL_SIZE (obj
))
3071 && known_eq (wi::to_poly_offset (DECL_SIZE (obj
)), size1
));
3074 /* If STMT kills the memory reference REF return true, otherwise
3078 stmt_kills_ref_p (gimple
*stmt
, ao_ref
*ref
)
3080 if (!ao_ref_base (ref
))
3083 if (gimple_has_lhs (stmt
)
3084 && TREE_CODE (gimple_get_lhs (stmt
)) != SSA_NAME
3085 /* The assignment is not necessarily carried out if it can throw
3086 and we can catch it in the current function where we could inspect
3088 ??? We only need to care about the RHS throwing. For aggregate
3089 assignments or similar calls and non-call exceptions the LHS
3090 might throw as well. */
3091 && !stmt_can_throw_internal (cfun
, stmt
))
3093 tree lhs
= gimple_get_lhs (stmt
);
3094 /* If LHS is literally a base of the access we are done. */
3097 tree base
= ref
->ref
;
3098 tree innermost_dropped_array_ref
= NULL_TREE
;
3099 if (handled_component_p (base
))
3101 tree saved_lhs0
= NULL_TREE
;
3102 if (handled_component_p (lhs
))
3104 saved_lhs0
= TREE_OPERAND (lhs
, 0);
3105 TREE_OPERAND (lhs
, 0) = integer_zero_node
;
3109 /* Just compare the outermost handled component, if
3110 they are equal we have found a possible common
3112 tree saved_base0
= TREE_OPERAND (base
, 0);
3113 TREE_OPERAND (base
, 0) = integer_zero_node
;
3114 bool res
= operand_equal_p (lhs
, base
, 0);
3115 TREE_OPERAND (base
, 0) = saved_base0
;
3118 /* Remember if we drop an array-ref that we need to
3119 double-check not being at struct end. */
3120 if (TREE_CODE (base
) == ARRAY_REF
3121 || TREE_CODE (base
) == ARRAY_RANGE_REF
)
3122 innermost_dropped_array_ref
= base
;
3123 /* Otherwise drop handled components of the access. */
3126 while (handled_component_p (base
));
3128 TREE_OPERAND (lhs
, 0) = saved_lhs0
;
3130 /* Finally check if the lhs has the same address and size as the
3131 base candidate of the access. Watch out if we have dropped
3132 an array-ref that was at struct end, this means ref->ref may
3133 be outside of the TYPE_SIZE of its base. */
3134 if ((! innermost_dropped_array_ref
3135 || ! array_at_struct_end_p (innermost_dropped_array_ref
))
3137 || (((TYPE_SIZE (TREE_TYPE (lhs
))
3138 == TYPE_SIZE (TREE_TYPE (base
)))
3139 || (TYPE_SIZE (TREE_TYPE (lhs
))
3140 && TYPE_SIZE (TREE_TYPE (base
))
3141 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs
)),
3142 TYPE_SIZE (TREE_TYPE (base
)),
3144 && operand_equal_p (lhs
, base
,
3146 | OEP_MATCH_SIDE_EFFECTS
))))
3150 /* Now look for non-literal equal bases with the restriction of
3151 handling constant offset and size. */
3152 /* For a must-alias check we need to be able to constrain
3153 the access properly. */
3154 if (!ref
->max_size_known_p ())
3156 poly_int64 size
, offset
, max_size
, ref_offset
= ref
->offset
;
3158 tree base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
,
3160 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3161 so base == ref->base does not always hold. */
3162 if (base
!= ref
->base
)
3164 /* Try using points-to info. */
3165 if (same_addr_size_stores_p (base
, offset
, size
, max_size
, ref
->base
,
3166 ref
->offset
, ref
->size
, ref
->max_size
))
3169 /* If both base and ref->base are MEM_REFs, only compare the
3170 first operand, and if the second operand isn't equal constant,
3171 try to add the offsets into offset and ref_offset. */
3172 if (TREE_CODE (base
) == MEM_REF
&& TREE_CODE (ref
->base
) == MEM_REF
3173 && TREE_OPERAND (base
, 0) == TREE_OPERAND (ref
->base
, 0))
3175 if (!tree_int_cst_equal (TREE_OPERAND (base
, 1),
3176 TREE_OPERAND (ref
->base
, 1)))
3178 poly_offset_int off1
= mem_ref_offset (base
);
3179 off1
<<= LOG2_BITS_PER_UNIT
;
3181 poly_offset_int off2
= mem_ref_offset (ref
->base
);
3182 off2
<<= LOG2_BITS_PER_UNIT
;
3184 if (!off1
.to_shwi (&offset
) || !off2
.to_shwi (&ref_offset
))
3191 /* For a must-alias check we need to be able to constrain
3192 the access properly. */
3193 if (known_eq (size
, max_size
)
3194 && known_subrange_p (ref_offset
, ref
->max_size
, offset
, size
))
3198 if (is_gimple_call (stmt
))
3200 tree callee
= gimple_call_fndecl (stmt
);
3201 if (callee
!= NULL_TREE
3202 && gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3203 switch (DECL_FUNCTION_CODE (callee
))
3207 tree ptr
= gimple_call_arg (stmt
, 0);
3208 tree base
= ao_ref_base (ref
);
3209 if (base
&& TREE_CODE (base
) == MEM_REF
3210 && TREE_OPERAND (base
, 0) == ptr
)
3215 case BUILT_IN_MEMCPY
:
3216 case BUILT_IN_MEMPCPY
:
3217 case BUILT_IN_MEMMOVE
:
3218 case BUILT_IN_MEMSET
:
3219 case BUILT_IN_MEMCPY_CHK
:
3220 case BUILT_IN_MEMPCPY_CHK
:
3221 case BUILT_IN_MEMMOVE_CHK
:
3222 case BUILT_IN_MEMSET_CHK
:
3223 case BUILT_IN_STRNCPY
:
3224 case BUILT_IN_STPNCPY
:
3225 case BUILT_IN_CALLOC
:
3227 /* For a must-alias check we need to be able to constrain
3228 the access properly. */
3229 if (!ref
->max_size_known_p ())
3234 /* In execution order a calloc call will never kill
3235 anything. However, DSE will (ab)use this interface
3236 to ask if a calloc call writes the same memory locations
3237 as a later assignment, memset, etc. So handle calloc
3238 in the expected way. */
3239 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_CALLOC
)
3241 tree arg0
= gimple_call_arg (stmt
, 0);
3242 tree arg1
= gimple_call_arg (stmt
, 1);
3243 if (TREE_CODE (arg0
) != INTEGER_CST
3244 || TREE_CODE (arg1
) != INTEGER_CST
)
3247 dest
= gimple_call_lhs (stmt
);
3248 len
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg0
), arg0
, arg1
);
3252 dest
= gimple_call_arg (stmt
, 0);
3253 len
= gimple_call_arg (stmt
, 2);
3255 if (!poly_int_tree_p (len
))
3257 tree rbase
= ref
->base
;
3258 poly_offset_int roffset
= ref
->offset
;
3260 ao_ref_init_from_ptr_and_size (&dref
, dest
, len
);
3261 tree base
= ao_ref_base (&dref
);
3262 poly_offset_int offset
= dref
.offset
;
3263 if (!base
|| !known_size_p (dref
.size
))
3265 if (TREE_CODE (base
) == MEM_REF
)
3267 if (TREE_CODE (rbase
) != MEM_REF
)
3269 // Compare pointers.
3270 offset
+= mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
;
3271 roffset
+= mem_ref_offset (rbase
) << LOG2_BITS_PER_UNIT
;
3272 base
= TREE_OPERAND (base
, 0);
3273 rbase
= TREE_OPERAND (rbase
, 0);
3276 && known_subrange_p (roffset
, ref
->max_size
, offset
,
3277 wi::to_poly_offset (len
)
3278 << LOG2_BITS_PER_UNIT
))
3283 case BUILT_IN_VA_END
:
3285 tree ptr
= gimple_call_arg (stmt
, 0);
3286 if (TREE_CODE (ptr
) == ADDR_EXPR
)
3288 tree base
= ao_ref_base (ref
);
3289 if (TREE_OPERAND (ptr
, 0) == base
)
3302 stmt_kills_ref_p (gimple
*stmt
, tree ref
)
3305 ao_ref_init (&r
, ref
);
3306 return stmt_kills_ref_p (stmt
, &r
);
3310 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3311 TARGET or a statement clobbering the memory reference REF in which
3312 case false is returned. The walk starts with VUSE, one argument of PHI. */
3315 maybe_skip_until (gimple
*phi
, tree
&target
, basic_block target_bb
,
3316 ao_ref
*ref
, tree vuse
, bool tbaa_p
, unsigned int &limit
,
3317 bitmap
*visited
, bool abort_on_visited
,
3318 void *(*translate
)(ao_ref
*, tree
, void *, translate_flags
*),
3319 translate_flags disambiguate_only
,
3322 basic_block bb
= gimple_bb (phi
);
3325 *visited
= BITMAP_ALLOC (NULL
);
3327 bitmap_set_bit (*visited
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
3329 /* Walk until we hit the target. */
3330 while (vuse
!= target
)
3332 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3333 /* If we are searching for the target VUSE by walking up to
3334 TARGET_BB dominating the original PHI we are finished once
3335 we reach a default def or a definition in a block dominating
3336 that block. Update TARGET and return. */
3338 && (gimple_nop_p (def_stmt
)
3339 || dominated_by_p (CDI_DOMINATORS
,
3340 target_bb
, gimple_bb (def_stmt
))))
3346 /* Recurse for PHI nodes. */
3347 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3349 /* An already visited PHI node ends the walk successfully. */
3350 if (bitmap_bit_p (*visited
, SSA_NAME_VERSION (PHI_RESULT (def_stmt
))))
3351 return !abort_on_visited
;
3352 vuse
= get_continuation_for_phi (def_stmt
, ref
, tbaa_p
, limit
,
3353 visited
, abort_on_visited
,
3354 translate
, data
, disambiguate_only
);
3359 else if (gimple_nop_p (def_stmt
))
3363 /* A clobbering statement or the end of the IL ends it failing. */
3364 if ((int)limit
<= 0)
3367 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
, tbaa_p
))
3369 translate_flags tf
= disambiguate_only
;
3371 && (*translate
) (ref
, vuse
, data
, &tf
) == NULL
)
3377 /* If we reach a new basic-block see if we already skipped it
3378 in a previous walk that ended successfully. */
3379 if (gimple_bb (def_stmt
) != bb
)
3381 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (vuse
)))
3382 return !abort_on_visited
;
3383 bb
= gimple_bb (def_stmt
);
3385 vuse
= gimple_vuse (def_stmt
);
3391 /* Starting from a PHI node for the virtual operand of the memory reference
3392 REF find a continuation virtual operand that allows to continue walking
3393 statements dominating PHI skipping only statements that cannot possibly
3394 clobber REF. Decrements LIMIT for each alias disambiguation done
3395 and aborts the walk, returning NULL_TREE if it reaches zero.
3396 Returns NULL_TREE if no suitable virtual operand can be found. */
3399 get_continuation_for_phi (gimple
*phi
, ao_ref
*ref
, bool tbaa_p
,
3400 unsigned int &limit
, bitmap
*visited
,
3401 bool abort_on_visited
,
3402 void *(*translate
)(ao_ref
*, tree
, void *,
3405 translate_flags disambiguate_only
)
3407 unsigned nargs
= gimple_phi_num_args (phi
);
3409 /* Through a single-argument PHI we can simply look through. */
3411 return PHI_ARG_DEF (phi
, 0);
3413 /* For two or more arguments try to pairwise skip non-aliasing code
3414 until we hit the phi argument definition that dominates the other one. */
3415 basic_block phi_bb
= gimple_bb (phi
);
3419 /* Find a candidate for the virtual operand which definition
3420 dominates those of all others. */
3421 /* First look if any of the args themselves satisfy this. */
3422 for (i
= 0; i
< nargs
; ++i
)
3424 arg0
= PHI_ARG_DEF (phi
, i
);
3425 if (SSA_NAME_IS_DEFAULT_DEF (arg0
))
3427 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (arg0
));
3428 if (def_bb
!= phi_bb
3429 && dominated_by_p (CDI_DOMINATORS
, phi_bb
, def_bb
))
3433 /* If not, look if we can reach such candidate by walking defs
3434 until we hit the immediate dominator. maybe_skip_until will
3436 basic_block dom
= get_immediate_dominator (CDI_DOMINATORS
, phi_bb
);
3438 /* Then check against the (to be) found candidate. */
3439 for (i
= 0; i
< nargs
; ++i
)
3441 arg1
= PHI_ARG_DEF (phi
, i
);
3444 else if (! maybe_skip_until (phi
, arg0
, dom
, ref
, arg1
, tbaa_p
,
3448 /* Do not valueize when walking over
3452 gimple_bb (SSA_NAME_DEF_STMT (arg1
)),
3455 : disambiguate_only
, data
))
3462 /* Based on the memory reference REF and its virtual use VUSE call
3463 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3464 itself. That is, for each virtual use for which its defining statement
3465 does not clobber REF.
3467 WALKER is called with REF, the current virtual use and DATA. If
3468 WALKER returns non-NULL the walk stops and its result is returned.
3469 At the end of a non-successful walk NULL is returned.
3471 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3472 use which definition is a statement that may clobber REF and DATA.
3473 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3474 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3475 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3476 to adjust REF and *DATA to make that valid.
3478 VALUEIZE if non-NULL is called with the next VUSE that is considered
3479 and return value is substituted for that. This can be used to
3480 implement optimistic value-numbering for example. Note that the
3481 VUSE argument is assumed to be valueized already.
3483 LIMIT specifies the number of alias queries we are allowed to do,
3484 the walk stops when it reaches zero and NULL is returned. LIMIT
3485 is decremented by the number of alias queries (plus adjustments
3486 done by the callbacks) upon return.
3488 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3491 walk_non_aliased_vuses (ao_ref
*ref
, tree vuse
, bool tbaa_p
,
3492 void *(*walker
)(ao_ref
*, tree
, void *),
3493 void *(*translate
)(ao_ref
*, tree
, void *,
3495 tree (*valueize
)(tree
),
3496 unsigned &limit
, void *data
)
3498 bitmap visited
= NULL
;
3500 bool translated
= false;
3502 timevar_push (TV_ALIAS_STMT_WALK
);
3508 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3509 res
= (*walker
) (ref
, vuse
, data
);
3511 if (res
== (void *)-1)
3516 /* Lookup succeeded. */
3517 else if (res
!= NULL
)
3522 vuse
= valueize (vuse
);
3529 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3530 if (gimple_nop_p (def_stmt
))
3532 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3533 vuse
= get_continuation_for_phi (def_stmt
, ref
, tbaa_p
, limit
,
3534 &visited
, translated
, translate
, data
);
3537 if ((int)limit
<= 0)
3543 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
, tbaa_p
))
3547 translate_flags disambiguate_only
= TR_TRANSLATE
;
3548 res
= (*translate
) (ref
, vuse
, data
, &disambiguate_only
);
3549 /* Failed lookup and translation. */
3550 if (res
== (void *)-1)
3555 /* Lookup succeeded. */
3556 else if (res
!= NULL
)
3558 /* Translation succeeded, continue walking. */
3559 translated
= translated
|| disambiguate_only
== TR_TRANSLATE
;
3561 vuse
= gimple_vuse (def_stmt
);
3567 BITMAP_FREE (visited
);
3569 timevar_pop (TV_ALIAS_STMT_WALK
);
3575 /* Based on the memory reference REF call WALKER for each vdef which
3576 defining statement may clobber REF, starting with VDEF. If REF
3577 is NULL_TREE, each defining statement is visited.
3579 WALKER is called with REF, the current vdef and DATA. If WALKER
3580 returns true the walk is stopped, otherwise it continues.
3582 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3583 The pointer may be NULL and then we do not track this information.
3585 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3586 PHI argument (but only one walk continues on merge points), the
3587 return value is true if any of the walks was successful.
3589 The function returns the number of statements walked or -1 if
3590 LIMIT stmts were walked and the walk was aborted at this point.
3591 If LIMIT is zero the walk is not aborted. */
3594 walk_aliased_vdefs_1 (ao_ref
*ref
, tree vdef
,
3595 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
3596 bitmap
*visited
, unsigned int cnt
,
3597 bool *function_entry_reached
, unsigned limit
)
3601 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vdef
);
3604 && !bitmap_set_bit (*visited
, SSA_NAME_VERSION (vdef
)))
3607 if (gimple_nop_p (def_stmt
))
3609 if (function_entry_reached
)
3610 *function_entry_reached
= true;
3613 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3617 *visited
= BITMAP_ALLOC (NULL
);
3618 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); ++i
)
3620 int res
= walk_aliased_vdefs_1 (ref
,
3621 gimple_phi_arg_def (def_stmt
, i
),
3622 walker
, data
, visited
, cnt
,
3623 function_entry_reached
, limit
);
3631 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3636 || stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
3637 && (*walker
) (ref
, vdef
, data
))
3640 vdef
= gimple_vuse (def_stmt
);
3646 walk_aliased_vdefs (ao_ref
*ref
, tree vdef
,
3647 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
3649 bool *function_entry_reached
, unsigned int limit
)
3651 bitmap local_visited
= NULL
;
3654 timevar_push (TV_ALIAS_STMT_WALK
);
3656 if (function_entry_reached
)
3657 *function_entry_reached
= false;
3659 ret
= walk_aliased_vdefs_1 (ref
, vdef
, walker
, data
,
3660 visited
? visited
: &local_visited
, 0,
3661 function_entry_reached
, limit
);
3663 BITMAP_FREE (local_visited
);
3665 timevar_pop (TV_ALIAS_STMT_WALK
);