1 /* Alias analysis for trees.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
32 #include "tree-pretty-print.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
39 #include "ipa-reference.h"
42 /* Broad overview of how alias analysis on gimple works:
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
61 The main alias-oracle entry-points are
63 bool stmt_may_clobber_ref_p (gimple *, tree)
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
77 bool refs_may_alias_p (tree, tree)
79 This function tries to disambiguate two reference trees.
81 bool ptr_deref_may_alias_global_p (tree)
83 This function queries if dereferencing a pointer variable may
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
90 static int nonoverlapping_refs_since_match_p (tree
, tree
, tree
, tree
, bool);
91 static bool nonoverlapping_component_refs_p (const_tree
, const_tree
);
93 /* Query statistics for the different low-level disambiguators.
94 A high-level query may trigger multiple of them. */
97 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias
;
98 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias
;
99 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias
;
100 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias
;
101 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias
;
102 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias
;
103 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias
;
104 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias
;
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias
;
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias
;
107 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias
;
108 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap
;
109 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias
;
113 dump_alias_stats (FILE *s
)
115 fprintf (s
, "\nAlias oracle query stats:\n");
116 fprintf (s
, " refs_may_alias_p: "
117 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
118 HOST_WIDE_INT_PRINT_DEC
" queries\n",
119 alias_stats
.refs_may_alias_p_no_alias
,
120 alias_stats
.refs_may_alias_p_no_alias
121 + alias_stats
.refs_may_alias_p_may_alias
);
122 fprintf (s
, " ref_maybe_used_by_call_p: "
123 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
124 HOST_WIDE_INT_PRINT_DEC
" queries\n",
125 alias_stats
.ref_maybe_used_by_call_p_no_alias
,
126 alias_stats
.refs_may_alias_p_no_alias
127 + alias_stats
.ref_maybe_used_by_call_p_may_alias
);
128 fprintf (s
, " call_may_clobber_ref_p: "
129 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
130 HOST_WIDE_INT_PRINT_DEC
" queries\n",
131 alias_stats
.call_may_clobber_ref_p_no_alias
,
132 alias_stats
.call_may_clobber_ref_p_no_alias
133 + alias_stats
.call_may_clobber_ref_p_may_alias
);
134 fprintf (s
, " nonoverlapping_component_refs_p: "
135 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
136 HOST_WIDE_INT_PRINT_DEC
" queries\n",
137 alias_stats
.nonoverlapping_component_refs_p_no_alias
,
138 alias_stats
.nonoverlapping_component_refs_p_no_alias
139 + alias_stats
.nonoverlapping_component_refs_p_may_alias
);
140 fprintf (s
, " nonoverlapping_refs_since_match_p: "
141 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
142 HOST_WIDE_INT_PRINT_DEC
" must overlaps, "
143 HOST_WIDE_INT_PRINT_DEC
" queries\n",
144 alias_stats
.nonoverlapping_refs_since_match_p_no_alias
,
145 alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
,
146 alias_stats
.nonoverlapping_refs_since_match_p_no_alias
147 + alias_stats
.nonoverlapping_refs_since_match_p_may_alias
148 + alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
);
149 fprintf (s
, " aliasing_component_refs_p: "
150 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC
" queries\n",
152 alias_stats
.aliasing_component_refs_p_no_alias
,
153 alias_stats
.aliasing_component_refs_p_no_alias
154 + alias_stats
.aliasing_component_refs_p_may_alias
);
155 dump_alias_stats_in_alias_c (s
);
159 /* Return true, if dereferencing PTR may alias with a global variable. */
162 ptr_deref_may_alias_global_p (tree ptr
)
164 struct ptr_info_def
*pi
;
166 /* If we end up with a pointer constant here that may point
168 if (TREE_CODE (ptr
) != SSA_NAME
)
171 pi
= SSA_NAME_PTR_INFO (ptr
);
173 /* If we do not have points-to information for this variable,
178 /* ??? This does not use TBAA to prune globals ptr may not access. */
179 return pt_solution_includes_global (&pi
->pt
);
182 /* Return true if dereferencing PTR may alias DECL.
183 The caller is responsible for applying TBAA to see if PTR
184 may access DECL at all. */
187 ptr_deref_may_alias_decl_p (tree ptr
, tree decl
)
189 struct ptr_info_def
*pi
;
191 /* Conversions are irrelevant for points-to information and
192 data-dependence analysis can feed us those. */
195 /* Anything we do not explicilty handle aliases. */
196 if ((TREE_CODE (ptr
) != SSA_NAME
197 && TREE_CODE (ptr
) != ADDR_EXPR
198 && TREE_CODE (ptr
) != POINTER_PLUS_EXPR
)
199 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
201 && TREE_CODE (decl
) != PARM_DECL
202 && TREE_CODE (decl
) != RESULT_DECL
))
205 /* Disregard pointer offsetting. */
206 if (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
)
210 ptr
= TREE_OPERAND (ptr
, 0);
212 while (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
);
213 return ptr_deref_may_alias_decl_p (ptr
, decl
);
216 /* ADDR_EXPR pointers either just offset another pointer or directly
217 specify the pointed-to set. */
218 if (TREE_CODE (ptr
) == ADDR_EXPR
)
220 tree base
= get_base_address (TREE_OPERAND (ptr
, 0));
222 && (TREE_CODE (base
) == MEM_REF
223 || TREE_CODE (base
) == TARGET_MEM_REF
))
224 ptr
= TREE_OPERAND (base
, 0);
227 return compare_base_decls (base
, decl
) != 0;
229 && CONSTANT_CLASS_P (base
))
235 /* Non-aliased variables cannot be pointed to. */
236 if (!may_be_aliased (decl
))
239 /* If we do not have useful points-to information for this pointer
240 we cannot disambiguate anything else. */
241 pi
= SSA_NAME_PTR_INFO (ptr
);
245 return pt_solution_includes (&pi
->pt
, decl
);
248 /* Return true if dereferenced PTR1 and PTR2 may alias.
249 The caller is responsible for applying TBAA to see if accesses
250 through PTR1 and PTR2 may conflict at all. */
253 ptr_derefs_may_alias_p (tree ptr1
, tree ptr2
)
255 struct ptr_info_def
*pi1
, *pi2
;
257 /* Conversions are irrelevant for points-to information and
258 data-dependence analysis can feed us those. */
262 /* Disregard pointer offsetting. */
263 if (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
)
267 ptr1
= TREE_OPERAND (ptr1
, 0);
269 while (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
);
270 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
272 if (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
)
276 ptr2
= TREE_OPERAND (ptr2
, 0);
278 while (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
);
279 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
282 /* ADDR_EXPR pointers either just offset another pointer or directly
283 specify the pointed-to set. */
284 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
286 tree base
= get_base_address (TREE_OPERAND (ptr1
, 0));
288 && (TREE_CODE (base
) == MEM_REF
289 || TREE_CODE (base
) == TARGET_MEM_REF
))
290 return ptr_derefs_may_alias_p (TREE_OPERAND (base
, 0), ptr2
);
293 return ptr_deref_may_alias_decl_p (ptr2
, base
);
297 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
299 tree base
= get_base_address (TREE_OPERAND (ptr2
, 0));
301 && (TREE_CODE (base
) == MEM_REF
302 || TREE_CODE (base
) == TARGET_MEM_REF
))
303 return ptr_derefs_may_alias_p (ptr1
, TREE_OPERAND (base
, 0));
306 return ptr_deref_may_alias_decl_p (ptr1
, base
);
311 /* From here we require SSA name pointers. Anything else aliases. */
312 if (TREE_CODE (ptr1
) != SSA_NAME
313 || TREE_CODE (ptr2
) != SSA_NAME
314 || !POINTER_TYPE_P (TREE_TYPE (ptr1
))
315 || !POINTER_TYPE_P (TREE_TYPE (ptr2
)))
318 /* We may end up with two empty points-to solutions for two same pointers.
319 In this case we still want to say both pointers alias, so shortcut
324 /* If we do not have useful points-to information for either pointer
325 we cannot disambiguate anything else. */
326 pi1
= SSA_NAME_PTR_INFO (ptr1
);
327 pi2
= SSA_NAME_PTR_INFO (ptr2
);
331 /* ??? This does not use TBAA to prune decls from the intersection
332 that not both pointers may access. */
333 return pt_solutions_intersect (&pi1
->pt
, &pi2
->pt
);
336 /* Return true if dereferencing PTR may alias *REF.
337 The caller is responsible for applying TBAA to see if PTR
338 may access *REF at all. */
341 ptr_deref_may_alias_ref_p_1 (tree ptr
, ao_ref
*ref
)
343 tree base
= ao_ref_base (ref
);
345 if (TREE_CODE (base
) == MEM_REF
346 || TREE_CODE (base
) == TARGET_MEM_REF
)
347 return ptr_derefs_may_alias_p (ptr
, TREE_OPERAND (base
, 0));
348 else if (DECL_P (base
))
349 return ptr_deref_may_alias_decl_p (ptr
, base
);
354 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
357 ptrs_compare_unequal (tree ptr1
, tree ptr2
)
359 /* First resolve the pointers down to a SSA name pointer base or
360 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
361 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
362 or STRING_CSTs which needs points-to adjustments to track them
363 in the points-to sets. */
364 tree obj1
= NULL_TREE
;
365 tree obj2
= NULL_TREE
;
366 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
368 tree tem
= get_base_address (TREE_OPERAND (ptr1
, 0));
372 || TREE_CODE (tem
) == PARM_DECL
373 || TREE_CODE (tem
) == RESULT_DECL
)
375 else if (TREE_CODE (tem
) == MEM_REF
)
376 ptr1
= TREE_OPERAND (tem
, 0);
378 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
380 tree tem
= get_base_address (TREE_OPERAND (ptr2
, 0));
384 || TREE_CODE (tem
) == PARM_DECL
385 || TREE_CODE (tem
) == RESULT_DECL
)
387 else if (TREE_CODE (tem
) == MEM_REF
)
388 ptr2
= TREE_OPERAND (tem
, 0);
391 /* Canonicalize ptr vs. object. */
392 if (TREE_CODE (ptr1
) == SSA_NAME
&& obj2
)
394 std::swap (ptr1
, ptr2
);
395 std::swap (obj1
, obj2
);
399 /* Other code handles this correctly, no need to duplicate it here. */;
400 else if (obj1
&& TREE_CODE (ptr2
) == SSA_NAME
)
402 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr2
);
403 /* We may not use restrict to optimize pointer comparisons.
404 See PR71062. So we have to assume that restrict-pointed-to
405 may be in fact obj1. */
407 || pi
->pt
.vars_contains_restrict
408 || pi
->pt
.vars_contains_interposable
)
411 && (TREE_STATIC (obj1
) || DECL_EXTERNAL (obj1
)))
413 varpool_node
*node
= varpool_node::get (obj1
);
414 /* If obj1 may bind to NULL give up (see below). */
416 || ! node
->nonzero_address ()
417 || ! decl_binds_to_current_def_p (obj1
))
420 return !pt_solution_includes (&pi
->pt
, obj1
);
423 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
424 but those require pt.null to be conservatively correct. */
429 /* Returns whether reference REF to BASE may refer to global memory. */
432 ref_may_alias_global_p_1 (tree base
)
435 return is_global_var (base
);
436 else if (TREE_CODE (base
) == MEM_REF
437 || TREE_CODE (base
) == TARGET_MEM_REF
)
438 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
443 ref_may_alias_global_p (ao_ref
*ref
)
445 tree base
= ao_ref_base (ref
);
446 return ref_may_alias_global_p_1 (base
);
450 ref_may_alias_global_p (tree ref
)
452 tree base
= get_base_address (ref
);
453 return ref_may_alias_global_p_1 (base
);
456 /* Return true whether STMT may clobber global memory. */
459 stmt_may_clobber_global_p (gimple
*stmt
)
463 if (!gimple_vdef (stmt
))
466 /* ??? We can ask the oracle whether an artificial pointer
467 dereference with a pointer with points-to information covering
468 all global memory (what about non-address taken memory?) maybe
469 clobbered by this call. As there is at the moment no convenient
470 way of doing that without generating garbage do some manual
472 ??? We could make a NULL ao_ref argument to the various
473 predicates special, meaning any global memory. */
475 switch (gimple_code (stmt
))
478 lhs
= gimple_assign_lhs (stmt
);
479 return (TREE_CODE (lhs
) != SSA_NAME
480 && ref_may_alias_global_p (lhs
));
489 /* Dump alias information on FILE. */
492 dump_alias_info (FILE *file
)
497 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
500 fprintf (file
, "\n\nAlias information for %s\n\n", funcname
);
502 fprintf (file
, "Aliased symbols\n\n");
504 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
506 if (may_be_aliased (var
))
507 dump_variable (file
, var
);
510 fprintf (file
, "\nCall clobber information\n");
512 fprintf (file
, "\nESCAPED");
513 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped
);
515 fprintf (file
, "\n\nFlow-insensitive points-to information\n\n");
517 FOR_EACH_SSA_NAME (i
, ptr
, cfun
)
519 struct ptr_info_def
*pi
;
521 if (!POINTER_TYPE_P (TREE_TYPE (ptr
))
522 || SSA_NAME_IN_FREE_LIST (ptr
))
525 pi
= SSA_NAME_PTR_INFO (ptr
);
527 dump_points_to_info_for (file
, ptr
);
530 fprintf (file
, "\n");
534 /* Dump alias information on stderr. */
537 debug_alias_info (void)
539 dump_alias_info (stderr
);
543 /* Dump the points-to set *PT into FILE. */
546 dump_points_to_solution (FILE *file
, struct pt_solution
*pt
)
549 fprintf (file
, ", points-to anything");
552 fprintf (file
, ", points-to non-local");
555 fprintf (file
, ", points-to escaped");
558 fprintf (file
, ", points-to unit escaped");
561 fprintf (file
, ", points-to NULL");
565 fprintf (file
, ", points-to vars: ");
566 dump_decl_set (file
, pt
->vars
);
567 if (pt
->vars_contains_nonlocal
568 || pt
->vars_contains_escaped
569 || pt
->vars_contains_escaped_heap
570 || pt
->vars_contains_restrict
)
572 const char *comma
= "";
573 fprintf (file
, " (");
574 if (pt
->vars_contains_nonlocal
)
576 fprintf (file
, "nonlocal");
579 if (pt
->vars_contains_escaped
)
581 fprintf (file
, "%sescaped", comma
);
584 if (pt
->vars_contains_escaped_heap
)
586 fprintf (file
, "%sescaped heap", comma
);
589 if (pt
->vars_contains_restrict
)
591 fprintf (file
, "%srestrict", comma
);
594 if (pt
->vars_contains_interposable
)
595 fprintf (file
, "%sinterposable", comma
);
602 /* Unified dump function for pt_solution. */
605 debug (pt_solution
&ref
)
607 dump_points_to_solution (stderr
, &ref
);
611 debug (pt_solution
*ptr
)
616 fprintf (stderr
, "<nil>\n");
620 /* Dump points-to information for SSA_NAME PTR into FILE. */
623 dump_points_to_info_for (FILE *file
, tree ptr
)
625 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
627 print_generic_expr (file
, ptr
, dump_flags
);
630 dump_points_to_solution (file
, &pi
->pt
);
632 fprintf (file
, ", points-to anything");
634 fprintf (file
, "\n");
638 /* Dump points-to information for VAR into stderr. */
641 debug_points_to_info_for (tree var
)
643 dump_points_to_info_for (stderr
, var
);
647 /* Initializes the alias-oracle reference representation *R from REF. */
650 ao_ref_init (ao_ref
*r
, tree ref
)
657 r
->ref_alias_set
= -1;
658 r
->base_alias_set
= -1;
659 r
->volatile_p
= ref
? TREE_THIS_VOLATILE (ref
) : false;
662 /* Returns the base object of the memory reference *REF. */
665 ao_ref_base (ao_ref
*ref
)
671 ref
->base
= get_ref_base_and_extent (ref
->ref
, &ref
->offset
, &ref
->size
,
672 &ref
->max_size
, &reverse
);
676 /* Returns the base object alias set of the memory reference *REF. */
679 ao_ref_base_alias_set (ao_ref
*ref
)
682 if (ref
->base_alias_set
!= -1)
683 return ref
->base_alias_set
;
687 while (handled_component_p (base_ref
))
688 base_ref
= TREE_OPERAND (base_ref
, 0);
689 ref
->base_alias_set
= get_alias_set (base_ref
);
690 return ref
->base_alias_set
;
693 /* Returns the reference alias set of the memory reference *REF. */
696 ao_ref_alias_set (ao_ref
*ref
)
698 if (ref
->ref_alias_set
!= -1)
699 return ref
->ref_alias_set
;
700 ref
->ref_alias_set
= get_alias_set (ref
->ref
);
701 return ref
->ref_alias_set
;
704 /* Init an alias-oracle reference representation from a gimple pointer
705 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
706 size is assumed to be unknown. The access is assumed to be only
707 to or after of the pointer target, not before it. */
710 ao_ref_init_from_ptr_and_size (ao_ref
*ref
, tree ptr
, tree size
)
712 poly_int64 t
, size_hwi
, extra_offset
= 0;
713 ref
->ref
= NULL_TREE
;
714 if (TREE_CODE (ptr
) == SSA_NAME
)
716 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
717 if (gimple_assign_single_p (stmt
)
718 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
719 ptr
= gimple_assign_rhs1 (stmt
);
720 else if (is_gimple_assign (stmt
)
721 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
722 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt
), &extra_offset
))
724 ptr
= gimple_assign_rhs1 (stmt
);
725 extra_offset
*= BITS_PER_UNIT
;
729 if (TREE_CODE (ptr
) == ADDR_EXPR
)
731 ref
->base
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &t
);
733 ref
->offset
= BITS_PER_UNIT
* t
;
738 ref
->base
= get_base_address (TREE_OPERAND (ptr
, 0));
743 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
744 ref
->base
= build2 (MEM_REF
, char_type_node
,
745 ptr
, null_pointer_node
);
748 ref
->offset
+= extra_offset
;
750 && poly_int_tree_p (size
, &size_hwi
)
751 && coeffs_in_range_p (size_hwi
, 0, HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
752 ref
->max_size
= ref
->size
= size_hwi
* BITS_PER_UNIT
;
754 ref
->max_size
= ref
->size
= -1;
755 ref
->ref_alias_set
= 0;
756 ref
->base_alias_set
= 0;
757 ref
->volatile_p
= false;
760 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
763 Return 0 if equal or incomparable. */
766 compare_sizes (tree s1
, tree s2
)
774 if (!poly_int_tree_p (s1
, &size1
) || !poly_int_tree_p (s2
, &size2
))
776 if (known_lt (size1
, size2
))
778 if (known_lt (size2
, size1
))
783 /* Compare TYPE1 and TYPE2 by its size.
784 Return -1 if size of TYPE1 < size of TYPE2
785 Return 1 if size of TYPE1 > size of TYPE2
786 Return 0 if types are of equal sizes or we can not compare them. */
789 compare_type_sizes (tree type1
, tree type2
)
791 /* Be conservative for arrays and vectors. We want to support partial
792 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
793 while (TREE_CODE (type1
) == ARRAY_TYPE
794 || TREE_CODE (type1
) == VECTOR_TYPE
)
795 type1
= TREE_TYPE (type1
);
796 while (TREE_CODE (type2
) == ARRAY_TYPE
797 || TREE_CODE (type2
) == VECTOR_TYPE
)
798 type2
= TREE_TYPE (type2
);
799 return compare_sizes (TYPE_SIZE (type1
), TYPE_SIZE (type2
));
802 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
803 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
807 same_type_for_tbaa (tree type1
, tree type2
)
809 type1
= TYPE_MAIN_VARIANT (type1
);
810 type2
= TYPE_MAIN_VARIANT (type2
);
812 /* Handle the most common case first. */
816 /* If we would have to do structural comparison bail out. */
817 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
818 || TYPE_STRUCTURAL_EQUALITY_P (type2
))
821 /* Compare the canonical types. */
822 if (TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
))
825 /* ??? Array types are not properly unified in all cases as we have
826 spurious changes in the index types for example. Removing this
827 causes all sorts of problems with the Fortran frontend. */
828 if (TREE_CODE (type1
) == ARRAY_TYPE
829 && TREE_CODE (type2
) == ARRAY_TYPE
)
832 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
833 object of one of its constrained subtypes, e.g. when a function with an
834 unconstrained parameter passed by reference is called on an object and
835 inlined. But, even in the case of a fixed size, type and subtypes are
836 not equivalent enough as to share the same TYPE_CANONICAL, since this
837 would mean that conversions between them are useless, whereas they are
838 not (e.g. type and subtypes can have different modes). So, in the end,
839 they are only guaranteed to have the same alias set. */
840 if (get_alias_set (type1
) == get_alias_set (type2
))
843 /* The types are known to be not equal. */
847 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
848 components on it). */
851 type_has_components_p (tree type
)
853 return AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
)
854 || TREE_CODE (type
) == COMPLEX_TYPE
;
857 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
858 respectively are either pointing to same address or are completely
859 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
862 Try to disambiguate using the access path starting from the match
863 and return false if there is no conflict.
865 Helper for aliasing_component_refs_p. */
868 aliasing_matching_component_refs_p (tree match1
, tree ref1
,
869 poly_int64 offset1
, poly_int64 max_size1
,
870 tree match2
, tree ref2
,
871 poly_int64 offset2
, poly_int64 max_size2
,
872 bool partial_overlap
)
874 poly_int64 offadj
, sztmp
, msztmp
;
877 if (!partial_overlap
)
879 get_ref_base_and_extent (match2
, &offadj
, &sztmp
, &msztmp
, &reverse
);
881 get_ref_base_and_extent (match1
, &offadj
, &sztmp
, &msztmp
, &reverse
);
883 if (!ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
885 ++alias_stats
.aliasing_component_refs_p_no_alias
;
890 int cmp
= nonoverlapping_refs_since_match_p (match1
, ref1
, match2
, ref2
,
893 || (cmp
== -1 && nonoverlapping_component_refs_p (ref1
, ref2
)))
895 ++alias_stats
.aliasing_component_refs_p_no_alias
;
898 ++alias_stats
.aliasing_component_refs_p_may_alias
;
902 /* Return true if REF is reference to zero sized trailing array. I.e.
903 struct foo {int bar; int array[0];} *fooptr;
907 component_ref_to_zero_sized_trailing_array_p (tree ref
)
909 return (TREE_CODE (ref
) == COMPONENT_REF
910 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
911 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)))
912 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)))))
913 && array_at_struct_end_p (ref
));
916 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
917 aliasing_component_refs_p.
919 Walk access path REF2 and try to find type matching TYPE1
920 (which is a start of possibly aliasing access path REF1).
921 If match is found, try to disambiguate.
923 Return 0 for sucessful disambiguation.
924 Return 1 if match was found but disambiguation failed
925 Return -1 if there is no match.
926 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
927 in access patch REF2 and -1 if we are not sure. */
930 aliasing_component_refs_walk (tree ref1
, tree type1
, tree base1
,
931 poly_int64 offset1
, poly_int64 max_size1
,
932 tree end_struct_ref1
,
933 tree ref2
, tree base2
,
934 poly_int64 offset2
, poly_int64 max_size2
,
942 /* We walk from inner type to the outer types. If type we see is
943 already too large to be part of type1, terminate the search. */
944 int cmp
= compare_type_sizes (type1
, TREE_TYPE (ref
));
948 || compare_type_sizes (TREE_TYPE (end_struct_ref1
),
949 TREE_TYPE (ref
)) < 0))
951 /* If types may be of same size, see if we can decide about their
955 same_p
= same_type_for_tbaa (TREE_TYPE (ref
), type1
);
958 /* In case we can't decide whether types are same try to
959 continue looking for the exact match.
960 Remember however that we possibly saw a match
961 to bypass the access path continuations tests we do later. */
965 if (!handled_component_p (ref
))
967 ref
= TREE_OPERAND (ref
, 0);
971 bool partial_overlap
= false;
973 /* We assume that arrays can overlap by multiple of their elements
974 size as tested in gcc.dg/torture/alias-2.c.
975 This partial overlap happen only when both arrays are bases of
976 the access and not contained within another component ref.
977 To be safe we also assume partial overlap for VLAs. */
978 if (TREE_CODE (TREE_TYPE (base1
)) == ARRAY_TYPE
979 && (!TYPE_SIZE (TREE_TYPE (base1
))
980 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1
))) != INTEGER_CST
983 /* Setting maybe_match to true triggers
984 nonoverlapping_component_refs_p test later that still may do
985 useful disambiguation. */
987 partial_overlap
= true;
989 return aliasing_matching_component_refs_p (base1
, ref1
,
998 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
999 Return true if they can be composed to single access path
1000 base1...ref1...base2...ref2.
1002 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1003 a trailing array access after REF1 in the non-TBAA part of the access.
1004 REF1_ALIAS_SET is the alias set of REF1.
1006 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1007 a traling array access in the TBAA part of access path2.
1008 BASE2_ALIAS_SET is the alias set of base2. */
1011 access_path_may_continue_p (tree ref_type1
, bool end_struct_past_end1
,
1012 alias_set_type ref1_alias_set
,
1013 tree base_type2
, tree end_struct_ref2
,
1014 alias_set_type base2_alias_set
)
1016 /* Access path can not continue past types with no components. */
1017 if (!type_has_components_p (ref_type1
))
1020 /* If first access path ends by too small type to hold base of
1021 the second access path, typically paths can not continue.
1023 Punt if end_struct_past_end1 is true. We want to support arbitrary
1024 type puning past first COMPONENT_REF to union because redundant store
1025 elimination depends on this, see PR92152. For this reason we can not
1026 check size of the reference because types may partially overlap. */
1027 if (!end_struct_past_end1
)
1029 if (compare_type_sizes (ref_type1
, base_type2
) < 0)
1031 /* If the path2 contains trailing array access we can strenghten the check
1032 to verify that also the size of element of the trailing array fits.
1033 In fact we could check for offset + type_size, but we do not track
1034 offsets and this is quite side case. */
1036 && compare_type_sizes (ref_type1
, TREE_TYPE (end_struct_ref2
)) < 0)
1039 return (base2_alias_set
== ref1_alias_set
1040 || alias_set_subset_of (base2_alias_set
, ref1_alias_set
));
1043 /* Determine if the two component references REF1 and REF2 which are
1044 based on access types TYPE1 and TYPE2 and of which at least one is based
1045 on an indirect reference may alias.
1046 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1047 are the respective alias sets. */
1050 aliasing_component_refs_p (tree ref1
,
1051 alias_set_type ref1_alias_set
,
1052 alias_set_type base1_alias_set
,
1053 poly_int64 offset1
, poly_int64 max_size1
,
1055 alias_set_type ref2_alias_set
,
1056 alias_set_type base2_alias_set
,
1057 poly_int64 offset2
, poly_int64 max_size2
)
1059 /* If one reference is a component references through pointers try to find a
1060 common base and apply offset based disambiguation. This handles
1062 struct A { int i; int j; } *q;
1063 struct B { struct A a; int k; } *p;
1064 disambiguating q->i and p->a.j. */
1067 bool maybe_match
= false;
1068 tree end_struct_ref1
= NULL
, end_struct_ref2
= NULL
;
1069 bool end_struct_past_end1
= false;
1070 bool end_struct_past_end2
= false;
1072 /* Choose bases and base types to search for.
1073 The access path is as follows:
1074 base....end_of_tbaa_ref...actual_ref
1075 At one place in the access path may be a reference to zero sized or
1078 We generally discard the segment after end_of_tbaa_ref however
1079 we need to be careful in case it contains zero sized or traling array.
1080 These may happen after refernce to union and in this case we need to
1081 not disambiguate type puning scenarios.
1084 base1 to point to base
1086 ref1 to point to end_of_tbaa_ref
1088 end_struct_ref1 to point the trailing reference (if it exists
1089 in range base....end_of_tbaa_ref
1091 end_struct_past_end1 is true if this traling refernece occurs in
1092 end_of_tbaa_ref...actual_ref. */
1094 while (handled_component_p (base1
))
1096 /* Generally access paths are monotous in the size of object. The
1097 exception are trailing arrays of structures. I.e.
1098 struct a {int array[0];};
1100 struct a {int array1[0]; int array[];};
1101 Such struct has size 0 but accesses to a.array may have non-zero size.
1102 In this case the size of TREE_TYPE (base1) is smaller than
1103 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
1105 Because we compare sizes of arrays just by sizes of their elements,
1106 we only need to care about zero sized array fields here. */
1107 if (component_ref_to_zero_sized_trailing_array_p (base1
))
1109 gcc_checking_assert (!end_struct_ref1
);
1110 end_struct_ref1
= base1
;
1112 if (ends_tbaa_access_path_p (base1
))
1114 ref1
= TREE_OPERAND (base1
, 0);
1115 if (end_struct_ref1
)
1117 end_struct_past_end1
= true;
1118 end_struct_ref1
= NULL
;
1121 base1
= TREE_OPERAND (base1
, 0);
1123 type1
= TREE_TYPE (base1
);
1125 while (handled_component_p (base2
))
1127 if (component_ref_to_zero_sized_trailing_array_p (base2
))
1129 gcc_checking_assert (!end_struct_ref2
);
1130 end_struct_ref2
= base2
;
1132 if (ends_tbaa_access_path_p (base2
))
1134 ref2
= TREE_OPERAND (base2
, 0);
1135 if (end_struct_ref2
)
1137 end_struct_past_end2
= true;
1138 end_struct_ref2
= NULL
;
1141 base2
= TREE_OPERAND (base2
, 0);
1143 type2
= TREE_TYPE (base2
);
1145 /* Now search for the type1 in the access path of ref2. This
1146 would be a common base for doing offset based disambiguation on.
1147 This however only makes sense if type2 is big enough to hold type1. */
1148 int cmp_outer
= compare_type_sizes (type2
, type1
);
1150 /* If type2 is big enough to contain type1 walk its access path.
1151 We also need to care of arrays at the end of structs that may extend
1152 beyond the end of structure. If this occurs in the TBAA part of the
1153 access path, we need to consider the increased type as well. */
1156 && compare_type_sizes (TREE_TYPE (end_struct_ref2
), type1
) >= 0))
1158 int res
= aliasing_component_refs_walk (ref1
, type1
, base1
,
1161 ref2
, base2
, offset2
, max_size2
,
1167 /* If we didn't find a common base, try the other way around. */
1170 && compare_type_sizes (TREE_TYPE (end_struct_ref1
), type1
) <= 0))
1172 int res
= aliasing_component_refs_walk (ref2
, type2
, base2
,
1175 ref1
, base1
, offset1
, max_size1
,
1181 /* In the following code we make an assumption that the types in access
1182 paths do not overlap and thus accesses alias only if one path can be
1183 continuation of another. If we was not able to decide about equivalence,
1184 we need to give up. */
1187 if (!nonoverlapping_component_refs_p (ref1
, ref2
))
1189 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1192 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1196 if (access_path_may_continue_p (TREE_TYPE (ref1
), end_struct_past_end1
,
1198 type2
, end_struct_ref2
,
1200 || access_path_may_continue_p (TREE_TYPE (ref2
), end_struct_past_end2
,
1202 type1
, end_struct_ref1
,
1205 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1208 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1212 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1213 that bases of both component refs are either equivalent or nonoverlapping.
1214 We do not assume that the containers of FIELD1 and FIELD2 are of the
1217 Return 0 in case the base address of component_refs are same then
1218 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1219 may not be of same type or size.
1221 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1223 Return -1 otherwise.
1225 Main difference between 0 and -1 is to let
1226 nonoverlapping_component_refs_since_match_p discover the semantically
1227 equivalent part of the access path.
1229 Note that this function is used even with -fno-strict-aliasing
1230 and makes use of no TBAA assumptions. */
1233 nonoverlapping_component_refs_p_1 (const_tree field1
, const_tree field2
)
1235 /* If both fields are of the same type, we could save hard work of
1236 comparing offsets. */
1237 tree type1
= DECL_CONTEXT (field1
);
1238 tree type2
= DECL_CONTEXT (field2
);
1240 if (TREE_CODE (type1
) == RECORD_TYPE
1241 && DECL_BIT_FIELD_REPRESENTATIVE (field1
))
1242 field1
= DECL_BIT_FIELD_REPRESENTATIVE (field1
);
1243 if (TREE_CODE (type2
) == RECORD_TYPE
1244 && DECL_BIT_FIELD_REPRESENTATIVE (field2
))
1245 field2
= DECL_BIT_FIELD_REPRESENTATIVE (field2
);
1247 /* ??? Bitfields can overlap at RTL level so punt on them.
1248 FIXME: RTL expansion should be fixed by adjusting the access path
1249 when producing MEM_ATTRs for MEMs which are wider than
1250 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1251 if (DECL_BIT_FIELD (field1
) && DECL_BIT_FIELD (field2
))
1254 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1255 if (type1
== type2
&& TREE_CODE (type1
) == RECORD_TYPE
)
1256 return field1
!= field2
;
1258 /* In common case the offsets and bit offsets will be the same.
1259 However if frontends do not agree on the alignment, they may be
1260 different even if they actually represent same address.
1261 Try the common case first and if that fails calcualte the
1262 actual bit offset. */
1263 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1
),
1264 DECL_FIELD_OFFSET (field2
))
1265 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1
),
1266 DECL_FIELD_BIT_OFFSET (field2
)))
1269 /* Note that it may be possible to use component_ref_field_offset
1270 which would provide offsets as trees. However constructing and folding
1271 trees is expensive and does not seem to be worth the compile time
1274 poly_uint64 offset1
, offset2
;
1275 poly_uint64 bit_offset1
, bit_offset2
;
1277 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1
), &offset1
)
1278 && poly_int_tree_p (DECL_FIELD_OFFSET (field2
), &offset2
)
1279 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1
), &bit_offset1
)
1280 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2
), &bit_offset2
))
1282 offset1
= (offset1
<< LOG2_BITS_PER_UNIT
) + bit_offset1
;
1283 offset2
= (offset2
<< LOG2_BITS_PER_UNIT
) + bit_offset2
;
1285 if (known_eq (offset1
, offset2
))
1288 poly_uint64 size1
, size2
;
1290 if (poly_int_tree_p (DECL_SIZE (field1
), &size1
)
1291 && poly_int_tree_p (DECL_SIZE (field2
), &size2
)
1292 && !ranges_maybe_overlap_p (offset1
, size1
, offset2
, size2
))
1295 /* Resort to slower overlap checking by looking for matching types in
1296 the middle of access path. */
1300 /* Return low bound of array. Do not produce new trees
1301 and thus do not care about particular type of integer constant
1302 and placeholder exprs. */
1305 cheap_array_ref_low_bound (tree ref
)
1307 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
1309 /* Avoid expensive array_ref_low_bound.
1310 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1311 type or it is zero. */
1312 if (TREE_OPERAND (ref
, 2))
1313 return TREE_OPERAND (ref
, 2);
1314 else if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
1315 return TYPE_MIN_VALUE (domain_type
);
1317 return integer_zero_node
;
1320 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1321 completely disjoint.
1323 Return 1 if the refs are non-overlapping.
1324 Return 0 if they are possibly overlapping but if so the overlap again
1325 starts on the same address.
1326 Return -1 otherwise. */
1329 nonoverlapping_array_refs_p (tree ref1
, tree ref2
)
1331 tree index1
= TREE_OPERAND (ref1
, 1);
1332 tree index2
= TREE_OPERAND (ref2
, 1);
1333 tree low_bound1
= cheap_array_ref_low_bound(ref1
);
1334 tree low_bound2
= cheap_array_ref_low_bound(ref2
);
1336 /* Handle zero offsets first: we do not need to match type size in this
1338 if (operand_equal_p (index1
, low_bound1
, 0)
1339 && operand_equal_p (index2
, low_bound2
, 0))
1342 /* If type sizes are different, give up.
1344 Avoid expensive array_ref_element_size.
1345 If operand 3 is present it denotes size in the alignmnet units.
1346 Otherwise size is TYPE_SIZE of the element type.
1347 Handle only common cases where types are of the same "kind". */
1348 if ((TREE_OPERAND (ref1
, 3) == NULL
) != (TREE_OPERAND (ref2
, 3) == NULL
))
1351 tree elmt_type1
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1
, 0)));
1352 tree elmt_type2
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2
, 0)));
1354 if (TREE_OPERAND (ref1
, 3))
1356 if (TYPE_ALIGN (elmt_type1
) != TYPE_ALIGN (elmt_type2
)
1357 || !operand_equal_p (TREE_OPERAND (ref1
, 3),
1358 TREE_OPERAND (ref2
, 3), 0))
1363 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1
),
1364 TYPE_SIZE_UNIT (elmt_type2
), 0))
1368 /* Since we know that type sizes are the same, there is no need to return
1369 -1 after this point. Partial overlap can not be introduced. */
1371 /* We may need to fold trees in this case.
1372 TODO: Handle integer constant case at least. */
1373 if (!operand_equal_p (low_bound1
, low_bound2
, 0))
1376 if (TREE_CODE (index1
) == INTEGER_CST
&& TREE_CODE (index2
) == INTEGER_CST
)
1378 if (tree_int_cst_equal (index1
, index2
))
1382 /* TODO: We can use VRP to further disambiguate here. */
1386 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1387 MATCH2 either point to the same address or are disjoint.
1388 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1389 respectively or NULL in the case we established equivalence of bases.
1390 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1391 overlap by exact multiply of their element size.
1393 This test works by matching the initial segment of the access path
1394 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1395 match was determined without use of TBAA oracle.
1397 Return 1 if we can determine that component references REF1 and REF2,
1398 that are within a common DECL, cannot overlap.
1400 Return 0 if paths are same and thus there is nothing to disambiguate more
1401 (i.e. there is must alias assuming there is must alias between MATCH1 and
1404 Return -1 if we can not determine 0 or 1 - this happens when we met
1405 non-matching types was met in the path.
1406 In this case it may make sense to continue by other disambiguation
1410 nonoverlapping_refs_since_match_p (tree match1
, tree ref1
,
1411 tree match2
, tree ref2
,
1412 bool partial_overlap
)
1414 int ntbaa1
= 0, ntbaa2
= 0;
1415 /* Early return if there are no references to match, we do not need
1416 to walk the access paths.
1418 Do not consider this as may-alias for stats - it is more useful
1419 to have information how many disambiguations happened provided that
1420 the query was meaningful. */
1422 if (match1
== ref1
|| !handled_component_p (ref1
)
1423 || match2
== ref2
|| !handled_component_p (ref2
))
1426 auto_vec
<tree
, 16> component_refs1
;
1427 auto_vec
<tree
, 16> component_refs2
;
1429 /* Create the stack of handled components for REF1. */
1430 while (handled_component_p (ref1
) && ref1
!= match1
)
1432 /* We use TBAA only to re-synchronize after mismatched refs. So we
1433 do not need to truncate access path after TBAA part ends. */
1434 if (ends_tbaa_access_path_p (ref1
))
1438 component_refs1
.safe_push (ref1
);
1439 ref1
= TREE_OPERAND (ref1
, 0);
1442 /* Create the stack of handled components for REF2. */
1443 while (handled_component_p (ref2
) && ref2
!= match2
)
1445 if (ends_tbaa_access_path_p (ref2
))
1449 component_refs2
.safe_push (ref2
);
1450 ref2
= TREE_OPERAND (ref2
, 0);
1453 if (!flag_strict_aliasing
)
1459 bool mem_ref1
= TREE_CODE (ref1
) == MEM_REF
&& ref1
!= match1
;
1460 bool mem_ref2
= TREE_CODE (ref2
) == MEM_REF
&& ref2
!= match2
;
1462 /* If only one of access path starts with MEM_REF check that offset is 0
1463 so the addresses stays the same after stripping it.
1464 TODO: In this case we may walk the other access path until we get same
1467 If both starts with MEM_REF, offset has to be same. */
1468 if ((mem_ref1
&& !mem_ref2
&& !integer_zerop (TREE_OPERAND (ref1
, 1)))
1469 || (mem_ref2
&& !mem_ref1
&& !integer_zerop (TREE_OPERAND (ref2
, 1)))
1470 || (mem_ref1
&& mem_ref2
1471 && !tree_int_cst_equal (TREE_OPERAND (ref1
, 1),
1472 TREE_OPERAND (ref2
, 1))))
1474 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1478 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1479 to handle them here at all. */
1480 gcc_checking_assert (TREE_CODE (ref1
) != TARGET_MEM_REF
1481 && TREE_CODE (ref2
) != TARGET_MEM_REF
);
1483 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1484 rank. This is sufficient because we start from the same DECL and you
1485 cannot reference several fields at a time with COMPONENT_REFs (unlike
1486 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1487 of them to access a sub-component, unless you're in a union, in which
1488 case the return value will precisely be false. */
1491 /* Track if we seen unmatched ref with non-zero offset. In this case
1492 we must look for partial overlaps. */
1493 bool seen_unmatched_ref_p
= false;
1495 /* First match ARRAY_REFs an try to disambiguate. */
1496 if (!component_refs1
.is_empty ()
1497 && !component_refs2
.is_empty ())
1499 unsigned int narray_refs1
=0, narray_refs2
=0;
1501 /* We generally assume that both access paths starts by same sequence
1502 of refs. However if number of array refs is not in sync, try
1503 to recover and pop elts until number match. This helps the case
1504 where one access path starts by array and other by element. */
1505 for (narray_refs1
= 0; narray_refs1
< component_refs1
.length ();
1507 if (TREE_CODE (component_refs1
[component_refs1
.length()
1508 - 1 - narray_refs1
]) != ARRAY_REF
)
1511 for (narray_refs2
= 0; narray_refs2
< component_refs2
.length ();
1513 if (TREE_CODE (component_refs2
[component_refs2
.length()
1514 - 1 - narray_refs2
]) != ARRAY_REF
)
1516 for (; narray_refs1
> narray_refs2
; narray_refs1
--)
1518 ref1
= component_refs1
.pop ();
1521 /* If index is non-zero we need to check whether the reference
1522 does not break the main invariant that bases are either
1523 disjoint or equal. Consider the example:
1525 unsigned char out[][1];
1529 Here bases out and out are same, but after removing the
1530 [i] index, this invariant no longer holds, because
1531 out[i] points to the middle of array out.
1533 TODO: If size of type of the skipped reference is an integer
1534 multiply of the size of type of the other reference this
1535 invariant can be verified, but even then it is not completely
1536 safe with !flag_strict_aliasing if the other reference contains
1537 unbounded array accesses.
1540 if (!operand_equal_p (TREE_OPERAND (ref1
, 1),
1541 cheap_array_ref_low_bound (ref1
), 0))
1544 for (; narray_refs2
> narray_refs1
; narray_refs2
--)
1546 ref2
= component_refs2
.pop ();
1548 if (!operand_equal_p (TREE_OPERAND (ref2
, 1),
1549 cheap_array_ref_low_bound (ref2
), 0))
1552 /* Try to disambiguate matched arrays. */
1553 for (unsigned int i
= 0; i
< narray_refs1
; i
++)
1555 int cmp
= nonoverlapping_array_refs_p (component_refs1
.pop (),
1556 component_refs2
.pop ());
1559 if (cmp
== 1 && !partial_overlap
)
1562 .nonoverlapping_refs_since_match_p_no_alias
;
1567 seen_unmatched_ref_p
= true;
1568 /* We can not maintain the invariant that bases are either
1569 same or completely disjoint. However we can still recover
1570 from type based alias analysis if we reach referneces to
1571 same sizes. We do not attempt to match array sizes, so
1572 just finish array walking and look for component refs. */
1573 if (ntbaa1
< 0 || ntbaa2
< 0)
1575 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1578 for (i
++; i
< narray_refs1
; i
++)
1580 component_refs1
.pop ();
1581 component_refs2
.pop ();
1587 partial_overlap
= false;
1591 /* Next look for component_refs. */
1594 if (component_refs1
.is_empty ())
1597 .nonoverlapping_refs_since_match_p_must_overlap
;
1600 ref1
= component_refs1
.pop ();
1602 if (TREE_CODE (ref1
) != COMPONENT_REF
)
1604 seen_unmatched_ref_p
= true;
1605 if (ntbaa1
< 0 || ntbaa2
< 0)
1607 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1612 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1
, 0))));
1616 if (component_refs2
.is_empty ())
1619 .nonoverlapping_refs_since_match_p_must_overlap
;
1622 ref2
= component_refs2
.pop ();
1624 if (TREE_CODE (ref2
) != COMPONENT_REF
)
1626 if (ntbaa1
< 0 || ntbaa2
< 0)
1628 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1631 seen_unmatched_ref_p
= true;
1634 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2
, 0))));
1636 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1638 gcc_checking_assert (TREE_CODE (ref1
) == COMPONENT_REF
1639 && TREE_CODE (ref2
) == COMPONENT_REF
);
1641 tree field1
= TREE_OPERAND (ref1
, 1);
1642 tree field2
= TREE_OPERAND (ref2
, 1);
1644 /* ??? We cannot simply use the type of operand #0 of the refs here
1645 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1646 for common blocks instead of using unions like everyone else. */
1647 tree type1
= DECL_CONTEXT (field1
);
1648 tree type2
= DECL_CONTEXT (field2
);
1650 partial_overlap
= false;
1652 /* If we skipped array refs on type of different sizes, we can
1653 no longer be sure that there are not partial overlaps. */
1654 if (seen_unmatched_ref_p
&& ntbaa1
>= 0 && ntbaa2
>= 0
1655 && !operand_equal_p (TYPE_SIZE (type1
), TYPE_SIZE (type2
), 0))
1658 .nonoverlapping_refs_since_match_p_may_alias
;
1662 int cmp
= nonoverlapping_component_refs_p_1 (field1
, field2
);
1666 .nonoverlapping_refs_since_match_p_may_alias
;
1672 .nonoverlapping_refs_since_match_p_no_alias
;
1677 ++alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
;
1681 /* Return TYPE_UID which can be used to match record types we consider
1682 same for TBAA purposes. */
1685 ncr_type_uid (const_tree field
)
1687 /* ??? We cannot simply use the type of operand #0 of the refs here
1688 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1689 for common blocks instead of using unions like everyone else. */
1690 tree type
= DECL_FIELD_CONTEXT (field
);
1691 /* With LTO types considered same_type_for_tbaa_p
1692 from different translation unit may not have same
1693 main variant. They however have same TYPE_CANONICAL. */
1694 if (TYPE_CANONICAL (type
))
1695 return TYPE_UID (TYPE_CANONICAL (type
));
1696 return TYPE_UID (type
);
1699 /* qsort compare function to sort FIELD_DECLs after their
1700 DECL_FIELD_CONTEXT TYPE_UID. */
1703 ncr_compar (const void *field1_
, const void *field2_
)
1705 const_tree field1
= *(const_tree
*) const_cast <void *>(field1_
);
1706 const_tree field2
= *(const_tree
*) const_cast <void *>(field2_
);
1707 unsigned int uid1
= ncr_type_uid (field1
);
1708 unsigned int uid2
= ncr_type_uid (field2
);
1712 else if (uid1
> uid2
)
1717 /* Return true if we can determine that the fields referenced cannot
1718 overlap for any pair of objects. This relies on TBAA. */
1721 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
1723 /* Early return if we have nothing to do.
1725 Do not consider this as may-alias for stats - it is more useful
1726 to have information how many disambiguations happened provided that
1727 the query was meaningful. */
1728 if (!flag_strict_aliasing
1730 || !handled_component_p (x
)
1731 || !handled_component_p (y
))
1734 auto_vec
<const_tree
, 16> fieldsx
;
1735 while (handled_component_p (x
))
1737 if (TREE_CODE (x
) == COMPONENT_REF
)
1739 tree field
= TREE_OPERAND (x
, 1);
1740 tree type
= DECL_FIELD_CONTEXT (field
);
1741 if (TREE_CODE (type
) == RECORD_TYPE
)
1742 fieldsx
.safe_push (field
);
1744 else if (ends_tbaa_access_path_p (x
))
1745 fieldsx
.truncate (0);
1746 x
= TREE_OPERAND (x
, 0);
1748 if (fieldsx
.length () == 0)
1750 auto_vec
<const_tree
, 16> fieldsy
;
1751 while (handled_component_p (y
))
1753 if (TREE_CODE (y
) == COMPONENT_REF
)
1755 tree field
= TREE_OPERAND (y
, 1);
1756 tree type
= DECL_FIELD_CONTEXT (field
);
1757 if (TREE_CODE (type
) == RECORD_TYPE
)
1758 fieldsy
.safe_push (TREE_OPERAND (y
, 1));
1760 else if (ends_tbaa_access_path_p (y
))
1761 fieldsy
.truncate (0);
1762 y
= TREE_OPERAND (y
, 0);
1764 if (fieldsy
.length () == 0)
1766 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1770 /* Most common case first. */
1771 if (fieldsx
.length () == 1
1772 && fieldsy
.length () == 1)
1774 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx
[0]),
1775 DECL_FIELD_CONTEXT (fieldsy
[0])) == 1
1776 && nonoverlapping_component_refs_p_1 (fieldsx
[0], fieldsy
[0]) == 1)
1778 ++alias_stats
.nonoverlapping_component_refs_p_no_alias
;
1783 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1788 if (fieldsx
.length () == 2)
1790 if (ncr_compar (&fieldsx
[0], &fieldsx
[1]) == 1)
1791 std::swap (fieldsx
[0], fieldsx
[1]);
1794 fieldsx
.qsort (ncr_compar
);
1796 if (fieldsy
.length () == 2)
1798 if (ncr_compar (&fieldsy
[0], &fieldsy
[1]) == 1)
1799 std::swap (fieldsy
[0], fieldsy
[1]);
1802 fieldsy
.qsort (ncr_compar
);
1804 unsigned i
= 0, j
= 0;
1807 const_tree fieldx
= fieldsx
[i
];
1808 const_tree fieldy
= fieldsy
[j
];
1810 /* We're left with accessing different fields of a structure,
1811 no possible overlap. */
1812 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx
),
1813 DECL_FIELD_CONTEXT (fieldy
)) == 1
1814 && nonoverlapping_component_refs_p_1 (fieldx
, fieldy
) == 1)
1816 ++alias_stats
.nonoverlapping_component_refs_p_no_alias
;
1820 if (ncr_type_uid (fieldx
) < ncr_type_uid (fieldy
))
1823 if (i
== fieldsx
.length ())
1829 if (j
== fieldsy
.length ())
1835 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1840 /* Return true if two memory references based on the variables BASE1
1841 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1842 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1843 if non-NULL are the complete memory reference trees. */
1846 decl_refs_may_alias_p (tree ref1
, tree base1
,
1847 poly_int64 offset1
, poly_int64 max_size1
,
1849 tree ref2
, tree base2
,
1850 poly_int64 offset2
, poly_int64 max_size2
,
1853 gcc_checking_assert (DECL_P (base1
) && DECL_P (base2
));
1855 /* If both references are based on different variables, they cannot alias. */
1856 if (compare_base_decls (base1
, base2
) == 0)
1859 /* If both references are based on the same variable, they cannot alias if
1860 the accesses do not overlap. */
1861 if (!ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
1864 /* If there is must alias, there is no use disambiguating further. */
1865 if (known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
))
1868 /* For components with variable position, the above test isn't sufficient,
1869 so we disambiguate component references manually. */
1871 && handled_component_p (ref1
) && handled_component_p (ref2
)
1872 && nonoverlapping_refs_since_match_p (NULL
, ref1
, NULL
, ref2
, false) == 1)
1878 /* Return true if an indirect reference based on *PTR1 constrained
1879 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1880 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1881 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1882 in which case they are computed on-demand. REF1 and REF2
1883 if non-NULL are the complete memory reference trees. */
1886 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1887 poly_int64 offset1
, poly_int64 max_size1
,
1889 alias_set_type ref1_alias_set
,
1890 alias_set_type base1_alias_set
,
1891 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1892 poly_int64 offset2
, poly_int64 max_size2
,
1894 alias_set_type ref2_alias_set
,
1895 alias_set_type base2_alias_set
, bool tbaa_p
)
1898 tree ptrtype1
, dbase2
;
1900 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1901 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1904 ptr1
= TREE_OPERAND (base1
, 0);
1905 poly_offset_int moff
= mem_ref_offset (base1
) << LOG2_BITS_PER_UNIT
;
1907 /* If only one reference is based on a variable, they cannot alias if
1908 the pointer access is beyond the extent of the variable access.
1909 (the pointer base cannot validly point to an offset less than zero
1911 ??? IVOPTs creates bases that do not honor this restriction,
1912 so do not apply this optimization for TARGET_MEM_REFs. */
1913 if (TREE_CODE (base1
) != TARGET_MEM_REF
1914 && !ranges_maybe_overlap_p (offset1
+ moff
, -1, offset2
, max_size2
))
1916 /* They also cannot alias if the pointer may not point to the decl. */
1917 if (!ptr_deref_may_alias_decl_p (ptr1
, base2
))
1920 /* Disambiguations that rely on strict aliasing rules follow. */
1921 if (!flag_strict_aliasing
|| !tbaa_p
)
1924 /* If the alias set for a pointer access is zero all bets are off. */
1925 if (base1_alias_set
== 0 || base2_alias_set
== 0)
1928 /* When we are trying to disambiguate an access with a pointer dereference
1929 as base versus one with a decl as base we can use both the size
1930 of the decl and its dynamic type for extra disambiguation.
1931 ??? We do not know anything about the dynamic type of the decl
1932 other than that its alias-set contains base2_alias_set as a subset
1933 which does not help us here. */
1934 /* As we know nothing useful about the dynamic type of the decl just
1935 use the usual conflict check rather than a subset test.
1936 ??? We could introduce -fvery-strict-aliasing when the language
1937 does not allow decls to have a dynamic type that differs from their
1938 static type. Then we can check
1939 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1940 if (base1_alias_set
!= base2_alias_set
1941 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1944 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1946 /* If the size of the access relevant for TBAA through the pointer
1947 is bigger than the size of the decl we can't possibly access the
1948 decl via that pointer. */
1949 if (/* ??? This in turn may run afoul when a decl of type T which is
1950 a member of union type U is accessed through a pointer to
1951 type U and sizeof T is smaller than sizeof U. */
1952 TREE_CODE (TREE_TYPE (ptrtype1
)) != UNION_TYPE
1953 && TREE_CODE (TREE_TYPE (ptrtype1
)) != QUAL_UNION_TYPE
1954 && compare_sizes (DECL_SIZE (base2
),
1955 TYPE_SIZE (TREE_TYPE (ptrtype1
))) < 0)
1961 /* If the decl is accessed via a MEM_REF, reconstruct the base
1962 we can use for TBAA and an appropriately adjusted offset. */
1964 while (handled_component_p (dbase2
))
1965 dbase2
= TREE_OPERAND (dbase2
, 0);
1966 poly_int64 doffset1
= offset1
;
1967 poly_offset_int doffset2
= offset2
;
1968 if (TREE_CODE (dbase2
) == MEM_REF
1969 || TREE_CODE (dbase2
) == TARGET_MEM_REF
)
1971 doffset2
-= mem_ref_offset (dbase2
) << LOG2_BITS_PER_UNIT
;
1972 tree ptrtype2
= TREE_TYPE (TREE_OPERAND (dbase2
, 1));
1973 /* If second reference is view-converted, give up now. */
1974 if (same_type_for_tbaa (TREE_TYPE (dbase2
), TREE_TYPE (ptrtype2
)) != 1)
1978 /* If first reference is view-converted, give up now. */
1979 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1)
1982 /* If both references are through the same type, they do not alias
1983 if the accesses do not overlap. This does extra disambiguation
1984 for mixed/pointer accesses but requires strict aliasing.
1985 For MEM_REFs we require that the component-ref offset we computed
1986 is relative to the start of the type which we ensure by
1987 comparing rvalue and access type and disregarding the constant
1990 But avoid treating variable length arrays as "objects", instead assume they
1991 can overlap by an exact multiple of their element size.
1992 See gcc.dg/torture/alias-2.c. */
1993 if (((TREE_CODE (base1
) != TARGET_MEM_REF
1994 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1995 && (TREE_CODE (dbase2
) != TARGET_MEM_REF
1996 || (!TMR_INDEX (dbase2
) && !TMR_INDEX2 (dbase2
))))
1997 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (dbase2
)) == 1)
1999 bool partial_overlap
= (TREE_CODE (TREE_TYPE (base1
)) == ARRAY_TYPE
2000 && (TYPE_SIZE (TREE_TYPE (base1
))
2001 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1
)))
2003 if (!partial_overlap
2004 && !ranges_maybe_overlap_p (doffset1
, max_size1
, doffset2
, max_size2
))
2007 /* If there is must alias, there is no use disambiguating further. */
2008 || (!partial_overlap
2009 && known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
)))
2011 int res
= nonoverlapping_refs_since_match_p (base1
, ref1
, base2
, ref2
,
2014 return !nonoverlapping_component_refs_p (ref1
, ref2
);
2018 /* Do access-path based disambiguation. */
2020 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
2021 return aliasing_component_refs_p (ref1
,
2022 ref1_alias_set
, base1_alias_set
,
2025 ref2_alias_set
, base2_alias_set
,
2026 offset2
, max_size2
);
2031 /* Return true if two indirect references based on *PTR1
2032 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2033 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2034 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2035 in which case they are computed on-demand. REF1 and REF2
2036 if non-NULL are the complete memory reference trees. */
2039 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
2040 poly_int64 offset1
, poly_int64 max_size1
,
2042 alias_set_type ref1_alias_set
,
2043 alias_set_type base1_alias_set
,
2044 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
2045 poly_int64 offset2
, poly_int64 max_size2
,
2047 alias_set_type ref2_alias_set
,
2048 alias_set_type base2_alias_set
, bool tbaa_p
)
2052 tree ptrtype1
, ptrtype2
;
2054 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
2055 || TREE_CODE (base1
) == TARGET_MEM_REF
)
2056 && (TREE_CODE (base2
) == MEM_REF
2057 || TREE_CODE (base2
) == TARGET_MEM_REF
));
2059 ptr1
= TREE_OPERAND (base1
, 0);
2060 ptr2
= TREE_OPERAND (base2
, 0);
2062 /* If both bases are based on pointers they cannot alias if they may not
2063 point to the same memory object or if they point to the same object
2064 and the accesses do not overlap. */
2065 if ((!cfun
|| gimple_in_ssa_p (cfun
))
2066 && operand_equal_p (ptr1
, ptr2
, 0)
2067 && (((TREE_CODE (base1
) != TARGET_MEM_REF
2068 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2069 && (TREE_CODE (base2
) != TARGET_MEM_REF
2070 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
))))
2071 || (TREE_CODE (base1
) == TARGET_MEM_REF
2072 && TREE_CODE (base2
) == TARGET_MEM_REF
2073 && (TMR_STEP (base1
) == TMR_STEP (base2
)
2074 || (TMR_STEP (base1
) && TMR_STEP (base2
)
2075 && operand_equal_p (TMR_STEP (base1
),
2076 TMR_STEP (base2
), 0)))
2077 && (TMR_INDEX (base1
) == TMR_INDEX (base2
)
2078 || (TMR_INDEX (base1
) && TMR_INDEX (base2
)
2079 && operand_equal_p (TMR_INDEX (base1
),
2080 TMR_INDEX (base2
), 0)))
2081 && (TMR_INDEX2 (base1
) == TMR_INDEX2 (base2
)
2082 || (TMR_INDEX2 (base1
) && TMR_INDEX2 (base2
)
2083 && operand_equal_p (TMR_INDEX2 (base1
),
2084 TMR_INDEX2 (base2
), 0))))))
2086 poly_offset_int moff1
= mem_ref_offset (base1
) << LOG2_BITS_PER_UNIT
;
2087 poly_offset_int moff2
= mem_ref_offset (base2
) << LOG2_BITS_PER_UNIT
;
2088 if (!ranges_maybe_overlap_p (offset1
+ moff1
, max_size1
,
2089 offset2
+ moff2
, max_size2
))
2091 /* If there is must alias, there is no use disambiguating further. */
2092 if (known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
))
2096 int res
= nonoverlapping_refs_since_match_p (NULL
, ref1
, NULL
, ref2
,
2102 if (!ptr_derefs_may_alias_p (ptr1
, ptr2
))
2105 /* Disambiguations that rely on strict aliasing rules follow. */
2106 if (!flag_strict_aliasing
|| !tbaa_p
)
2109 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
2110 ptrtype2
= TREE_TYPE (TREE_OPERAND (base2
, 1));
2112 /* If the alias set for a pointer access is zero all bets are off. */
2113 if (base1_alias_set
== 0
2114 || base2_alias_set
== 0)
2117 /* Do type-based disambiguation. */
2118 if (base1_alias_set
!= base2_alias_set
2119 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
2122 /* If either reference is view-converted, give up now. */
2123 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
2124 || same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) != 1)
2127 /* If both references are through the same type, they do not alias
2128 if the accesses do not overlap. This does extra disambiguation
2129 for mixed/pointer accesses but requires strict aliasing. */
2130 if ((TREE_CODE (base1
) != TARGET_MEM_REF
2131 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2132 && (TREE_CODE (base2
) != TARGET_MEM_REF
2133 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
)))
2134 && same_type_for_tbaa (TREE_TYPE (ptrtype1
),
2135 TREE_TYPE (ptrtype2
)) == 1)
2137 /* But avoid treating arrays as "objects", instead assume they
2138 can overlap by an exact multiple of their element size.
2139 See gcc.dg/torture/alias-2.c. */
2140 bool partial_overlap
= TREE_CODE (TREE_TYPE (ptrtype1
)) == ARRAY_TYPE
;
2142 if (!partial_overlap
2143 && !ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
2146 || (!partial_overlap
2147 && known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
)))
2149 int res
= nonoverlapping_refs_since_match_p (base1
, ref1
, base2
, ref2
,
2152 return !nonoverlapping_component_refs_p (ref1
, ref2
);
2156 /* Do access-path based disambiguation. */
2158 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
2159 return aliasing_component_refs_p (ref1
,
2160 ref1_alias_set
, base1_alias_set
,
2163 ref2_alias_set
, base2_alias_set
,
2164 offset2
, max_size2
);
2169 /* Return true, if the two memory references REF1 and REF2 may alias. */
2172 refs_may_alias_p_2 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
2175 poly_int64 offset1
= 0, offset2
= 0;
2176 poly_int64 max_size1
= -1, max_size2
= -1;
2177 bool var1_p
, var2_p
, ind1_p
, ind2_p
;
2179 gcc_checking_assert ((!ref1
->ref
2180 || TREE_CODE (ref1
->ref
) == SSA_NAME
2181 || DECL_P (ref1
->ref
)
2182 || TREE_CODE (ref1
->ref
) == STRING_CST
2183 || handled_component_p (ref1
->ref
)
2184 || TREE_CODE (ref1
->ref
) == MEM_REF
2185 || TREE_CODE (ref1
->ref
) == TARGET_MEM_REF
)
2187 || TREE_CODE (ref2
->ref
) == SSA_NAME
2188 || DECL_P (ref2
->ref
)
2189 || TREE_CODE (ref2
->ref
) == STRING_CST
2190 || handled_component_p (ref2
->ref
)
2191 || TREE_CODE (ref2
->ref
) == MEM_REF
2192 || TREE_CODE (ref2
->ref
) == TARGET_MEM_REF
));
2194 /* Decompose the references into their base objects and the access. */
2195 base1
= ao_ref_base (ref1
);
2196 offset1
= ref1
->offset
;
2197 max_size1
= ref1
->max_size
;
2198 base2
= ao_ref_base (ref2
);
2199 offset2
= ref2
->offset
;
2200 max_size2
= ref2
->max_size
;
2202 /* We can end up with registers or constants as bases for example from
2203 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2204 which is seen as a struct copy. */
2205 if (TREE_CODE (base1
) == SSA_NAME
2206 || TREE_CODE (base1
) == CONST_DECL
2207 || TREE_CODE (base1
) == CONSTRUCTOR
2208 || TREE_CODE (base1
) == ADDR_EXPR
2209 || CONSTANT_CLASS_P (base1
)
2210 || TREE_CODE (base2
) == SSA_NAME
2211 || TREE_CODE (base2
) == CONST_DECL
2212 || TREE_CODE (base2
) == CONSTRUCTOR
2213 || TREE_CODE (base2
) == ADDR_EXPR
2214 || CONSTANT_CLASS_P (base2
))
2217 /* We can end up referring to code via function and label decls.
2218 As we likely do not properly track code aliases conservatively
2220 if (TREE_CODE (base1
) == FUNCTION_DECL
2221 || TREE_CODE (base1
) == LABEL_DECL
2222 || TREE_CODE (base2
) == FUNCTION_DECL
2223 || TREE_CODE (base2
) == LABEL_DECL
)
2226 /* Two volatile accesses always conflict. */
2227 if (ref1
->volatile_p
2228 && ref2
->volatile_p
)
2231 /* Defer to simple offset based disambiguation if we have
2232 references based on two decls. Do this before defering to
2233 TBAA to handle must-alias cases in conformance with the
2234 GCC extension of allowing type-punning through unions. */
2235 var1_p
= DECL_P (base1
);
2236 var2_p
= DECL_P (base2
);
2237 if (var1_p
&& var2_p
)
2238 return decl_refs_may_alias_p (ref1
->ref
, base1
, offset1
, max_size1
,
2240 ref2
->ref
, base2
, offset2
, max_size2
,
2243 /* Handle restrict based accesses.
2244 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2246 tree rbase1
= base1
;
2247 tree rbase2
= base2
;
2252 while (handled_component_p (rbase1
))
2253 rbase1
= TREE_OPERAND (rbase1
, 0);
2259 while (handled_component_p (rbase2
))
2260 rbase2
= TREE_OPERAND (rbase2
, 0);
2262 if (rbase1
&& rbase2
2263 && (TREE_CODE (base1
) == MEM_REF
|| TREE_CODE (base1
) == TARGET_MEM_REF
)
2264 && (TREE_CODE (base2
) == MEM_REF
|| TREE_CODE (base2
) == TARGET_MEM_REF
)
2265 /* If the accesses are in the same restrict clique... */
2266 && MR_DEPENDENCE_CLIQUE (base1
) == MR_DEPENDENCE_CLIQUE (base2
)
2267 /* But based on different pointers they do not alias. */
2268 && MR_DEPENDENCE_BASE (base1
) != MR_DEPENDENCE_BASE (base2
))
2271 ind1_p
= (TREE_CODE (base1
) == MEM_REF
2272 || TREE_CODE (base1
) == TARGET_MEM_REF
);
2273 ind2_p
= (TREE_CODE (base2
) == MEM_REF
2274 || TREE_CODE (base2
) == TARGET_MEM_REF
);
2276 /* Canonicalize the pointer-vs-decl case. */
2277 if (ind1_p
&& var2_p
)
2279 std::swap (offset1
, offset2
);
2280 std::swap (max_size1
, max_size2
);
2281 std::swap (base1
, base2
);
2282 std::swap (ref1
, ref2
);
2289 /* First defer to TBAA if possible. */
2291 && flag_strict_aliasing
2292 && !alias_sets_conflict_p (ao_ref_alias_set (ref1
),
2293 ao_ref_alias_set (ref2
)))
2296 /* If the reference is based on a pointer that points to memory
2297 that may not be written to then the other reference cannot possibly
2299 if ((TREE_CODE (TREE_OPERAND (base2
, 0)) == SSA_NAME
2300 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2
, 0)))
2302 && TREE_CODE (TREE_OPERAND (base1
, 0)) == SSA_NAME
2303 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1
, 0))))
2306 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2307 if (var1_p
&& ind2_p
)
2308 return indirect_ref_may_alias_decl_p (ref2
->ref
, base2
,
2309 offset2
, max_size2
, ref2
->size
,
2310 ao_ref_alias_set (ref2
),
2311 ao_ref_base_alias_set (ref2
),
2313 offset1
, max_size1
, ref1
->size
,
2314 ao_ref_alias_set (ref1
),
2315 ao_ref_base_alias_set (ref1
),
2317 else if (ind1_p
&& ind2_p
)
2318 return indirect_refs_may_alias_p (ref1
->ref
, base1
,
2319 offset1
, max_size1
, ref1
->size
,
2320 ao_ref_alias_set (ref1
),
2321 ao_ref_base_alias_set (ref1
),
2323 offset2
, max_size2
, ref2
->size
,
2324 ao_ref_alias_set (ref2
),
2325 ao_ref_base_alias_set (ref2
),
2331 /* Return true, if the two memory references REF1 and REF2 may alias
2332 and update statistics. */
2335 refs_may_alias_p_1 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
2337 bool res
= refs_may_alias_p_2 (ref1
, ref2
, tbaa_p
);
2339 ++alias_stats
.refs_may_alias_p_may_alias
;
2341 ++alias_stats
.refs_may_alias_p_no_alias
;
2346 refs_may_alias_p (tree ref1
, ao_ref
*ref2
, bool tbaa_p
)
2349 ao_ref_init (&r1
, ref1
);
2350 return refs_may_alias_p_1 (&r1
, ref2
, tbaa_p
);
2354 refs_may_alias_p (tree ref1
, tree ref2
, bool tbaa_p
)
2357 ao_ref_init (&r1
, ref1
);
2358 ao_ref_init (&r2
, ref2
);
2359 return refs_may_alias_p_1 (&r1
, &r2
, tbaa_p
);
2362 /* Returns true if there is a anti-dependence for the STORE that
2363 executes after the LOAD. */
2366 refs_anti_dependent_p (tree load
, tree store
)
2369 ao_ref_init (&r1
, load
);
2370 ao_ref_init (&r2
, store
);
2371 return refs_may_alias_p_1 (&r1
, &r2
, false);
2374 /* Returns true if there is a output dependence for the stores
2375 STORE1 and STORE2. */
2378 refs_output_dependent_p (tree store1
, tree store2
)
2381 ao_ref_init (&r1
, store1
);
2382 ao_ref_init (&r2
, store2
);
2383 return refs_may_alias_p_1 (&r1
, &r2
, false);
2386 /* If the call CALL may use the memory reference REF return true,
2387 otherwise return false. */
2390 ref_maybe_used_by_call_p_1 (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2394 int flags
= gimple_call_flags (call
);
2396 /* Const functions without a static chain do not implicitly use memory. */
2397 if (!gimple_call_chain (call
)
2398 && (flags
& (ECF_CONST
|ECF_NOVOPS
)))
2401 base
= ao_ref_base (ref
);
2405 /* A call that is not without side-effects might involve volatile
2406 accesses and thus conflicts with all other volatile accesses. */
2407 if (ref
->volatile_p
)
2410 /* If the reference is based on a decl that is not aliased the call
2411 cannot possibly use it. */
2413 && !may_be_aliased (base
)
2414 /* But local statics can be used through recursion. */
2415 && !is_global_var (base
))
2418 callee
= gimple_call_fndecl (call
);
2420 /* Handle those builtin functions explicitly that do not act as
2421 escape points. See tree-ssa-structalias.c:find_func_aliases
2422 for the list of builtins we might need to handle here. */
2423 if (callee
!= NULL_TREE
2424 && gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2425 switch (DECL_FUNCTION_CODE (callee
))
2427 /* All the following functions read memory pointed to by
2428 their second argument. strcat/strncat additionally
2429 reads memory pointed to by the first argument. */
2430 case BUILT_IN_STRCAT
:
2431 case BUILT_IN_STRNCAT
:
2434 ao_ref_init_from_ptr_and_size (&dref
,
2435 gimple_call_arg (call
, 0),
2437 if (refs_may_alias_p_1 (&dref
, ref
, false))
2441 case BUILT_IN_STRCPY
:
2442 case BUILT_IN_STRNCPY
:
2443 case BUILT_IN_MEMCPY
:
2444 case BUILT_IN_MEMMOVE
:
2445 case BUILT_IN_MEMPCPY
:
2446 case BUILT_IN_STPCPY
:
2447 case BUILT_IN_STPNCPY
:
2448 case BUILT_IN_TM_MEMCPY
:
2449 case BUILT_IN_TM_MEMMOVE
:
2452 tree size
= NULL_TREE
;
2453 if (gimple_call_num_args (call
) == 3)
2454 size
= gimple_call_arg (call
, 2);
2455 ao_ref_init_from_ptr_and_size (&dref
,
2456 gimple_call_arg (call
, 1),
2458 return refs_may_alias_p_1 (&dref
, ref
, false);
2460 case BUILT_IN_STRCAT_CHK
:
2461 case BUILT_IN_STRNCAT_CHK
:
2464 ao_ref_init_from_ptr_and_size (&dref
,
2465 gimple_call_arg (call
, 0),
2467 if (refs_may_alias_p_1 (&dref
, ref
, false))
2471 case BUILT_IN_STRCPY_CHK
:
2472 case BUILT_IN_STRNCPY_CHK
:
2473 case BUILT_IN_MEMCPY_CHK
:
2474 case BUILT_IN_MEMMOVE_CHK
:
2475 case BUILT_IN_MEMPCPY_CHK
:
2476 case BUILT_IN_STPCPY_CHK
:
2477 case BUILT_IN_STPNCPY_CHK
:
2480 tree size
= NULL_TREE
;
2481 if (gimple_call_num_args (call
) == 4)
2482 size
= gimple_call_arg (call
, 2);
2483 ao_ref_init_from_ptr_and_size (&dref
,
2484 gimple_call_arg (call
, 1),
2486 return refs_may_alias_p_1 (&dref
, ref
, false);
2488 case BUILT_IN_BCOPY
:
2491 tree size
= gimple_call_arg (call
, 2);
2492 ao_ref_init_from_ptr_and_size (&dref
,
2493 gimple_call_arg (call
, 0),
2495 return refs_may_alias_p_1 (&dref
, ref
, false);
2498 /* The following functions read memory pointed to by their
2500 CASE_BUILT_IN_TM_LOAD (1):
2501 CASE_BUILT_IN_TM_LOAD (2):
2502 CASE_BUILT_IN_TM_LOAD (4):
2503 CASE_BUILT_IN_TM_LOAD (8):
2504 CASE_BUILT_IN_TM_LOAD (FLOAT
):
2505 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
2506 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
2507 CASE_BUILT_IN_TM_LOAD (M64
):
2508 CASE_BUILT_IN_TM_LOAD (M128
):
2509 CASE_BUILT_IN_TM_LOAD (M256
):
2510 case BUILT_IN_TM_LOG
:
2511 case BUILT_IN_TM_LOG_1
:
2512 case BUILT_IN_TM_LOG_2
:
2513 case BUILT_IN_TM_LOG_4
:
2514 case BUILT_IN_TM_LOG_8
:
2515 case BUILT_IN_TM_LOG_FLOAT
:
2516 case BUILT_IN_TM_LOG_DOUBLE
:
2517 case BUILT_IN_TM_LOG_LDOUBLE
:
2518 case BUILT_IN_TM_LOG_M64
:
2519 case BUILT_IN_TM_LOG_M128
:
2520 case BUILT_IN_TM_LOG_M256
:
2521 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call
, 0), ref
);
2523 /* These read memory pointed to by the first argument. */
2524 case BUILT_IN_STRDUP
:
2525 case BUILT_IN_STRNDUP
:
2526 case BUILT_IN_REALLOC
:
2529 tree size
= NULL_TREE
;
2530 if (gimple_call_num_args (call
) == 2)
2531 size
= gimple_call_arg (call
, 1);
2532 ao_ref_init_from_ptr_and_size (&dref
,
2533 gimple_call_arg (call
, 0),
2535 return refs_may_alias_p_1 (&dref
, ref
, false);
2537 /* These read memory pointed to by the first argument. */
2538 case BUILT_IN_INDEX
:
2539 case BUILT_IN_STRCHR
:
2540 case BUILT_IN_STRRCHR
:
2543 ao_ref_init_from_ptr_and_size (&dref
,
2544 gimple_call_arg (call
, 0),
2546 return refs_may_alias_p_1 (&dref
, ref
, false);
2548 /* These read memory pointed to by the first argument with size
2549 in the third argument. */
2550 case BUILT_IN_MEMCHR
:
2553 ao_ref_init_from_ptr_and_size (&dref
,
2554 gimple_call_arg (call
, 0),
2555 gimple_call_arg (call
, 2));
2556 return refs_may_alias_p_1 (&dref
, ref
, false);
2558 /* These read memory pointed to by the first and second arguments. */
2559 case BUILT_IN_STRSTR
:
2560 case BUILT_IN_STRPBRK
:
2563 ao_ref_init_from_ptr_and_size (&dref
,
2564 gimple_call_arg (call
, 0),
2566 if (refs_may_alias_p_1 (&dref
, ref
, false))
2568 ao_ref_init_from_ptr_and_size (&dref
,
2569 gimple_call_arg (call
, 1),
2571 return refs_may_alias_p_1 (&dref
, ref
, false);
2574 /* The following builtins do not read from memory. */
2576 case BUILT_IN_MALLOC
:
2577 case BUILT_IN_POSIX_MEMALIGN
:
2578 case BUILT_IN_ALIGNED_ALLOC
:
2579 case BUILT_IN_CALLOC
:
2580 CASE_BUILT_IN_ALLOCA
:
2581 case BUILT_IN_STACK_SAVE
:
2582 case BUILT_IN_STACK_RESTORE
:
2583 case BUILT_IN_MEMSET
:
2584 case BUILT_IN_TM_MEMSET
:
2585 case BUILT_IN_MEMSET_CHK
:
2586 case BUILT_IN_FREXP
:
2587 case BUILT_IN_FREXPF
:
2588 case BUILT_IN_FREXPL
:
2589 case BUILT_IN_GAMMA_R
:
2590 case BUILT_IN_GAMMAF_R
:
2591 case BUILT_IN_GAMMAL_R
:
2592 case BUILT_IN_LGAMMA_R
:
2593 case BUILT_IN_LGAMMAF_R
:
2594 case BUILT_IN_LGAMMAL_R
:
2596 case BUILT_IN_MODFF
:
2597 case BUILT_IN_MODFL
:
2598 case BUILT_IN_REMQUO
:
2599 case BUILT_IN_REMQUOF
:
2600 case BUILT_IN_REMQUOL
:
2601 case BUILT_IN_SINCOS
:
2602 case BUILT_IN_SINCOSF
:
2603 case BUILT_IN_SINCOSL
:
2604 case BUILT_IN_ASSUME_ALIGNED
:
2605 case BUILT_IN_VA_END
:
2607 /* __sync_* builtins and some OpenMP builtins act as threading
2609 #undef DEF_SYNC_BUILTIN
2610 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2611 #include "sync-builtins.def"
2612 #undef DEF_SYNC_BUILTIN
2613 case BUILT_IN_GOMP_ATOMIC_START
:
2614 case BUILT_IN_GOMP_ATOMIC_END
:
2615 case BUILT_IN_GOMP_BARRIER
:
2616 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2617 case BUILT_IN_GOMP_TASKWAIT
:
2618 case BUILT_IN_GOMP_TASKGROUP_END
:
2619 case BUILT_IN_GOMP_CRITICAL_START
:
2620 case BUILT_IN_GOMP_CRITICAL_END
:
2621 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2622 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2623 case BUILT_IN_GOMP_LOOP_END
:
2624 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2625 case BUILT_IN_GOMP_ORDERED_START
:
2626 case BUILT_IN_GOMP_ORDERED_END
:
2627 case BUILT_IN_GOMP_SECTIONS_END
:
2628 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2629 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2630 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2634 /* Fallthru to general call handling. */;
2637 /* Check if base is a global static variable that is not read
2639 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
2641 struct cgraph_node
*node
= cgraph_node::get (callee
);
2645 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2646 node yet. We should enforce that there are nodes for all decls in the
2647 IL and remove this check instead. */
2649 && (id
= ipa_reference_var_uid (base
)) != -1
2650 && (read
= ipa_reference_get_read_global (node
))
2651 && !bitmap_bit_p (read
, id
))
2655 /* Check if the base variable is call-used. */
2658 if (pt_solution_includes (gimple_call_use_set (call
), base
))
2661 else if ((TREE_CODE (base
) == MEM_REF
2662 || TREE_CODE (base
) == TARGET_MEM_REF
)
2663 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2665 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2669 if (pt_solutions_intersect (gimple_call_use_set (call
), &pi
->pt
))
2675 /* Inspect call arguments for passed-by-value aliases. */
2677 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
2679 tree op
= gimple_call_arg (call
, i
);
2680 int flags
= gimple_call_arg_flags (call
, i
);
2682 if (flags
& EAF_UNUSED
)
2685 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
2686 op
= TREE_OPERAND (op
, 0);
2688 if (TREE_CODE (op
) != SSA_NAME
2689 && !is_gimple_min_invariant (op
))
2692 ao_ref_init (&r
, op
);
2693 if (refs_may_alias_p_1 (&r
, ref
, tbaa_p
))
2702 ref_maybe_used_by_call_p (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2705 res
= ref_maybe_used_by_call_p_1 (call
, ref
, tbaa_p
);
2707 ++alias_stats
.ref_maybe_used_by_call_p_may_alias
;
2709 ++alias_stats
.ref_maybe_used_by_call_p_no_alias
;
2714 /* If the statement STMT may use the memory reference REF return
2715 true, otherwise return false. */
2718 ref_maybe_used_by_stmt_p (gimple
*stmt
, ao_ref
*ref
, bool tbaa_p
)
2720 if (is_gimple_assign (stmt
))
2724 /* All memory assign statements are single. */
2725 if (!gimple_assign_single_p (stmt
))
2728 rhs
= gimple_assign_rhs1 (stmt
);
2729 if (is_gimple_reg (rhs
)
2730 || is_gimple_min_invariant (rhs
)
2731 || gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
)
2734 return refs_may_alias_p (rhs
, ref
, tbaa_p
);
2736 else if (is_gimple_call (stmt
))
2737 return ref_maybe_used_by_call_p (as_a
<gcall
*> (stmt
), ref
, tbaa_p
);
2738 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
2740 tree retval
= gimple_return_retval (return_stmt
);
2742 && TREE_CODE (retval
) != SSA_NAME
2743 && !is_gimple_min_invariant (retval
)
2744 && refs_may_alias_p (retval
, ref
, tbaa_p
))
2746 /* If ref escapes the function then the return acts as a use. */
2747 tree base
= ao_ref_base (ref
);
2750 else if (DECL_P (base
))
2751 return is_global_var (base
);
2752 else if (TREE_CODE (base
) == MEM_REF
2753 || TREE_CODE (base
) == TARGET_MEM_REF
)
2754 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
2762 ref_maybe_used_by_stmt_p (gimple
*stmt
, tree ref
, bool tbaa_p
)
2765 ao_ref_init (&r
, ref
);
2766 return ref_maybe_used_by_stmt_p (stmt
, &r
, tbaa_p
);
2769 /* If the call in statement CALL may clobber the memory reference REF
2770 return true, otherwise return false. */
2773 call_may_clobber_ref_p_1 (gcall
*call
, ao_ref
*ref
)
2778 /* If the call is pure or const it cannot clobber anything. */
2779 if (gimple_call_flags (call
)
2780 & (ECF_PURE
|ECF_CONST
|ECF_LOOPING_CONST_OR_PURE
|ECF_NOVOPS
))
2782 if (gimple_call_internal_p (call
))
2783 switch (gimple_call_internal_fn (call
))
2785 /* Treat these internal calls like ECF_PURE for aliasing,
2786 they don't write to any memory the program should care about.
2787 They have important other side-effects, and read memory,
2788 so can't be ECF_NOVOPS. */
2789 case IFN_UBSAN_NULL
:
2790 case IFN_UBSAN_BOUNDS
:
2791 case IFN_UBSAN_VPTR
:
2792 case IFN_UBSAN_OBJECT_SIZE
:
2794 case IFN_ASAN_CHECK
:
2800 base
= ao_ref_base (ref
);
2804 if (TREE_CODE (base
) == SSA_NAME
2805 || CONSTANT_CLASS_P (base
))
2808 /* A call that is not without side-effects might involve volatile
2809 accesses and thus conflicts with all other volatile accesses. */
2810 if (ref
->volatile_p
)
2813 /* If the reference is based on a decl that is not aliased the call
2814 cannot possibly clobber it. */
2816 && !may_be_aliased (base
)
2817 /* But local non-readonly statics can be modified through recursion
2818 or the call may implement a threading barrier which we must
2819 treat as may-def. */
2820 && (TREE_READONLY (base
)
2821 || !is_global_var (base
)))
2824 /* If the reference is based on a pointer that points to memory
2825 that may not be written to then the call cannot possibly clobber it. */
2826 if ((TREE_CODE (base
) == MEM_REF
2827 || TREE_CODE (base
) == TARGET_MEM_REF
)
2828 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
2829 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base
, 0)))
2832 callee
= gimple_call_fndecl (call
);
2834 /* Handle those builtin functions explicitly that do not act as
2835 escape points. See tree-ssa-structalias.c:find_func_aliases
2836 for the list of builtins we might need to handle here. */
2837 if (callee
!= NULL_TREE
2838 && gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2839 switch (DECL_FUNCTION_CODE (callee
))
2841 /* All the following functions clobber memory pointed to by
2842 their first argument. */
2843 case BUILT_IN_STRCPY
:
2844 case BUILT_IN_STRNCPY
:
2845 case BUILT_IN_MEMCPY
:
2846 case BUILT_IN_MEMMOVE
:
2847 case BUILT_IN_MEMPCPY
:
2848 case BUILT_IN_STPCPY
:
2849 case BUILT_IN_STPNCPY
:
2850 case BUILT_IN_STRCAT
:
2851 case BUILT_IN_STRNCAT
:
2852 case BUILT_IN_MEMSET
:
2853 case BUILT_IN_TM_MEMSET
:
2854 CASE_BUILT_IN_TM_STORE (1):
2855 CASE_BUILT_IN_TM_STORE (2):
2856 CASE_BUILT_IN_TM_STORE (4):
2857 CASE_BUILT_IN_TM_STORE (8):
2858 CASE_BUILT_IN_TM_STORE (FLOAT
):
2859 CASE_BUILT_IN_TM_STORE (DOUBLE
):
2860 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
2861 CASE_BUILT_IN_TM_STORE (M64
):
2862 CASE_BUILT_IN_TM_STORE (M128
):
2863 CASE_BUILT_IN_TM_STORE (M256
):
2864 case BUILT_IN_TM_MEMCPY
:
2865 case BUILT_IN_TM_MEMMOVE
:
2868 tree size
= NULL_TREE
;
2869 /* Don't pass in size for strncat, as the maximum size
2870 is strlen (dest) + n + 1 instead of n, resp.
2871 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2873 if (gimple_call_num_args (call
) == 3
2874 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT
)
2875 size
= gimple_call_arg (call
, 2);
2876 ao_ref_init_from_ptr_and_size (&dref
,
2877 gimple_call_arg (call
, 0),
2879 return refs_may_alias_p_1 (&dref
, ref
, false);
2881 case BUILT_IN_STRCPY_CHK
:
2882 case BUILT_IN_STRNCPY_CHK
:
2883 case BUILT_IN_MEMCPY_CHK
:
2884 case BUILT_IN_MEMMOVE_CHK
:
2885 case BUILT_IN_MEMPCPY_CHK
:
2886 case BUILT_IN_STPCPY_CHK
:
2887 case BUILT_IN_STPNCPY_CHK
:
2888 case BUILT_IN_STRCAT_CHK
:
2889 case BUILT_IN_STRNCAT_CHK
:
2890 case BUILT_IN_MEMSET_CHK
:
2893 tree size
= NULL_TREE
;
2894 /* Don't pass in size for __strncat_chk, as the maximum size
2895 is strlen (dest) + n + 1 instead of n, resp.
2896 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2898 if (gimple_call_num_args (call
) == 4
2899 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT_CHK
)
2900 size
= gimple_call_arg (call
, 2);
2901 ao_ref_init_from_ptr_and_size (&dref
,
2902 gimple_call_arg (call
, 0),
2904 return refs_may_alias_p_1 (&dref
, ref
, false);
2906 case BUILT_IN_BCOPY
:
2909 tree size
= gimple_call_arg (call
, 2);
2910 ao_ref_init_from_ptr_and_size (&dref
,
2911 gimple_call_arg (call
, 1),
2913 return refs_may_alias_p_1 (&dref
, ref
, false);
2915 /* Allocating memory does not have any side-effects apart from
2916 being the definition point for the pointer. */
2917 case BUILT_IN_MALLOC
:
2918 case BUILT_IN_ALIGNED_ALLOC
:
2919 case BUILT_IN_CALLOC
:
2920 case BUILT_IN_STRDUP
:
2921 case BUILT_IN_STRNDUP
:
2922 /* Unix98 specifies that errno is set on allocation failure. */
2924 && targetm
.ref_may_alias_errno (ref
))
2927 case BUILT_IN_STACK_SAVE
:
2928 CASE_BUILT_IN_ALLOCA
:
2929 case BUILT_IN_ASSUME_ALIGNED
:
2931 /* But posix_memalign stores a pointer into the memory pointed to
2932 by its first argument. */
2933 case BUILT_IN_POSIX_MEMALIGN
:
2935 tree ptrptr
= gimple_call_arg (call
, 0);
2937 ao_ref_init_from_ptr_and_size (&dref
, ptrptr
,
2938 TYPE_SIZE_UNIT (ptr_type_node
));
2939 return (refs_may_alias_p_1 (&dref
, ref
, false)
2941 && targetm
.ref_may_alias_errno (ref
)));
2943 /* Freeing memory kills the pointed-to memory. More importantly
2944 the call has to serve as a barrier for moving loads and stores
2947 case BUILT_IN_VA_END
:
2949 tree ptr
= gimple_call_arg (call
, 0);
2950 return ptr_deref_may_alias_ref_p_1 (ptr
, ref
);
2952 /* Realloc serves both as allocation point and deallocation point. */
2953 case BUILT_IN_REALLOC
:
2955 tree ptr
= gimple_call_arg (call
, 0);
2956 /* Unix98 specifies that errno is set on allocation failure. */
2957 return ((flag_errno_math
2958 && targetm
.ref_may_alias_errno (ref
))
2959 || ptr_deref_may_alias_ref_p_1 (ptr
, ref
));
2961 case BUILT_IN_GAMMA_R
:
2962 case BUILT_IN_GAMMAF_R
:
2963 case BUILT_IN_GAMMAL_R
:
2964 case BUILT_IN_LGAMMA_R
:
2965 case BUILT_IN_LGAMMAF_R
:
2966 case BUILT_IN_LGAMMAL_R
:
2968 tree out
= gimple_call_arg (call
, 1);
2969 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2971 if (flag_errno_math
)
2975 case BUILT_IN_FREXP
:
2976 case BUILT_IN_FREXPF
:
2977 case BUILT_IN_FREXPL
:
2979 case BUILT_IN_MODFF
:
2980 case BUILT_IN_MODFL
:
2982 tree out
= gimple_call_arg (call
, 1);
2983 return ptr_deref_may_alias_ref_p_1 (out
, ref
);
2985 case BUILT_IN_REMQUO
:
2986 case BUILT_IN_REMQUOF
:
2987 case BUILT_IN_REMQUOL
:
2989 tree out
= gimple_call_arg (call
, 2);
2990 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2992 if (flag_errno_math
)
2996 case BUILT_IN_SINCOS
:
2997 case BUILT_IN_SINCOSF
:
2998 case BUILT_IN_SINCOSL
:
3000 tree sin
= gimple_call_arg (call
, 1);
3001 tree cos
= gimple_call_arg (call
, 2);
3002 return (ptr_deref_may_alias_ref_p_1 (sin
, ref
)
3003 || ptr_deref_may_alias_ref_p_1 (cos
, ref
));
3005 /* __sync_* builtins and some OpenMP builtins act as threading
3007 #undef DEF_SYNC_BUILTIN
3008 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
3009 #include "sync-builtins.def"
3010 #undef DEF_SYNC_BUILTIN
3011 case BUILT_IN_GOMP_ATOMIC_START
:
3012 case BUILT_IN_GOMP_ATOMIC_END
:
3013 case BUILT_IN_GOMP_BARRIER
:
3014 case BUILT_IN_GOMP_BARRIER_CANCEL
:
3015 case BUILT_IN_GOMP_TASKWAIT
:
3016 case BUILT_IN_GOMP_TASKGROUP_END
:
3017 case BUILT_IN_GOMP_CRITICAL_START
:
3018 case BUILT_IN_GOMP_CRITICAL_END
:
3019 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
3020 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
3021 case BUILT_IN_GOMP_LOOP_END
:
3022 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
3023 case BUILT_IN_GOMP_ORDERED_START
:
3024 case BUILT_IN_GOMP_ORDERED_END
:
3025 case BUILT_IN_GOMP_SECTIONS_END
:
3026 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
3027 case BUILT_IN_GOMP_SINGLE_COPY_START
:
3028 case BUILT_IN_GOMP_SINGLE_COPY_END
:
3031 /* Fallthru to general call handling. */;
3034 /* Check if base is a global static variable that is not written
3036 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
3038 struct cgraph_node
*node
= cgraph_node::get (callee
);
3043 && (id
= ipa_reference_var_uid (base
)) != -1
3044 && (written
= ipa_reference_get_written_global (node
))
3045 && !bitmap_bit_p (written
, id
))
3049 /* Check if the base variable is call-clobbered. */
3051 return pt_solution_includes (gimple_call_clobber_set (call
), base
);
3052 else if ((TREE_CODE (base
) == MEM_REF
3053 || TREE_CODE (base
) == TARGET_MEM_REF
)
3054 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
3056 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
3060 return pt_solutions_intersect (gimple_call_clobber_set (call
), &pi
->pt
);
3066 /* If the call in statement CALL may clobber the memory reference REF
3067 return true, otherwise return false. */
3070 call_may_clobber_ref_p (gcall
*call
, tree ref
)
3074 ao_ref_init (&r
, ref
);
3075 res
= call_may_clobber_ref_p_1 (call
, &r
);
3077 ++alias_stats
.call_may_clobber_ref_p_may_alias
;
3079 ++alias_stats
.call_may_clobber_ref_p_no_alias
;
3084 /* If the statement STMT may clobber the memory reference REF return true,
3085 otherwise return false. */
3088 stmt_may_clobber_ref_p_1 (gimple
*stmt
, ao_ref
*ref
, bool tbaa_p
)
3090 if (is_gimple_call (stmt
))
3092 tree lhs
= gimple_call_lhs (stmt
);
3094 && TREE_CODE (lhs
) != SSA_NAME
)
3097 ao_ref_init (&r
, lhs
);
3098 if (refs_may_alias_p_1 (ref
, &r
, tbaa_p
))
3102 return call_may_clobber_ref_p_1 (as_a
<gcall
*> (stmt
), ref
);
3104 else if (gimple_assign_single_p (stmt
))
3106 tree lhs
= gimple_assign_lhs (stmt
);
3107 if (TREE_CODE (lhs
) != SSA_NAME
)
3110 ao_ref_init (&r
, lhs
);
3111 return refs_may_alias_p_1 (ref
, &r
, tbaa_p
);
3114 else if (gimple_code (stmt
) == GIMPLE_ASM
)
3121 stmt_may_clobber_ref_p (gimple
*stmt
, tree ref
, bool tbaa_p
)
3124 ao_ref_init (&r
, ref
);
3125 return stmt_may_clobber_ref_p_1 (stmt
, &r
, tbaa_p
);
3128 /* Return true if store1 and store2 described by corresponding tuples
3129 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3133 same_addr_size_stores_p (tree base1
, poly_int64 offset1
, poly_int64 size1
,
3134 poly_int64 max_size1
,
3135 tree base2
, poly_int64 offset2
, poly_int64 size2
,
3136 poly_int64 max_size2
)
3138 /* Offsets need to be 0. */
3139 if (maybe_ne (offset1
, 0)
3140 || maybe_ne (offset2
, 0))
3143 bool base1_obj_p
= SSA_VAR_P (base1
);
3144 bool base2_obj_p
= SSA_VAR_P (base2
);
3146 /* We need one object. */
3147 if (base1_obj_p
== base2_obj_p
)
3149 tree obj
= base1_obj_p
? base1
: base2
;
3151 /* And we need one MEM_REF. */
3152 bool base1_memref_p
= TREE_CODE (base1
) == MEM_REF
;
3153 bool base2_memref_p
= TREE_CODE (base2
) == MEM_REF
;
3154 if (base1_memref_p
== base2_memref_p
)
3156 tree memref
= base1_memref_p
? base1
: base2
;
3158 /* Sizes need to be valid. */
3159 if (!known_size_p (max_size1
)
3160 || !known_size_p (max_size2
)
3161 || !known_size_p (size1
)
3162 || !known_size_p (size2
))
3165 /* Max_size needs to match size. */
3166 if (maybe_ne (max_size1
, size1
)
3167 || maybe_ne (max_size2
, size2
))
3170 /* Sizes need to match. */
3171 if (maybe_ne (size1
, size2
))
3175 /* Check that memref is a store to pointer with singleton points-to info. */
3176 if (!integer_zerop (TREE_OPERAND (memref
, 1)))
3178 tree ptr
= TREE_OPERAND (memref
, 0);
3179 if (TREE_CODE (ptr
) != SSA_NAME
)
3181 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
3182 unsigned int pt_uid
;
3184 || !pt_solution_singleton_or_null_p (&pi
->pt
, &pt_uid
))
3187 /* Be conservative with non-call exceptions when the address might
3189 if (cfun
->can_throw_non_call_exceptions
&& pi
->pt
.null
)
3192 /* Check that ptr points relative to obj. */
3193 unsigned int obj_uid
= DECL_PT_UID (obj
);
3194 if (obj_uid
!= pt_uid
)
3197 /* Check that the object size is the same as the store size. That ensures us
3198 that ptr points to the start of obj. */
3199 return (DECL_SIZE (obj
)
3200 && poly_int_tree_p (DECL_SIZE (obj
))
3201 && known_eq (wi::to_poly_offset (DECL_SIZE (obj
)), size1
));
3204 /* If STMT kills the memory reference REF return true, otherwise
3208 stmt_kills_ref_p (gimple
*stmt
, ao_ref
*ref
)
3210 if (!ao_ref_base (ref
))
3213 if (gimple_has_lhs (stmt
)
3214 && TREE_CODE (gimple_get_lhs (stmt
)) != SSA_NAME
3215 /* The assignment is not necessarily carried out if it can throw
3216 and we can catch it in the current function where we could inspect
3218 ??? We only need to care about the RHS throwing. For aggregate
3219 assignments or similar calls and non-call exceptions the LHS
3220 might throw as well. */
3221 && !stmt_can_throw_internal (cfun
, stmt
))
3223 tree lhs
= gimple_get_lhs (stmt
);
3224 /* If LHS is literally a base of the access we are done. */
3227 tree base
= ref
->ref
;
3228 tree innermost_dropped_array_ref
= NULL_TREE
;
3229 if (handled_component_p (base
))
3231 tree saved_lhs0
= NULL_TREE
;
3232 if (handled_component_p (lhs
))
3234 saved_lhs0
= TREE_OPERAND (lhs
, 0);
3235 TREE_OPERAND (lhs
, 0) = integer_zero_node
;
3239 /* Just compare the outermost handled component, if
3240 they are equal we have found a possible common
3242 tree saved_base0
= TREE_OPERAND (base
, 0);
3243 TREE_OPERAND (base
, 0) = integer_zero_node
;
3244 bool res
= operand_equal_p (lhs
, base
, 0);
3245 TREE_OPERAND (base
, 0) = saved_base0
;
3248 /* Remember if we drop an array-ref that we need to
3249 double-check not being at struct end. */
3250 if (TREE_CODE (base
) == ARRAY_REF
3251 || TREE_CODE (base
) == ARRAY_RANGE_REF
)
3252 innermost_dropped_array_ref
= base
;
3253 /* Otherwise drop handled components of the access. */
3256 while (handled_component_p (base
));
3258 TREE_OPERAND (lhs
, 0) = saved_lhs0
;
3260 /* Finally check if the lhs has the same address and size as the
3261 base candidate of the access. Watch out if we have dropped
3262 an array-ref that was at struct end, this means ref->ref may
3263 be outside of the TYPE_SIZE of its base. */
3264 if ((! innermost_dropped_array_ref
3265 || ! array_at_struct_end_p (innermost_dropped_array_ref
))
3267 || (((TYPE_SIZE (TREE_TYPE (lhs
))
3268 == TYPE_SIZE (TREE_TYPE (base
)))
3269 || (TYPE_SIZE (TREE_TYPE (lhs
))
3270 && TYPE_SIZE (TREE_TYPE (base
))
3271 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs
)),
3272 TYPE_SIZE (TREE_TYPE (base
)),
3274 && operand_equal_p (lhs
, base
,
3276 | OEP_MATCH_SIDE_EFFECTS
))))
3280 /* Now look for non-literal equal bases with the restriction of
3281 handling constant offset and size. */
3282 /* For a must-alias check we need to be able to constrain
3283 the access properly. */
3284 if (!ref
->max_size_known_p ())
3286 poly_int64 size
, offset
, max_size
, ref_offset
= ref
->offset
;
3288 tree base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
,
3290 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3291 so base == ref->base does not always hold. */
3292 if (base
!= ref
->base
)
3294 /* Try using points-to info. */
3295 if (same_addr_size_stores_p (base
, offset
, size
, max_size
, ref
->base
,
3296 ref
->offset
, ref
->size
, ref
->max_size
))
3299 /* If both base and ref->base are MEM_REFs, only compare the
3300 first operand, and if the second operand isn't equal constant,
3301 try to add the offsets into offset and ref_offset. */
3302 if (TREE_CODE (base
) == MEM_REF
&& TREE_CODE (ref
->base
) == MEM_REF
3303 && TREE_OPERAND (base
, 0) == TREE_OPERAND (ref
->base
, 0))
3305 if (!tree_int_cst_equal (TREE_OPERAND (base
, 1),
3306 TREE_OPERAND (ref
->base
, 1)))
3308 poly_offset_int off1
= mem_ref_offset (base
);
3309 off1
<<= LOG2_BITS_PER_UNIT
;
3311 poly_offset_int off2
= mem_ref_offset (ref
->base
);
3312 off2
<<= LOG2_BITS_PER_UNIT
;
3314 if (!off1
.to_shwi (&offset
) || !off2
.to_shwi (&ref_offset
))
3321 /* For a must-alias check we need to be able to constrain
3322 the access properly. */
3323 if (known_eq (size
, max_size
)
3324 && known_subrange_p (ref_offset
, ref
->max_size
, offset
, size
))
3328 if (is_gimple_call (stmt
))
3330 tree callee
= gimple_call_fndecl (stmt
);
3331 if (callee
!= NULL_TREE
3332 && gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3333 switch (DECL_FUNCTION_CODE (callee
))
3337 tree ptr
= gimple_call_arg (stmt
, 0);
3338 tree base
= ao_ref_base (ref
);
3339 if (base
&& TREE_CODE (base
) == MEM_REF
3340 && TREE_OPERAND (base
, 0) == ptr
)
3345 case BUILT_IN_MEMCPY
:
3346 case BUILT_IN_MEMPCPY
:
3347 case BUILT_IN_MEMMOVE
:
3348 case BUILT_IN_MEMSET
:
3349 case BUILT_IN_MEMCPY_CHK
:
3350 case BUILT_IN_MEMPCPY_CHK
:
3351 case BUILT_IN_MEMMOVE_CHK
:
3352 case BUILT_IN_MEMSET_CHK
:
3353 case BUILT_IN_STRNCPY
:
3354 case BUILT_IN_STPNCPY
:
3355 case BUILT_IN_CALLOC
:
3357 /* For a must-alias check we need to be able to constrain
3358 the access properly. */
3359 if (!ref
->max_size_known_p ())
3364 /* In execution order a calloc call will never kill
3365 anything. However, DSE will (ab)use this interface
3366 to ask if a calloc call writes the same memory locations
3367 as a later assignment, memset, etc. So handle calloc
3368 in the expected way. */
3369 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_CALLOC
)
3371 tree arg0
= gimple_call_arg (stmt
, 0);
3372 tree arg1
= gimple_call_arg (stmt
, 1);
3373 if (TREE_CODE (arg0
) != INTEGER_CST
3374 || TREE_CODE (arg1
) != INTEGER_CST
)
3377 dest
= gimple_call_lhs (stmt
);
3380 len
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg0
), arg0
, arg1
);
3384 dest
= gimple_call_arg (stmt
, 0);
3385 len
= gimple_call_arg (stmt
, 2);
3387 if (!poly_int_tree_p (len
))
3389 tree rbase
= ref
->base
;
3390 poly_offset_int roffset
= ref
->offset
;
3392 ao_ref_init_from_ptr_and_size (&dref
, dest
, len
);
3393 tree base
= ao_ref_base (&dref
);
3394 poly_offset_int offset
= dref
.offset
;
3395 if (!base
|| !known_size_p (dref
.size
))
3397 if (TREE_CODE (base
) == MEM_REF
)
3399 if (TREE_CODE (rbase
) != MEM_REF
)
3401 // Compare pointers.
3402 offset
+= mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
;
3403 roffset
+= mem_ref_offset (rbase
) << LOG2_BITS_PER_UNIT
;
3404 base
= TREE_OPERAND (base
, 0);
3405 rbase
= TREE_OPERAND (rbase
, 0);
3408 && known_subrange_p (roffset
, ref
->max_size
, offset
,
3409 wi::to_poly_offset (len
)
3410 << LOG2_BITS_PER_UNIT
))
3415 case BUILT_IN_VA_END
:
3417 tree ptr
= gimple_call_arg (stmt
, 0);
3418 if (TREE_CODE (ptr
) == ADDR_EXPR
)
3420 tree base
= ao_ref_base (ref
);
3421 if (TREE_OPERAND (ptr
, 0) == base
)
3434 stmt_kills_ref_p (gimple
*stmt
, tree ref
)
3437 ao_ref_init (&r
, ref
);
3438 return stmt_kills_ref_p (stmt
, &r
);
3442 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3443 TARGET or a statement clobbering the memory reference REF in which
3444 case false is returned. The walk starts with VUSE, one argument of PHI. */
3447 maybe_skip_until (gimple
*phi
, tree
&target
, basic_block target_bb
,
3448 ao_ref
*ref
, tree vuse
, bool tbaa_p
, unsigned int &limit
,
3449 bitmap
*visited
, bool abort_on_visited
,
3450 void *(*translate
)(ao_ref
*, tree
, void *, translate_flags
*),
3451 translate_flags disambiguate_only
,
3454 basic_block bb
= gimple_bb (phi
);
3457 *visited
= BITMAP_ALLOC (NULL
);
3459 bitmap_set_bit (*visited
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
3461 /* Walk until we hit the target. */
3462 while (vuse
!= target
)
3464 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3465 /* If we are searching for the target VUSE by walking up to
3466 TARGET_BB dominating the original PHI we are finished once
3467 we reach a default def or a definition in a block dominating
3468 that block. Update TARGET and return. */
3470 && (gimple_nop_p (def_stmt
)
3471 || dominated_by_p (CDI_DOMINATORS
,
3472 target_bb
, gimple_bb (def_stmt
))))
3478 /* Recurse for PHI nodes. */
3479 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3481 /* An already visited PHI node ends the walk successfully. */
3482 if (bitmap_bit_p (*visited
, SSA_NAME_VERSION (PHI_RESULT (def_stmt
))))
3483 return !abort_on_visited
;
3484 vuse
= get_continuation_for_phi (def_stmt
, ref
, tbaa_p
, limit
,
3485 visited
, abort_on_visited
,
3486 translate
, data
, disambiguate_only
);
3491 else if (gimple_nop_p (def_stmt
))
3495 /* A clobbering statement or the end of the IL ends it failing. */
3496 if ((int)limit
<= 0)
3499 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
, tbaa_p
))
3501 translate_flags tf
= disambiguate_only
;
3503 && (*translate
) (ref
, vuse
, data
, &tf
) == NULL
)
3509 /* If we reach a new basic-block see if we already skipped it
3510 in a previous walk that ended successfully. */
3511 if (gimple_bb (def_stmt
) != bb
)
3513 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (vuse
)))
3514 return !abort_on_visited
;
3515 bb
= gimple_bb (def_stmt
);
3517 vuse
= gimple_vuse (def_stmt
);
3523 /* Starting from a PHI node for the virtual operand of the memory reference
3524 REF find a continuation virtual operand that allows to continue walking
3525 statements dominating PHI skipping only statements that cannot possibly
3526 clobber REF. Decrements LIMIT for each alias disambiguation done
3527 and aborts the walk, returning NULL_TREE if it reaches zero.
3528 Returns NULL_TREE if no suitable virtual operand can be found. */
3531 get_continuation_for_phi (gimple
*phi
, ao_ref
*ref
, bool tbaa_p
,
3532 unsigned int &limit
, bitmap
*visited
,
3533 bool abort_on_visited
,
3534 void *(*translate
)(ao_ref
*, tree
, void *,
3537 translate_flags disambiguate_only
)
3539 unsigned nargs
= gimple_phi_num_args (phi
);
3541 /* Through a single-argument PHI we can simply look through. */
3543 return PHI_ARG_DEF (phi
, 0);
3545 /* For two or more arguments try to pairwise skip non-aliasing code
3546 until we hit the phi argument definition that dominates the other one. */
3547 basic_block phi_bb
= gimple_bb (phi
);
3551 /* Find a candidate for the virtual operand which definition
3552 dominates those of all others. */
3553 /* First look if any of the args themselves satisfy this. */
3554 for (i
= 0; i
< nargs
; ++i
)
3556 arg0
= PHI_ARG_DEF (phi
, i
);
3557 if (SSA_NAME_IS_DEFAULT_DEF (arg0
))
3559 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (arg0
));
3560 if (def_bb
!= phi_bb
3561 && dominated_by_p (CDI_DOMINATORS
, phi_bb
, def_bb
))
3565 /* If not, look if we can reach such candidate by walking defs
3566 until we hit the immediate dominator. maybe_skip_until will
3568 basic_block dom
= get_immediate_dominator (CDI_DOMINATORS
, phi_bb
);
3570 /* Then check against the (to be) found candidate. */
3571 for (i
= 0; i
< nargs
; ++i
)
3573 arg1
= PHI_ARG_DEF (phi
, i
);
3576 else if (! maybe_skip_until (phi
, arg0
, dom
, ref
, arg1
, tbaa_p
,
3580 /* Do not valueize when walking over
3584 gimple_bb (SSA_NAME_DEF_STMT (arg1
)),
3587 : disambiguate_only
, data
))
3594 /* Based on the memory reference REF and its virtual use VUSE call
3595 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3596 itself. That is, for each virtual use for which its defining statement
3597 does not clobber REF.
3599 WALKER is called with REF, the current virtual use and DATA. If
3600 WALKER returns non-NULL the walk stops and its result is returned.
3601 At the end of a non-successful walk NULL is returned.
3603 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3604 use which definition is a statement that may clobber REF and DATA.
3605 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3606 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3607 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3608 to adjust REF and *DATA to make that valid.
3610 VALUEIZE if non-NULL is called with the next VUSE that is considered
3611 and return value is substituted for that. This can be used to
3612 implement optimistic value-numbering for example. Note that the
3613 VUSE argument is assumed to be valueized already.
3615 LIMIT specifies the number of alias queries we are allowed to do,
3616 the walk stops when it reaches zero and NULL is returned. LIMIT
3617 is decremented by the number of alias queries (plus adjustments
3618 done by the callbacks) upon return.
3620 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3623 walk_non_aliased_vuses (ao_ref
*ref
, tree vuse
, bool tbaa_p
,
3624 void *(*walker
)(ao_ref
*, tree
, void *),
3625 void *(*translate
)(ao_ref
*, tree
, void *,
3627 tree (*valueize
)(tree
),
3628 unsigned &limit
, void *data
)
3630 bitmap visited
= NULL
;
3632 bool translated
= false;
3634 timevar_push (TV_ALIAS_STMT_WALK
);
3640 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3641 res
= (*walker
) (ref
, vuse
, data
);
3643 if (res
== (void *)-1)
3648 /* Lookup succeeded. */
3649 else if (res
!= NULL
)
3654 vuse
= valueize (vuse
);
3661 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3662 if (gimple_nop_p (def_stmt
))
3664 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3665 vuse
= get_continuation_for_phi (def_stmt
, ref
, tbaa_p
, limit
,
3666 &visited
, translated
, translate
, data
);
3669 if ((int)limit
<= 0)
3675 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
, tbaa_p
))
3679 translate_flags disambiguate_only
= TR_TRANSLATE
;
3680 res
= (*translate
) (ref
, vuse
, data
, &disambiguate_only
);
3681 /* Failed lookup and translation. */
3682 if (res
== (void *)-1)
3687 /* Lookup succeeded. */
3688 else if (res
!= NULL
)
3690 /* Translation succeeded, continue walking. */
3691 translated
= translated
|| disambiguate_only
== TR_TRANSLATE
;
3693 vuse
= gimple_vuse (def_stmt
);
3699 BITMAP_FREE (visited
);
3701 timevar_pop (TV_ALIAS_STMT_WALK
);
3707 /* Based on the memory reference REF call WALKER for each vdef which
3708 defining statement may clobber REF, starting with VDEF. If REF
3709 is NULL_TREE, each defining statement is visited.
3711 WALKER is called with REF, the current vdef and DATA. If WALKER
3712 returns true the walk is stopped, otherwise it continues.
3714 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3715 The pointer may be NULL and then we do not track this information.
3717 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3718 PHI argument (but only one walk continues on merge points), the
3719 return value is true if any of the walks was successful.
3721 The function returns the number of statements walked or -1 if
3722 LIMIT stmts were walked and the walk was aborted at this point.
3723 If LIMIT is zero the walk is not aborted. */
3726 walk_aliased_vdefs_1 (ao_ref
*ref
, tree vdef
,
3727 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
3728 bitmap
*visited
, unsigned int cnt
,
3729 bool *function_entry_reached
, unsigned limit
)
3733 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vdef
);
3736 && !bitmap_set_bit (*visited
, SSA_NAME_VERSION (vdef
)))
3739 if (gimple_nop_p (def_stmt
))
3741 if (function_entry_reached
)
3742 *function_entry_reached
= true;
3745 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3749 *visited
= BITMAP_ALLOC (NULL
);
3750 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); ++i
)
3752 int res
= walk_aliased_vdefs_1 (ref
,
3753 gimple_phi_arg_def (def_stmt
, i
),
3754 walker
, data
, visited
, cnt
,
3755 function_entry_reached
, limit
);
3763 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3768 || stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
3769 && (*walker
) (ref
, vdef
, data
))
3772 vdef
= gimple_vuse (def_stmt
);
3778 walk_aliased_vdefs (ao_ref
*ref
, tree vdef
,
3779 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
3781 bool *function_entry_reached
, unsigned int limit
)
3783 bitmap local_visited
= NULL
;
3786 timevar_push (TV_ALIAS_STMT_WALK
);
3788 if (function_entry_reached
)
3789 *function_entry_reached
= false;
3791 ret
= walk_aliased_vdefs_1 (ref
, vdef
, walker
, data
,
3792 visited
? visited
: &local_visited
, 0,
3793 function_entry_reached
, limit
);
3795 BITMAP_FREE (local_visited
);
3797 timevar_pop (TV_ALIAS_STMT_WALK
);