tree-ssa-alias.c (decl_refs_may_alias_p): Add size1 and size2 parameters; return...
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90 static int nonoverlapping_component_refs_since_match_p (tree, tree, tree, tree);
91 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
92
93 /* Query statistics for the different low-level disambiguators.
94 A high-level query may trigger multiple of them. */
95
96 static struct {
97 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
98 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
99 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
100 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
101 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
102 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
103 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
104 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
107 unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_may_alias;
108 unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_no_alias;
109 } alias_stats;
110
111 void
112 dump_alias_stats (FILE *s)
113 {
114 fprintf (s, "\nAlias oracle query stats:\n");
115 fprintf (s, " refs_may_alias_p: "
116 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
117 HOST_WIDE_INT_PRINT_DEC" queries\n",
118 alias_stats.refs_may_alias_p_no_alias,
119 alias_stats.refs_may_alias_p_no_alias
120 + alias_stats.refs_may_alias_p_may_alias);
121 fprintf (s, " ref_maybe_used_by_call_p: "
122 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
123 HOST_WIDE_INT_PRINT_DEC" queries\n",
124 alias_stats.ref_maybe_used_by_call_p_no_alias,
125 alias_stats.refs_may_alias_p_no_alias
126 + alias_stats.ref_maybe_used_by_call_p_may_alias);
127 fprintf (s, " call_may_clobber_ref_p: "
128 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
129 HOST_WIDE_INT_PRINT_DEC" queries\n",
130 alias_stats.call_may_clobber_ref_p_no_alias,
131 alias_stats.call_may_clobber_ref_p_no_alias
132 + alias_stats.call_may_clobber_ref_p_may_alias);
133 fprintf (s, " nonoverlapping_component_refs_p: "
134 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
135 HOST_WIDE_INT_PRINT_DEC" queries\n",
136 alias_stats.nonoverlapping_component_refs_p_no_alias,
137 alias_stats.nonoverlapping_component_refs_p_no_alias
138 + alias_stats.nonoverlapping_component_refs_p_may_alias);
139 fprintf (s, " nonoverlapping_component_refs_since_match_p: "
140 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
141 HOST_WIDE_INT_PRINT_DEC" queries\n",
142 alias_stats.nonoverlapping_component_refs_since_match_p_no_alias,
143 alias_stats.nonoverlapping_component_refs_since_match_p_no_alias
144 + alias_stats.nonoverlapping_component_refs_since_match_p_may_alias);
145 fprintf (s, " aliasing_component_refs_p: "
146 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
147 HOST_WIDE_INT_PRINT_DEC" queries\n",
148 alias_stats.aliasing_component_refs_p_no_alias,
149 alias_stats.aliasing_component_refs_p_no_alias
150 + alias_stats.aliasing_component_refs_p_may_alias);
151 dump_alias_stats_in_alias_c (s);
152 }
153
154
155 /* Return true, if dereferencing PTR may alias with a global variable. */
156
157 bool
158 ptr_deref_may_alias_global_p (tree ptr)
159 {
160 struct ptr_info_def *pi;
161
162 /* If we end up with a pointer constant here that may point
163 to global memory. */
164 if (TREE_CODE (ptr) != SSA_NAME)
165 return true;
166
167 pi = SSA_NAME_PTR_INFO (ptr);
168
169 /* If we do not have points-to information for this variable,
170 we have to punt. */
171 if (!pi)
172 return true;
173
174 /* ??? This does not use TBAA to prune globals ptr may not access. */
175 return pt_solution_includes_global (&pi->pt);
176 }
177
178 /* Return true if dereferencing PTR may alias DECL.
179 The caller is responsible for applying TBAA to see if PTR
180 may access DECL at all. */
181
182 static bool
183 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
184 {
185 struct ptr_info_def *pi;
186
187 /* Conversions are irrelevant for points-to information and
188 data-dependence analysis can feed us those. */
189 STRIP_NOPS (ptr);
190
191 /* Anything we do not explicilty handle aliases. */
192 if ((TREE_CODE (ptr) != SSA_NAME
193 && TREE_CODE (ptr) != ADDR_EXPR
194 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
195 || !POINTER_TYPE_P (TREE_TYPE (ptr))
196 || (!VAR_P (decl)
197 && TREE_CODE (decl) != PARM_DECL
198 && TREE_CODE (decl) != RESULT_DECL))
199 return true;
200
201 /* Disregard pointer offsetting. */
202 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
203 {
204 do
205 {
206 ptr = TREE_OPERAND (ptr, 0);
207 }
208 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
209 return ptr_deref_may_alias_decl_p (ptr, decl);
210 }
211
212 /* ADDR_EXPR pointers either just offset another pointer or directly
213 specify the pointed-to set. */
214 if (TREE_CODE (ptr) == ADDR_EXPR)
215 {
216 tree base = get_base_address (TREE_OPERAND (ptr, 0));
217 if (base
218 && (TREE_CODE (base) == MEM_REF
219 || TREE_CODE (base) == TARGET_MEM_REF))
220 ptr = TREE_OPERAND (base, 0);
221 else if (base
222 && DECL_P (base))
223 return compare_base_decls (base, decl) != 0;
224 else if (base
225 && CONSTANT_CLASS_P (base))
226 return false;
227 else
228 return true;
229 }
230
231 /* Non-aliased variables cannot be pointed to. */
232 if (!may_be_aliased (decl))
233 return false;
234
235 /* If we do not have useful points-to information for this pointer
236 we cannot disambiguate anything else. */
237 pi = SSA_NAME_PTR_INFO (ptr);
238 if (!pi)
239 return true;
240
241 return pt_solution_includes (&pi->pt, decl);
242 }
243
244 /* Return true if dereferenced PTR1 and PTR2 may alias.
245 The caller is responsible for applying TBAA to see if accesses
246 through PTR1 and PTR2 may conflict at all. */
247
248 bool
249 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
250 {
251 struct ptr_info_def *pi1, *pi2;
252
253 /* Conversions are irrelevant for points-to information and
254 data-dependence analysis can feed us those. */
255 STRIP_NOPS (ptr1);
256 STRIP_NOPS (ptr2);
257
258 /* Disregard pointer offsetting. */
259 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
260 {
261 do
262 {
263 ptr1 = TREE_OPERAND (ptr1, 0);
264 }
265 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
266 return ptr_derefs_may_alias_p (ptr1, ptr2);
267 }
268 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
269 {
270 do
271 {
272 ptr2 = TREE_OPERAND (ptr2, 0);
273 }
274 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
275 return ptr_derefs_may_alias_p (ptr1, ptr2);
276 }
277
278 /* ADDR_EXPR pointers either just offset another pointer or directly
279 specify the pointed-to set. */
280 if (TREE_CODE (ptr1) == ADDR_EXPR)
281 {
282 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
283 if (base
284 && (TREE_CODE (base) == MEM_REF
285 || TREE_CODE (base) == TARGET_MEM_REF))
286 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
287 else if (base
288 && DECL_P (base))
289 return ptr_deref_may_alias_decl_p (ptr2, base);
290 else
291 return true;
292 }
293 if (TREE_CODE (ptr2) == ADDR_EXPR)
294 {
295 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
296 if (base
297 && (TREE_CODE (base) == MEM_REF
298 || TREE_CODE (base) == TARGET_MEM_REF))
299 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
300 else if (base
301 && DECL_P (base))
302 return ptr_deref_may_alias_decl_p (ptr1, base);
303 else
304 return true;
305 }
306
307 /* From here we require SSA name pointers. Anything else aliases. */
308 if (TREE_CODE (ptr1) != SSA_NAME
309 || TREE_CODE (ptr2) != SSA_NAME
310 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
311 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
312 return true;
313
314 /* We may end up with two empty points-to solutions for two same pointers.
315 In this case we still want to say both pointers alias, so shortcut
316 that here. */
317 if (ptr1 == ptr2)
318 return true;
319
320 /* If we do not have useful points-to information for either pointer
321 we cannot disambiguate anything else. */
322 pi1 = SSA_NAME_PTR_INFO (ptr1);
323 pi2 = SSA_NAME_PTR_INFO (ptr2);
324 if (!pi1 || !pi2)
325 return true;
326
327 /* ??? This does not use TBAA to prune decls from the intersection
328 that not both pointers may access. */
329 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
330 }
331
332 /* Return true if dereferencing PTR may alias *REF.
333 The caller is responsible for applying TBAA to see if PTR
334 may access *REF at all. */
335
336 static bool
337 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
338 {
339 tree base = ao_ref_base (ref);
340
341 if (TREE_CODE (base) == MEM_REF
342 || TREE_CODE (base) == TARGET_MEM_REF)
343 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
344 else if (DECL_P (base))
345 return ptr_deref_may_alias_decl_p (ptr, base);
346
347 return true;
348 }
349
350 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
351
352 bool
353 ptrs_compare_unequal (tree ptr1, tree ptr2)
354 {
355 /* First resolve the pointers down to a SSA name pointer base or
356 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
357 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
358 or STRING_CSTs which needs points-to adjustments to track them
359 in the points-to sets. */
360 tree obj1 = NULL_TREE;
361 tree obj2 = NULL_TREE;
362 if (TREE_CODE (ptr1) == ADDR_EXPR)
363 {
364 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
365 if (! tem)
366 return false;
367 if (VAR_P (tem)
368 || TREE_CODE (tem) == PARM_DECL
369 || TREE_CODE (tem) == RESULT_DECL)
370 obj1 = tem;
371 else if (TREE_CODE (tem) == MEM_REF)
372 ptr1 = TREE_OPERAND (tem, 0);
373 }
374 if (TREE_CODE (ptr2) == ADDR_EXPR)
375 {
376 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
377 if (! tem)
378 return false;
379 if (VAR_P (tem)
380 || TREE_CODE (tem) == PARM_DECL
381 || TREE_CODE (tem) == RESULT_DECL)
382 obj2 = tem;
383 else if (TREE_CODE (tem) == MEM_REF)
384 ptr2 = TREE_OPERAND (tem, 0);
385 }
386
387 /* Canonicalize ptr vs. object. */
388 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
389 {
390 std::swap (ptr1, ptr2);
391 std::swap (obj1, obj2);
392 }
393
394 if (obj1 && obj2)
395 /* Other code handles this correctly, no need to duplicate it here. */;
396 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
397 {
398 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
399 /* We may not use restrict to optimize pointer comparisons.
400 See PR71062. So we have to assume that restrict-pointed-to
401 may be in fact obj1. */
402 if (!pi
403 || pi->pt.vars_contains_restrict
404 || pi->pt.vars_contains_interposable)
405 return false;
406 if (VAR_P (obj1)
407 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
408 {
409 varpool_node *node = varpool_node::get (obj1);
410 /* If obj1 may bind to NULL give up (see below). */
411 if (! node
412 || ! node->nonzero_address ()
413 || ! decl_binds_to_current_def_p (obj1))
414 return false;
415 }
416 return !pt_solution_includes (&pi->pt, obj1);
417 }
418
419 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
420 but those require pt.null to be conservatively correct. */
421
422 return false;
423 }
424
425 /* Returns whether reference REF to BASE may refer to global memory. */
426
427 static bool
428 ref_may_alias_global_p_1 (tree base)
429 {
430 if (DECL_P (base))
431 return is_global_var (base);
432 else if (TREE_CODE (base) == MEM_REF
433 || TREE_CODE (base) == TARGET_MEM_REF)
434 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
435 return true;
436 }
437
438 bool
439 ref_may_alias_global_p (ao_ref *ref)
440 {
441 tree base = ao_ref_base (ref);
442 return ref_may_alias_global_p_1 (base);
443 }
444
445 bool
446 ref_may_alias_global_p (tree ref)
447 {
448 tree base = get_base_address (ref);
449 return ref_may_alias_global_p_1 (base);
450 }
451
452 /* Return true whether STMT may clobber global memory. */
453
454 bool
455 stmt_may_clobber_global_p (gimple *stmt)
456 {
457 tree lhs;
458
459 if (!gimple_vdef (stmt))
460 return false;
461
462 /* ??? We can ask the oracle whether an artificial pointer
463 dereference with a pointer with points-to information covering
464 all global memory (what about non-address taken memory?) maybe
465 clobbered by this call. As there is at the moment no convenient
466 way of doing that without generating garbage do some manual
467 checking instead.
468 ??? We could make a NULL ao_ref argument to the various
469 predicates special, meaning any global memory. */
470
471 switch (gimple_code (stmt))
472 {
473 case GIMPLE_ASSIGN:
474 lhs = gimple_assign_lhs (stmt);
475 return (TREE_CODE (lhs) != SSA_NAME
476 && ref_may_alias_global_p (lhs));
477 case GIMPLE_CALL:
478 return true;
479 default:
480 return true;
481 }
482 }
483
484
485 /* Dump alias information on FILE. */
486
487 void
488 dump_alias_info (FILE *file)
489 {
490 unsigned i;
491 tree ptr;
492 const char *funcname
493 = lang_hooks.decl_printable_name (current_function_decl, 2);
494 tree var;
495
496 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
497
498 fprintf (file, "Aliased symbols\n\n");
499
500 FOR_EACH_LOCAL_DECL (cfun, i, var)
501 {
502 if (may_be_aliased (var))
503 dump_variable (file, var);
504 }
505
506 fprintf (file, "\nCall clobber information\n");
507
508 fprintf (file, "\nESCAPED");
509 dump_points_to_solution (file, &cfun->gimple_df->escaped);
510
511 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
512
513 FOR_EACH_SSA_NAME (i, ptr, cfun)
514 {
515 struct ptr_info_def *pi;
516
517 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
518 || SSA_NAME_IN_FREE_LIST (ptr))
519 continue;
520
521 pi = SSA_NAME_PTR_INFO (ptr);
522 if (pi)
523 dump_points_to_info_for (file, ptr);
524 }
525
526 fprintf (file, "\n");
527 }
528
529
530 /* Dump alias information on stderr. */
531
532 DEBUG_FUNCTION void
533 debug_alias_info (void)
534 {
535 dump_alias_info (stderr);
536 }
537
538
539 /* Dump the points-to set *PT into FILE. */
540
541 void
542 dump_points_to_solution (FILE *file, struct pt_solution *pt)
543 {
544 if (pt->anything)
545 fprintf (file, ", points-to anything");
546
547 if (pt->nonlocal)
548 fprintf (file, ", points-to non-local");
549
550 if (pt->escaped)
551 fprintf (file, ", points-to escaped");
552
553 if (pt->ipa_escaped)
554 fprintf (file, ", points-to unit escaped");
555
556 if (pt->null)
557 fprintf (file, ", points-to NULL");
558
559 if (pt->vars)
560 {
561 fprintf (file, ", points-to vars: ");
562 dump_decl_set (file, pt->vars);
563 if (pt->vars_contains_nonlocal
564 || pt->vars_contains_escaped
565 || pt->vars_contains_escaped_heap
566 || pt->vars_contains_restrict)
567 {
568 const char *comma = "";
569 fprintf (file, " (");
570 if (pt->vars_contains_nonlocal)
571 {
572 fprintf (file, "nonlocal");
573 comma = ", ";
574 }
575 if (pt->vars_contains_escaped)
576 {
577 fprintf (file, "%sescaped", comma);
578 comma = ", ";
579 }
580 if (pt->vars_contains_escaped_heap)
581 {
582 fprintf (file, "%sescaped heap", comma);
583 comma = ", ";
584 }
585 if (pt->vars_contains_restrict)
586 {
587 fprintf (file, "%srestrict", comma);
588 comma = ", ";
589 }
590 if (pt->vars_contains_interposable)
591 fprintf (file, "%sinterposable", comma);
592 fprintf (file, ")");
593 }
594 }
595 }
596
597
598 /* Unified dump function for pt_solution. */
599
600 DEBUG_FUNCTION void
601 debug (pt_solution &ref)
602 {
603 dump_points_to_solution (stderr, &ref);
604 }
605
606 DEBUG_FUNCTION void
607 debug (pt_solution *ptr)
608 {
609 if (ptr)
610 debug (*ptr);
611 else
612 fprintf (stderr, "<nil>\n");
613 }
614
615
616 /* Dump points-to information for SSA_NAME PTR into FILE. */
617
618 void
619 dump_points_to_info_for (FILE *file, tree ptr)
620 {
621 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
622
623 print_generic_expr (file, ptr, dump_flags);
624
625 if (pi)
626 dump_points_to_solution (file, &pi->pt);
627 else
628 fprintf (file, ", points-to anything");
629
630 fprintf (file, "\n");
631 }
632
633
634 /* Dump points-to information for VAR into stderr. */
635
636 DEBUG_FUNCTION void
637 debug_points_to_info_for (tree var)
638 {
639 dump_points_to_info_for (stderr, var);
640 }
641
642
643 /* Initializes the alias-oracle reference representation *R from REF. */
644
645 void
646 ao_ref_init (ao_ref *r, tree ref)
647 {
648 r->ref = ref;
649 r->base = NULL_TREE;
650 r->offset = 0;
651 r->size = -1;
652 r->max_size = -1;
653 r->ref_alias_set = -1;
654 r->base_alias_set = -1;
655 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
656 }
657
658 /* Returns the base object of the memory reference *REF. */
659
660 tree
661 ao_ref_base (ao_ref *ref)
662 {
663 bool reverse;
664
665 if (ref->base)
666 return ref->base;
667 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
668 &ref->max_size, &reverse);
669 return ref->base;
670 }
671
672 /* Returns the base object alias set of the memory reference *REF. */
673
674 alias_set_type
675 ao_ref_base_alias_set (ao_ref *ref)
676 {
677 tree base_ref;
678 if (ref->base_alias_set != -1)
679 return ref->base_alias_set;
680 if (!ref->ref)
681 return 0;
682 base_ref = ref->ref;
683 while (handled_component_p (base_ref))
684 base_ref = TREE_OPERAND (base_ref, 0);
685 ref->base_alias_set = get_alias_set (base_ref);
686 return ref->base_alias_set;
687 }
688
689 /* Returns the reference alias set of the memory reference *REF. */
690
691 alias_set_type
692 ao_ref_alias_set (ao_ref *ref)
693 {
694 if (ref->ref_alias_set != -1)
695 return ref->ref_alias_set;
696 ref->ref_alias_set = get_alias_set (ref->ref);
697 return ref->ref_alias_set;
698 }
699
700 /* Init an alias-oracle reference representation from a gimple pointer
701 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
702 size is assumed to be unknown. The access is assumed to be only
703 to or after of the pointer target, not before it. */
704
705 void
706 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
707 {
708 poly_int64 t, size_hwi, extra_offset = 0;
709 ref->ref = NULL_TREE;
710 if (TREE_CODE (ptr) == SSA_NAME)
711 {
712 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
713 if (gimple_assign_single_p (stmt)
714 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
715 ptr = gimple_assign_rhs1 (stmt);
716 else if (is_gimple_assign (stmt)
717 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
718 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
719 {
720 ptr = gimple_assign_rhs1 (stmt);
721 extra_offset *= BITS_PER_UNIT;
722 }
723 }
724
725 if (TREE_CODE (ptr) == ADDR_EXPR)
726 {
727 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
728 if (ref->base)
729 ref->offset = BITS_PER_UNIT * t;
730 else
731 {
732 size = NULL_TREE;
733 ref->offset = 0;
734 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
735 }
736 }
737 else
738 {
739 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
740 ref->base = build2 (MEM_REF, char_type_node,
741 ptr, null_pointer_node);
742 ref->offset = 0;
743 }
744 ref->offset += extra_offset;
745 if (size
746 && poly_int_tree_p (size, &size_hwi)
747 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
748 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
749 else
750 ref->max_size = ref->size = -1;
751 ref->ref_alias_set = 0;
752 ref->base_alias_set = 0;
753 ref->volatile_p = false;
754 }
755
756 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
757 Return -1 if S1 < S2
758 Return 1 if S1 > S2
759 Return 0 if equal or incomparable. */
760
761 static int
762 compare_sizes (tree s1, tree s2)
763 {
764 if (!s1 || !s2)
765 return 0;
766
767 poly_uint64 size1;
768 poly_uint64 size2;
769
770 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
771 return 0;
772 if (known_lt (size1, size2))
773 return -1;
774 if (known_lt (size2, size1))
775 return 1;
776 return 0;
777 }
778
779 /* Compare TYPE1 and TYPE2 by its size.
780 Return -1 if size of TYPE1 < size of TYPE2
781 Return 1 if size of TYPE1 > size of TYPE2
782 Return 0 if types are of equal sizes or we can not compare them. */
783
784 static int
785 compare_type_sizes (tree type1, tree type2)
786 {
787 /* Be conservative for arrays and vectors. We want to support partial
788 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
789 while (TREE_CODE (type1) == ARRAY_TYPE
790 || TREE_CODE (type1) == VECTOR_TYPE)
791 type1 = TREE_TYPE (type1);
792 while (TREE_CODE (type2) == ARRAY_TYPE
793 || TREE_CODE (type2) == VECTOR_TYPE)
794 type2 = TREE_TYPE (type2);
795 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
796 }
797
798 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
799 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
800 decide. */
801
802 static inline int
803 same_type_for_tbaa (tree type1, tree type2)
804 {
805 type1 = TYPE_MAIN_VARIANT (type1);
806 type2 = TYPE_MAIN_VARIANT (type2);
807
808 /* Handle the most common case first. */
809 if (type1 == type2)
810 return 1;
811
812 /* If we would have to do structural comparison bail out. */
813 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
814 || TYPE_STRUCTURAL_EQUALITY_P (type2))
815 return -1;
816
817 /* Compare the canonical types. */
818 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
819 return 1;
820
821 /* ??? Array types are not properly unified in all cases as we have
822 spurious changes in the index types for example. Removing this
823 causes all sorts of problems with the Fortran frontend. */
824 if (TREE_CODE (type1) == ARRAY_TYPE
825 && TREE_CODE (type2) == ARRAY_TYPE)
826 return -1;
827
828 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
829 object of one of its constrained subtypes, e.g. when a function with an
830 unconstrained parameter passed by reference is called on an object and
831 inlined. But, even in the case of a fixed size, type and subtypes are
832 not equivalent enough as to share the same TYPE_CANONICAL, since this
833 would mean that conversions between them are useless, whereas they are
834 not (e.g. type and subtypes can have different modes). So, in the end,
835 they are only guaranteed to have the same alias set. */
836 if (get_alias_set (type1) == get_alias_set (type2))
837 return -1;
838
839 /* The types are known to be not equal. */
840 return 0;
841 }
842
843 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
844 components on it). */
845
846 static bool
847 type_has_components_p (tree type)
848 {
849 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
850 || TREE_CODE (type) == COMPLEX_TYPE;
851 }
852
853 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
854 respectively are either pointing to same address or are completely
855 disjoint.
856
857 Try to disambiguate using the access path starting from the match
858 and return false if there is no conflict.
859
860 Helper for aliasing_component_refs_p. */
861
862 static bool
863 aliasing_matching_component_refs_p (tree match1, tree ref1,
864 poly_int64 offset1, poly_int64 max_size1,
865 tree match2, tree ref2,
866 poly_int64 offset2, poly_int64 max_size2)
867 {
868 poly_int64 offadj, sztmp, msztmp;
869 bool reverse;
870
871
872 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
873 offset2 -= offadj;
874 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
875 offset1 -= offadj;
876 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
877 {
878 ++alias_stats.aliasing_component_refs_p_no_alias;
879 return false;
880 }
881
882 int cmp = nonoverlapping_component_refs_since_match_p (match1, ref1,
883 match2, ref2);
884 if (cmp == 1
885 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
886 {
887 ++alias_stats.aliasing_component_refs_p_no_alias;
888 return false;
889 }
890 ++alias_stats.aliasing_component_refs_p_may_alias;
891 return true;
892 }
893
894 /* Determine if the two component references REF1 and REF2 which are
895 based on access types TYPE1 and TYPE2 and of which at least one is based
896 on an indirect reference may alias.
897 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
898 are the respective alias sets. */
899
900 static bool
901 aliasing_component_refs_p (tree ref1,
902 alias_set_type ref1_alias_set,
903 alias_set_type base1_alias_set,
904 poly_int64 offset1, poly_int64 max_size1,
905 tree ref2,
906 alias_set_type ref2_alias_set,
907 alias_set_type base2_alias_set,
908 poly_int64 offset2, poly_int64 max_size2)
909 {
910 /* If one reference is a component references through pointers try to find a
911 common base and apply offset based disambiguation. This handles
912 for example
913 struct A { int i; int j; } *q;
914 struct B { struct A a; int k; } *p;
915 disambiguating q->i and p->a.j. */
916 tree base1, base2;
917 tree type1, type2;
918 int same_p1 = 0, same_p2 = 0;
919 bool maybe_match = false;
920 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
921
922 /* Choose bases and base types to search for. */
923 base1 = ref1;
924 while (handled_component_p (base1))
925 {
926 /* Generally access paths are monotous in the size of object. The
927 exception are trailing arrays of structures. I.e.
928 struct a {int array[0];};
929 or
930 struct a {int array1[0]; int array[];};
931 Such struct has size 0 but accesses to a.array may have non-zero size.
932 In this case the size of TREE_TYPE (base1) is smaller than
933 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
934
935 Because we compare sizes of arrays just by sizes of their elements,
936 we only need to care about zero sized array fields here. */
937 if (TREE_CODE (base1) == COMPONENT_REF
938 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base1, 1))) == ARRAY_TYPE
939 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base1, 1)))
940 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base1, 1)))))
941 && array_at_struct_end_p (base1))
942 {
943 gcc_checking_assert (!end_struct_ref1);
944 end_struct_ref1 = base1;
945 }
946 if (TREE_CODE (base1) == VIEW_CONVERT_EXPR
947 || TREE_CODE (base1) == BIT_FIELD_REF)
948 ref1 = TREE_OPERAND (base1, 0);
949 base1 = TREE_OPERAND (base1, 0);
950 }
951 type1 = TREE_TYPE (base1);
952 base2 = ref2;
953 while (handled_component_p (base2))
954 {
955 if (TREE_CODE (base2) == COMPONENT_REF
956 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base2, 1))) == ARRAY_TYPE
957 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base2, 1)))
958 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base2, 1)))))
959 && array_at_struct_end_p (base2))
960 {
961 gcc_checking_assert (!end_struct_ref2);
962 end_struct_ref2 = base2;
963 }
964 if (TREE_CODE (base2) == VIEW_CONVERT_EXPR
965 || TREE_CODE (base2) == BIT_FIELD_REF)
966 ref2 = TREE_OPERAND (base2, 0);
967 base2 = TREE_OPERAND (base2, 0);
968 }
969 type2 = TREE_TYPE (base2);
970
971 /* Now search for the type1 in the access path of ref2. This
972 would be a common base for doing offset based disambiguation on.
973 This however only makes sense if type2 is big enough to hold type1. */
974 int cmp_outer = compare_type_sizes (type2, type1);
975
976 /* If type2 is big enough to contain type1 walk its access path.
977 We also need to care of arrays at the end of structs that may extend
978 beyond the end of structure. */
979 if (cmp_outer >= 0
980 || (end_struct_ref2
981 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
982 {
983 tree ref = ref2;
984 while (true)
985 {
986 /* We walk from inner type to the outer types. If type we see is
987 already too large to be part of type1, terminate the search. */
988 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
989
990 if (cmp < 0
991 && (!end_struct_ref1
992 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
993 TREE_TYPE (ref)) < 0))
994 break;
995 /* If types may be of same size, see if we can decide about their
996 equality. */
997 if (cmp == 0)
998 {
999 same_p2 = same_type_for_tbaa (TREE_TYPE (ref), type1);
1000 if (same_p2 == 1)
1001 break;
1002 /* In case we can't decide whether types are same try to
1003 continue looking for the exact match.
1004 Remember however that we possibly saw a match
1005 to bypass the access path continuations tests we do later. */
1006 if (same_p2 == -1)
1007 maybe_match = true;
1008 }
1009 if (!handled_component_p (ref))
1010 break;
1011 ref = TREE_OPERAND (ref, 0);
1012 }
1013 if (same_p2 == 1)
1014 {
1015 /* We assume that arrays can overlap by multiple of their elements
1016 size as tested in gcc.dg/torture/alias-2.c.
1017 This partial overlap happen only when both arrays are bases of
1018 the access and not contained within another component ref.
1019 To be safe we also assume partial overlap for VLAs. */
1020 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1021 && (!TYPE_SIZE (TREE_TYPE (base1))
1022 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1023 || ref == base2))
1024 /* Setting maybe_match to true triggers
1025 nonoverlapping_component_refs_p test later that still may do
1026 useful disambiguation. */
1027 maybe_match = true;
1028 else
1029 return aliasing_matching_component_refs_p (base1, ref1,
1030 offset1, max_size1,
1031 ref, ref2,
1032 offset2, max_size2);
1033 }
1034 }
1035
1036 /* If we didn't find a common base, try the other way around. */
1037 if (cmp_outer <= 0
1038 || (end_struct_ref1
1039 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1040 {
1041 tree ref = ref1;
1042 while (true)
1043 {
1044 int cmp = compare_type_sizes (type2, TREE_TYPE (ref));
1045 if (cmp < 0
1046 && (!end_struct_ref2
1047 || compare_type_sizes (TREE_TYPE (end_struct_ref2),
1048 TREE_TYPE (ref)) < 0))
1049 break;
1050 /* If types may be of same size, see if we can decide about their
1051 equality. */
1052 if (cmp == 0)
1053 {
1054 same_p1 = same_type_for_tbaa (TREE_TYPE (ref), type2);
1055 if (same_p1 == 1)
1056 break;
1057 if (same_p1 == -1)
1058 maybe_match = true;
1059 }
1060 if (!handled_component_p (ref))
1061 break;
1062 ref = TREE_OPERAND (ref, 0);
1063 }
1064 if (same_p1 == 1)
1065 {
1066 if (TREE_CODE (TREE_TYPE (base2)) == ARRAY_TYPE
1067 && (!TYPE_SIZE (TREE_TYPE (base2))
1068 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base2))) != INTEGER_CST
1069 || ref == base1))
1070 maybe_match = true;
1071 else
1072 return aliasing_matching_component_refs_p (ref, ref1,
1073 offset1, max_size1,
1074 base2, ref2,
1075 offset2, max_size2);
1076 }
1077 }
1078
1079 /* In the following code we make an assumption that the types in access
1080 paths do not overlap and thus accesses alias only if one path can be
1081 continuation of another. If we was not able to decide about equivalence,
1082 we need to give up. */
1083 if (maybe_match)
1084 {
1085 if (!nonoverlapping_component_refs_p (ref1, ref2))
1086 {
1087 ++alias_stats.aliasing_component_refs_p_may_alias;
1088 return true;
1089 }
1090 ++alias_stats.aliasing_component_refs_p_no_alias;
1091 return false;
1092 }
1093
1094 /* If we have two type access paths B1.path1 and B2.path2 they may
1095 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
1096 But we can still have a path that goes B1.path1...B2.path2 with
1097 a part that we do not see. So we can only disambiguate now
1098 if there is no B2 in the tail of path1 and no B1 on the
1099 tail of path2. */
1100 if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0
1101 && (!end_struct_ref1
1102 || compare_type_sizes (TREE_TYPE (ref2),
1103 TREE_TYPE (end_struct_ref1)) >= 0)
1104 && type_has_components_p (TREE_TYPE (ref2))
1105 && (base1_alias_set == ref2_alias_set
1106 || alias_set_subset_of (base1_alias_set, ref2_alias_set)))
1107 {
1108 ++alias_stats.aliasing_component_refs_p_may_alias;
1109 return true;
1110 }
1111 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
1112 if (compare_type_sizes (TREE_TYPE (ref1), type2) >= 0
1113 && (!end_struct_ref2
1114 || compare_type_sizes (TREE_TYPE (ref1),
1115 TREE_TYPE (end_struct_ref2)) >= 0)
1116 && type_has_components_p (TREE_TYPE (ref1))
1117 && (base2_alias_set == ref1_alias_set
1118 || alias_set_subset_of (base2_alias_set, ref1_alias_set)))
1119 {
1120 ++alias_stats.aliasing_component_refs_p_may_alias;
1121 return true;
1122 }
1123 ++alias_stats.aliasing_component_refs_p_no_alias;
1124 return false;
1125 }
1126
1127 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1128 MATCH2 either point to the same address or are disjoint.
1129 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1130 respectively or NULL in the case we established equivalence of bases.
1131
1132 This test works by matching the initial segment of the access path
1133 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1134 match was determined without use of TBAA oracle.
1135
1136 Return 1 if we can determine that component references REF1 and REF2,
1137 that are within a common DECL, cannot overlap.
1138
1139 Return 0 if paths are same and thus there is nothing to disambiguate more
1140 (i.e. there is must alias assuming there is must alias between MATCH1 and
1141 MATCH2)
1142
1143 Return -1 if we can not determine 0 or 1 - this happens when we met
1144 non-matching types was met in the path.
1145 In this case it may make sense to continue by other disambiguation
1146 oracles. */
1147
1148 static int
1149 nonoverlapping_component_refs_since_match_p (tree match1, tree ref1,
1150 tree match2, tree ref2)
1151 {
1152 auto_vec<tree, 16> component_refs1;
1153 auto_vec<tree, 16> component_refs2;
1154
1155 /* Create the stack of handled components for REF1. */
1156 while (handled_component_p (ref1))
1157 {
1158 if (TREE_CODE (ref1) == VIEW_CONVERT_EXPR
1159 || TREE_CODE (ref1) == BIT_FIELD_REF)
1160 component_refs1.truncate (0);
1161 else
1162 component_refs1.safe_push (ref1);
1163 if (ref1 == match1)
1164 break;
1165 ref1 = TREE_OPERAND (ref1, 0);
1166 }
1167 if (TREE_CODE (ref1) == MEM_REF && ref1 != match1)
1168 {
1169 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
1170 {
1171 ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
1172 return -1;
1173 }
1174 }
1175 /* TODO: Handle TARGET_MEM_REF later. */
1176 if (TREE_CODE (ref1) == TARGET_MEM_REF && ref1 != match1)
1177 {
1178 ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
1179 return -1;
1180 }
1181
1182 /* Create the stack of handled components for REF2. */
1183 while (handled_component_p (ref2))
1184 {
1185 if (TREE_CODE (ref2) == VIEW_CONVERT_EXPR
1186 || TREE_CODE (ref2) == BIT_FIELD_REF)
1187 component_refs2.truncate (0);
1188 else
1189 component_refs2.safe_push (ref2);
1190 if (ref2 == match2)
1191 break;
1192 ref2 = TREE_OPERAND (ref2, 0);
1193 }
1194 if (TREE_CODE (ref2) == MEM_REF && ref2 != match2)
1195 {
1196 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
1197 {
1198 ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
1199 return -1;
1200 }
1201 }
1202 if (TREE_CODE (ref2) == TARGET_MEM_REF && ref2 != match2)
1203 {
1204 ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
1205 return -1;
1206 }
1207
1208 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1209 rank. This is sufficient because we start from the same DECL and you
1210 cannot reference several fields at a time with COMPONENT_REFs (unlike
1211 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1212 of them to access a sub-component, unless you're in a union, in which
1213 case the return value will precisely be false. */
1214 while (true)
1215 {
1216 do
1217 {
1218 if (component_refs1.is_empty ())
1219 {
1220 ++alias_stats
1221 .nonoverlapping_component_refs_since_match_p_may_alias;
1222 return 0;
1223 }
1224 ref1 = component_refs1.pop ();
1225 }
1226 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1227
1228 do
1229 {
1230 if (component_refs2.is_empty ())
1231 {
1232 ++alias_stats
1233 .nonoverlapping_component_refs_since_match_p_may_alias;
1234 return 0;
1235 }
1236 ref2 = component_refs2.pop ();
1237 }
1238 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1239
1240 /* Beware of BIT_FIELD_REF. */
1241 if (TREE_CODE (ref1) != COMPONENT_REF
1242 || TREE_CODE (ref2) != COMPONENT_REF)
1243 {
1244 ++alias_stats
1245 .nonoverlapping_component_refs_since_match_p_may_alias;
1246 return -1;
1247 }
1248
1249 tree field1 = TREE_OPERAND (ref1, 1);
1250 tree field2 = TREE_OPERAND (ref2, 1);
1251
1252 /* ??? We cannot simply use the type of operand #0 of the refs here
1253 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1254 for common blocks instead of using unions like everyone else. */
1255 tree type1 = DECL_CONTEXT (field1);
1256 tree type2 = DECL_CONTEXT (field2);
1257
1258 /* We cannot disambiguate fields in a union or qualified union. */
1259 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
1260 {
1261 ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
1262 return -1;
1263 }
1264
1265 if (field1 != field2)
1266 {
1267 /* A field and its representative need to be considered the
1268 same. */
1269 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
1270 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
1271 {
1272 ++alias_stats
1273 .nonoverlapping_component_refs_since_match_p_may_alias;
1274 return 0;
1275 }
1276 /* Different fields of the same record type cannot overlap.
1277 ??? Bitfields can overlap at RTL level so punt on them. */
1278 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1279 {
1280 ++alias_stats
1281 .nonoverlapping_component_refs_since_match_p_may_alias;
1282 return 0;
1283 }
1284 ++alias_stats.nonoverlapping_component_refs_since_match_p_no_alias;
1285 return 1;
1286 }
1287 }
1288
1289 ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
1290 return 0;
1291 }
1292
1293 /* qsort compare function to sort FIELD_DECLs after their
1294 DECL_FIELD_CONTEXT TYPE_UID. */
1295
1296 static inline int
1297 ncr_compar (const void *field1_, const void *field2_)
1298 {
1299 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1300 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1301 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
1302 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
1303 if (uid1 < uid2)
1304 return -1;
1305 else if (uid1 > uid2)
1306 return 1;
1307 return 0;
1308 }
1309
1310 /* Return true if we can determine that the fields referenced cannot
1311 overlap for any pair of objects. This relies on TBAA. */
1312
1313 static bool
1314 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1315 {
1316 if (!flag_strict_aliasing
1317 || !x || !y
1318 || !handled_component_p (x)
1319 || !handled_component_p (y))
1320 {
1321 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1322 return false;
1323 }
1324
1325 auto_vec<const_tree, 16> fieldsx;
1326 while (handled_component_p (x))
1327 {
1328 if (TREE_CODE (x) == COMPONENT_REF)
1329 {
1330 tree field = TREE_OPERAND (x, 1);
1331 tree type = DECL_FIELD_CONTEXT (field);
1332 if (TREE_CODE (type) == RECORD_TYPE)
1333 fieldsx.safe_push (field);
1334 }
1335 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
1336 || TREE_CODE (x) == BIT_FIELD_REF)
1337 fieldsx.truncate (0);
1338 x = TREE_OPERAND (x, 0);
1339 }
1340 if (fieldsx.length () == 0)
1341 return false;
1342 auto_vec<const_tree, 16> fieldsy;
1343 while (handled_component_p (y))
1344 {
1345 if (TREE_CODE (y) == COMPONENT_REF)
1346 {
1347 tree field = TREE_OPERAND (y, 1);
1348 tree type = DECL_FIELD_CONTEXT (field);
1349 if (TREE_CODE (type) == RECORD_TYPE)
1350 fieldsy.safe_push (TREE_OPERAND (y, 1));
1351 }
1352 else if (TREE_CODE (y) == VIEW_CONVERT_EXPR
1353 || TREE_CODE (y) == BIT_FIELD_REF)
1354 fieldsy.truncate (0);
1355 y = TREE_OPERAND (y, 0);
1356 }
1357 if (fieldsy.length () == 0)
1358 {
1359 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1360 return false;
1361 }
1362
1363 /* Most common case first. */
1364 if (fieldsx.length () == 1
1365 && fieldsy.length () == 1)
1366 {
1367 if ((DECL_FIELD_CONTEXT (fieldsx[0])
1368 == DECL_FIELD_CONTEXT (fieldsy[0]))
1369 && fieldsx[0] != fieldsy[0]
1370 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])))
1371 {
1372 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1373 return true;
1374 }
1375 else
1376 {
1377 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1378 return false;
1379 }
1380 }
1381
1382 if (fieldsx.length () == 2)
1383 {
1384 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1385 std::swap (fieldsx[0], fieldsx[1]);
1386 }
1387 else
1388 fieldsx.qsort (ncr_compar);
1389
1390 if (fieldsy.length () == 2)
1391 {
1392 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1393 std::swap (fieldsy[0], fieldsy[1]);
1394 }
1395 else
1396 fieldsy.qsort (ncr_compar);
1397
1398 unsigned i = 0, j = 0;
1399 do
1400 {
1401 const_tree fieldx = fieldsx[i];
1402 const_tree fieldy = fieldsy[j];
1403 tree typex = DECL_FIELD_CONTEXT (fieldx);
1404 tree typey = DECL_FIELD_CONTEXT (fieldy);
1405 if (typex == typey)
1406 {
1407 /* We're left with accessing different fields of a structure,
1408 no possible overlap. */
1409 if (fieldx != fieldy)
1410 {
1411 /* A field and its representative need to be considered the
1412 same. */
1413 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1414 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1415 ;
1416 /* Different fields of the same record type cannot overlap.
1417 ??? Bitfields can overlap at RTL level so punt on them. */
1418 else if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1419 ;
1420 else
1421 {
1422 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1423 return true;
1424 }
1425 }
1426 }
1427 if (TYPE_UID (typex) < TYPE_UID (typey))
1428 {
1429 i++;
1430 if (i == fieldsx.length ())
1431 break;
1432 }
1433 else
1434 {
1435 j++;
1436 if (j == fieldsy.length ())
1437 break;
1438 }
1439 }
1440 while (1);
1441
1442 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1443 return false;
1444 }
1445
1446
1447 /* Return true if two memory references based on the variables BASE1
1448 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1449 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1450 if non-NULL are the complete memory reference trees. */
1451
1452 static bool
1453 decl_refs_may_alias_p (tree ref1, tree base1,
1454 poly_int64 offset1, poly_int64 max_size1,
1455 poly_int64 size1,
1456 tree ref2, tree base2,
1457 poly_int64 offset2, poly_int64 max_size2,
1458 poly_int64 size2)
1459 {
1460 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1461
1462 /* If both references are based on different variables, they cannot alias. */
1463 if (compare_base_decls (base1, base2) == 0)
1464 return false;
1465
1466 /* If both references are based on the same variable, they cannot alias if
1467 the accesses do not overlap. */
1468 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1469 return false;
1470
1471 /* If there is must alias, there is no use disambiguating further. */
1472 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1473 return true;
1474
1475 /* For components with variable position, the above test isn't sufficient,
1476 so we disambiguate component references manually. */
1477 if (ref1 && ref2
1478 && handled_component_p (ref1) && handled_component_p (ref2)
1479 && nonoverlapping_component_refs_since_match_p (NULL, ref1,
1480 NULL, ref2) == 1)
1481 return false;
1482
1483 return true;
1484 }
1485
1486 /* Return true if an indirect reference based on *PTR1 constrained
1487 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1488 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1489 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1490 in which case they are computed on-demand. REF1 and REF2
1491 if non-NULL are the complete memory reference trees. */
1492
1493 static bool
1494 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1495 poly_int64 offset1, poly_int64 max_size1,
1496 poly_int64 size1,
1497 alias_set_type ref1_alias_set,
1498 alias_set_type base1_alias_set,
1499 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1500 poly_int64 offset2, poly_int64 max_size2,
1501 poly_int64 size2,
1502 alias_set_type ref2_alias_set,
1503 alias_set_type base2_alias_set, bool tbaa_p)
1504 {
1505 tree ptr1;
1506 tree ptrtype1, dbase2;
1507
1508 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1509 || TREE_CODE (base1) == TARGET_MEM_REF)
1510 && DECL_P (base2));
1511
1512 ptr1 = TREE_OPERAND (base1, 0);
1513 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1514
1515 /* If only one reference is based on a variable, they cannot alias if
1516 the pointer access is beyond the extent of the variable access.
1517 (the pointer base cannot validly point to an offset less than zero
1518 of the variable).
1519 ??? IVOPTs creates bases that do not honor this restriction,
1520 so do not apply this optimization for TARGET_MEM_REFs. */
1521 if (TREE_CODE (base1) != TARGET_MEM_REF
1522 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1523 return false;
1524 /* They also cannot alias if the pointer may not point to the decl. */
1525 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1526 return false;
1527
1528 /* Disambiguations that rely on strict aliasing rules follow. */
1529 if (!flag_strict_aliasing || !tbaa_p)
1530 return true;
1531
1532 /* If the alias set for a pointer access is zero all bets are off. */
1533 if (base1_alias_set == 0 || base2_alias_set == 0)
1534 return true;
1535
1536 /* When we are trying to disambiguate an access with a pointer dereference
1537 as base versus one with a decl as base we can use both the size
1538 of the decl and its dynamic type for extra disambiguation.
1539 ??? We do not know anything about the dynamic type of the decl
1540 other than that its alias-set contains base2_alias_set as a subset
1541 which does not help us here. */
1542 /* As we know nothing useful about the dynamic type of the decl just
1543 use the usual conflict check rather than a subset test.
1544 ??? We could introduce -fvery-strict-aliasing when the language
1545 does not allow decls to have a dynamic type that differs from their
1546 static type. Then we can check
1547 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1548 if (base1_alias_set != base2_alias_set
1549 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1550 return false;
1551
1552 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1553
1554 /* If the size of the access relevant for TBAA through the pointer
1555 is bigger than the size of the decl we can't possibly access the
1556 decl via that pointer. */
1557 if (/* ??? This in turn may run afoul when a decl of type T which is
1558 a member of union type U is accessed through a pointer to
1559 type U and sizeof T is smaller than sizeof U. */
1560 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1561 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1562 && compare_sizes (DECL_SIZE (base2),
1563 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1564 return false;
1565
1566 if (!ref2)
1567 return true;
1568
1569 /* If the decl is accessed via a MEM_REF, reconstruct the base
1570 we can use for TBAA and an appropriately adjusted offset. */
1571 dbase2 = ref2;
1572 while (handled_component_p (dbase2))
1573 dbase2 = TREE_OPERAND (dbase2, 0);
1574 poly_int64 doffset1 = offset1;
1575 poly_offset_int doffset2 = offset2;
1576 if (TREE_CODE (dbase2) == MEM_REF
1577 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1578 {
1579 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1580 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
1581 /* If second reference is view-converted, give up now. */
1582 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
1583 return true;
1584 }
1585
1586 /* If first reference is view-converted, give up now. */
1587 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
1588 return true;
1589
1590 /* If both references are through the same type, they do not alias
1591 if the accesses do not overlap. This does extra disambiguation
1592 for mixed/pointer accesses but requires strict aliasing.
1593 For MEM_REFs we require that the component-ref offset we computed
1594 is relative to the start of the type which we ensure by
1595 comparing rvalue and access type and disregarding the constant
1596 pointer offset.
1597
1598 But avoid treating variable length arrays as "objects", instead assume they
1599 can overlap by an exact multiple of their element size.
1600 See gcc.dg/torture/alias-2.c. */
1601 if (((TREE_CODE (base1) != TARGET_MEM_REF
1602 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1603 && (TREE_CODE (dbase2) != TARGET_MEM_REF
1604 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
1605 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
1606 && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
1607 || (TYPE_SIZE (TREE_TYPE (base1))
1608 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST)))
1609 {
1610 if (!ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
1611 return false;
1612 if (!ref1 || !ref2
1613 /* If there is must alias, there is no use disambiguating further. */
1614 || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
1615 return true;
1616 int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
1617 base2, ref2);
1618 if (res == -1)
1619 return !nonoverlapping_component_refs_p (ref1, ref2);
1620 return !res;
1621 }
1622
1623 /* Do access-path based disambiguation. */
1624 if (ref1 && ref2
1625 && (handled_component_p (ref1) || handled_component_p (ref2)))
1626 return aliasing_component_refs_p (ref1,
1627 ref1_alias_set, base1_alias_set,
1628 offset1, max_size1,
1629 ref2,
1630 ref2_alias_set, base2_alias_set,
1631 offset2, max_size2);
1632
1633 return true;
1634 }
1635
1636 /* Return true if two indirect references based on *PTR1
1637 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1638 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1639 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1640 in which case they are computed on-demand. REF1 and REF2
1641 if non-NULL are the complete memory reference trees. */
1642
1643 static bool
1644 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1645 poly_int64 offset1, poly_int64 max_size1,
1646 poly_int64 size1,
1647 alias_set_type ref1_alias_set,
1648 alias_set_type base1_alias_set,
1649 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1650 poly_int64 offset2, poly_int64 max_size2,
1651 poly_int64 size2,
1652 alias_set_type ref2_alias_set,
1653 alias_set_type base2_alias_set, bool tbaa_p)
1654 {
1655 tree ptr1;
1656 tree ptr2;
1657 tree ptrtype1, ptrtype2;
1658
1659 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1660 || TREE_CODE (base1) == TARGET_MEM_REF)
1661 && (TREE_CODE (base2) == MEM_REF
1662 || TREE_CODE (base2) == TARGET_MEM_REF));
1663
1664 ptr1 = TREE_OPERAND (base1, 0);
1665 ptr2 = TREE_OPERAND (base2, 0);
1666
1667 /* If both bases are based on pointers they cannot alias if they may not
1668 point to the same memory object or if they point to the same object
1669 and the accesses do not overlap. */
1670 if ((!cfun || gimple_in_ssa_p (cfun))
1671 && operand_equal_p (ptr1, ptr2, 0)
1672 && (((TREE_CODE (base1) != TARGET_MEM_REF
1673 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1674 && (TREE_CODE (base2) != TARGET_MEM_REF
1675 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1676 || (TREE_CODE (base1) == TARGET_MEM_REF
1677 && TREE_CODE (base2) == TARGET_MEM_REF
1678 && (TMR_STEP (base1) == TMR_STEP (base2)
1679 || (TMR_STEP (base1) && TMR_STEP (base2)
1680 && operand_equal_p (TMR_STEP (base1),
1681 TMR_STEP (base2), 0)))
1682 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1683 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1684 && operand_equal_p (TMR_INDEX (base1),
1685 TMR_INDEX (base2), 0)))
1686 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1687 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1688 && operand_equal_p (TMR_INDEX2 (base1),
1689 TMR_INDEX2 (base2), 0))))))
1690 {
1691 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1692 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1693 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1694 offset2 + moff2, max_size2))
1695 return false;
1696 /* If there is must alias, there is no use disambiguating further. */
1697 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1698 return true;
1699 if (ref1 && ref2)
1700 {
1701 int res = nonoverlapping_component_refs_since_match_p (NULL, ref1,
1702 NULL, ref2);
1703 if (res != -1)
1704 return !res;
1705 }
1706 }
1707 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1708 return false;
1709
1710 /* Disambiguations that rely on strict aliasing rules follow. */
1711 if (!flag_strict_aliasing || !tbaa_p)
1712 return true;
1713
1714 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1715 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1716
1717 /* If the alias set for a pointer access is zero all bets are off. */
1718 if (base1_alias_set == 0
1719 || base2_alias_set == 0)
1720 return true;
1721
1722 /* Do type-based disambiguation. */
1723 if (base1_alias_set != base2_alias_set
1724 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1725 return false;
1726
1727 /* If either reference is view-converted, give up now. */
1728 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1729 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1730 return true;
1731
1732 /* If both references are through the same type, they do not alias
1733 if the accesses do not overlap. This does extra disambiguation
1734 for mixed/pointer accesses but requires strict aliasing. */
1735 if ((TREE_CODE (base1) != TARGET_MEM_REF
1736 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1737 && (TREE_CODE (base2) != TARGET_MEM_REF
1738 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1739 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1740 TREE_TYPE (ptrtype2)) == 1
1741 /* But avoid treating arrays as "objects", instead assume they
1742 can overlap by an exact multiple of their element size.
1743 See gcc.dg/torture/alias-2.c. */
1744 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1745 {
1746 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1747 return false;
1748 if (!ref1 || !ref2
1749 || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
1750 return true;
1751 int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
1752 base2, ref2);
1753 if (res == -1)
1754 return !nonoverlapping_component_refs_p (ref1, ref2);
1755 return !res;
1756 }
1757
1758 /* Do access-path based disambiguation. */
1759 if (ref1 && ref2
1760 && (handled_component_p (ref1) || handled_component_p (ref2)))
1761 return aliasing_component_refs_p (ref1,
1762 ref1_alias_set, base1_alias_set,
1763 offset1, max_size1,
1764 ref2,
1765 ref2_alias_set, base2_alias_set,
1766 offset2, max_size2);
1767
1768 return true;
1769 }
1770
1771 /* Return true, if the two memory references REF1 and REF2 may alias. */
1772
1773 static bool
1774 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1775 {
1776 tree base1, base2;
1777 poly_int64 offset1 = 0, offset2 = 0;
1778 poly_int64 max_size1 = -1, max_size2 = -1;
1779 bool var1_p, var2_p, ind1_p, ind2_p;
1780
1781 gcc_checking_assert ((!ref1->ref
1782 || TREE_CODE (ref1->ref) == SSA_NAME
1783 || DECL_P (ref1->ref)
1784 || TREE_CODE (ref1->ref) == STRING_CST
1785 || handled_component_p (ref1->ref)
1786 || TREE_CODE (ref1->ref) == MEM_REF
1787 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1788 && (!ref2->ref
1789 || TREE_CODE (ref2->ref) == SSA_NAME
1790 || DECL_P (ref2->ref)
1791 || TREE_CODE (ref2->ref) == STRING_CST
1792 || handled_component_p (ref2->ref)
1793 || TREE_CODE (ref2->ref) == MEM_REF
1794 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1795
1796 /* Decompose the references into their base objects and the access. */
1797 base1 = ao_ref_base (ref1);
1798 offset1 = ref1->offset;
1799 max_size1 = ref1->max_size;
1800 base2 = ao_ref_base (ref2);
1801 offset2 = ref2->offset;
1802 max_size2 = ref2->max_size;
1803
1804 /* We can end up with registers or constants as bases for example from
1805 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1806 which is seen as a struct copy. */
1807 if (TREE_CODE (base1) == SSA_NAME
1808 || TREE_CODE (base1) == CONST_DECL
1809 || TREE_CODE (base1) == CONSTRUCTOR
1810 || TREE_CODE (base1) == ADDR_EXPR
1811 || CONSTANT_CLASS_P (base1)
1812 || TREE_CODE (base2) == SSA_NAME
1813 || TREE_CODE (base2) == CONST_DECL
1814 || TREE_CODE (base2) == CONSTRUCTOR
1815 || TREE_CODE (base2) == ADDR_EXPR
1816 || CONSTANT_CLASS_P (base2))
1817 return false;
1818
1819 /* We can end up referring to code via function and label decls.
1820 As we likely do not properly track code aliases conservatively
1821 bail out. */
1822 if (TREE_CODE (base1) == FUNCTION_DECL
1823 || TREE_CODE (base1) == LABEL_DECL
1824 || TREE_CODE (base2) == FUNCTION_DECL
1825 || TREE_CODE (base2) == LABEL_DECL)
1826 return true;
1827
1828 /* Two volatile accesses always conflict. */
1829 if (ref1->volatile_p
1830 && ref2->volatile_p)
1831 return true;
1832
1833 /* Defer to simple offset based disambiguation if we have
1834 references based on two decls. Do this before defering to
1835 TBAA to handle must-alias cases in conformance with the
1836 GCC extension of allowing type-punning through unions. */
1837 var1_p = DECL_P (base1);
1838 var2_p = DECL_P (base2);
1839 if (var1_p && var2_p)
1840 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1841 ref1->size,
1842 ref2->ref, base2, offset2, max_size2,
1843 ref2->size);
1844
1845 /* Handle restrict based accesses.
1846 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1847 here. */
1848 tree rbase1 = base1;
1849 tree rbase2 = base2;
1850 if (var1_p)
1851 {
1852 rbase1 = ref1->ref;
1853 if (rbase1)
1854 while (handled_component_p (rbase1))
1855 rbase1 = TREE_OPERAND (rbase1, 0);
1856 }
1857 if (var2_p)
1858 {
1859 rbase2 = ref2->ref;
1860 if (rbase2)
1861 while (handled_component_p (rbase2))
1862 rbase2 = TREE_OPERAND (rbase2, 0);
1863 }
1864 if (rbase1 && rbase2
1865 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1866 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1867 /* If the accesses are in the same restrict clique... */
1868 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1869 /* But based on different pointers they do not alias. */
1870 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1871 return false;
1872
1873 ind1_p = (TREE_CODE (base1) == MEM_REF
1874 || TREE_CODE (base1) == TARGET_MEM_REF);
1875 ind2_p = (TREE_CODE (base2) == MEM_REF
1876 || TREE_CODE (base2) == TARGET_MEM_REF);
1877
1878 /* Canonicalize the pointer-vs-decl case. */
1879 if (ind1_p && var2_p)
1880 {
1881 std::swap (offset1, offset2);
1882 std::swap (max_size1, max_size2);
1883 std::swap (base1, base2);
1884 std::swap (ref1, ref2);
1885 var1_p = true;
1886 ind1_p = false;
1887 var2_p = false;
1888 ind2_p = true;
1889 }
1890
1891 /* First defer to TBAA if possible. */
1892 if (tbaa_p
1893 && flag_strict_aliasing
1894 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1895 ao_ref_alias_set (ref2)))
1896 return false;
1897
1898 /* If the reference is based on a pointer that points to memory
1899 that may not be written to then the other reference cannot possibly
1900 clobber it. */
1901 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1902 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1903 || (ind1_p
1904 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1905 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1906 return false;
1907
1908 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1909 if (var1_p && ind2_p)
1910 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1911 offset2, max_size2, ref2->size,
1912 ao_ref_alias_set (ref2),
1913 ao_ref_base_alias_set (ref2),
1914 ref1->ref, base1,
1915 offset1, max_size1, ref1->size,
1916 ao_ref_alias_set (ref1),
1917 ao_ref_base_alias_set (ref1),
1918 tbaa_p);
1919 else if (ind1_p && ind2_p)
1920 return indirect_refs_may_alias_p (ref1->ref, base1,
1921 offset1, max_size1, ref1->size,
1922 ao_ref_alias_set (ref1),
1923 ao_ref_base_alias_set (ref1),
1924 ref2->ref, base2,
1925 offset2, max_size2, ref2->size,
1926 ao_ref_alias_set (ref2),
1927 ao_ref_base_alias_set (ref2),
1928 tbaa_p);
1929
1930 gcc_unreachable ();
1931 }
1932
1933 /* Return true, if the two memory references REF1 and REF2 may alias
1934 and update statistics. */
1935
1936 bool
1937 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1938 {
1939 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
1940 if (res)
1941 ++alias_stats.refs_may_alias_p_may_alias;
1942 else
1943 ++alias_stats.refs_may_alias_p_no_alias;
1944 return res;
1945 }
1946
1947 static bool
1948 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1949 {
1950 ao_ref r1;
1951 ao_ref_init (&r1, ref1);
1952 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1953 }
1954
1955 bool
1956 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1957 {
1958 ao_ref r1, r2;
1959 ao_ref_init (&r1, ref1);
1960 ao_ref_init (&r2, ref2);
1961 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1962 }
1963
1964 /* Returns true if there is a anti-dependence for the STORE that
1965 executes after the LOAD. */
1966
1967 bool
1968 refs_anti_dependent_p (tree load, tree store)
1969 {
1970 ao_ref r1, r2;
1971 ao_ref_init (&r1, load);
1972 ao_ref_init (&r2, store);
1973 return refs_may_alias_p_1 (&r1, &r2, false);
1974 }
1975
1976 /* Returns true if there is a output dependence for the stores
1977 STORE1 and STORE2. */
1978
1979 bool
1980 refs_output_dependent_p (tree store1, tree store2)
1981 {
1982 ao_ref r1, r2;
1983 ao_ref_init (&r1, store1);
1984 ao_ref_init (&r2, store2);
1985 return refs_may_alias_p_1 (&r1, &r2, false);
1986 }
1987
1988 /* If the call CALL may use the memory reference REF return true,
1989 otherwise return false. */
1990
1991 static bool
1992 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1993 {
1994 tree base, callee;
1995 unsigned i;
1996 int flags = gimple_call_flags (call);
1997
1998 /* Const functions without a static chain do not implicitly use memory. */
1999 if (!gimple_call_chain (call)
2000 && (flags & (ECF_CONST|ECF_NOVOPS)))
2001 goto process_args;
2002
2003 base = ao_ref_base (ref);
2004 if (!base)
2005 return true;
2006
2007 /* A call that is not without side-effects might involve volatile
2008 accesses and thus conflicts with all other volatile accesses. */
2009 if (ref->volatile_p)
2010 return true;
2011
2012 /* If the reference is based on a decl that is not aliased the call
2013 cannot possibly use it. */
2014 if (DECL_P (base)
2015 && !may_be_aliased (base)
2016 /* But local statics can be used through recursion. */
2017 && !is_global_var (base))
2018 goto process_args;
2019
2020 callee = gimple_call_fndecl (call);
2021
2022 /* Handle those builtin functions explicitly that do not act as
2023 escape points. See tree-ssa-structalias.c:find_func_aliases
2024 for the list of builtins we might need to handle here. */
2025 if (callee != NULL_TREE
2026 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2027 switch (DECL_FUNCTION_CODE (callee))
2028 {
2029 /* All the following functions read memory pointed to by
2030 their second argument. strcat/strncat additionally
2031 reads memory pointed to by the first argument. */
2032 case BUILT_IN_STRCAT:
2033 case BUILT_IN_STRNCAT:
2034 {
2035 ao_ref dref;
2036 ao_ref_init_from_ptr_and_size (&dref,
2037 gimple_call_arg (call, 0),
2038 NULL_TREE);
2039 if (refs_may_alias_p_1 (&dref, ref, false))
2040 return true;
2041 }
2042 /* FALLTHRU */
2043 case BUILT_IN_STRCPY:
2044 case BUILT_IN_STRNCPY:
2045 case BUILT_IN_MEMCPY:
2046 case BUILT_IN_MEMMOVE:
2047 case BUILT_IN_MEMPCPY:
2048 case BUILT_IN_STPCPY:
2049 case BUILT_IN_STPNCPY:
2050 case BUILT_IN_TM_MEMCPY:
2051 case BUILT_IN_TM_MEMMOVE:
2052 {
2053 ao_ref dref;
2054 tree size = NULL_TREE;
2055 if (gimple_call_num_args (call) == 3)
2056 size = gimple_call_arg (call, 2);
2057 ao_ref_init_from_ptr_and_size (&dref,
2058 gimple_call_arg (call, 1),
2059 size);
2060 return refs_may_alias_p_1 (&dref, ref, false);
2061 }
2062 case BUILT_IN_STRCAT_CHK:
2063 case BUILT_IN_STRNCAT_CHK:
2064 {
2065 ao_ref dref;
2066 ao_ref_init_from_ptr_and_size (&dref,
2067 gimple_call_arg (call, 0),
2068 NULL_TREE);
2069 if (refs_may_alias_p_1 (&dref, ref, false))
2070 return true;
2071 }
2072 /* FALLTHRU */
2073 case BUILT_IN_STRCPY_CHK:
2074 case BUILT_IN_STRNCPY_CHK:
2075 case BUILT_IN_MEMCPY_CHK:
2076 case BUILT_IN_MEMMOVE_CHK:
2077 case BUILT_IN_MEMPCPY_CHK:
2078 case BUILT_IN_STPCPY_CHK:
2079 case BUILT_IN_STPNCPY_CHK:
2080 {
2081 ao_ref dref;
2082 tree size = NULL_TREE;
2083 if (gimple_call_num_args (call) == 4)
2084 size = gimple_call_arg (call, 2);
2085 ao_ref_init_from_ptr_and_size (&dref,
2086 gimple_call_arg (call, 1),
2087 size);
2088 return refs_may_alias_p_1 (&dref, ref, false);
2089 }
2090 case BUILT_IN_BCOPY:
2091 {
2092 ao_ref dref;
2093 tree size = gimple_call_arg (call, 2);
2094 ao_ref_init_from_ptr_and_size (&dref,
2095 gimple_call_arg (call, 0),
2096 size);
2097 return refs_may_alias_p_1 (&dref, ref, false);
2098 }
2099
2100 /* The following functions read memory pointed to by their
2101 first argument. */
2102 CASE_BUILT_IN_TM_LOAD (1):
2103 CASE_BUILT_IN_TM_LOAD (2):
2104 CASE_BUILT_IN_TM_LOAD (4):
2105 CASE_BUILT_IN_TM_LOAD (8):
2106 CASE_BUILT_IN_TM_LOAD (FLOAT):
2107 CASE_BUILT_IN_TM_LOAD (DOUBLE):
2108 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
2109 CASE_BUILT_IN_TM_LOAD (M64):
2110 CASE_BUILT_IN_TM_LOAD (M128):
2111 CASE_BUILT_IN_TM_LOAD (M256):
2112 case BUILT_IN_TM_LOG:
2113 case BUILT_IN_TM_LOG_1:
2114 case BUILT_IN_TM_LOG_2:
2115 case BUILT_IN_TM_LOG_4:
2116 case BUILT_IN_TM_LOG_8:
2117 case BUILT_IN_TM_LOG_FLOAT:
2118 case BUILT_IN_TM_LOG_DOUBLE:
2119 case BUILT_IN_TM_LOG_LDOUBLE:
2120 case BUILT_IN_TM_LOG_M64:
2121 case BUILT_IN_TM_LOG_M128:
2122 case BUILT_IN_TM_LOG_M256:
2123 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
2124
2125 /* These read memory pointed to by the first argument. */
2126 case BUILT_IN_STRDUP:
2127 case BUILT_IN_STRNDUP:
2128 case BUILT_IN_REALLOC:
2129 {
2130 ao_ref dref;
2131 tree size = NULL_TREE;
2132 if (gimple_call_num_args (call) == 2)
2133 size = gimple_call_arg (call, 1);
2134 ao_ref_init_from_ptr_and_size (&dref,
2135 gimple_call_arg (call, 0),
2136 size);
2137 return refs_may_alias_p_1 (&dref, ref, false);
2138 }
2139 /* These read memory pointed to by the first argument. */
2140 case BUILT_IN_INDEX:
2141 case BUILT_IN_STRCHR:
2142 case BUILT_IN_STRRCHR:
2143 {
2144 ao_ref dref;
2145 ao_ref_init_from_ptr_and_size (&dref,
2146 gimple_call_arg (call, 0),
2147 NULL_TREE);
2148 return refs_may_alias_p_1 (&dref, ref, false);
2149 }
2150 /* These read memory pointed to by the first argument with size
2151 in the third argument. */
2152 case BUILT_IN_MEMCHR:
2153 {
2154 ao_ref dref;
2155 ao_ref_init_from_ptr_and_size (&dref,
2156 gimple_call_arg (call, 0),
2157 gimple_call_arg (call, 2));
2158 return refs_may_alias_p_1 (&dref, ref, false);
2159 }
2160 /* These read memory pointed to by the first and second arguments. */
2161 case BUILT_IN_STRSTR:
2162 case BUILT_IN_STRPBRK:
2163 {
2164 ao_ref dref;
2165 ao_ref_init_from_ptr_and_size (&dref,
2166 gimple_call_arg (call, 0),
2167 NULL_TREE);
2168 if (refs_may_alias_p_1 (&dref, ref, false))
2169 return true;
2170 ao_ref_init_from_ptr_and_size (&dref,
2171 gimple_call_arg (call, 1),
2172 NULL_TREE);
2173 return refs_may_alias_p_1 (&dref, ref, false);
2174 }
2175
2176 /* The following builtins do not read from memory. */
2177 case BUILT_IN_FREE:
2178 case BUILT_IN_MALLOC:
2179 case BUILT_IN_POSIX_MEMALIGN:
2180 case BUILT_IN_ALIGNED_ALLOC:
2181 case BUILT_IN_CALLOC:
2182 CASE_BUILT_IN_ALLOCA:
2183 case BUILT_IN_STACK_SAVE:
2184 case BUILT_IN_STACK_RESTORE:
2185 case BUILT_IN_MEMSET:
2186 case BUILT_IN_TM_MEMSET:
2187 case BUILT_IN_MEMSET_CHK:
2188 case BUILT_IN_FREXP:
2189 case BUILT_IN_FREXPF:
2190 case BUILT_IN_FREXPL:
2191 case BUILT_IN_GAMMA_R:
2192 case BUILT_IN_GAMMAF_R:
2193 case BUILT_IN_GAMMAL_R:
2194 case BUILT_IN_LGAMMA_R:
2195 case BUILT_IN_LGAMMAF_R:
2196 case BUILT_IN_LGAMMAL_R:
2197 case BUILT_IN_MODF:
2198 case BUILT_IN_MODFF:
2199 case BUILT_IN_MODFL:
2200 case BUILT_IN_REMQUO:
2201 case BUILT_IN_REMQUOF:
2202 case BUILT_IN_REMQUOL:
2203 case BUILT_IN_SINCOS:
2204 case BUILT_IN_SINCOSF:
2205 case BUILT_IN_SINCOSL:
2206 case BUILT_IN_ASSUME_ALIGNED:
2207 case BUILT_IN_VA_END:
2208 return false;
2209 /* __sync_* builtins and some OpenMP builtins act as threading
2210 barriers. */
2211 #undef DEF_SYNC_BUILTIN
2212 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2213 #include "sync-builtins.def"
2214 #undef DEF_SYNC_BUILTIN
2215 case BUILT_IN_GOMP_ATOMIC_START:
2216 case BUILT_IN_GOMP_ATOMIC_END:
2217 case BUILT_IN_GOMP_BARRIER:
2218 case BUILT_IN_GOMP_BARRIER_CANCEL:
2219 case BUILT_IN_GOMP_TASKWAIT:
2220 case BUILT_IN_GOMP_TASKGROUP_END:
2221 case BUILT_IN_GOMP_CRITICAL_START:
2222 case BUILT_IN_GOMP_CRITICAL_END:
2223 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2224 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2225 case BUILT_IN_GOMP_LOOP_END:
2226 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2227 case BUILT_IN_GOMP_ORDERED_START:
2228 case BUILT_IN_GOMP_ORDERED_END:
2229 case BUILT_IN_GOMP_SECTIONS_END:
2230 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2231 case BUILT_IN_GOMP_SINGLE_COPY_START:
2232 case BUILT_IN_GOMP_SINGLE_COPY_END:
2233 return true;
2234
2235 default:
2236 /* Fallthru to general call handling. */;
2237 }
2238
2239 /* Check if base is a global static variable that is not read
2240 by the function. */
2241 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2242 {
2243 struct cgraph_node *node = cgraph_node::get (callee);
2244 bitmap not_read;
2245
2246 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2247 node yet. We should enforce that there are nodes for all decls in the
2248 IL and remove this check instead. */
2249 if (node
2250 && (not_read = ipa_reference_get_not_read_global (node))
2251 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
2252 goto process_args;
2253 }
2254
2255 /* Check if the base variable is call-used. */
2256 if (DECL_P (base))
2257 {
2258 if (pt_solution_includes (gimple_call_use_set (call), base))
2259 return true;
2260 }
2261 else if ((TREE_CODE (base) == MEM_REF
2262 || TREE_CODE (base) == TARGET_MEM_REF)
2263 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2264 {
2265 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2266 if (!pi)
2267 return true;
2268
2269 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2270 return true;
2271 }
2272 else
2273 return true;
2274
2275 /* Inspect call arguments for passed-by-value aliases. */
2276 process_args:
2277 for (i = 0; i < gimple_call_num_args (call); ++i)
2278 {
2279 tree op = gimple_call_arg (call, i);
2280 int flags = gimple_call_arg_flags (call, i);
2281
2282 if (flags & EAF_UNUSED)
2283 continue;
2284
2285 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2286 op = TREE_OPERAND (op, 0);
2287
2288 if (TREE_CODE (op) != SSA_NAME
2289 && !is_gimple_min_invariant (op))
2290 {
2291 ao_ref r;
2292 ao_ref_init (&r, op);
2293 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2294 return true;
2295 }
2296 }
2297
2298 return false;
2299 }
2300
2301 static bool
2302 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2303 {
2304 bool res;
2305 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2306 if (res)
2307 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2308 else
2309 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2310 return res;
2311 }
2312
2313
2314 /* If the statement STMT may use the memory reference REF return
2315 true, otherwise return false. */
2316
2317 bool
2318 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2319 {
2320 if (is_gimple_assign (stmt))
2321 {
2322 tree rhs;
2323
2324 /* All memory assign statements are single. */
2325 if (!gimple_assign_single_p (stmt))
2326 return false;
2327
2328 rhs = gimple_assign_rhs1 (stmt);
2329 if (is_gimple_reg (rhs)
2330 || is_gimple_min_invariant (rhs)
2331 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2332 return false;
2333
2334 return refs_may_alias_p (rhs, ref, tbaa_p);
2335 }
2336 else if (is_gimple_call (stmt))
2337 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2338 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2339 {
2340 tree retval = gimple_return_retval (return_stmt);
2341 if (retval
2342 && TREE_CODE (retval) != SSA_NAME
2343 && !is_gimple_min_invariant (retval)
2344 && refs_may_alias_p (retval, ref, tbaa_p))
2345 return true;
2346 /* If ref escapes the function then the return acts as a use. */
2347 tree base = ao_ref_base (ref);
2348 if (!base)
2349 ;
2350 else if (DECL_P (base))
2351 return is_global_var (base);
2352 else if (TREE_CODE (base) == MEM_REF
2353 || TREE_CODE (base) == TARGET_MEM_REF)
2354 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2355 return false;
2356 }
2357
2358 return true;
2359 }
2360
2361 bool
2362 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2363 {
2364 ao_ref r;
2365 ao_ref_init (&r, ref);
2366 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2367 }
2368
2369 /* If the call in statement CALL may clobber the memory reference REF
2370 return true, otherwise return false. */
2371
2372 bool
2373 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2374 {
2375 tree base;
2376 tree callee;
2377
2378 /* If the call is pure or const it cannot clobber anything. */
2379 if (gimple_call_flags (call)
2380 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2381 return false;
2382 if (gimple_call_internal_p (call))
2383 switch (gimple_call_internal_fn (call))
2384 {
2385 /* Treat these internal calls like ECF_PURE for aliasing,
2386 they don't write to any memory the program should care about.
2387 They have important other side-effects, and read memory,
2388 so can't be ECF_NOVOPS. */
2389 case IFN_UBSAN_NULL:
2390 case IFN_UBSAN_BOUNDS:
2391 case IFN_UBSAN_VPTR:
2392 case IFN_UBSAN_OBJECT_SIZE:
2393 case IFN_UBSAN_PTR:
2394 case IFN_ASAN_CHECK:
2395 return false;
2396 default:
2397 break;
2398 }
2399
2400 base = ao_ref_base (ref);
2401 if (!base)
2402 return true;
2403
2404 if (TREE_CODE (base) == SSA_NAME
2405 || CONSTANT_CLASS_P (base))
2406 return false;
2407
2408 /* A call that is not without side-effects might involve volatile
2409 accesses and thus conflicts with all other volatile accesses. */
2410 if (ref->volatile_p)
2411 return true;
2412
2413 /* If the reference is based on a decl that is not aliased the call
2414 cannot possibly clobber it. */
2415 if (DECL_P (base)
2416 && !may_be_aliased (base)
2417 /* But local non-readonly statics can be modified through recursion
2418 or the call may implement a threading barrier which we must
2419 treat as may-def. */
2420 && (TREE_READONLY (base)
2421 || !is_global_var (base)))
2422 return false;
2423
2424 /* If the reference is based on a pointer that points to memory
2425 that may not be written to then the call cannot possibly clobber it. */
2426 if ((TREE_CODE (base) == MEM_REF
2427 || TREE_CODE (base) == TARGET_MEM_REF)
2428 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2429 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2430 return false;
2431
2432 callee = gimple_call_fndecl (call);
2433
2434 /* Handle those builtin functions explicitly that do not act as
2435 escape points. See tree-ssa-structalias.c:find_func_aliases
2436 for the list of builtins we might need to handle here. */
2437 if (callee != NULL_TREE
2438 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2439 switch (DECL_FUNCTION_CODE (callee))
2440 {
2441 /* All the following functions clobber memory pointed to by
2442 their first argument. */
2443 case BUILT_IN_STRCPY:
2444 case BUILT_IN_STRNCPY:
2445 case BUILT_IN_MEMCPY:
2446 case BUILT_IN_MEMMOVE:
2447 case BUILT_IN_MEMPCPY:
2448 case BUILT_IN_STPCPY:
2449 case BUILT_IN_STPNCPY:
2450 case BUILT_IN_STRCAT:
2451 case BUILT_IN_STRNCAT:
2452 case BUILT_IN_MEMSET:
2453 case BUILT_IN_TM_MEMSET:
2454 CASE_BUILT_IN_TM_STORE (1):
2455 CASE_BUILT_IN_TM_STORE (2):
2456 CASE_BUILT_IN_TM_STORE (4):
2457 CASE_BUILT_IN_TM_STORE (8):
2458 CASE_BUILT_IN_TM_STORE (FLOAT):
2459 CASE_BUILT_IN_TM_STORE (DOUBLE):
2460 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2461 CASE_BUILT_IN_TM_STORE (M64):
2462 CASE_BUILT_IN_TM_STORE (M128):
2463 CASE_BUILT_IN_TM_STORE (M256):
2464 case BUILT_IN_TM_MEMCPY:
2465 case BUILT_IN_TM_MEMMOVE:
2466 {
2467 ao_ref dref;
2468 tree size = NULL_TREE;
2469 /* Don't pass in size for strncat, as the maximum size
2470 is strlen (dest) + n + 1 instead of n, resp.
2471 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2472 known. */
2473 if (gimple_call_num_args (call) == 3
2474 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2475 size = gimple_call_arg (call, 2);
2476 ao_ref_init_from_ptr_and_size (&dref,
2477 gimple_call_arg (call, 0),
2478 size);
2479 return refs_may_alias_p_1 (&dref, ref, false);
2480 }
2481 case BUILT_IN_STRCPY_CHK:
2482 case BUILT_IN_STRNCPY_CHK:
2483 case BUILT_IN_MEMCPY_CHK:
2484 case BUILT_IN_MEMMOVE_CHK:
2485 case BUILT_IN_MEMPCPY_CHK:
2486 case BUILT_IN_STPCPY_CHK:
2487 case BUILT_IN_STPNCPY_CHK:
2488 case BUILT_IN_STRCAT_CHK:
2489 case BUILT_IN_STRNCAT_CHK:
2490 case BUILT_IN_MEMSET_CHK:
2491 {
2492 ao_ref dref;
2493 tree size = NULL_TREE;
2494 /* Don't pass in size for __strncat_chk, as the maximum size
2495 is strlen (dest) + n + 1 instead of n, resp.
2496 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2497 known. */
2498 if (gimple_call_num_args (call) == 4
2499 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2500 size = gimple_call_arg (call, 2);
2501 ao_ref_init_from_ptr_and_size (&dref,
2502 gimple_call_arg (call, 0),
2503 size);
2504 return refs_may_alias_p_1 (&dref, ref, false);
2505 }
2506 case BUILT_IN_BCOPY:
2507 {
2508 ao_ref dref;
2509 tree size = gimple_call_arg (call, 2);
2510 ao_ref_init_from_ptr_and_size (&dref,
2511 gimple_call_arg (call, 1),
2512 size);
2513 return refs_may_alias_p_1 (&dref, ref, false);
2514 }
2515 /* Allocating memory does not have any side-effects apart from
2516 being the definition point for the pointer. */
2517 case BUILT_IN_MALLOC:
2518 case BUILT_IN_ALIGNED_ALLOC:
2519 case BUILT_IN_CALLOC:
2520 case BUILT_IN_STRDUP:
2521 case BUILT_IN_STRNDUP:
2522 /* Unix98 specifies that errno is set on allocation failure. */
2523 if (flag_errno_math
2524 && targetm.ref_may_alias_errno (ref))
2525 return true;
2526 return false;
2527 case BUILT_IN_STACK_SAVE:
2528 CASE_BUILT_IN_ALLOCA:
2529 case BUILT_IN_ASSUME_ALIGNED:
2530 return false;
2531 /* But posix_memalign stores a pointer into the memory pointed to
2532 by its first argument. */
2533 case BUILT_IN_POSIX_MEMALIGN:
2534 {
2535 tree ptrptr = gimple_call_arg (call, 0);
2536 ao_ref dref;
2537 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2538 TYPE_SIZE_UNIT (ptr_type_node));
2539 return (refs_may_alias_p_1 (&dref, ref, false)
2540 || (flag_errno_math
2541 && targetm.ref_may_alias_errno (ref)));
2542 }
2543 /* Freeing memory kills the pointed-to memory. More importantly
2544 the call has to serve as a barrier for moving loads and stores
2545 across it. */
2546 case BUILT_IN_FREE:
2547 case BUILT_IN_VA_END:
2548 {
2549 tree ptr = gimple_call_arg (call, 0);
2550 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2551 }
2552 /* Realloc serves both as allocation point and deallocation point. */
2553 case BUILT_IN_REALLOC:
2554 {
2555 tree ptr = gimple_call_arg (call, 0);
2556 /* Unix98 specifies that errno is set on allocation failure. */
2557 return ((flag_errno_math
2558 && targetm.ref_may_alias_errno (ref))
2559 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2560 }
2561 case BUILT_IN_GAMMA_R:
2562 case BUILT_IN_GAMMAF_R:
2563 case BUILT_IN_GAMMAL_R:
2564 case BUILT_IN_LGAMMA_R:
2565 case BUILT_IN_LGAMMAF_R:
2566 case BUILT_IN_LGAMMAL_R:
2567 {
2568 tree out = gimple_call_arg (call, 1);
2569 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2570 return true;
2571 if (flag_errno_math)
2572 break;
2573 return false;
2574 }
2575 case BUILT_IN_FREXP:
2576 case BUILT_IN_FREXPF:
2577 case BUILT_IN_FREXPL:
2578 case BUILT_IN_MODF:
2579 case BUILT_IN_MODFF:
2580 case BUILT_IN_MODFL:
2581 {
2582 tree out = gimple_call_arg (call, 1);
2583 return ptr_deref_may_alias_ref_p_1 (out, ref);
2584 }
2585 case BUILT_IN_REMQUO:
2586 case BUILT_IN_REMQUOF:
2587 case BUILT_IN_REMQUOL:
2588 {
2589 tree out = gimple_call_arg (call, 2);
2590 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2591 return true;
2592 if (flag_errno_math)
2593 break;
2594 return false;
2595 }
2596 case BUILT_IN_SINCOS:
2597 case BUILT_IN_SINCOSF:
2598 case BUILT_IN_SINCOSL:
2599 {
2600 tree sin = gimple_call_arg (call, 1);
2601 tree cos = gimple_call_arg (call, 2);
2602 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2603 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2604 }
2605 /* __sync_* builtins and some OpenMP builtins act as threading
2606 barriers. */
2607 #undef DEF_SYNC_BUILTIN
2608 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2609 #include "sync-builtins.def"
2610 #undef DEF_SYNC_BUILTIN
2611 case BUILT_IN_GOMP_ATOMIC_START:
2612 case BUILT_IN_GOMP_ATOMIC_END:
2613 case BUILT_IN_GOMP_BARRIER:
2614 case BUILT_IN_GOMP_BARRIER_CANCEL:
2615 case BUILT_IN_GOMP_TASKWAIT:
2616 case BUILT_IN_GOMP_TASKGROUP_END:
2617 case BUILT_IN_GOMP_CRITICAL_START:
2618 case BUILT_IN_GOMP_CRITICAL_END:
2619 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2620 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2621 case BUILT_IN_GOMP_LOOP_END:
2622 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2623 case BUILT_IN_GOMP_ORDERED_START:
2624 case BUILT_IN_GOMP_ORDERED_END:
2625 case BUILT_IN_GOMP_SECTIONS_END:
2626 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2627 case BUILT_IN_GOMP_SINGLE_COPY_START:
2628 case BUILT_IN_GOMP_SINGLE_COPY_END:
2629 return true;
2630 default:
2631 /* Fallthru to general call handling. */;
2632 }
2633
2634 /* Check if base is a global static variable that is not written
2635 by the function. */
2636 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2637 {
2638 struct cgraph_node *node = cgraph_node::get (callee);
2639 bitmap not_written;
2640
2641 if (node
2642 && (not_written = ipa_reference_get_not_written_global (node))
2643 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2644 return false;
2645 }
2646
2647 /* Check if the base variable is call-clobbered. */
2648 if (DECL_P (base))
2649 return pt_solution_includes (gimple_call_clobber_set (call), base);
2650 else if ((TREE_CODE (base) == MEM_REF
2651 || TREE_CODE (base) == TARGET_MEM_REF)
2652 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2653 {
2654 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2655 if (!pi)
2656 return true;
2657
2658 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2659 }
2660
2661 return true;
2662 }
2663
2664 /* If the call in statement CALL may clobber the memory reference REF
2665 return true, otherwise return false. */
2666
2667 bool
2668 call_may_clobber_ref_p (gcall *call, tree ref)
2669 {
2670 bool res;
2671 ao_ref r;
2672 ao_ref_init (&r, ref);
2673 res = call_may_clobber_ref_p_1 (call, &r);
2674 if (res)
2675 ++alias_stats.call_may_clobber_ref_p_may_alias;
2676 else
2677 ++alias_stats.call_may_clobber_ref_p_no_alias;
2678 return res;
2679 }
2680
2681
2682 /* If the statement STMT may clobber the memory reference REF return true,
2683 otherwise return false. */
2684
2685 bool
2686 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2687 {
2688 if (is_gimple_call (stmt))
2689 {
2690 tree lhs = gimple_call_lhs (stmt);
2691 if (lhs
2692 && TREE_CODE (lhs) != SSA_NAME)
2693 {
2694 ao_ref r;
2695 ao_ref_init (&r, lhs);
2696 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2697 return true;
2698 }
2699
2700 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2701 }
2702 else if (gimple_assign_single_p (stmt))
2703 {
2704 tree lhs = gimple_assign_lhs (stmt);
2705 if (TREE_CODE (lhs) != SSA_NAME)
2706 {
2707 ao_ref r;
2708 ao_ref_init (&r, lhs);
2709 return refs_may_alias_p_1 (ref, &r, tbaa_p);
2710 }
2711 }
2712 else if (gimple_code (stmt) == GIMPLE_ASM)
2713 return true;
2714
2715 return false;
2716 }
2717
2718 bool
2719 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2720 {
2721 ao_ref r;
2722 ao_ref_init (&r, ref);
2723 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2724 }
2725
2726 /* Return true if store1 and store2 described by corresponding tuples
2727 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2728 address. */
2729
2730 static bool
2731 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2732 poly_int64 max_size1,
2733 tree base2, poly_int64 offset2, poly_int64 size2,
2734 poly_int64 max_size2)
2735 {
2736 /* Offsets need to be 0. */
2737 if (maybe_ne (offset1, 0)
2738 || maybe_ne (offset2, 0))
2739 return false;
2740
2741 bool base1_obj_p = SSA_VAR_P (base1);
2742 bool base2_obj_p = SSA_VAR_P (base2);
2743
2744 /* We need one object. */
2745 if (base1_obj_p == base2_obj_p)
2746 return false;
2747 tree obj = base1_obj_p ? base1 : base2;
2748
2749 /* And we need one MEM_REF. */
2750 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2751 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2752 if (base1_memref_p == base2_memref_p)
2753 return false;
2754 tree memref = base1_memref_p ? base1 : base2;
2755
2756 /* Sizes need to be valid. */
2757 if (!known_size_p (max_size1)
2758 || !known_size_p (max_size2)
2759 || !known_size_p (size1)
2760 || !known_size_p (size2))
2761 return false;
2762
2763 /* Max_size needs to match size. */
2764 if (maybe_ne (max_size1, size1)
2765 || maybe_ne (max_size2, size2))
2766 return false;
2767
2768 /* Sizes need to match. */
2769 if (maybe_ne (size1, size2))
2770 return false;
2771
2772
2773 /* Check that memref is a store to pointer with singleton points-to info. */
2774 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2775 return false;
2776 tree ptr = TREE_OPERAND (memref, 0);
2777 if (TREE_CODE (ptr) != SSA_NAME)
2778 return false;
2779 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2780 unsigned int pt_uid;
2781 if (pi == NULL
2782 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2783 return false;
2784
2785 /* Be conservative with non-call exceptions when the address might
2786 be NULL. */
2787 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2788 return false;
2789
2790 /* Check that ptr points relative to obj. */
2791 unsigned int obj_uid = DECL_PT_UID (obj);
2792 if (obj_uid != pt_uid)
2793 return false;
2794
2795 /* Check that the object size is the same as the store size. That ensures us
2796 that ptr points to the start of obj. */
2797 return (DECL_SIZE (obj)
2798 && poly_int_tree_p (DECL_SIZE (obj))
2799 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2800 }
2801
2802 /* If STMT kills the memory reference REF return true, otherwise
2803 return false. */
2804
2805 bool
2806 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2807 {
2808 if (!ao_ref_base (ref))
2809 return false;
2810
2811 if (gimple_has_lhs (stmt)
2812 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2813 /* The assignment is not necessarily carried out if it can throw
2814 and we can catch it in the current function where we could inspect
2815 the previous value.
2816 ??? We only need to care about the RHS throwing. For aggregate
2817 assignments or similar calls and non-call exceptions the LHS
2818 might throw as well. */
2819 && !stmt_can_throw_internal (cfun, stmt))
2820 {
2821 tree lhs = gimple_get_lhs (stmt);
2822 /* If LHS is literally a base of the access we are done. */
2823 if (ref->ref)
2824 {
2825 tree base = ref->ref;
2826 tree innermost_dropped_array_ref = NULL_TREE;
2827 if (handled_component_p (base))
2828 {
2829 tree saved_lhs0 = NULL_TREE;
2830 if (handled_component_p (lhs))
2831 {
2832 saved_lhs0 = TREE_OPERAND (lhs, 0);
2833 TREE_OPERAND (lhs, 0) = integer_zero_node;
2834 }
2835 do
2836 {
2837 /* Just compare the outermost handled component, if
2838 they are equal we have found a possible common
2839 base. */
2840 tree saved_base0 = TREE_OPERAND (base, 0);
2841 TREE_OPERAND (base, 0) = integer_zero_node;
2842 bool res = operand_equal_p (lhs, base, 0);
2843 TREE_OPERAND (base, 0) = saved_base0;
2844 if (res)
2845 break;
2846 /* Remember if we drop an array-ref that we need to
2847 double-check not being at struct end. */
2848 if (TREE_CODE (base) == ARRAY_REF
2849 || TREE_CODE (base) == ARRAY_RANGE_REF)
2850 innermost_dropped_array_ref = base;
2851 /* Otherwise drop handled components of the access. */
2852 base = saved_base0;
2853 }
2854 while (handled_component_p (base));
2855 if (saved_lhs0)
2856 TREE_OPERAND (lhs, 0) = saved_lhs0;
2857 }
2858 /* Finally check if the lhs has the same address and size as the
2859 base candidate of the access. Watch out if we have dropped
2860 an array-ref that was at struct end, this means ref->ref may
2861 be outside of the TYPE_SIZE of its base. */
2862 if ((! innermost_dropped_array_ref
2863 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2864 && (lhs == base
2865 || (((TYPE_SIZE (TREE_TYPE (lhs))
2866 == TYPE_SIZE (TREE_TYPE (base)))
2867 || (TYPE_SIZE (TREE_TYPE (lhs))
2868 && TYPE_SIZE (TREE_TYPE (base))
2869 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2870 TYPE_SIZE (TREE_TYPE (base)),
2871 0)))
2872 && operand_equal_p (lhs, base,
2873 OEP_ADDRESS_OF
2874 | OEP_MATCH_SIDE_EFFECTS))))
2875 return true;
2876 }
2877
2878 /* Now look for non-literal equal bases with the restriction of
2879 handling constant offset and size. */
2880 /* For a must-alias check we need to be able to constrain
2881 the access properly. */
2882 if (!ref->max_size_known_p ())
2883 return false;
2884 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2885 bool reverse;
2886 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2887 &reverse);
2888 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2889 so base == ref->base does not always hold. */
2890 if (base != ref->base)
2891 {
2892 /* Try using points-to info. */
2893 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2894 ref->offset, ref->size, ref->max_size))
2895 return true;
2896
2897 /* If both base and ref->base are MEM_REFs, only compare the
2898 first operand, and if the second operand isn't equal constant,
2899 try to add the offsets into offset and ref_offset. */
2900 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2901 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2902 {
2903 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2904 TREE_OPERAND (ref->base, 1)))
2905 {
2906 poly_offset_int off1 = mem_ref_offset (base);
2907 off1 <<= LOG2_BITS_PER_UNIT;
2908 off1 += offset;
2909 poly_offset_int off2 = mem_ref_offset (ref->base);
2910 off2 <<= LOG2_BITS_PER_UNIT;
2911 off2 += ref_offset;
2912 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2913 size = -1;
2914 }
2915 }
2916 else
2917 size = -1;
2918 }
2919 /* For a must-alias check we need to be able to constrain
2920 the access properly. */
2921 if (known_eq (size, max_size)
2922 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2923 return true;
2924 }
2925
2926 if (is_gimple_call (stmt))
2927 {
2928 tree callee = gimple_call_fndecl (stmt);
2929 if (callee != NULL_TREE
2930 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2931 switch (DECL_FUNCTION_CODE (callee))
2932 {
2933 case BUILT_IN_FREE:
2934 {
2935 tree ptr = gimple_call_arg (stmt, 0);
2936 tree base = ao_ref_base (ref);
2937 if (base && TREE_CODE (base) == MEM_REF
2938 && TREE_OPERAND (base, 0) == ptr)
2939 return true;
2940 break;
2941 }
2942
2943 case BUILT_IN_MEMCPY:
2944 case BUILT_IN_MEMPCPY:
2945 case BUILT_IN_MEMMOVE:
2946 case BUILT_IN_MEMSET:
2947 case BUILT_IN_MEMCPY_CHK:
2948 case BUILT_IN_MEMPCPY_CHK:
2949 case BUILT_IN_MEMMOVE_CHK:
2950 case BUILT_IN_MEMSET_CHK:
2951 case BUILT_IN_STRNCPY:
2952 case BUILT_IN_STPNCPY:
2953 case BUILT_IN_CALLOC:
2954 {
2955 /* For a must-alias check we need to be able to constrain
2956 the access properly. */
2957 if (!ref->max_size_known_p ())
2958 return false;
2959 tree dest;
2960 tree len;
2961
2962 /* In execution order a calloc call will never kill
2963 anything. However, DSE will (ab)use this interface
2964 to ask if a calloc call writes the same memory locations
2965 as a later assignment, memset, etc. So handle calloc
2966 in the expected way. */
2967 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
2968 {
2969 tree arg0 = gimple_call_arg (stmt, 0);
2970 tree arg1 = gimple_call_arg (stmt, 1);
2971 if (TREE_CODE (arg0) != INTEGER_CST
2972 || TREE_CODE (arg1) != INTEGER_CST)
2973 return false;
2974
2975 dest = gimple_call_lhs (stmt);
2976 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
2977 }
2978 else
2979 {
2980 dest = gimple_call_arg (stmt, 0);
2981 len = gimple_call_arg (stmt, 2);
2982 }
2983 if (!poly_int_tree_p (len))
2984 return false;
2985 tree rbase = ref->base;
2986 poly_offset_int roffset = ref->offset;
2987 ao_ref dref;
2988 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2989 tree base = ao_ref_base (&dref);
2990 poly_offset_int offset = dref.offset;
2991 if (!base || !known_size_p (dref.size))
2992 return false;
2993 if (TREE_CODE (base) == MEM_REF)
2994 {
2995 if (TREE_CODE (rbase) != MEM_REF)
2996 return false;
2997 // Compare pointers.
2998 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2999 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
3000 base = TREE_OPERAND (base, 0);
3001 rbase = TREE_OPERAND (rbase, 0);
3002 }
3003 if (base == rbase
3004 && known_subrange_p (roffset, ref->max_size, offset,
3005 wi::to_poly_offset (len)
3006 << LOG2_BITS_PER_UNIT))
3007 return true;
3008 break;
3009 }
3010
3011 case BUILT_IN_VA_END:
3012 {
3013 tree ptr = gimple_call_arg (stmt, 0);
3014 if (TREE_CODE (ptr) == ADDR_EXPR)
3015 {
3016 tree base = ao_ref_base (ref);
3017 if (TREE_OPERAND (ptr, 0) == base)
3018 return true;
3019 }
3020 break;
3021 }
3022
3023 default:;
3024 }
3025 }
3026 return false;
3027 }
3028
3029 bool
3030 stmt_kills_ref_p (gimple *stmt, tree ref)
3031 {
3032 ao_ref r;
3033 ao_ref_init (&r, ref);
3034 return stmt_kills_ref_p (stmt, &r);
3035 }
3036
3037
3038 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3039 TARGET or a statement clobbering the memory reference REF in which
3040 case false is returned. The walk starts with VUSE, one argument of PHI. */
3041
3042 static bool
3043 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3044 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
3045 bool abort_on_visited,
3046 void *(*translate)(ao_ref *, tree, void *, bool *),
3047 void *data)
3048 {
3049 basic_block bb = gimple_bb (phi);
3050
3051 if (!*visited)
3052 *visited = BITMAP_ALLOC (NULL);
3053
3054 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3055
3056 /* Walk until we hit the target. */
3057 while (vuse != target)
3058 {
3059 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3060 /* If we are searching for the target VUSE by walking up to
3061 TARGET_BB dominating the original PHI we are finished once
3062 we reach a default def or a definition in a block dominating
3063 that block. Update TARGET and return. */
3064 if (!target
3065 && (gimple_nop_p (def_stmt)
3066 || dominated_by_p (CDI_DOMINATORS,
3067 target_bb, gimple_bb (def_stmt))))
3068 {
3069 target = vuse;
3070 return true;
3071 }
3072
3073 /* Recurse for PHI nodes. */
3074 if (gimple_code (def_stmt) == GIMPLE_PHI)
3075 {
3076 /* An already visited PHI node ends the walk successfully. */
3077 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3078 return !abort_on_visited;
3079 vuse = get_continuation_for_phi (def_stmt, ref, limit,
3080 visited, abort_on_visited,
3081 translate, data);
3082 if (!vuse)
3083 return false;
3084 continue;
3085 }
3086 else if (gimple_nop_p (def_stmt))
3087 return false;
3088 else
3089 {
3090 /* A clobbering statement or the end of the IL ends it failing. */
3091 if ((int)limit <= 0)
3092 return false;
3093 --limit;
3094 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
3095 {
3096 bool disambiguate_only = true;
3097 if (translate
3098 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
3099 ;
3100 else
3101 return false;
3102 }
3103 }
3104 /* If we reach a new basic-block see if we already skipped it
3105 in a previous walk that ended successfully. */
3106 if (gimple_bb (def_stmt) != bb)
3107 {
3108 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3109 return !abort_on_visited;
3110 bb = gimple_bb (def_stmt);
3111 }
3112 vuse = gimple_vuse (def_stmt);
3113 }
3114 return true;
3115 }
3116
3117
3118 /* Starting from a PHI node for the virtual operand of the memory reference
3119 REF find a continuation virtual operand that allows to continue walking
3120 statements dominating PHI skipping only statements that cannot possibly
3121 clobber REF. Decrements LIMIT for each alias disambiguation done
3122 and aborts the walk, returning NULL_TREE if it reaches zero.
3123 Returns NULL_TREE if no suitable virtual operand can be found. */
3124
3125 tree
3126 get_continuation_for_phi (gimple *phi, ao_ref *ref,
3127 unsigned int &limit, bitmap *visited,
3128 bool abort_on_visited,
3129 void *(*translate)(ao_ref *, tree, void *, bool *),
3130 void *data)
3131 {
3132 unsigned nargs = gimple_phi_num_args (phi);
3133
3134 /* Through a single-argument PHI we can simply look through. */
3135 if (nargs == 1)
3136 return PHI_ARG_DEF (phi, 0);
3137
3138 /* For two or more arguments try to pairwise skip non-aliasing code
3139 until we hit the phi argument definition that dominates the other one. */
3140 basic_block phi_bb = gimple_bb (phi);
3141 tree arg0, arg1;
3142 unsigned i;
3143
3144 /* Find a candidate for the virtual operand which definition
3145 dominates those of all others. */
3146 /* First look if any of the args themselves satisfy this. */
3147 for (i = 0; i < nargs; ++i)
3148 {
3149 arg0 = PHI_ARG_DEF (phi, i);
3150 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3151 break;
3152 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3153 if (def_bb != phi_bb
3154 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3155 break;
3156 arg0 = NULL_TREE;
3157 }
3158 /* If not, look if we can reach such candidate by walking defs
3159 until we hit the immediate dominator. maybe_skip_until will
3160 do that for us. */
3161 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3162
3163 /* Then check against the (to be) found candidate. */
3164 for (i = 0; i < nargs; ++i)
3165 {
3166 arg1 = PHI_ARG_DEF (phi, i);
3167 if (arg1 == arg0)
3168 ;
3169 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
3170 abort_on_visited,
3171 /* Do not translate when walking over
3172 backedges. */
3173 dominated_by_p
3174 (CDI_DOMINATORS,
3175 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3176 phi_bb)
3177 ? NULL : translate, data))
3178 return NULL_TREE;
3179 }
3180
3181 return arg0;
3182 }
3183
3184 /* Based on the memory reference REF and its virtual use VUSE call
3185 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3186 itself. That is, for each virtual use for which its defining statement
3187 does not clobber REF.
3188
3189 WALKER is called with REF, the current virtual use and DATA. If
3190 WALKER returns non-NULL the walk stops and its result is returned.
3191 At the end of a non-successful walk NULL is returned.
3192
3193 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3194 use which definition is a statement that may clobber REF and DATA.
3195 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3196 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3197 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3198 to adjust REF and *DATA to make that valid.
3199
3200 VALUEIZE if non-NULL is called with the next VUSE that is considered
3201 and return value is substituted for that. This can be used to
3202 implement optimistic value-numbering for example. Note that the
3203 VUSE argument is assumed to be valueized already.
3204
3205 LIMIT specifies the number of alias queries we are allowed to do,
3206 the walk stops when it reaches zero and NULL is returned. LIMIT
3207 is decremented by the number of alias queries (plus adjustments
3208 done by the callbacks) upon return.
3209
3210 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3211
3212 void *
3213 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
3214 void *(*walker)(ao_ref *, tree, void *),
3215 void *(*translate)(ao_ref *, tree, void *, bool *),
3216 tree (*valueize)(tree),
3217 unsigned &limit, void *data)
3218 {
3219 bitmap visited = NULL;
3220 void *res;
3221 bool translated = false;
3222
3223 timevar_push (TV_ALIAS_STMT_WALK);
3224
3225 do
3226 {
3227 gimple *def_stmt;
3228
3229 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3230 res = (*walker) (ref, vuse, data);
3231 /* Abort walk. */
3232 if (res == (void *)-1)
3233 {
3234 res = NULL;
3235 break;
3236 }
3237 /* Lookup succeeded. */
3238 else if (res != NULL)
3239 break;
3240
3241 if (valueize)
3242 {
3243 vuse = valueize (vuse);
3244 if (!vuse)
3245 {
3246 res = NULL;
3247 break;
3248 }
3249 }
3250 def_stmt = SSA_NAME_DEF_STMT (vuse);
3251 if (gimple_nop_p (def_stmt))
3252 break;
3253 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3254 vuse = get_continuation_for_phi (def_stmt, ref, limit,
3255 &visited, translated, translate, data);
3256 else
3257 {
3258 if ((int)limit <= 0)
3259 {
3260 res = NULL;
3261 break;
3262 }
3263 --limit;
3264 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
3265 {
3266 if (!translate)
3267 break;
3268 bool disambiguate_only = false;
3269 res = (*translate) (ref, vuse, data, &disambiguate_only);
3270 /* Failed lookup and translation. */
3271 if (res == (void *)-1)
3272 {
3273 res = NULL;
3274 break;
3275 }
3276 /* Lookup succeeded. */
3277 else if (res != NULL)
3278 break;
3279 /* Translation succeeded, continue walking. */
3280 translated = translated || !disambiguate_only;
3281 }
3282 vuse = gimple_vuse (def_stmt);
3283 }
3284 }
3285 while (vuse);
3286
3287 if (visited)
3288 BITMAP_FREE (visited);
3289
3290 timevar_pop (TV_ALIAS_STMT_WALK);
3291
3292 return res;
3293 }
3294
3295
3296 /* Based on the memory reference REF call WALKER for each vdef which
3297 defining statement may clobber REF, starting with VDEF. If REF
3298 is NULL_TREE, each defining statement is visited.
3299
3300 WALKER is called with REF, the current vdef and DATA. If WALKER
3301 returns true the walk is stopped, otherwise it continues.
3302
3303 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3304 The pointer may be NULL and then we do not track this information.
3305
3306 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3307 PHI argument (but only one walk continues on merge points), the
3308 return value is true if any of the walks was successful.
3309
3310 The function returns the number of statements walked or -1 if
3311 LIMIT stmts were walked and the walk was aborted at this point.
3312 If LIMIT is zero the walk is not aborted. */
3313
3314 static int
3315 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3316 bool (*walker)(ao_ref *, tree, void *), void *data,
3317 bitmap *visited, unsigned int cnt,
3318 bool *function_entry_reached, unsigned limit)
3319 {
3320 do
3321 {
3322 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3323
3324 if (*visited
3325 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3326 return cnt;
3327
3328 if (gimple_nop_p (def_stmt))
3329 {
3330 if (function_entry_reached)
3331 *function_entry_reached = true;
3332 return cnt;
3333 }
3334 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3335 {
3336 unsigned i;
3337 if (!*visited)
3338 *visited = BITMAP_ALLOC (NULL);
3339 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3340 {
3341 int res = walk_aliased_vdefs_1 (ref,
3342 gimple_phi_arg_def (def_stmt, i),
3343 walker, data, visited, cnt,
3344 function_entry_reached, limit);
3345 if (res == -1)
3346 return -1;
3347 cnt = res;
3348 }
3349 return cnt;
3350 }
3351
3352 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3353 cnt++;
3354 if (cnt == limit)
3355 return -1;
3356 if ((!ref
3357 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3358 && (*walker) (ref, vdef, data))
3359 return cnt;
3360
3361 vdef = gimple_vuse (def_stmt);
3362 }
3363 while (1);
3364 }
3365
3366 int
3367 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3368 bool (*walker)(ao_ref *, tree, void *), void *data,
3369 bitmap *visited,
3370 bool *function_entry_reached, unsigned int limit)
3371 {
3372 bitmap local_visited = NULL;
3373 int ret;
3374
3375 timevar_push (TV_ALIAS_STMT_WALK);
3376
3377 if (function_entry_reached)
3378 *function_entry_reached = false;
3379
3380 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3381 visited ? visited : &local_visited, 0,
3382 function_entry_reached, limit);
3383 if (local_visited)
3384 BITMAP_FREE (local_visited);
3385
3386 timevar_pop (TV_ALIAS_STMT_WALK);
3387
3388 return ret;
3389 }
3390