tree-optimization/93946 - fix bogus redundant store removal in FRE, DSE and DOM
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
91 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
92
93 /* Query statistics for the different low-level disambiguators.
94 A high-level query may trigger multiple of them. */
95
96 static struct {
97 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
98 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
99 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
100 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
101 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
102 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
103 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
104 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
107 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
108 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
109 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
110 } alias_stats;
111
112 void
113 dump_alias_stats (FILE *s)
114 {
115 fprintf (s, "\nAlias oracle query stats:\n");
116 fprintf (s, " refs_may_alias_p: "
117 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
118 HOST_WIDE_INT_PRINT_DEC" queries\n",
119 alias_stats.refs_may_alias_p_no_alias,
120 alias_stats.refs_may_alias_p_no_alias
121 + alias_stats.refs_may_alias_p_may_alias);
122 fprintf (s, " ref_maybe_used_by_call_p: "
123 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
124 HOST_WIDE_INT_PRINT_DEC" queries\n",
125 alias_stats.ref_maybe_used_by_call_p_no_alias,
126 alias_stats.refs_may_alias_p_no_alias
127 + alias_stats.ref_maybe_used_by_call_p_may_alias);
128 fprintf (s, " call_may_clobber_ref_p: "
129 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
130 HOST_WIDE_INT_PRINT_DEC" queries\n",
131 alias_stats.call_may_clobber_ref_p_no_alias,
132 alias_stats.call_may_clobber_ref_p_no_alias
133 + alias_stats.call_may_clobber_ref_p_may_alias);
134 fprintf (s, " nonoverlapping_component_refs_p: "
135 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
136 HOST_WIDE_INT_PRINT_DEC" queries\n",
137 alias_stats.nonoverlapping_component_refs_p_no_alias,
138 alias_stats.nonoverlapping_component_refs_p_no_alias
139 + alias_stats.nonoverlapping_component_refs_p_may_alias);
140 fprintf (s, " nonoverlapping_refs_since_match_p: "
141 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
142 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
143 HOST_WIDE_INT_PRINT_DEC" queries\n",
144 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
145 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
146 alias_stats.nonoverlapping_refs_since_match_p_no_alias
147 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
148 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
149 fprintf (s, " aliasing_component_refs_p: "
150 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC" queries\n",
152 alias_stats.aliasing_component_refs_p_no_alias,
153 alias_stats.aliasing_component_refs_p_no_alias
154 + alias_stats.aliasing_component_refs_p_may_alias);
155 dump_alias_stats_in_alias_c (s);
156 }
157
158
159 /* Return true, if dereferencing PTR may alias with a global variable. */
160
161 bool
162 ptr_deref_may_alias_global_p (tree ptr)
163 {
164 struct ptr_info_def *pi;
165
166 /* If we end up with a pointer constant here that may point
167 to global memory. */
168 if (TREE_CODE (ptr) != SSA_NAME)
169 return true;
170
171 pi = SSA_NAME_PTR_INFO (ptr);
172
173 /* If we do not have points-to information for this variable,
174 we have to punt. */
175 if (!pi)
176 return true;
177
178 /* ??? This does not use TBAA to prune globals ptr may not access. */
179 return pt_solution_includes_global (&pi->pt);
180 }
181
182 /* Return true if dereferencing PTR may alias DECL.
183 The caller is responsible for applying TBAA to see if PTR
184 may access DECL at all. */
185
186 static bool
187 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
188 {
189 struct ptr_info_def *pi;
190
191 /* Conversions are irrelevant for points-to information and
192 data-dependence analysis can feed us those. */
193 STRIP_NOPS (ptr);
194
195 /* Anything we do not explicilty handle aliases. */
196 if ((TREE_CODE (ptr) != SSA_NAME
197 && TREE_CODE (ptr) != ADDR_EXPR
198 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
199 || !POINTER_TYPE_P (TREE_TYPE (ptr))
200 || (!VAR_P (decl)
201 && TREE_CODE (decl) != PARM_DECL
202 && TREE_CODE (decl) != RESULT_DECL))
203 return true;
204
205 /* Disregard pointer offsetting. */
206 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
207 {
208 do
209 {
210 ptr = TREE_OPERAND (ptr, 0);
211 }
212 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
213 return ptr_deref_may_alias_decl_p (ptr, decl);
214 }
215
216 /* ADDR_EXPR pointers either just offset another pointer or directly
217 specify the pointed-to set. */
218 if (TREE_CODE (ptr) == ADDR_EXPR)
219 {
220 tree base = get_base_address (TREE_OPERAND (ptr, 0));
221 if (base
222 && (TREE_CODE (base) == MEM_REF
223 || TREE_CODE (base) == TARGET_MEM_REF))
224 ptr = TREE_OPERAND (base, 0);
225 else if (base
226 && DECL_P (base))
227 return compare_base_decls (base, decl) != 0;
228 else if (base
229 && CONSTANT_CLASS_P (base))
230 return false;
231 else
232 return true;
233 }
234
235 /* Non-aliased variables cannot be pointed to. */
236 if (!may_be_aliased (decl))
237 return false;
238
239 /* If we do not have useful points-to information for this pointer
240 we cannot disambiguate anything else. */
241 pi = SSA_NAME_PTR_INFO (ptr);
242 if (!pi)
243 return true;
244
245 return pt_solution_includes (&pi->pt, decl);
246 }
247
248 /* Return true if dereferenced PTR1 and PTR2 may alias.
249 The caller is responsible for applying TBAA to see if accesses
250 through PTR1 and PTR2 may conflict at all. */
251
252 bool
253 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
254 {
255 struct ptr_info_def *pi1, *pi2;
256
257 /* Conversions are irrelevant for points-to information and
258 data-dependence analysis can feed us those. */
259 STRIP_NOPS (ptr1);
260 STRIP_NOPS (ptr2);
261
262 /* Disregard pointer offsetting. */
263 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
264 {
265 do
266 {
267 ptr1 = TREE_OPERAND (ptr1, 0);
268 }
269 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
270 return ptr_derefs_may_alias_p (ptr1, ptr2);
271 }
272 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
273 {
274 do
275 {
276 ptr2 = TREE_OPERAND (ptr2, 0);
277 }
278 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
279 return ptr_derefs_may_alias_p (ptr1, ptr2);
280 }
281
282 /* ADDR_EXPR pointers either just offset another pointer or directly
283 specify the pointed-to set. */
284 if (TREE_CODE (ptr1) == ADDR_EXPR)
285 {
286 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
287 if (base
288 && (TREE_CODE (base) == MEM_REF
289 || TREE_CODE (base) == TARGET_MEM_REF))
290 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
291 else if (base
292 && DECL_P (base))
293 return ptr_deref_may_alias_decl_p (ptr2, base);
294 else
295 return true;
296 }
297 if (TREE_CODE (ptr2) == ADDR_EXPR)
298 {
299 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
300 if (base
301 && (TREE_CODE (base) == MEM_REF
302 || TREE_CODE (base) == TARGET_MEM_REF))
303 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
304 else if (base
305 && DECL_P (base))
306 return ptr_deref_may_alias_decl_p (ptr1, base);
307 else
308 return true;
309 }
310
311 /* From here we require SSA name pointers. Anything else aliases. */
312 if (TREE_CODE (ptr1) != SSA_NAME
313 || TREE_CODE (ptr2) != SSA_NAME
314 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
315 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
316 return true;
317
318 /* We may end up with two empty points-to solutions for two same pointers.
319 In this case we still want to say both pointers alias, so shortcut
320 that here. */
321 if (ptr1 == ptr2)
322 return true;
323
324 /* If we do not have useful points-to information for either pointer
325 we cannot disambiguate anything else. */
326 pi1 = SSA_NAME_PTR_INFO (ptr1);
327 pi2 = SSA_NAME_PTR_INFO (ptr2);
328 if (!pi1 || !pi2)
329 return true;
330
331 /* ??? This does not use TBAA to prune decls from the intersection
332 that not both pointers may access. */
333 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
334 }
335
336 /* Return true if dereferencing PTR may alias *REF.
337 The caller is responsible for applying TBAA to see if PTR
338 may access *REF at all. */
339
340 static bool
341 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
342 {
343 tree base = ao_ref_base (ref);
344
345 if (TREE_CODE (base) == MEM_REF
346 || TREE_CODE (base) == TARGET_MEM_REF)
347 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
348 else if (DECL_P (base))
349 return ptr_deref_may_alias_decl_p (ptr, base);
350
351 return true;
352 }
353
354 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
355
356 bool
357 ptrs_compare_unequal (tree ptr1, tree ptr2)
358 {
359 /* First resolve the pointers down to a SSA name pointer base or
360 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
361 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
362 or STRING_CSTs which needs points-to adjustments to track them
363 in the points-to sets. */
364 tree obj1 = NULL_TREE;
365 tree obj2 = NULL_TREE;
366 if (TREE_CODE (ptr1) == ADDR_EXPR)
367 {
368 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
369 if (! tem)
370 return false;
371 if (VAR_P (tem)
372 || TREE_CODE (tem) == PARM_DECL
373 || TREE_CODE (tem) == RESULT_DECL)
374 obj1 = tem;
375 else if (TREE_CODE (tem) == MEM_REF)
376 ptr1 = TREE_OPERAND (tem, 0);
377 }
378 if (TREE_CODE (ptr2) == ADDR_EXPR)
379 {
380 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
381 if (! tem)
382 return false;
383 if (VAR_P (tem)
384 || TREE_CODE (tem) == PARM_DECL
385 || TREE_CODE (tem) == RESULT_DECL)
386 obj2 = tem;
387 else if (TREE_CODE (tem) == MEM_REF)
388 ptr2 = TREE_OPERAND (tem, 0);
389 }
390
391 /* Canonicalize ptr vs. object. */
392 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
393 {
394 std::swap (ptr1, ptr2);
395 std::swap (obj1, obj2);
396 }
397
398 if (obj1 && obj2)
399 /* Other code handles this correctly, no need to duplicate it here. */;
400 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
401 {
402 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
403 /* We may not use restrict to optimize pointer comparisons.
404 See PR71062. So we have to assume that restrict-pointed-to
405 may be in fact obj1. */
406 if (!pi
407 || pi->pt.vars_contains_restrict
408 || pi->pt.vars_contains_interposable)
409 return false;
410 if (VAR_P (obj1)
411 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
412 {
413 varpool_node *node = varpool_node::get (obj1);
414 /* If obj1 may bind to NULL give up (see below). */
415 if (! node
416 || ! node->nonzero_address ()
417 || ! decl_binds_to_current_def_p (obj1))
418 return false;
419 }
420 return !pt_solution_includes (&pi->pt, obj1);
421 }
422
423 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
424 but those require pt.null to be conservatively correct. */
425
426 return false;
427 }
428
429 /* Returns whether reference REF to BASE may refer to global memory. */
430
431 static bool
432 ref_may_alias_global_p_1 (tree base)
433 {
434 if (DECL_P (base))
435 return is_global_var (base);
436 else if (TREE_CODE (base) == MEM_REF
437 || TREE_CODE (base) == TARGET_MEM_REF)
438 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
439 return true;
440 }
441
442 bool
443 ref_may_alias_global_p (ao_ref *ref)
444 {
445 tree base = ao_ref_base (ref);
446 return ref_may_alias_global_p_1 (base);
447 }
448
449 bool
450 ref_may_alias_global_p (tree ref)
451 {
452 tree base = get_base_address (ref);
453 return ref_may_alias_global_p_1 (base);
454 }
455
456 /* Return true whether STMT may clobber global memory. */
457
458 bool
459 stmt_may_clobber_global_p (gimple *stmt)
460 {
461 tree lhs;
462
463 if (!gimple_vdef (stmt))
464 return false;
465
466 /* ??? We can ask the oracle whether an artificial pointer
467 dereference with a pointer with points-to information covering
468 all global memory (what about non-address taken memory?) maybe
469 clobbered by this call. As there is at the moment no convenient
470 way of doing that without generating garbage do some manual
471 checking instead.
472 ??? We could make a NULL ao_ref argument to the various
473 predicates special, meaning any global memory. */
474
475 switch (gimple_code (stmt))
476 {
477 case GIMPLE_ASSIGN:
478 lhs = gimple_assign_lhs (stmt);
479 return (TREE_CODE (lhs) != SSA_NAME
480 && ref_may_alias_global_p (lhs));
481 case GIMPLE_CALL:
482 return true;
483 default:
484 return true;
485 }
486 }
487
488
489 /* Dump alias information on FILE. */
490
491 void
492 dump_alias_info (FILE *file)
493 {
494 unsigned i;
495 tree ptr;
496 const char *funcname
497 = lang_hooks.decl_printable_name (current_function_decl, 2);
498 tree var;
499
500 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
501
502 fprintf (file, "Aliased symbols\n\n");
503
504 FOR_EACH_LOCAL_DECL (cfun, i, var)
505 {
506 if (may_be_aliased (var))
507 dump_variable (file, var);
508 }
509
510 fprintf (file, "\nCall clobber information\n");
511
512 fprintf (file, "\nESCAPED");
513 dump_points_to_solution (file, &cfun->gimple_df->escaped);
514
515 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
516
517 FOR_EACH_SSA_NAME (i, ptr, cfun)
518 {
519 struct ptr_info_def *pi;
520
521 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
522 || SSA_NAME_IN_FREE_LIST (ptr))
523 continue;
524
525 pi = SSA_NAME_PTR_INFO (ptr);
526 if (pi)
527 dump_points_to_info_for (file, ptr);
528 }
529
530 fprintf (file, "\n");
531 }
532
533
534 /* Dump alias information on stderr. */
535
536 DEBUG_FUNCTION void
537 debug_alias_info (void)
538 {
539 dump_alias_info (stderr);
540 }
541
542
543 /* Dump the points-to set *PT into FILE. */
544
545 void
546 dump_points_to_solution (FILE *file, struct pt_solution *pt)
547 {
548 if (pt->anything)
549 fprintf (file, ", points-to anything");
550
551 if (pt->nonlocal)
552 fprintf (file, ", points-to non-local");
553
554 if (pt->escaped)
555 fprintf (file, ", points-to escaped");
556
557 if (pt->ipa_escaped)
558 fprintf (file, ", points-to unit escaped");
559
560 if (pt->null)
561 fprintf (file, ", points-to NULL");
562
563 if (pt->vars)
564 {
565 fprintf (file, ", points-to vars: ");
566 dump_decl_set (file, pt->vars);
567 if (pt->vars_contains_nonlocal
568 || pt->vars_contains_escaped
569 || pt->vars_contains_escaped_heap
570 || pt->vars_contains_restrict)
571 {
572 const char *comma = "";
573 fprintf (file, " (");
574 if (pt->vars_contains_nonlocal)
575 {
576 fprintf (file, "nonlocal");
577 comma = ", ";
578 }
579 if (pt->vars_contains_escaped)
580 {
581 fprintf (file, "%sescaped", comma);
582 comma = ", ";
583 }
584 if (pt->vars_contains_escaped_heap)
585 {
586 fprintf (file, "%sescaped heap", comma);
587 comma = ", ";
588 }
589 if (pt->vars_contains_restrict)
590 {
591 fprintf (file, "%srestrict", comma);
592 comma = ", ";
593 }
594 if (pt->vars_contains_interposable)
595 fprintf (file, "%sinterposable", comma);
596 fprintf (file, ")");
597 }
598 }
599 }
600
601
602 /* Unified dump function for pt_solution. */
603
604 DEBUG_FUNCTION void
605 debug (pt_solution &ref)
606 {
607 dump_points_to_solution (stderr, &ref);
608 }
609
610 DEBUG_FUNCTION void
611 debug (pt_solution *ptr)
612 {
613 if (ptr)
614 debug (*ptr);
615 else
616 fprintf (stderr, "<nil>\n");
617 }
618
619
620 /* Dump points-to information for SSA_NAME PTR into FILE. */
621
622 void
623 dump_points_to_info_for (FILE *file, tree ptr)
624 {
625 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
626
627 print_generic_expr (file, ptr, dump_flags);
628
629 if (pi)
630 dump_points_to_solution (file, &pi->pt);
631 else
632 fprintf (file, ", points-to anything");
633
634 fprintf (file, "\n");
635 }
636
637
638 /* Dump points-to information for VAR into stderr. */
639
640 DEBUG_FUNCTION void
641 debug_points_to_info_for (tree var)
642 {
643 dump_points_to_info_for (stderr, var);
644 }
645
646
647 /* Initializes the alias-oracle reference representation *R from REF. */
648
649 void
650 ao_ref_init (ao_ref *r, tree ref)
651 {
652 r->ref = ref;
653 r->base = NULL_TREE;
654 r->offset = 0;
655 r->size = -1;
656 r->max_size = -1;
657 r->ref_alias_set = -1;
658 r->base_alias_set = -1;
659 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
660 }
661
662 /* Returns the base object of the memory reference *REF. */
663
664 tree
665 ao_ref_base (ao_ref *ref)
666 {
667 bool reverse;
668
669 if (ref->base)
670 return ref->base;
671 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
672 &ref->max_size, &reverse);
673 return ref->base;
674 }
675
676 /* Returns the base object alias set of the memory reference *REF. */
677
678 alias_set_type
679 ao_ref_base_alias_set (ao_ref *ref)
680 {
681 tree base_ref;
682 if (ref->base_alias_set != -1)
683 return ref->base_alias_set;
684 if (!ref->ref)
685 return 0;
686 base_ref = ref->ref;
687 while (handled_component_p (base_ref))
688 base_ref = TREE_OPERAND (base_ref, 0);
689 ref->base_alias_set = get_alias_set (base_ref);
690 return ref->base_alias_set;
691 }
692
693 /* Returns the reference alias set of the memory reference *REF. */
694
695 alias_set_type
696 ao_ref_alias_set (ao_ref *ref)
697 {
698 if (ref->ref_alias_set != -1)
699 return ref->ref_alias_set;
700 if (!ref->ref)
701 return 0;
702 ref->ref_alias_set = get_alias_set (ref->ref);
703 return ref->ref_alias_set;
704 }
705
706 /* Init an alias-oracle reference representation from a gimple pointer
707 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
708 size is assumed to be unknown. The access is assumed to be only
709 to or after of the pointer target, not before it. */
710
711 void
712 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
713 {
714 poly_int64 t, size_hwi, extra_offset = 0;
715 ref->ref = NULL_TREE;
716 if (TREE_CODE (ptr) == SSA_NAME)
717 {
718 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
719 if (gimple_assign_single_p (stmt)
720 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
721 ptr = gimple_assign_rhs1 (stmt);
722 else if (is_gimple_assign (stmt)
723 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
724 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
725 {
726 ptr = gimple_assign_rhs1 (stmt);
727 extra_offset *= BITS_PER_UNIT;
728 }
729 }
730
731 if (TREE_CODE (ptr) == ADDR_EXPR)
732 {
733 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
734 if (ref->base)
735 ref->offset = BITS_PER_UNIT * t;
736 else
737 {
738 size = NULL_TREE;
739 ref->offset = 0;
740 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
741 }
742 }
743 else
744 {
745 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
746 ref->base = build2 (MEM_REF, char_type_node,
747 ptr, null_pointer_node);
748 ref->offset = 0;
749 }
750 ref->offset += extra_offset;
751 if (size
752 && poly_int_tree_p (size, &size_hwi)
753 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
754 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
755 else
756 ref->max_size = ref->size = -1;
757 ref->ref_alias_set = 0;
758 ref->base_alias_set = 0;
759 ref->volatile_p = false;
760 }
761
762 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
763 Return -1 if S1 < S2
764 Return 1 if S1 > S2
765 Return 0 if equal or incomparable. */
766
767 static int
768 compare_sizes (tree s1, tree s2)
769 {
770 if (!s1 || !s2)
771 return 0;
772
773 poly_uint64 size1;
774 poly_uint64 size2;
775
776 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
777 return 0;
778 if (known_lt (size1, size2))
779 return -1;
780 if (known_lt (size2, size1))
781 return 1;
782 return 0;
783 }
784
785 /* Compare TYPE1 and TYPE2 by its size.
786 Return -1 if size of TYPE1 < size of TYPE2
787 Return 1 if size of TYPE1 > size of TYPE2
788 Return 0 if types are of equal sizes or we can not compare them. */
789
790 static int
791 compare_type_sizes (tree type1, tree type2)
792 {
793 /* Be conservative for arrays and vectors. We want to support partial
794 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
795 while (TREE_CODE (type1) == ARRAY_TYPE
796 || TREE_CODE (type1) == VECTOR_TYPE)
797 type1 = TREE_TYPE (type1);
798 while (TREE_CODE (type2) == ARRAY_TYPE
799 || TREE_CODE (type2) == VECTOR_TYPE)
800 type2 = TREE_TYPE (type2);
801 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
802 }
803
804 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
805 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
806 decide. */
807
808 static inline int
809 same_type_for_tbaa (tree type1, tree type2)
810 {
811 type1 = TYPE_MAIN_VARIANT (type1);
812 type2 = TYPE_MAIN_VARIANT (type2);
813
814 /* Handle the most common case first. */
815 if (type1 == type2)
816 return 1;
817
818 /* If we would have to do structural comparison bail out. */
819 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
820 || TYPE_STRUCTURAL_EQUALITY_P (type2))
821 return -1;
822
823 /* Compare the canonical types. */
824 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
825 return 1;
826
827 /* ??? Array types are not properly unified in all cases as we have
828 spurious changes in the index types for example. Removing this
829 causes all sorts of problems with the Fortran frontend. */
830 if (TREE_CODE (type1) == ARRAY_TYPE
831 && TREE_CODE (type2) == ARRAY_TYPE)
832 return -1;
833
834 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
835 object of one of its constrained subtypes, e.g. when a function with an
836 unconstrained parameter passed by reference is called on an object and
837 inlined. But, even in the case of a fixed size, type and subtypes are
838 not equivalent enough as to share the same TYPE_CANONICAL, since this
839 would mean that conversions between them are useless, whereas they are
840 not (e.g. type and subtypes can have different modes). So, in the end,
841 they are only guaranteed to have the same alias set. */
842 if (get_alias_set (type1) == get_alias_set (type2))
843 return -1;
844
845 /* The types are known to be not equal. */
846 return 0;
847 }
848
849 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
850 components on it). */
851
852 static bool
853 type_has_components_p (tree type)
854 {
855 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
856 || TREE_CODE (type) == COMPLEX_TYPE;
857 }
858
859 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
860 respectively are either pointing to same address or are completely
861 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
862 just partly overlap.
863
864 Try to disambiguate using the access path starting from the match
865 and return false if there is no conflict.
866
867 Helper for aliasing_component_refs_p. */
868
869 static bool
870 aliasing_matching_component_refs_p (tree match1, tree ref1,
871 poly_int64 offset1, poly_int64 max_size1,
872 tree match2, tree ref2,
873 poly_int64 offset2, poly_int64 max_size2,
874 bool partial_overlap)
875 {
876 poly_int64 offadj, sztmp, msztmp;
877 bool reverse;
878
879 if (!partial_overlap)
880 {
881 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
882 offset2 -= offadj;
883 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
884 offset1 -= offadj;
885 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
886 {
887 ++alias_stats.aliasing_component_refs_p_no_alias;
888 return false;
889 }
890 }
891
892 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
893 partial_overlap);
894 if (cmp == 1
895 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
896 {
897 ++alias_stats.aliasing_component_refs_p_no_alias;
898 return false;
899 }
900 ++alias_stats.aliasing_component_refs_p_may_alias;
901 return true;
902 }
903
904 /* Return true if REF is reference to zero sized trailing array. I.e.
905 struct foo {int bar; int array[0];} *fooptr;
906 fooptr->array. */
907
908 static bool
909 component_ref_to_zero_sized_trailing_array_p (tree ref)
910 {
911 return (TREE_CODE (ref) == COMPONENT_REF
912 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
913 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
914 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
915 && array_at_struct_end_p (ref));
916 }
917
918 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
919 aliasing_component_refs_p.
920
921 Walk access path REF2 and try to find type matching TYPE1
922 (which is a start of possibly aliasing access path REF1).
923 If match is found, try to disambiguate.
924
925 Return 0 for sucessful disambiguation.
926 Return 1 if match was found but disambiguation failed
927 Return -1 if there is no match.
928 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
929 in access patch REF2 and -1 if we are not sure. */
930
931 static int
932 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
933 poly_int64 offset1, poly_int64 max_size1,
934 tree end_struct_ref1,
935 tree ref2, tree base2,
936 poly_int64 offset2, poly_int64 max_size2,
937 bool *maybe_match)
938 {
939 tree ref = ref2;
940 int same_p = 0;
941
942 while (true)
943 {
944 /* We walk from inner type to the outer types. If type we see is
945 already too large to be part of type1, terminate the search. */
946 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
947
948 if (cmp < 0
949 && (!end_struct_ref1
950 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
951 TREE_TYPE (ref)) < 0))
952 break;
953 /* If types may be of same size, see if we can decide about their
954 equality. */
955 if (cmp == 0)
956 {
957 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
958 if (same_p == 1)
959 break;
960 /* In case we can't decide whether types are same try to
961 continue looking for the exact match.
962 Remember however that we possibly saw a match
963 to bypass the access path continuations tests we do later. */
964 if (same_p == -1)
965 *maybe_match = true;
966 }
967 if (!handled_component_p (ref))
968 break;
969 ref = TREE_OPERAND (ref, 0);
970 }
971 if (same_p == 1)
972 {
973 bool partial_overlap = false;
974
975 /* We assume that arrays can overlap by multiple of their elements
976 size as tested in gcc.dg/torture/alias-2.c.
977 This partial overlap happen only when both arrays are bases of
978 the access and not contained within another component ref.
979 To be safe we also assume partial overlap for VLAs. */
980 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
981 && (!TYPE_SIZE (TREE_TYPE (base1))
982 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
983 || ref == base2))
984 {
985 /* Setting maybe_match to true triggers
986 nonoverlapping_component_refs_p test later that still may do
987 useful disambiguation. */
988 *maybe_match = true;
989 partial_overlap = true;
990 }
991 return aliasing_matching_component_refs_p (base1, ref1,
992 offset1, max_size1,
993 ref, ref2,
994 offset2, max_size2,
995 partial_overlap);
996 }
997 return -1;
998 }
999
1000 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1001 Return true if they can be composed to single access path
1002 base1...ref1...base2...ref2.
1003
1004 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1005 a trailing array access after REF1 in the non-TBAA part of the access.
1006 REF1_ALIAS_SET is the alias set of REF1.
1007
1008 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1009 a traling array access in the TBAA part of access path2.
1010 BASE2_ALIAS_SET is the alias set of base2. */
1011
1012 bool
1013 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1014 alias_set_type ref1_alias_set,
1015 tree base_type2, tree end_struct_ref2,
1016 alias_set_type base2_alias_set)
1017 {
1018 /* Access path can not continue past types with no components. */
1019 if (!type_has_components_p (ref_type1))
1020 return false;
1021
1022 /* If first access path ends by too small type to hold base of
1023 the second access path, typically paths can not continue.
1024
1025 Punt if end_struct_past_end1 is true. We want to support arbitrary
1026 type puning past first COMPONENT_REF to union because redundant store
1027 elimination depends on this, see PR92152. For this reason we can not
1028 check size of the reference because types may partially overlap. */
1029 if (!end_struct_past_end1)
1030 {
1031 if (compare_type_sizes (ref_type1, base_type2) < 0)
1032 return false;
1033 /* If the path2 contains trailing array access we can strenghten the check
1034 to verify that also the size of element of the trailing array fits.
1035 In fact we could check for offset + type_size, but we do not track
1036 offsets and this is quite side case. */
1037 if (end_struct_ref2
1038 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1039 return false;
1040 }
1041 return (base2_alias_set == ref1_alias_set
1042 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1043 }
1044
1045 /* Determine if the two component references REF1 and REF2 which are
1046 based on access types TYPE1 and TYPE2 and of which at least one is based
1047 on an indirect reference may alias.
1048 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1049 are the respective alias sets. */
1050
1051 static bool
1052 aliasing_component_refs_p (tree ref1,
1053 alias_set_type ref1_alias_set,
1054 alias_set_type base1_alias_set,
1055 poly_int64 offset1, poly_int64 max_size1,
1056 tree ref2,
1057 alias_set_type ref2_alias_set,
1058 alias_set_type base2_alias_set,
1059 poly_int64 offset2, poly_int64 max_size2)
1060 {
1061 /* If one reference is a component references through pointers try to find a
1062 common base and apply offset based disambiguation. This handles
1063 for example
1064 struct A { int i; int j; } *q;
1065 struct B { struct A a; int k; } *p;
1066 disambiguating q->i and p->a.j. */
1067 tree base1, base2;
1068 tree type1, type2;
1069 bool maybe_match = false;
1070 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1071 bool end_struct_past_end1 = false;
1072 bool end_struct_past_end2 = false;
1073
1074 /* Choose bases and base types to search for.
1075 The access path is as follows:
1076 base....end_of_tbaa_ref...actual_ref
1077 At one place in the access path may be a reference to zero sized or
1078 trailing array.
1079
1080 We generally discard the segment after end_of_tbaa_ref however
1081 we need to be careful in case it contains zero sized or traling array.
1082 These may happen after refernce to union and in this case we need to
1083 not disambiguate type puning scenarios.
1084
1085 We set:
1086 base1 to point to base
1087
1088 ref1 to point to end_of_tbaa_ref
1089
1090 end_struct_ref1 to point the trailing reference (if it exists
1091 in range base....end_of_tbaa_ref
1092
1093 end_struct_past_end1 is true if this traling refernece occurs in
1094 end_of_tbaa_ref...actual_ref. */
1095 base1 = ref1;
1096 while (handled_component_p (base1))
1097 {
1098 /* Generally access paths are monotous in the size of object. The
1099 exception are trailing arrays of structures. I.e.
1100 struct a {int array[0];};
1101 or
1102 struct a {int array1[0]; int array[];};
1103 Such struct has size 0 but accesses to a.array may have non-zero size.
1104 In this case the size of TREE_TYPE (base1) is smaller than
1105 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
1106
1107 Because we compare sizes of arrays just by sizes of their elements,
1108 we only need to care about zero sized array fields here. */
1109 if (component_ref_to_zero_sized_trailing_array_p (base1))
1110 {
1111 gcc_checking_assert (!end_struct_ref1);
1112 end_struct_ref1 = base1;
1113 }
1114 if (ends_tbaa_access_path_p (base1))
1115 {
1116 ref1 = TREE_OPERAND (base1, 0);
1117 if (end_struct_ref1)
1118 {
1119 end_struct_past_end1 = true;
1120 end_struct_ref1 = NULL;
1121 }
1122 }
1123 base1 = TREE_OPERAND (base1, 0);
1124 }
1125 type1 = TREE_TYPE (base1);
1126 base2 = ref2;
1127 while (handled_component_p (base2))
1128 {
1129 if (component_ref_to_zero_sized_trailing_array_p (base2))
1130 {
1131 gcc_checking_assert (!end_struct_ref2);
1132 end_struct_ref2 = base2;
1133 }
1134 if (ends_tbaa_access_path_p (base2))
1135 {
1136 ref2 = TREE_OPERAND (base2, 0);
1137 if (end_struct_ref2)
1138 {
1139 end_struct_past_end2 = true;
1140 end_struct_ref2 = NULL;
1141 }
1142 }
1143 base2 = TREE_OPERAND (base2, 0);
1144 }
1145 type2 = TREE_TYPE (base2);
1146
1147 /* Now search for the type1 in the access path of ref2. This
1148 would be a common base for doing offset based disambiguation on.
1149 This however only makes sense if type2 is big enough to hold type1. */
1150 int cmp_outer = compare_type_sizes (type2, type1);
1151
1152 /* If type2 is big enough to contain type1 walk its access path.
1153 We also need to care of arrays at the end of structs that may extend
1154 beyond the end of structure. If this occurs in the TBAA part of the
1155 access path, we need to consider the increased type as well. */
1156 if (cmp_outer >= 0
1157 || (end_struct_ref2
1158 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1159 {
1160 int res = aliasing_component_refs_walk (ref1, type1, base1,
1161 offset1, max_size1,
1162 end_struct_ref1,
1163 ref2, base2, offset2, max_size2,
1164 &maybe_match);
1165 if (res != -1)
1166 return res;
1167 }
1168
1169 /* If we didn't find a common base, try the other way around. */
1170 if (cmp_outer <= 0
1171 || (end_struct_ref1
1172 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1173 {
1174 int res = aliasing_component_refs_walk (ref2, type2, base2,
1175 offset2, max_size2,
1176 end_struct_ref2,
1177 ref1, base1, offset1, max_size1,
1178 &maybe_match);
1179 if (res != -1)
1180 return res;
1181 }
1182
1183 /* In the following code we make an assumption that the types in access
1184 paths do not overlap and thus accesses alias only if one path can be
1185 continuation of another. If we was not able to decide about equivalence,
1186 we need to give up. */
1187 if (maybe_match)
1188 {
1189 if (!nonoverlapping_component_refs_p (ref1, ref2))
1190 {
1191 ++alias_stats.aliasing_component_refs_p_may_alias;
1192 return true;
1193 }
1194 ++alias_stats.aliasing_component_refs_p_no_alias;
1195 return false;
1196 }
1197
1198 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1199 ref1_alias_set,
1200 type2, end_struct_ref2,
1201 base2_alias_set)
1202 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1203 ref2_alias_set,
1204 type1, end_struct_ref1,
1205 base1_alias_set))
1206 {
1207 ++alias_stats.aliasing_component_refs_p_may_alias;
1208 return true;
1209 }
1210 ++alias_stats.aliasing_component_refs_p_no_alias;
1211 return false;
1212 }
1213
1214 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1215 that bases of both component refs are either equivalent or nonoverlapping.
1216 We do not assume that the containers of FIELD1 and FIELD2 are of the
1217 same type or size.
1218
1219 Return 0 in case the base address of component_refs are same then
1220 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1221 may not be of same type or size.
1222
1223 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1224
1225 Return -1 otherwise.
1226
1227 Main difference between 0 and -1 is to let
1228 nonoverlapping_component_refs_since_match_p discover the semantically
1229 equivalent part of the access path.
1230
1231 Note that this function is used even with -fno-strict-aliasing
1232 and makes use of no TBAA assumptions. */
1233
1234 static int
1235 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1236 {
1237 /* If both fields are of the same type, we could save hard work of
1238 comparing offsets. */
1239 tree type1 = DECL_CONTEXT (field1);
1240 tree type2 = DECL_CONTEXT (field2);
1241
1242 if (TREE_CODE (type1) == RECORD_TYPE
1243 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1244 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1245 if (TREE_CODE (type2) == RECORD_TYPE
1246 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1247 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1248
1249 /* ??? Bitfields can overlap at RTL level so punt on them.
1250 FIXME: RTL expansion should be fixed by adjusting the access path
1251 when producing MEM_ATTRs for MEMs which are wider than
1252 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1253 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1254 return -1;
1255
1256 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1257 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1258 return field1 != field2;
1259
1260 /* In common case the offsets and bit offsets will be the same.
1261 However if frontends do not agree on the alignment, they may be
1262 different even if they actually represent same address.
1263 Try the common case first and if that fails calcualte the
1264 actual bit offset. */
1265 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1266 DECL_FIELD_OFFSET (field2))
1267 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1268 DECL_FIELD_BIT_OFFSET (field2)))
1269 return 0;
1270
1271 /* Note that it may be possible to use component_ref_field_offset
1272 which would provide offsets as trees. However constructing and folding
1273 trees is expensive and does not seem to be worth the compile time
1274 cost. */
1275
1276 poly_uint64 offset1, offset2;
1277 poly_uint64 bit_offset1, bit_offset2;
1278
1279 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1280 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1281 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1282 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1283 {
1284 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1285 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1286
1287 if (known_eq (offset1, offset2))
1288 return 0;
1289
1290 poly_uint64 size1, size2;
1291
1292 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1293 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1294 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1295 return 1;
1296 }
1297 /* Resort to slower overlap checking by looking for matching types in
1298 the middle of access path. */
1299 return -1;
1300 }
1301
1302 /* Return low bound of array. Do not produce new trees
1303 and thus do not care about particular type of integer constant
1304 and placeholder exprs. */
1305
1306 static tree
1307 cheap_array_ref_low_bound (tree ref)
1308 {
1309 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1310
1311 /* Avoid expensive array_ref_low_bound.
1312 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1313 type or it is zero. */
1314 if (TREE_OPERAND (ref, 2))
1315 return TREE_OPERAND (ref, 2);
1316 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1317 return TYPE_MIN_VALUE (domain_type);
1318 else
1319 return integer_zero_node;
1320 }
1321
1322 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1323 completely disjoint.
1324
1325 Return 1 if the refs are non-overlapping.
1326 Return 0 if they are possibly overlapping but if so the overlap again
1327 starts on the same address.
1328 Return -1 otherwise. */
1329
1330 int
1331 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1332 {
1333 tree index1 = TREE_OPERAND (ref1, 1);
1334 tree index2 = TREE_OPERAND (ref2, 1);
1335 tree low_bound1 = cheap_array_ref_low_bound(ref1);
1336 tree low_bound2 = cheap_array_ref_low_bound(ref2);
1337
1338 /* Handle zero offsets first: we do not need to match type size in this
1339 case. */
1340 if (operand_equal_p (index1, low_bound1, 0)
1341 && operand_equal_p (index2, low_bound2, 0))
1342 return 0;
1343
1344 /* If type sizes are different, give up.
1345
1346 Avoid expensive array_ref_element_size.
1347 If operand 3 is present it denotes size in the alignmnet units.
1348 Otherwise size is TYPE_SIZE of the element type.
1349 Handle only common cases where types are of the same "kind". */
1350 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1351 return -1;
1352
1353 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1354 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1355
1356 if (TREE_OPERAND (ref1, 3))
1357 {
1358 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1359 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1360 TREE_OPERAND (ref2, 3), 0))
1361 return -1;
1362 }
1363 else
1364 {
1365 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1366 TYPE_SIZE_UNIT (elmt_type2), 0))
1367 return -1;
1368 }
1369
1370 /* Since we know that type sizes are the same, there is no need to return
1371 -1 after this point. Partial overlap can not be introduced. */
1372
1373 /* We may need to fold trees in this case.
1374 TODO: Handle integer constant case at least. */
1375 if (!operand_equal_p (low_bound1, low_bound2, 0))
1376 return 0;
1377
1378 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1379 {
1380 if (tree_int_cst_equal (index1, index2))
1381 return 0;
1382 return 1;
1383 }
1384 /* TODO: We can use VRP to further disambiguate here. */
1385 return 0;
1386 }
1387
1388 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1389 MATCH2 either point to the same address or are disjoint.
1390 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1391 respectively or NULL in the case we established equivalence of bases.
1392 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1393 overlap by exact multiply of their element size.
1394
1395 This test works by matching the initial segment of the access path
1396 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1397 match was determined without use of TBAA oracle.
1398
1399 Return 1 if we can determine that component references REF1 and REF2,
1400 that are within a common DECL, cannot overlap.
1401
1402 Return 0 if paths are same and thus there is nothing to disambiguate more
1403 (i.e. there is must alias assuming there is must alias between MATCH1 and
1404 MATCH2)
1405
1406 Return -1 if we can not determine 0 or 1 - this happens when we met
1407 non-matching types was met in the path.
1408 In this case it may make sense to continue by other disambiguation
1409 oracles. */
1410
1411 static int
1412 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1413 tree match2, tree ref2,
1414 bool partial_overlap)
1415 {
1416 int ntbaa1 = 0, ntbaa2 = 0;
1417 /* Early return if there are no references to match, we do not need
1418 to walk the access paths.
1419
1420 Do not consider this as may-alias for stats - it is more useful
1421 to have information how many disambiguations happened provided that
1422 the query was meaningful. */
1423
1424 if (match1 == ref1 || !handled_component_p (ref1)
1425 || match2 == ref2 || !handled_component_p (ref2))
1426 return -1;
1427
1428 auto_vec<tree, 16> component_refs1;
1429 auto_vec<tree, 16> component_refs2;
1430
1431 /* Create the stack of handled components for REF1. */
1432 while (handled_component_p (ref1) && ref1 != match1)
1433 {
1434 /* We use TBAA only to re-synchronize after mismatched refs. So we
1435 do not need to truncate access path after TBAA part ends. */
1436 if (ends_tbaa_access_path_p (ref1))
1437 ntbaa1 = 0;
1438 else
1439 ntbaa1++;
1440 component_refs1.safe_push (ref1);
1441 ref1 = TREE_OPERAND (ref1, 0);
1442 }
1443
1444 /* Create the stack of handled components for REF2. */
1445 while (handled_component_p (ref2) && ref2 != match2)
1446 {
1447 if (ends_tbaa_access_path_p (ref2))
1448 ntbaa2 = 0;
1449 else
1450 ntbaa2++;
1451 component_refs2.safe_push (ref2);
1452 ref2 = TREE_OPERAND (ref2, 0);
1453 }
1454
1455 if (!flag_strict_aliasing)
1456 {
1457 ntbaa1 = 0;
1458 ntbaa2 = 0;
1459 }
1460
1461 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1462 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1463
1464 /* If only one of access path starts with MEM_REF check that offset is 0
1465 so the addresses stays the same after stripping it.
1466 TODO: In this case we may walk the other access path until we get same
1467 offset.
1468
1469 If both starts with MEM_REF, offset has to be same. */
1470 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1471 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1472 || (mem_ref1 && mem_ref2
1473 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1474 TREE_OPERAND (ref2, 1))))
1475 {
1476 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1477 return -1;
1478 }
1479
1480 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1481 to handle them here at all. */
1482 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1483 && TREE_CODE (ref2) != TARGET_MEM_REF);
1484
1485 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1486 rank. This is sufficient because we start from the same DECL and you
1487 cannot reference several fields at a time with COMPONENT_REFs (unlike
1488 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1489 of them to access a sub-component, unless you're in a union, in which
1490 case the return value will precisely be false. */
1491 while (true)
1492 {
1493 /* Track if we seen unmatched ref with non-zero offset. In this case
1494 we must look for partial overlaps. */
1495 bool seen_unmatched_ref_p = false;
1496
1497 /* First match ARRAY_REFs an try to disambiguate. */
1498 if (!component_refs1.is_empty ()
1499 && !component_refs2.is_empty ())
1500 {
1501 unsigned int narray_refs1=0, narray_refs2=0;
1502
1503 /* We generally assume that both access paths starts by same sequence
1504 of refs. However if number of array refs is not in sync, try
1505 to recover and pop elts until number match. This helps the case
1506 where one access path starts by array and other by element. */
1507 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1508 narray_refs1++)
1509 if (TREE_CODE (component_refs1 [component_refs1.length()
1510 - 1 - narray_refs1]) != ARRAY_REF)
1511 break;
1512
1513 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1514 narray_refs2++)
1515 if (TREE_CODE (component_refs2 [component_refs2.length()
1516 - 1 - narray_refs2]) != ARRAY_REF)
1517 break;
1518 for (; narray_refs1 > narray_refs2; narray_refs1--)
1519 {
1520 ref1 = component_refs1.pop ();
1521 ntbaa1--;
1522
1523 /* If index is non-zero we need to check whether the reference
1524 does not break the main invariant that bases are either
1525 disjoint or equal. Consider the example:
1526
1527 unsigned char out[][1];
1528 out[1]="a";
1529 out[i][0];
1530
1531 Here bases out and out are same, but after removing the
1532 [i] index, this invariant no longer holds, because
1533 out[i] points to the middle of array out.
1534
1535 TODO: If size of type of the skipped reference is an integer
1536 multiply of the size of type of the other reference this
1537 invariant can be verified, but even then it is not completely
1538 safe with !flag_strict_aliasing if the other reference contains
1539 unbounded array accesses.
1540 See */
1541
1542 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1543 cheap_array_ref_low_bound (ref1), 0))
1544 return 0;
1545 }
1546 for (; narray_refs2 > narray_refs1; narray_refs2--)
1547 {
1548 ref2 = component_refs2.pop ();
1549 ntbaa2--;
1550 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1551 cheap_array_ref_low_bound (ref2), 0))
1552 return 0;
1553 }
1554 /* Try to disambiguate matched arrays. */
1555 for (unsigned int i = 0; i < narray_refs1; i++)
1556 {
1557 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1558 component_refs2.pop ());
1559 ntbaa1--;
1560 ntbaa2--;
1561 if (cmp == 1 && !partial_overlap)
1562 {
1563 ++alias_stats
1564 .nonoverlapping_refs_since_match_p_no_alias;
1565 return 1;
1566 }
1567 if (cmp == -1)
1568 {
1569 seen_unmatched_ref_p = true;
1570 /* We can not maintain the invariant that bases are either
1571 same or completely disjoint. However we can still recover
1572 from type based alias analysis if we reach referneces to
1573 same sizes. We do not attempt to match array sizes, so
1574 just finish array walking and look for component refs. */
1575 if (ntbaa1 < 0 || ntbaa2 < 0)
1576 {
1577 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1578 return -1;
1579 }
1580 for (i++; i < narray_refs1; i++)
1581 {
1582 component_refs1.pop ();
1583 component_refs2.pop ();
1584 ntbaa1--;
1585 ntbaa2--;
1586 }
1587 break;
1588 }
1589 partial_overlap = false;
1590 }
1591 }
1592
1593 /* Next look for component_refs. */
1594 do
1595 {
1596 if (component_refs1.is_empty ())
1597 {
1598 ++alias_stats
1599 .nonoverlapping_refs_since_match_p_must_overlap;
1600 return 0;
1601 }
1602 ref1 = component_refs1.pop ();
1603 ntbaa1--;
1604 if (TREE_CODE (ref1) != COMPONENT_REF)
1605 {
1606 seen_unmatched_ref_p = true;
1607 if (ntbaa1 < 0 || ntbaa2 < 0)
1608 {
1609 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1610 return -1;
1611 }
1612 }
1613 }
1614 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1615
1616 do
1617 {
1618 if (component_refs2.is_empty ())
1619 {
1620 ++alias_stats
1621 .nonoverlapping_refs_since_match_p_must_overlap;
1622 return 0;
1623 }
1624 ref2 = component_refs2.pop ();
1625 ntbaa2--;
1626 if (TREE_CODE (ref2) != COMPONENT_REF)
1627 {
1628 if (ntbaa1 < 0 || ntbaa2 < 0)
1629 {
1630 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1631 return -1;
1632 }
1633 seen_unmatched_ref_p = true;
1634 }
1635 }
1636 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1637
1638 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1639 earlier. */
1640 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1641 && TREE_CODE (ref2) == COMPONENT_REF);
1642
1643 tree field1 = TREE_OPERAND (ref1, 1);
1644 tree field2 = TREE_OPERAND (ref2, 1);
1645
1646 /* ??? We cannot simply use the type of operand #0 of the refs here
1647 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1648 for common blocks instead of using unions like everyone else. */
1649 tree type1 = DECL_CONTEXT (field1);
1650 tree type2 = DECL_CONTEXT (field2);
1651
1652 partial_overlap = false;
1653
1654 /* If we skipped array refs on type of different sizes, we can
1655 no longer be sure that there are not partial overlaps. */
1656 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1657 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1658 {
1659 ++alias_stats
1660 .nonoverlapping_refs_since_match_p_may_alias;
1661 return -1;
1662 }
1663
1664 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1665 if (cmp == -1)
1666 {
1667 ++alias_stats
1668 .nonoverlapping_refs_since_match_p_may_alias;
1669 return -1;
1670 }
1671 else if (cmp == 1)
1672 {
1673 ++alias_stats
1674 .nonoverlapping_refs_since_match_p_no_alias;
1675 return 1;
1676 }
1677 }
1678
1679 ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
1680 return 0;
1681 }
1682
1683 /* Return TYPE_UID which can be used to match record types we consider
1684 same for TBAA purposes. */
1685
1686 static inline int
1687 ncr_type_uid (const_tree field)
1688 {
1689 /* ??? We cannot simply use the type of operand #0 of the refs here
1690 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1691 for common blocks instead of using unions like everyone else. */
1692 tree type = DECL_FIELD_CONTEXT (field);
1693 /* With LTO types considered same_type_for_tbaa_p
1694 from different translation unit may not have same
1695 main variant. They however have same TYPE_CANONICAL. */
1696 if (TYPE_CANONICAL (type))
1697 return TYPE_UID (TYPE_CANONICAL (type));
1698 return TYPE_UID (type);
1699 }
1700
1701 /* qsort compare function to sort FIELD_DECLs after their
1702 DECL_FIELD_CONTEXT TYPE_UID. */
1703
1704 static inline int
1705 ncr_compar (const void *field1_, const void *field2_)
1706 {
1707 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1708 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1709 unsigned int uid1 = ncr_type_uid (field1);
1710 unsigned int uid2 = ncr_type_uid (field2);
1711
1712 if (uid1 < uid2)
1713 return -1;
1714 else if (uid1 > uid2)
1715 return 1;
1716 return 0;
1717 }
1718
1719 /* Return true if we can determine that the fields referenced cannot
1720 overlap for any pair of objects. This relies on TBAA. */
1721
1722 static bool
1723 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1724 {
1725 /* Early return if we have nothing to do.
1726
1727 Do not consider this as may-alias for stats - it is more useful
1728 to have information how many disambiguations happened provided that
1729 the query was meaningful. */
1730 if (!flag_strict_aliasing
1731 || !x || !y
1732 || !handled_component_p (x)
1733 || !handled_component_p (y))
1734 return false;
1735
1736 auto_vec<const_tree, 16> fieldsx;
1737 while (handled_component_p (x))
1738 {
1739 if (TREE_CODE (x) == COMPONENT_REF)
1740 {
1741 tree field = TREE_OPERAND (x, 1);
1742 tree type = DECL_FIELD_CONTEXT (field);
1743 if (TREE_CODE (type) == RECORD_TYPE)
1744 fieldsx.safe_push (field);
1745 }
1746 else if (ends_tbaa_access_path_p (x))
1747 fieldsx.truncate (0);
1748 x = TREE_OPERAND (x, 0);
1749 }
1750 if (fieldsx.length () == 0)
1751 return false;
1752 auto_vec<const_tree, 16> fieldsy;
1753 while (handled_component_p (y))
1754 {
1755 if (TREE_CODE (y) == COMPONENT_REF)
1756 {
1757 tree field = TREE_OPERAND (y, 1);
1758 tree type = DECL_FIELD_CONTEXT (field);
1759 if (TREE_CODE (type) == RECORD_TYPE)
1760 fieldsy.safe_push (TREE_OPERAND (y, 1));
1761 }
1762 else if (ends_tbaa_access_path_p (y))
1763 fieldsy.truncate (0);
1764 y = TREE_OPERAND (y, 0);
1765 }
1766 if (fieldsy.length () == 0)
1767 {
1768 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1769 return false;
1770 }
1771
1772 /* Most common case first. */
1773 if (fieldsx.length () == 1
1774 && fieldsy.length () == 1)
1775 {
1776 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1777 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1778 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1779 {
1780 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1781 return true;
1782 }
1783 else
1784 {
1785 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1786 return false;
1787 }
1788 }
1789
1790 if (fieldsx.length () == 2)
1791 {
1792 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1793 std::swap (fieldsx[0], fieldsx[1]);
1794 }
1795 else
1796 fieldsx.qsort (ncr_compar);
1797
1798 if (fieldsy.length () == 2)
1799 {
1800 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1801 std::swap (fieldsy[0], fieldsy[1]);
1802 }
1803 else
1804 fieldsy.qsort (ncr_compar);
1805
1806 unsigned i = 0, j = 0;
1807 do
1808 {
1809 const_tree fieldx = fieldsx[i];
1810 const_tree fieldy = fieldsy[j];
1811
1812 /* We're left with accessing different fields of a structure,
1813 no possible overlap. */
1814 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1815 DECL_FIELD_CONTEXT (fieldy)) == 1
1816 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1817 {
1818 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1819 return true;
1820 }
1821
1822 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1823 {
1824 i++;
1825 if (i == fieldsx.length ())
1826 break;
1827 }
1828 else
1829 {
1830 j++;
1831 if (j == fieldsy.length ())
1832 break;
1833 }
1834 }
1835 while (1);
1836
1837 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1838 return false;
1839 }
1840
1841
1842 /* Return true if two memory references based on the variables BASE1
1843 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1844 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1845 if non-NULL are the complete memory reference trees. */
1846
1847 static bool
1848 decl_refs_may_alias_p (tree ref1, tree base1,
1849 poly_int64 offset1, poly_int64 max_size1,
1850 poly_int64 size1,
1851 tree ref2, tree base2,
1852 poly_int64 offset2, poly_int64 max_size2,
1853 poly_int64 size2)
1854 {
1855 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1856
1857 /* If both references are based on different variables, they cannot alias. */
1858 if (compare_base_decls (base1, base2) == 0)
1859 return false;
1860
1861 /* If both references are based on the same variable, they cannot alias if
1862 the accesses do not overlap. */
1863 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1864 return false;
1865
1866 /* If there is must alias, there is no use disambiguating further. */
1867 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1868 return true;
1869
1870 /* For components with variable position, the above test isn't sufficient,
1871 so we disambiguate component references manually. */
1872 if (ref1 && ref2
1873 && handled_component_p (ref1) && handled_component_p (ref2)
1874 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
1875 return false;
1876
1877 return true;
1878 }
1879
1880 /* Return true if an indirect reference based on *PTR1 constrained
1881 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1882 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1883 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1884 in which case they are computed on-demand. REF1 and REF2
1885 if non-NULL are the complete memory reference trees. */
1886
1887 static bool
1888 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1889 poly_int64 offset1, poly_int64 max_size1,
1890 poly_int64 size1,
1891 alias_set_type ref1_alias_set,
1892 alias_set_type base1_alias_set,
1893 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1894 poly_int64 offset2, poly_int64 max_size2,
1895 poly_int64 size2,
1896 alias_set_type ref2_alias_set,
1897 alias_set_type base2_alias_set, bool tbaa_p)
1898 {
1899 tree ptr1;
1900 tree ptrtype1, dbase2;
1901
1902 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1903 || TREE_CODE (base1) == TARGET_MEM_REF)
1904 && DECL_P (base2));
1905
1906 ptr1 = TREE_OPERAND (base1, 0);
1907 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1908
1909 /* If only one reference is based on a variable, they cannot alias if
1910 the pointer access is beyond the extent of the variable access.
1911 (the pointer base cannot validly point to an offset less than zero
1912 of the variable).
1913 ??? IVOPTs creates bases that do not honor this restriction,
1914 so do not apply this optimization for TARGET_MEM_REFs. */
1915 if (TREE_CODE (base1) != TARGET_MEM_REF
1916 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1917 return false;
1918 /* They also cannot alias if the pointer may not point to the decl. */
1919 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1920 return false;
1921
1922 /* Disambiguations that rely on strict aliasing rules follow. */
1923 if (!flag_strict_aliasing || !tbaa_p)
1924 return true;
1925
1926 /* If the alias set for a pointer access is zero all bets are off. */
1927 if (base1_alias_set == 0 || base2_alias_set == 0)
1928 return true;
1929
1930 /* When we are trying to disambiguate an access with a pointer dereference
1931 as base versus one with a decl as base we can use both the size
1932 of the decl and its dynamic type for extra disambiguation.
1933 ??? We do not know anything about the dynamic type of the decl
1934 other than that its alias-set contains base2_alias_set as a subset
1935 which does not help us here. */
1936 /* As we know nothing useful about the dynamic type of the decl just
1937 use the usual conflict check rather than a subset test.
1938 ??? We could introduce -fvery-strict-aliasing when the language
1939 does not allow decls to have a dynamic type that differs from their
1940 static type. Then we can check
1941 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1942 if (base1_alias_set != base2_alias_set
1943 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1944 return false;
1945
1946 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1947
1948 /* If the size of the access relevant for TBAA through the pointer
1949 is bigger than the size of the decl we can't possibly access the
1950 decl via that pointer. */
1951 if (/* ??? This in turn may run afoul when a decl of type T which is
1952 a member of union type U is accessed through a pointer to
1953 type U and sizeof T is smaller than sizeof U. */
1954 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1955 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1956 && compare_sizes (DECL_SIZE (base2),
1957 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1958 return false;
1959
1960 if (!ref2)
1961 return true;
1962
1963 /* If the decl is accessed via a MEM_REF, reconstruct the base
1964 we can use for TBAA and an appropriately adjusted offset. */
1965 dbase2 = ref2;
1966 while (handled_component_p (dbase2))
1967 dbase2 = TREE_OPERAND (dbase2, 0);
1968 poly_int64 doffset1 = offset1;
1969 poly_offset_int doffset2 = offset2;
1970 if (TREE_CODE (dbase2) == MEM_REF
1971 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1972 {
1973 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1974 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
1975 /* If second reference is view-converted, give up now. */
1976 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
1977 return true;
1978 }
1979
1980 /* If first reference is view-converted, give up now. */
1981 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
1982 return true;
1983
1984 /* If both references are through the same type, they do not alias
1985 if the accesses do not overlap. This does extra disambiguation
1986 for mixed/pointer accesses but requires strict aliasing.
1987 For MEM_REFs we require that the component-ref offset we computed
1988 is relative to the start of the type which we ensure by
1989 comparing rvalue and access type and disregarding the constant
1990 pointer offset.
1991
1992 But avoid treating variable length arrays as "objects", instead assume they
1993 can overlap by an exact multiple of their element size.
1994 See gcc.dg/torture/alias-2.c. */
1995 if (((TREE_CODE (base1) != TARGET_MEM_REF
1996 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1997 && (TREE_CODE (dbase2) != TARGET_MEM_REF
1998 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
1999 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2000 {
2001 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2002 && (TYPE_SIZE (TREE_TYPE (base1))
2003 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2004 != INTEGER_CST));
2005 if (!partial_overlap
2006 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2007 return false;
2008 if (!ref1 || !ref2
2009 /* If there is must alias, there is no use disambiguating further. */
2010 || (!partial_overlap
2011 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2012 return true;
2013 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2014 partial_overlap);
2015 if (res == -1)
2016 return !nonoverlapping_component_refs_p (ref1, ref2);
2017 return !res;
2018 }
2019
2020 /* Do access-path based disambiguation. */
2021 if (ref1 && ref2
2022 && (handled_component_p (ref1) || handled_component_p (ref2)))
2023 return aliasing_component_refs_p (ref1,
2024 ref1_alias_set, base1_alias_set,
2025 offset1, max_size1,
2026 ref2,
2027 ref2_alias_set, base2_alias_set,
2028 offset2, max_size2);
2029
2030 return true;
2031 }
2032
2033 /* Return true if two indirect references based on *PTR1
2034 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2035 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2036 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2037 in which case they are computed on-demand. REF1 and REF2
2038 if non-NULL are the complete memory reference trees. */
2039
2040 static bool
2041 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2042 poly_int64 offset1, poly_int64 max_size1,
2043 poly_int64 size1,
2044 alias_set_type ref1_alias_set,
2045 alias_set_type base1_alias_set,
2046 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2047 poly_int64 offset2, poly_int64 max_size2,
2048 poly_int64 size2,
2049 alias_set_type ref2_alias_set,
2050 alias_set_type base2_alias_set, bool tbaa_p)
2051 {
2052 tree ptr1;
2053 tree ptr2;
2054 tree ptrtype1, ptrtype2;
2055
2056 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2057 || TREE_CODE (base1) == TARGET_MEM_REF)
2058 && (TREE_CODE (base2) == MEM_REF
2059 || TREE_CODE (base2) == TARGET_MEM_REF));
2060
2061 ptr1 = TREE_OPERAND (base1, 0);
2062 ptr2 = TREE_OPERAND (base2, 0);
2063
2064 /* If both bases are based on pointers they cannot alias if they may not
2065 point to the same memory object or if they point to the same object
2066 and the accesses do not overlap. */
2067 if ((!cfun || gimple_in_ssa_p (cfun))
2068 && operand_equal_p (ptr1, ptr2, 0)
2069 && (((TREE_CODE (base1) != TARGET_MEM_REF
2070 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2071 && (TREE_CODE (base2) != TARGET_MEM_REF
2072 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2073 || (TREE_CODE (base1) == TARGET_MEM_REF
2074 && TREE_CODE (base2) == TARGET_MEM_REF
2075 && (TMR_STEP (base1) == TMR_STEP (base2)
2076 || (TMR_STEP (base1) && TMR_STEP (base2)
2077 && operand_equal_p (TMR_STEP (base1),
2078 TMR_STEP (base2), 0)))
2079 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2080 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2081 && operand_equal_p (TMR_INDEX (base1),
2082 TMR_INDEX (base2), 0)))
2083 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2084 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2085 && operand_equal_p (TMR_INDEX2 (base1),
2086 TMR_INDEX2 (base2), 0))))))
2087 {
2088 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2089 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2090 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2091 offset2 + moff2, max_size2))
2092 return false;
2093 /* If there is must alias, there is no use disambiguating further. */
2094 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2095 return true;
2096 if (ref1 && ref2)
2097 {
2098 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2099 false);
2100 if (res != -1)
2101 return !res;
2102 }
2103 }
2104 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2105 return false;
2106
2107 /* Disambiguations that rely on strict aliasing rules follow. */
2108 if (!flag_strict_aliasing || !tbaa_p)
2109 return true;
2110
2111 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2112 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2113
2114 /* If the alias set for a pointer access is zero all bets are off. */
2115 if (base1_alias_set == 0
2116 || base2_alias_set == 0)
2117 return true;
2118
2119 /* Do type-based disambiguation. */
2120 if (base1_alias_set != base2_alias_set
2121 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2122 return false;
2123
2124 /* If either reference is view-converted, give up now. */
2125 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2126 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2127 return true;
2128
2129 /* If both references are through the same type, they do not alias
2130 if the accesses do not overlap. This does extra disambiguation
2131 for mixed/pointer accesses but requires strict aliasing. */
2132 if ((TREE_CODE (base1) != TARGET_MEM_REF
2133 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2134 && (TREE_CODE (base2) != TARGET_MEM_REF
2135 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2136 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2137 TREE_TYPE (ptrtype2)) == 1)
2138 {
2139 /* But avoid treating arrays as "objects", instead assume they
2140 can overlap by an exact multiple of their element size.
2141 See gcc.dg/torture/alias-2.c. */
2142 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2143
2144 if (!partial_overlap
2145 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2146 return false;
2147 if (!ref1 || !ref2
2148 || (!partial_overlap
2149 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2150 return true;
2151 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2152 partial_overlap);
2153 if (res == -1)
2154 return !nonoverlapping_component_refs_p (ref1, ref2);
2155 return !res;
2156 }
2157
2158 /* Do access-path based disambiguation. */
2159 if (ref1 && ref2
2160 && (handled_component_p (ref1) || handled_component_p (ref2)))
2161 return aliasing_component_refs_p (ref1,
2162 ref1_alias_set, base1_alias_set,
2163 offset1, max_size1,
2164 ref2,
2165 ref2_alias_set, base2_alias_set,
2166 offset2, max_size2);
2167
2168 return true;
2169 }
2170
2171 /* Return true, if the two memory references REF1 and REF2 may alias. */
2172
2173 static bool
2174 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2175 {
2176 tree base1, base2;
2177 poly_int64 offset1 = 0, offset2 = 0;
2178 poly_int64 max_size1 = -1, max_size2 = -1;
2179 bool var1_p, var2_p, ind1_p, ind2_p;
2180
2181 gcc_checking_assert ((!ref1->ref
2182 || TREE_CODE (ref1->ref) == SSA_NAME
2183 || DECL_P (ref1->ref)
2184 || TREE_CODE (ref1->ref) == STRING_CST
2185 || handled_component_p (ref1->ref)
2186 || TREE_CODE (ref1->ref) == MEM_REF
2187 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
2188 && (!ref2->ref
2189 || TREE_CODE (ref2->ref) == SSA_NAME
2190 || DECL_P (ref2->ref)
2191 || TREE_CODE (ref2->ref) == STRING_CST
2192 || handled_component_p (ref2->ref)
2193 || TREE_CODE (ref2->ref) == MEM_REF
2194 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
2195
2196 /* Decompose the references into their base objects and the access. */
2197 base1 = ao_ref_base (ref1);
2198 offset1 = ref1->offset;
2199 max_size1 = ref1->max_size;
2200 base2 = ao_ref_base (ref2);
2201 offset2 = ref2->offset;
2202 max_size2 = ref2->max_size;
2203
2204 /* We can end up with registers or constants as bases for example from
2205 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2206 which is seen as a struct copy. */
2207 if (TREE_CODE (base1) == SSA_NAME
2208 || TREE_CODE (base1) == CONST_DECL
2209 || TREE_CODE (base1) == CONSTRUCTOR
2210 || TREE_CODE (base1) == ADDR_EXPR
2211 || CONSTANT_CLASS_P (base1)
2212 || TREE_CODE (base2) == SSA_NAME
2213 || TREE_CODE (base2) == CONST_DECL
2214 || TREE_CODE (base2) == CONSTRUCTOR
2215 || TREE_CODE (base2) == ADDR_EXPR
2216 || CONSTANT_CLASS_P (base2))
2217 return false;
2218
2219 /* We can end up referring to code via function and label decls.
2220 As we likely do not properly track code aliases conservatively
2221 bail out. */
2222 if (TREE_CODE (base1) == FUNCTION_DECL
2223 || TREE_CODE (base1) == LABEL_DECL
2224 || TREE_CODE (base2) == FUNCTION_DECL
2225 || TREE_CODE (base2) == LABEL_DECL)
2226 return true;
2227
2228 /* Two volatile accesses always conflict. */
2229 if (ref1->volatile_p
2230 && ref2->volatile_p)
2231 return true;
2232
2233 /* Defer to simple offset based disambiguation if we have
2234 references based on two decls. Do this before defering to
2235 TBAA to handle must-alias cases in conformance with the
2236 GCC extension of allowing type-punning through unions. */
2237 var1_p = DECL_P (base1);
2238 var2_p = DECL_P (base2);
2239 if (var1_p && var2_p)
2240 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
2241 ref1->size,
2242 ref2->ref, base2, offset2, max_size2,
2243 ref2->size);
2244
2245 /* Handle restrict based accesses.
2246 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2247 here. */
2248 tree rbase1 = base1;
2249 tree rbase2 = base2;
2250 if (var1_p)
2251 {
2252 rbase1 = ref1->ref;
2253 if (rbase1)
2254 while (handled_component_p (rbase1))
2255 rbase1 = TREE_OPERAND (rbase1, 0);
2256 }
2257 if (var2_p)
2258 {
2259 rbase2 = ref2->ref;
2260 if (rbase2)
2261 while (handled_component_p (rbase2))
2262 rbase2 = TREE_OPERAND (rbase2, 0);
2263 }
2264 if (rbase1 && rbase2
2265 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
2266 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
2267 /* If the accesses are in the same restrict clique... */
2268 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
2269 /* But based on different pointers they do not alias. */
2270 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
2271 return false;
2272
2273 ind1_p = (TREE_CODE (base1) == MEM_REF
2274 || TREE_CODE (base1) == TARGET_MEM_REF);
2275 ind2_p = (TREE_CODE (base2) == MEM_REF
2276 || TREE_CODE (base2) == TARGET_MEM_REF);
2277
2278 /* Canonicalize the pointer-vs-decl case. */
2279 if (ind1_p && var2_p)
2280 {
2281 std::swap (offset1, offset2);
2282 std::swap (max_size1, max_size2);
2283 std::swap (base1, base2);
2284 std::swap (ref1, ref2);
2285 var1_p = true;
2286 ind1_p = false;
2287 var2_p = false;
2288 ind2_p = true;
2289 }
2290
2291 /* First defer to TBAA if possible. */
2292 if (tbaa_p
2293 && flag_strict_aliasing
2294 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2295 ao_ref_alias_set (ref2)))
2296 return false;
2297
2298 /* If the reference is based on a pointer that points to memory
2299 that may not be written to then the other reference cannot possibly
2300 clobber it. */
2301 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2302 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2303 || (ind1_p
2304 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2305 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2306 return false;
2307
2308 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2309 if (var1_p && ind2_p)
2310 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
2311 offset2, max_size2, ref2->size,
2312 ao_ref_alias_set (ref2),
2313 ao_ref_base_alias_set (ref2),
2314 ref1->ref, base1,
2315 offset1, max_size1, ref1->size,
2316 ao_ref_alias_set (ref1),
2317 ao_ref_base_alias_set (ref1),
2318 tbaa_p);
2319 else if (ind1_p && ind2_p)
2320 return indirect_refs_may_alias_p (ref1->ref, base1,
2321 offset1, max_size1, ref1->size,
2322 ao_ref_alias_set (ref1),
2323 ao_ref_base_alias_set (ref1),
2324 ref2->ref, base2,
2325 offset2, max_size2, ref2->size,
2326 ao_ref_alias_set (ref2),
2327 ao_ref_base_alias_set (ref2),
2328 tbaa_p);
2329
2330 gcc_unreachable ();
2331 }
2332
2333 /* Return true, if the two memory references REF1 and REF2 may alias
2334 and update statistics. */
2335
2336 bool
2337 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2338 {
2339 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2340 if (res)
2341 ++alias_stats.refs_may_alias_p_may_alias;
2342 else
2343 ++alias_stats.refs_may_alias_p_no_alias;
2344 return res;
2345 }
2346
2347 static bool
2348 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2349 {
2350 ao_ref r1;
2351 ao_ref_init (&r1, ref1);
2352 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2353 }
2354
2355 bool
2356 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2357 {
2358 ao_ref r1, r2;
2359 ao_ref_init (&r1, ref1);
2360 ao_ref_init (&r2, ref2);
2361 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2362 }
2363
2364 /* Returns true if there is a anti-dependence for the STORE that
2365 executes after the LOAD. */
2366
2367 bool
2368 refs_anti_dependent_p (tree load, tree store)
2369 {
2370 ao_ref r1, r2;
2371 ao_ref_init (&r1, load);
2372 ao_ref_init (&r2, store);
2373 return refs_may_alias_p_1 (&r1, &r2, false);
2374 }
2375
2376 /* Returns true if there is a output dependence for the stores
2377 STORE1 and STORE2. */
2378
2379 bool
2380 refs_output_dependent_p (tree store1, tree store2)
2381 {
2382 ao_ref r1, r2;
2383 ao_ref_init (&r1, store1);
2384 ao_ref_init (&r2, store2);
2385 return refs_may_alias_p_1 (&r1, &r2, false);
2386 }
2387
2388 /* If the call CALL may use the memory reference REF return true,
2389 otherwise return false. */
2390
2391 static bool
2392 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2393 {
2394 tree base, callee;
2395 unsigned i;
2396 int flags = gimple_call_flags (call);
2397
2398 /* Const functions without a static chain do not implicitly use memory. */
2399 if (!gimple_call_chain (call)
2400 && (flags & (ECF_CONST|ECF_NOVOPS)))
2401 goto process_args;
2402
2403 base = ao_ref_base (ref);
2404 if (!base)
2405 return true;
2406
2407 /* A call that is not without side-effects might involve volatile
2408 accesses and thus conflicts with all other volatile accesses. */
2409 if (ref->volatile_p)
2410 return true;
2411
2412 /* If the reference is based on a decl that is not aliased the call
2413 cannot possibly use it. */
2414 if (DECL_P (base)
2415 && !may_be_aliased (base)
2416 /* But local statics can be used through recursion. */
2417 && !is_global_var (base))
2418 goto process_args;
2419
2420 callee = gimple_call_fndecl (call);
2421
2422 /* Handle those builtin functions explicitly that do not act as
2423 escape points. See tree-ssa-structalias.c:find_func_aliases
2424 for the list of builtins we might need to handle here. */
2425 if (callee != NULL_TREE
2426 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2427 switch (DECL_FUNCTION_CODE (callee))
2428 {
2429 /* All the following functions read memory pointed to by
2430 their second argument. strcat/strncat additionally
2431 reads memory pointed to by the first argument. */
2432 case BUILT_IN_STRCAT:
2433 case BUILT_IN_STRNCAT:
2434 {
2435 ao_ref dref;
2436 ao_ref_init_from_ptr_and_size (&dref,
2437 gimple_call_arg (call, 0),
2438 NULL_TREE);
2439 if (refs_may_alias_p_1 (&dref, ref, false))
2440 return true;
2441 }
2442 /* FALLTHRU */
2443 case BUILT_IN_STRCPY:
2444 case BUILT_IN_STRNCPY:
2445 case BUILT_IN_MEMCPY:
2446 case BUILT_IN_MEMMOVE:
2447 case BUILT_IN_MEMPCPY:
2448 case BUILT_IN_STPCPY:
2449 case BUILT_IN_STPNCPY:
2450 case BUILT_IN_TM_MEMCPY:
2451 case BUILT_IN_TM_MEMMOVE:
2452 {
2453 ao_ref dref;
2454 tree size = NULL_TREE;
2455 if (gimple_call_num_args (call) == 3)
2456 size = gimple_call_arg (call, 2);
2457 ao_ref_init_from_ptr_and_size (&dref,
2458 gimple_call_arg (call, 1),
2459 size);
2460 return refs_may_alias_p_1 (&dref, ref, false);
2461 }
2462 case BUILT_IN_STRCAT_CHK:
2463 case BUILT_IN_STRNCAT_CHK:
2464 {
2465 ao_ref dref;
2466 ao_ref_init_from_ptr_and_size (&dref,
2467 gimple_call_arg (call, 0),
2468 NULL_TREE);
2469 if (refs_may_alias_p_1 (&dref, ref, false))
2470 return true;
2471 }
2472 /* FALLTHRU */
2473 case BUILT_IN_STRCPY_CHK:
2474 case BUILT_IN_STRNCPY_CHK:
2475 case BUILT_IN_MEMCPY_CHK:
2476 case BUILT_IN_MEMMOVE_CHK:
2477 case BUILT_IN_MEMPCPY_CHK:
2478 case BUILT_IN_STPCPY_CHK:
2479 case BUILT_IN_STPNCPY_CHK:
2480 {
2481 ao_ref dref;
2482 tree size = NULL_TREE;
2483 if (gimple_call_num_args (call) == 4)
2484 size = gimple_call_arg (call, 2);
2485 ao_ref_init_from_ptr_and_size (&dref,
2486 gimple_call_arg (call, 1),
2487 size);
2488 return refs_may_alias_p_1 (&dref, ref, false);
2489 }
2490 case BUILT_IN_BCOPY:
2491 {
2492 ao_ref dref;
2493 tree size = gimple_call_arg (call, 2);
2494 ao_ref_init_from_ptr_and_size (&dref,
2495 gimple_call_arg (call, 0),
2496 size);
2497 return refs_may_alias_p_1 (&dref, ref, false);
2498 }
2499
2500 /* The following functions read memory pointed to by their
2501 first argument. */
2502 CASE_BUILT_IN_TM_LOAD (1):
2503 CASE_BUILT_IN_TM_LOAD (2):
2504 CASE_BUILT_IN_TM_LOAD (4):
2505 CASE_BUILT_IN_TM_LOAD (8):
2506 CASE_BUILT_IN_TM_LOAD (FLOAT):
2507 CASE_BUILT_IN_TM_LOAD (DOUBLE):
2508 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
2509 CASE_BUILT_IN_TM_LOAD (M64):
2510 CASE_BUILT_IN_TM_LOAD (M128):
2511 CASE_BUILT_IN_TM_LOAD (M256):
2512 case BUILT_IN_TM_LOG:
2513 case BUILT_IN_TM_LOG_1:
2514 case BUILT_IN_TM_LOG_2:
2515 case BUILT_IN_TM_LOG_4:
2516 case BUILT_IN_TM_LOG_8:
2517 case BUILT_IN_TM_LOG_FLOAT:
2518 case BUILT_IN_TM_LOG_DOUBLE:
2519 case BUILT_IN_TM_LOG_LDOUBLE:
2520 case BUILT_IN_TM_LOG_M64:
2521 case BUILT_IN_TM_LOG_M128:
2522 case BUILT_IN_TM_LOG_M256:
2523 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
2524
2525 /* These read memory pointed to by the first argument. */
2526 case BUILT_IN_STRDUP:
2527 case BUILT_IN_STRNDUP:
2528 case BUILT_IN_REALLOC:
2529 {
2530 ao_ref dref;
2531 tree size = NULL_TREE;
2532 if (gimple_call_num_args (call) == 2)
2533 size = gimple_call_arg (call, 1);
2534 ao_ref_init_from_ptr_and_size (&dref,
2535 gimple_call_arg (call, 0),
2536 size);
2537 return refs_may_alias_p_1 (&dref, ref, false);
2538 }
2539 /* These read memory pointed to by the first argument. */
2540 case BUILT_IN_INDEX:
2541 case BUILT_IN_STRCHR:
2542 case BUILT_IN_STRRCHR:
2543 {
2544 ao_ref dref;
2545 ao_ref_init_from_ptr_and_size (&dref,
2546 gimple_call_arg (call, 0),
2547 NULL_TREE);
2548 return refs_may_alias_p_1 (&dref, ref, false);
2549 }
2550 /* These read memory pointed to by the first argument with size
2551 in the third argument. */
2552 case BUILT_IN_MEMCHR:
2553 {
2554 ao_ref dref;
2555 ao_ref_init_from_ptr_and_size (&dref,
2556 gimple_call_arg (call, 0),
2557 gimple_call_arg (call, 2));
2558 return refs_may_alias_p_1 (&dref, ref, false);
2559 }
2560 /* These read memory pointed to by the first and second arguments. */
2561 case BUILT_IN_STRSTR:
2562 case BUILT_IN_STRPBRK:
2563 {
2564 ao_ref dref;
2565 ao_ref_init_from_ptr_and_size (&dref,
2566 gimple_call_arg (call, 0),
2567 NULL_TREE);
2568 if (refs_may_alias_p_1 (&dref, ref, false))
2569 return true;
2570 ao_ref_init_from_ptr_and_size (&dref,
2571 gimple_call_arg (call, 1),
2572 NULL_TREE);
2573 return refs_may_alias_p_1 (&dref, ref, false);
2574 }
2575
2576 /* The following builtins do not read from memory. */
2577 case BUILT_IN_FREE:
2578 case BUILT_IN_MALLOC:
2579 case BUILT_IN_POSIX_MEMALIGN:
2580 case BUILT_IN_ALIGNED_ALLOC:
2581 case BUILT_IN_CALLOC:
2582 CASE_BUILT_IN_ALLOCA:
2583 case BUILT_IN_STACK_SAVE:
2584 case BUILT_IN_STACK_RESTORE:
2585 case BUILT_IN_MEMSET:
2586 case BUILT_IN_TM_MEMSET:
2587 case BUILT_IN_MEMSET_CHK:
2588 case BUILT_IN_FREXP:
2589 case BUILT_IN_FREXPF:
2590 case BUILT_IN_FREXPL:
2591 case BUILT_IN_GAMMA_R:
2592 case BUILT_IN_GAMMAF_R:
2593 case BUILT_IN_GAMMAL_R:
2594 case BUILT_IN_LGAMMA_R:
2595 case BUILT_IN_LGAMMAF_R:
2596 case BUILT_IN_LGAMMAL_R:
2597 case BUILT_IN_MODF:
2598 case BUILT_IN_MODFF:
2599 case BUILT_IN_MODFL:
2600 case BUILT_IN_REMQUO:
2601 case BUILT_IN_REMQUOF:
2602 case BUILT_IN_REMQUOL:
2603 case BUILT_IN_SINCOS:
2604 case BUILT_IN_SINCOSF:
2605 case BUILT_IN_SINCOSL:
2606 case BUILT_IN_ASSUME_ALIGNED:
2607 case BUILT_IN_VA_END:
2608 return false;
2609 /* __sync_* builtins and some OpenMP builtins act as threading
2610 barriers. */
2611 #undef DEF_SYNC_BUILTIN
2612 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2613 #include "sync-builtins.def"
2614 #undef DEF_SYNC_BUILTIN
2615 case BUILT_IN_GOMP_ATOMIC_START:
2616 case BUILT_IN_GOMP_ATOMIC_END:
2617 case BUILT_IN_GOMP_BARRIER:
2618 case BUILT_IN_GOMP_BARRIER_CANCEL:
2619 case BUILT_IN_GOMP_TASKWAIT:
2620 case BUILT_IN_GOMP_TASKGROUP_END:
2621 case BUILT_IN_GOMP_CRITICAL_START:
2622 case BUILT_IN_GOMP_CRITICAL_END:
2623 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2624 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2625 case BUILT_IN_GOMP_LOOP_END:
2626 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2627 case BUILT_IN_GOMP_ORDERED_START:
2628 case BUILT_IN_GOMP_ORDERED_END:
2629 case BUILT_IN_GOMP_SECTIONS_END:
2630 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2631 case BUILT_IN_GOMP_SINGLE_COPY_START:
2632 case BUILT_IN_GOMP_SINGLE_COPY_END:
2633 return true;
2634
2635 default:
2636 /* Fallthru to general call handling. */;
2637 }
2638
2639 /* Check if base is a global static variable that is not read
2640 by the function. */
2641 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2642 {
2643 struct cgraph_node *node = cgraph_node::get (callee);
2644 bitmap read;
2645 int id;
2646
2647 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2648 node yet. We should enforce that there are nodes for all decls in the
2649 IL and remove this check instead. */
2650 if (node
2651 && (id = ipa_reference_var_uid (base)) != -1
2652 && (read = ipa_reference_get_read_global (node))
2653 && !bitmap_bit_p (read, id))
2654 goto process_args;
2655 }
2656
2657 /* Check if the base variable is call-used. */
2658 if (DECL_P (base))
2659 {
2660 if (pt_solution_includes (gimple_call_use_set (call), base))
2661 return true;
2662 }
2663 else if ((TREE_CODE (base) == MEM_REF
2664 || TREE_CODE (base) == TARGET_MEM_REF)
2665 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2666 {
2667 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2668 if (!pi)
2669 return true;
2670
2671 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2672 return true;
2673 }
2674 else
2675 return true;
2676
2677 /* Inspect call arguments for passed-by-value aliases. */
2678 process_args:
2679 for (i = 0; i < gimple_call_num_args (call); ++i)
2680 {
2681 tree op = gimple_call_arg (call, i);
2682 int flags = gimple_call_arg_flags (call, i);
2683
2684 if (flags & EAF_UNUSED)
2685 continue;
2686
2687 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2688 op = TREE_OPERAND (op, 0);
2689
2690 if (TREE_CODE (op) != SSA_NAME
2691 && !is_gimple_min_invariant (op))
2692 {
2693 ao_ref r;
2694 ao_ref_init (&r, op);
2695 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2696 return true;
2697 }
2698 }
2699
2700 return false;
2701 }
2702
2703 static bool
2704 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2705 {
2706 bool res;
2707 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2708 if (res)
2709 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2710 else
2711 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2712 return res;
2713 }
2714
2715
2716 /* If the statement STMT may use the memory reference REF return
2717 true, otherwise return false. */
2718
2719 bool
2720 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2721 {
2722 if (is_gimple_assign (stmt))
2723 {
2724 tree rhs;
2725
2726 /* All memory assign statements are single. */
2727 if (!gimple_assign_single_p (stmt))
2728 return false;
2729
2730 rhs = gimple_assign_rhs1 (stmt);
2731 if (is_gimple_reg (rhs)
2732 || is_gimple_min_invariant (rhs)
2733 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2734 return false;
2735
2736 return refs_may_alias_p (rhs, ref, tbaa_p);
2737 }
2738 else if (is_gimple_call (stmt))
2739 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2740 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2741 {
2742 tree retval = gimple_return_retval (return_stmt);
2743 if (retval
2744 && TREE_CODE (retval) != SSA_NAME
2745 && !is_gimple_min_invariant (retval)
2746 && refs_may_alias_p (retval, ref, tbaa_p))
2747 return true;
2748 /* If ref escapes the function then the return acts as a use. */
2749 tree base = ao_ref_base (ref);
2750 if (!base)
2751 ;
2752 else if (DECL_P (base))
2753 return is_global_var (base);
2754 else if (TREE_CODE (base) == MEM_REF
2755 || TREE_CODE (base) == TARGET_MEM_REF)
2756 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2757 return false;
2758 }
2759
2760 return true;
2761 }
2762
2763 bool
2764 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2765 {
2766 ao_ref r;
2767 ao_ref_init (&r, ref);
2768 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2769 }
2770
2771 /* If the call in statement CALL may clobber the memory reference REF
2772 return true, otherwise return false. */
2773
2774 bool
2775 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2776 {
2777 tree base;
2778 tree callee;
2779
2780 /* If the call is pure or const it cannot clobber anything. */
2781 if (gimple_call_flags (call)
2782 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2783 return false;
2784 if (gimple_call_internal_p (call))
2785 switch (gimple_call_internal_fn (call))
2786 {
2787 /* Treat these internal calls like ECF_PURE for aliasing,
2788 they don't write to any memory the program should care about.
2789 They have important other side-effects, and read memory,
2790 so can't be ECF_NOVOPS. */
2791 case IFN_UBSAN_NULL:
2792 case IFN_UBSAN_BOUNDS:
2793 case IFN_UBSAN_VPTR:
2794 case IFN_UBSAN_OBJECT_SIZE:
2795 case IFN_UBSAN_PTR:
2796 case IFN_ASAN_CHECK:
2797 return false;
2798 default:
2799 break;
2800 }
2801
2802 base = ao_ref_base (ref);
2803 if (!base)
2804 return true;
2805
2806 if (TREE_CODE (base) == SSA_NAME
2807 || CONSTANT_CLASS_P (base))
2808 return false;
2809
2810 /* A call that is not without side-effects might involve volatile
2811 accesses and thus conflicts with all other volatile accesses. */
2812 if (ref->volatile_p)
2813 return true;
2814
2815 /* If the reference is based on a decl that is not aliased the call
2816 cannot possibly clobber it. */
2817 if (DECL_P (base)
2818 && !may_be_aliased (base)
2819 /* But local non-readonly statics can be modified through recursion
2820 or the call may implement a threading barrier which we must
2821 treat as may-def. */
2822 && (TREE_READONLY (base)
2823 || !is_global_var (base)))
2824 return false;
2825
2826 /* If the reference is based on a pointer that points to memory
2827 that may not be written to then the call cannot possibly clobber it. */
2828 if ((TREE_CODE (base) == MEM_REF
2829 || TREE_CODE (base) == TARGET_MEM_REF)
2830 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2831 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2832 return false;
2833
2834 callee = gimple_call_fndecl (call);
2835
2836 /* Handle those builtin functions explicitly that do not act as
2837 escape points. See tree-ssa-structalias.c:find_func_aliases
2838 for the list of builtins we might need to handle here. */
2839 if (callee != NULL_TREE
2840 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2841 switch (DECL_FUNCTION_CODE (callee))
2842 {
2843 /* All the following functions clobber memory pointed to by
2844 their first argument. */
2845 case BUILT_IN_STRCPY:
2846 case BUILT_IN_STRNCPY:
2847 case BUILT_IN_MEMCPY:
2848 case BUILT_IN_MEMMOVE:
2849 case BUILT_IN_MEMPCPY:
2850 case BUILT_IN_STPCPY:
2851 case BUILT_IN_STPNCPY:
2852 case BUILT_IN_STRCAT:
2853 case BUILT_IN_STRNCAT:
2854 case BUILT_IN_MEMSET:
2855 case BUILT_IN_TM_MEMSET:
2856 CASE_BUILT_IN_TM_STORE (1):
2857 CASE_BUILT_IN_TM_STORE (2):
2858 CASE_BUILT_IN_TM_STORE (4):
2859 CASE_BUILT_IN_TM_STORE (8):
2860 CASE_BUILT_IN_TM_STORE (FLOAT):
2861 CASE_BUILT_IN_TM_STORE (DOUBLE):
2862 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2863 CASE_BUILT_IN_TM_STORE (M64):
2864 CASE_BUILT_IN_TM_STORE (M128):
2865 CASE_BUILT_IN_TM_STORE (M256):
2866 case BUILT_IN_TM_MEMCPY:
2867 case BUILT_IN_TM_MEMMOVE:
2868 {
2869 ao_ref dref;
2870 tree size = NULL_TREE;
2871 /* Don't pass in size for strncat, as the maximum size
2872 is strlen (dest) + n + 1 instead of n, resp.
2873 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2874 known. */
2875 if (gimple_call_num_args (call) == 3
2876 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2877 size = gimple_call_arg (call, 2);
2878 ao_ref_init_from_ptr_and_size (&dref,
2879 gimple_call_arg (call, 0),
2880 size);
2881 return refs_may_alias_p_1 (&dref, ref, false);
2882 }
2883 case BUILT_IN_STRCPY_CHK:
2884 case BUILT_IN_STRNCPY_CHK:
2885 case BUILT_IN_MEMCPY_CHK:
2886 case BUILT_IN_MEMMOVE_CHK:
2887 case BUILT_IN_MEMPCPY_CHK:
2888 case BUILT_IN_STPCPY_CHK:
2889 case BUILT_IN_STPNCPY_CHK:
2890 case BUILT_IN_STRCAT_CHK:
2891 case BUILT_IN_STRNCAT_CHK:
2892 case BUILT_IN_MEMSET_CHK:
2893 {
2894 ao_ref dref;
2895 tree size = NULL_TREE;
2896 /* Don't pass in size for __strncat_chk, as the maximum size
2897 is strlen (dest) + n + 1 instead of n, resp.
2898 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2899 known. */
2900 if (gimple_call_num_args (call) == 4
2901 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2902 size = gimple_call_arg (call, 2);
2903 ao_ref_init_from_ptr_and_size (&dref,
2904 gimple_call_arg (call, 0),
2905 size);
2906 return refs_may_alias_p_1 (&dref, ref, false);
2907 }
2908 case BUILT_IN_BCOPY:
2909 {
2910 ao_ref dref;
2911 tree size = gimple_call_arg (call, 2);
2912 ao_ref_init_from_ptr_and_size (&dref,
2913 gimple_call_arg (call, 1),
2914 size);
2915 return refs_may_alias_p_1 (&dref, ref, false);
2916 }
2917 /* Allocating memory does not have any side-effects apart from
2918 being the definition point for the pointer. */
2919 case BUILT_IN_MALLOC:
2920 case BUILT_IN_ALIGNED_ALLOC:
2921 case BUILT_IN_CALLOC:
2922 case BUILT_IN_STRDUP:
2923 case BUILT_IN_STRNDUP:
2924 /* Unix98 specifies that errno is set on allocation failure. */
2925 if (flag_errno_math
2926 && targetm.ref_may_alias_errno (ref))
2927 return true;
2928 return false;
2929 case BUILT_IN_STACK_SAVE:
2930 CASE_BUILT_IN_ALLOCA:
2931 case BUILT_IN_ASSUME_ALIGNED:
2932 return false;
2933 /* But posix_memalign stores a pointer into the memory pointed to
2934 by its first argument. */
2935 case BUILT_IN_POSIX_MEMALIGN:
2936 {
2937 tree ptrptr = gimple_call_arg (call, 0);
2938 ao_ref dref;
2939 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2940 TYPE_SIZE_UNIT (ptr_type_node));
2941 return (refs_may_alias_p_1 (&dref, ref, false)
2942 || (flag_errno_math
2943 && targetm.ref_may_alias_errno (ref)));
2944 }
2945 /* Freeing memory kills the pointed-to memory. More importantly
2946 the call has to serve as a barrier for moving loads and stores
2947 across it. */
2948 case BUILT_IN_FREE:
2949 case BUILT_IN_VA_END:
2950 {
2951 tree ptr = gimple_call_arg (call, 0);
2952 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2953 }
2954 /* Realloc serves both as allocation point and deallocation point. */
2955 case BUILT_IN_REALLOC:
2956 {
2957 tree ptr = gimple_call_arg (call, 0);
2958 /* Unix98 specifies that errno is set on allocation failure. */
2959 return ((flag_errno_math
2960 && targetm.ref_may_alias_errno (ref))
2961 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2962 }
2963 case BUILT_IN_GAMMA_R:
2964 case BUILT_IN_GAMMAF_R:
2965 case BUILT_IN_GAMMAL_R:
2966 case BUILT_IN_LGAMMA_R:
2967 case BUILT_IN_LGAMMAF_R:
2968 case BUILT_IN_LGAMMAL_R:
2969 {
2970 tree out = gimple_call_arg (call, 1);
2971 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2972 return true;
2973 if (flag_errno_math)
2974 break;
2975 return false;
2976 }
2977 case BUILT_IN_FREXP:
2978 case BUILT_IN_FREXPF:
2979 case BUILT_IN_FREXPL:
2980 case BUILT_IN_MODF:
2981 case BUILT_IN_MODFF:
2982 case BUILT_IN_MODFL:
2983 {
2984 tree out = gimple_call_arg (call, 1);
2985 return ptr_deref_may_alias_ref_p_1 (out, ref);
2986 }
2987 case BUILT_IN_REMQUO:
2988 case BUILT_IN_REMQUOF:
2989 case BUILT_IN_REMQUOL:
2990 {
2991 tree out = gimple_call_arg (call, 2);
2992 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2993 return true;
2994 if (flag_errno_math)
2995 break;
2996 return false;
2997 }
2998 case BUILT_IN_SINCOS:
2999 case BUILT_IN_SINCOSF:
3000 case BUILT_IN_SINCOSL:
3001 {
3002 tree sin = gimple_call_arg (call, 1);
3003 tree cos = gimple_call_arg (call, 2);
3004 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
3005 || ptr_deref_may_alias_ref_p_1 (cos, ref));
3006 }
3007 /* __sync_* builtins and some OpenMP builtins act as threading
3008 barriers. */
3009 #undef DEF_SYNC_BUILTIN
3010 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
3011 #include "sync-builtins.def"
3012 #undef DEF_SYNC_BUILTIN
3013 case BUILT_IN_GOMP_ATOMIC_START:
3014 case BUILT_IN_GOMP_ATOMIC_END:
3015 case BUILT_IN_GOMP_BARRIER:
3016 case BUILT_IN_GOMP_BARRIER_CANCEL:
3017 case BUILT_IN_GOMP_TASKWAIT:
3018 case BUILT_IN_GOMP_TASKGROUP_END:
3019 case BUILT_IN_GOMP_CRITICAL_START:
3020 case BUILT_IN_GOMP_CRITICAL_END:
3021 case BUILT_IN_GOMP_CRITICAL_NAME_START:
3022 case BUILT_IN_GOMP_CRITICAL_NAME_END:
3023 case BUILT_IN_GOMP_LOOP_END:
3024 case BUILT_IN_GOMP_LOOP_END_CANCEL:
3025 case BUILT_IN_GOMP_ORDERED_START:
3026 case BUILT_IN_GOMP_ORDERED_END:
3027 case BUILT_IN_GOMP_SECTIONS_END:
3028 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
3029 case BUILT_IN_GOMP_SINGLE_COPY_START:
3030 case BUILT_IN_GOMP_SINGLE_COPY_END:
3031 return true;
3032 default:
3033 /* Fallthru to general call handling. */;
3034 }
3035
3036 /* Check if base is a global static variable that is not written
3037 by the function. */
3038 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3039 {
3040 struct cgraph_node *node = cgraph_node::get (callee);
3041 bitmap written;
3042 int id;
3043
3044 if (node
3045 && (id = ipa_reference_var_uid (base)) != -1
3046 && (written = ipa_reference_get_written_global (node))
3047 && !bitmap_bit_p (written, id))
3048 return false;
3049 }
3050
3051 /* Check if the base variable is call-clobbered. */
3052 if (DECL_P (base))
3053 return pt_solution_includes (gimple_call_clobber_set (call), base);
3054 else if ((TREE_CODE (base) == MEM_REF
3055 || TREE_CODE (base) == TARGET_MEM_REF)
3056 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3057 {
3058 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3059 if (!pi)
3060 return true;
3061
3062 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3063 }
3064
3065 return true;
3066 }
3067
3068 /* If the call in statement CALL may clobber the memory reference REF
3069 return true, otherwise return false. */
3070
3071 bool
3072 call_may_clobber_ref_p (gcall *call, tree ref)
3073 {
3074 bool res;
3075 ao_ref r;
3076 ao_ref_init (&r, ref);
3077 res = call_may_clobber_ref_p_1 (call, &r);
3078 if (res)
3079 ++alias_stats.call_may_clobber_ref_p_may_alias;
3080 else
3081 ++alias_stats.call_may_clobber_ref_p_no_alias;
3082 return res;
3083 }
3084
3085
3086 /* If the statement STMT may clobber the memory reference REF return true,
3087 otherwise return false. */
3088
3089 bool
3090 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3091 {
3092 if (is_gimple_call (stmt))
3093 {
3094 tree lhs = gimple_call_lhs (stmt);
3095 if (lhs
3096 && TREE_CODE (lhs) != SSA_NAME)
3097 {
3098 ao_ref r;
3099 ao_ref_init (&r, lhs);
3100 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3101 return true;
3102 }
3103
3104 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
3105 }
3106 else if (gimple_assign_single_p (stmt))
3107 {
3108 tree lhs = gimple_assign_lhs (stmt);
3109 if (TREE_CODE (lhs) != SSA_NAME)
3110 {
3111 ao_ref r;
3112 ao_ref_init (&r, lhs);
3113 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3114 }
3115 }
3116 else if (gimple_code (stmt) == GIMPLE_ASM)
3117 return true;
3118
3119 return false;
3120 }
3121
3122 bool
3123 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3124 {
3125 ao_ref r;
3126 ao_ref_init (&r, ref);
3127 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3128 }
3129
3130 /* Return true if store1 and store2 described by corresponding tuples
3131 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3132 address. */
3133
3134 static bool
3135 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3136 poly_int64 max_size1,
3137 tree base2, poly_int64 offset2, poly_int64 size2,
3138 poly_int64 max_size2)
3139 {
3140 /* Offsets need to be 0. */
3141 if (maybe_ne (offset1, 0)
3142 || maybe_ne (offset2, 0))
3143 return false;
3144
3145 bool base1_obj_p = SSA_VAR_P (base1);
3146 bool base2_obj_p = SSA_VAR_P (base2);
3147
3148 /* We need one object. */
3149 if (base1_obj_p == base2_obj_p)
3150 return false;
3151 tree obj = base1_obj_p ? base1 : base2;
3152
3153 /* And we need one MEM_REF. */
3154 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3155 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3156 if (base1_memref_p == base2_memref_p)
3157 return false;
3158 tree memref = base1_memref_p ? base1 : base2;
3159
3160 /* Sizes need to be valid. */
3161 if (!known_size_p (max_size1)
3162 || !known_size_p (max_size2)
3163 || !known_size_p (size1)
3164 || !known_size_p (size2))
3165 return false;
3166
3167 /* Max_size needs to match size. */
3168 if (maybe_ne (max_size1, size1)
3169 || maybe_ne (max_size2, size2))
3170 return false;
3171
3172 /* Sizes need to match. */
3173 if (maybe_ne (size1, size2))
3174 return false;
3175
3176
3177 /* Check that memref is a store to pointer with singleton points-to info. */
3178 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3179 return false;
3180 tree ptr = TREE_OPERAND (memref, 0);
3181 if (TREE_CODE (ptr) != SSA_NAME)
3182 return false;
3183 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3184 unsigned int pt_uid;
3185 if (pi == NULL
3186 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3187 return false;
3188
3189 /* Be conservative with non-call exceptions when the address might
3190 be NULL. */
3191 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3192 return false;
3193
3194 /* Check that ptr points relative to obj. */
3195 unsigned int obj_uid = DECL_PT_UID (obj);
3196 if (obj_uid != pt_uid)
3197 return false;
3198
3199 /* Check that the object size is the same as the store size. That ensures us
3200 that ptr points to the start of obj. */
3201 return (DECL_SIZE (obj)
3202 && poly_int_tree_p (DECL_SIZE (obj))
3203 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3204 }
3205
3206 /* If STMT kills the memory reference REF return true, otherwise
3207 return false. */
3208
3209 bool
3210 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3211 {
3212 if (!ao_ref_base (ref))
3213 return false;
3214
3215 if (gimple_has_lhs (stmt)
3216 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3217 /* The assignment is not necessarily carried out if it can throw
3218 and we can catch it in the current function where we could inspect
3219 the previous value.
3220 ??? We only need to care about the RHS throwing. For aggregate
3221 assignments or similar calls and non-call exceptions the LHS
3222 might throw as well. */
3223 && !stmt_can_throw_internal (cfun, stmt))
3224 {
3225 tree lhs = gimple_get_lhs (stmt);
3226 /* If LHS is literally a base of the access we are done. */
3227 if (ref->ref)
3228 {
3229 tree base = ref->ref;
3230 tree innermost_dropped_array_ref = NULL_TREE;
3231 if (handled_component_p (base))
3232 {
3233 tree saved_lhs0 = NULL_TREE;
3234 if (handled_component_p (lhs))
3235 {
3236 saved_lhs0 = TREE_OPERAND (lhs, 0);
3237 TREE_OPERAND (lhs, 0) = integer_zero_node;
3238 }
3239 do
3240 {
3241 /* Just compare the outermost handled component, if
3242 they are equal we have found a possible common
3243 base. */
3244 tree saved_base0 = TREE_OPERAND (base, 0);
3245 TREE_OPERAND (base, 0) = integer_zero_node;
3246 bool res = operand_equal_p (lhs, base, 0);
3247 TREE_OPERAND (base, 0) = saved_base0;
3248 if (res)
3249 break;
3250 /* Remember if we drop an array-ref that we need to
3251 double-check not being at struct end. */
3252 if (TREE_CODE (base) == ARRAY_REF
3253 || TREE_CODE (base) == ARRAY_RANGE_REF)
3254 innermost_dropped_array_ref = base;
3255 /* Otherwise drop handled components of the access. */
3256 base = saved_base0;
3257 }
3258 while (handled_component_p (base));
3259 if (saved_lhs0)
3260 TREE_OPERAND (lhs, 0) = saved_lhs0;
3261 }
3262 /* Finally check if the lhs has the same address and size as the
3263 base candidate of the access. Watch out if we have dropped
3264 an array-ref that was at struct end, this means ref->ref may
3265 be outside of the TYPE_SIZE of its base. */
3266 if ((! innermost_dropped_array_ref
3267 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3268 && (lhs == base
3269 || (((TYPE_SIZE (TREE_TYPE (lhs))
3270 == TYPE_SIZE (TREE_TYPE (base)))
3271 || (TYPE_SIZE (TREE_TYPE (lhs))
3272 && TYPE_SIZE (TREE_TYPE (base))
3273 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3274 TYPE_SIZE (TREE_TYPE (base)),
3275 0)))
3276 && operand_equal_p (lhs, base,
3277 OEP_ADDRESS_OF
3278 | OEP_MATCH_SIDE_EFFECTS))))
3279 return true;
3280 }
3281
3282 /* Now look for non-literal equal bases with the restriction of
3283 handling constant offset and size. */
3284 /* For a must-alias check we need to be able to constrain
3285 the access properly. */
3286 if (!ref->max_size_known_p ())
3287 return false;
3288 poly_int64 size, offset, max_size, ref_offset = ref->offset;
3289 bool reverse;
3290 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3291 &reverse);
3292 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3293 so base == ref->base does not always hold. */
3294 if (base != ref->base)
3295 {
3296 /* Try using points-to info. */
3297 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3298 ref->offset, ref->size, ref->max_size))
3299 return true;
3300
3301 /* If both base and ref->base are MEM_REFs, only compare the
3302 first operand, and if the second operand isn't equal constant,
3303 try to add the offsets into offset and ref_offset. */
3304 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3305 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3306 {
3307 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3308 TREE_OPERAND (ref->base, 1)))
3309 {
3310 poly_offset_int off1 = mem_ref_offset (base);
3311 off1 <<= LOG2_BITS_PER_UNIT;
3312 off1 += offset;
3313 poly_offset_int off2 = mem_ref_offset (ref->base);
3314 off2 <<= LOG2_BITS_PER_UNIT;
3315 off2 += ref_offset;
3316 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3317 size = -1;
3318 }
3319 }
3320 else
3321 size = -1;
3322 }
3323 /* For a must-alias check we need to be able to constrain
3324 the access properly. */
3325 if (known_eq (size, max_size)
3326 && known_subrange_p (ref_offset, ref->max_size, offset, size))
3327 return true;
3328 }
3329
3330 if (is_gimple_call (stmt))
3331 {
3332 tree callee = gimple_call_fndecl (stmt);
3333 if (callee != NULL_TREE
3334 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3335 switch (DECL_FUNCTION_CODE (callee))
3336 {
3337 case BUILT_IN_FREE:
3338 {
3339 tree ptr = gimple_call_arg (stmt, 0);
3340 tree base = ao_ref_base (ref);
3341 if (base && TREE_CODE (base) == MEM_REF
3342 && TREE_OPERAND (base, 0) == ptr)
3343 return true;
3344 break;
3345 }
3346
3347 case BUILT_IN_MEMCPY:
3348 case BUILT_IN_MEMPCPY:
3349 case BUILT_IN_MEMMOVE:
3350 case BUILT_IN_MEMSET:
3351 case BUILT_IN_MEMCPY_CHK:
3352 case BUILT_IN_MEMPCPY_CHK:
3353 case BUILT_IN_MEMMOVE_CHK:
3354 case BUILT_IN_MEMSET_CHK:
3355 case BUILT_IN_STRNCPY:
3356 case BUILT_IN_STPNCPY:
3357 case BUILT_IN_CALLOC:
3358 {
3359 /* For a must-alias check we need to be able to constrain
3360 the access properly. */
3361 if (!ref->max_size_known_p ())
3362 return false;
3363 tree dest;
3364 tree len;
3365
3366 /* In execution order a calloc call will never kill
3367 anything. However, DSE will (ab)use this interface
3368 to ask if a calloc call writes the same memory locations
3369 as a later assignment, memset, etc. So handle calloc
3370 in the expected way. */
3371 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3372 {
3373 tree arg0 = gimple_call_arg (stmt, 0);
3374 tree arg1 = gimple_call_arg (stmt, 1);
3375 if (TREE_CODE (arg0) != INTEGER_CST
3376 || TREE_CODE (arg1) != INTEGER_CST)
3377 return false;
3378
3379 dest = gimple_call_lhs (stmt);
3380 if (!dest)
3381 return false;
3382 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3383 }
3384 else
3385 {
3386 dest = gimple_call_arg (stmt, 0);
3387 len = gimple_call_arg (stmt, 2);
3388 }
3389 if (!poly_int_tree_p (len))
3390 return false;
3391 tree rbase = ref->base;
3392 poly_offset_int roffset = ref->offset;
3393 ao_ref dref;
3394 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3395 tree base = ao_ref_base (&dref);
3396 poly_offset_int offset = dref.offset;
3397 if (!base || !known_size_p (dref.size))
3398 return false;
3399 if (TREE_CODE (base) == MEM_REF)
3400 {
3401 if (TREE_CODE (rbase) != MEM_REF)
3402 return false;
3403 // Compare pointers.
3404 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
3405 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
3406 base = TREE_OPERAND (base, 0);
3407 rbase = TREE_OPERAND (rbase, 0);
3408 }
3409 if (base == rbase
3410 && known_subrange_p (roffset, ref->max_size, offset,
3411 wi::to_poly_offset (len)
3412 << LOG2_BITS_PER_UNIT))
3413 return true;
3414 break;
3415 }
3416
3417 case BUILT_IN_VA_END:
3418 {
3419 tree ptr = gimple_call_arg (stmt, 0);
3420 if (TREE_CODE (ptr) == ADDR_EXPR)
3421 {
3422 tree base = ao_ref_base (ref);
3423 if (TREE_OPERAND (ptr, 0) == base)
3424 return true;
3425 }
3426 break;
3427 }
3428
3429 default:;
3430 }
3431 }
3432 return false;
3433 }
3434
3435 bool
3436 stmt_kills_ref_p (gimple *stmt, tree ref)
3437 {
3438 ao_ref r;
3439 ao_ref_init (&r, ref);
3440 return stmt_kills_ref_p (stmt, &r);
3441 }
3442
3443
3444 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3445 TARGET or a statement clobbering the memory reference REF in which
3446 case false is returned. The walk starts with VUSE, one argument of PHI. */
3447
3448 static bool
3449 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3450 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3451 bitmap *visited, bool abort_on_visited,
3452 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3453 translate_flags disambiguate_only,
3454 void *data)
3455 {
3456 basic_block bb = gimple_bb (phi);
3457
3458 if (!*visited)
3459 *visited = BITMAP_ALLOC (NULL);
3460
3461 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3462
3463 /* Walk until we hit the target. */
3464 while (vuse != target)
3465 {
3466 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3467 /* If we are searching for the target VUSE by walking up to
3468 TARGET_BB dominating the original PHI we are finished once
3469 we reach a default def or a definition in a block dominating
3470 that block. Update TARGET and return. */
3471 if (!target
3472 && (gimple_nop_p (def_stmt)
3473 || dominated_by_p (CDI_DOMINATORS,
3474 target_bb, gimple_bb (def_stmt))))
3475 {
3476 target = vuse;
3477 return true;
3478 }
3479
3480 /* Recurse for PHI nodes. */
3481 if (gimple_code (def_stmt) == GIMPLE_PHI)
3482 {
3483 /* An already visited PHI node ends the walk successfully. */
3484 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3485 return !abort_on_visited;
3486 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3487 visited, abort_on_visited,
3488 translate, data, disambiguate_only);
3489 if (!vuse)
3490 return false;
3491 continue;
3492 }
3493 else if (gimple_nop_p (def_stmt))
3494 return false;
3495 else
3496 {
3497 /* A clobbering statement or the end of the IL ends it failing. */
3498 if ((int)limit <= 0)
3499 return false;
3500 --limit;
3501 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3502 {
3503 translate_flags tf = disambiguate_only;
3504 if (translate
3505 && (*translate) (ref, vuse, data, &tf) == NULL)
3506 ;
3507 else
3508 return false;
3509 }
3510 }
3511 /* If we reach a new basic-block see if we already skipped it
3512 in a previous walk that ended successfully. */
3513 if (gimple_bb (def_stmt) != bb)
3514 {
3515 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3516 return !abort_on_visited;
3517 bb = gimple_bb (def_stmt);
3518 }
3519 vuse = gimple_vuse (def_stmt);
3520 }
3521 return true;
3522 }
3523
3524
3525 /* Starting from a PHI node for the virtual operand of the memory reference
3526 REF find a continuation virtual operand that allows to continue walking
3527 statements dominating PHI skipping only statements that cannot possibly
3528 clobber REF. Decrements LIMIT for each alias disambiguation done
3529 and aborts the walk, returning NULL_TREE if it reaches zero.
3530 Returns NULL_TREE if no suitable virtual operand can be found. */
3531
3532 tree
3533 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3534 unsigned int &limit, bitmap *visited,
3535 bool abort_on_visited,
3536 void *(*translate)(ao_ref *, tree, void *,
3537 translate_flags *),
3538 void *data,
3539 translate_flags disambiguate_only)
3540 {
3541 unsigned nargs = gimple_phi_num_args (phi);
3542
3543 /* Through a single-argument PHI we can simply look through. */
3544 if (nargs == 1)
3545 return PHI_ARG_DEF (phi, 0);
3546
3547 /* For two or more arguments try to pairwise skip non-aliasing code
3548 until we hit the phi argument definition that dominates the other one. */
3549 basic_block phi_bb = gimple_bb (phi);
3550 tree arg0, arg1;
3551 unsigned i;
3552
3553 /* Find a candidate for the virtual operand which definition
3554 dominates those of all others. */
3555 /* First look if any of the args themselves satisfy this. */
3556 for (i = 0; i < nargs; ++i)
3557 {
3558 arg0 = PHI_ARG_DEF (phi, i);
3559 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3560 break;
3561 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3562 if (def_bb != phi_bb
3563 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3564 break;
3565 arg0 = NULL_TREE;
3566 }
3567 /* If not, look if we can reach such candidate by walking defs
3568 until we hit the immediate dominator. maybe_skip_until will
3569 do that for us. */
3570 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3571
3572 /* Then check against the (to be) found candidate. */
3573 for (i = 0; i < nargs; ++i)
3574 {
3575 arg1 = PHI_ARG_DEF (phi, i);
3576 if (arg1 == arg0)
3577 ;
3578 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3579 limit, visited,
3580 abort_on_visited,
3581 translate,
3582 /* Do not valueize when walking over
3583 backedges. */
3584 dominated_by_p
3585 (CDI_DOMINATORS,
3586 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3587 phi_bb)
3588 ? TR_DISAMBIGUATE
3589 : disambiguate_only, data))
3590 return NULL_TREE;
3591 }
3592
3593 return arg0;
3594 }
3595
3596 /* Based on the memory reference REF and its virtual use VUSE call
3597 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3598 itself. That is, for each virtual use for which its defining statement
3599 does not clobber REF.
3600
3601 WALKER is called with REF, the current virtual use and DATA. If
3602 WALKER returns non-NULL the walk stops and its result is returned.
3603 At the end of a non-successful walk NULL is returned.
3604
3605 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3606 use which definition is a statement that may clobber REF and DATA.
3607 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3608 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3609 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3610 to adjust REF and *DATA to make that valid.
3611
3612 VALUEIZE if non-NULL is called with the next VUSE that is considered
3613 and return value is substituted for that. This can be used to
3614 implement optimistic value-numbering for example. Note that the
3615 VUSE argument is assumed to be valueized already.
3616
3617 LIMIT specifies the number of alias queries we are allowed to do,
3618 the walk stops when it reaches zero and NULL is returned. LIMIT
3619 is decremented by the number of alias queries (plus adjustments
3620 done by the callbacks) upon return.
3621
3622 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3623
3624 void *
3625 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3626 void *(*walker)(ao_ref *, tree, void *),
3627 void *(*translate)(ao_ref *, tree, void *,
3628 translate_flags *),
3629 tree (*valueize)(tree),
3630 unsigned &limit, void *data)
3631 {
3632 bitmap visited = NULL;
3633 void *res;
3634 bool translated = false;
3635
3636 timevar_push (TV_ALIAS_STMT_WALK);
3637
3638 do
3639 {
3640 gimple *def_stmt;
3641
3642 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3643 res = (*walker) (ref, vuse, data);
3644 /* Abort walk. */
3645 if (res == (void *)-1)
3646 {
3647 res = NULL;
3648 break;
3649 }
3650 /* Lookup succeeded. */
3651 else if (res != NULL)
3652 break;
3653
3654 if (valueize)
3655 {
3656 vuse = valueize (vuse);
3657 if (!vuse)
3658 {
3659 res = NULL;
3660 break;
3661 }
3662 }
3663 def_stmt = SSA_NAME_DEF_STMT (vuse);
3664 if (gimple_nop_p (def_stmt))
3665 break;
3666 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3667 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3668 &visited, translated, translate, data);
3669 else
3670 {
3671 if ((int)limit <= 0)
3672 {
3673 res = NULL;
3674 break;
3675 }
3676 --limit;
3677 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3678 {
3679 if (!translate)
3680 break;
3681 translate_flags disambiguate_only = TR_TRANSLATE;
3682 res = (*translate) (ref, vuse, data, &disambiguate_only);
3683 /* Failed lookup and translation. */
3684 if (res == (void *)-1)
3685 {
3686 res = NULL;
3687 break;
3688 }
3689 /* Lookup succeeded. */
3690 else if (res != NULL)
3691 break;
3692 /* Translation succeeded, continue walking. */
3693 translated = translated || disambiguate_only == TR_TRANSLATE;
3694 }
3695 vuse = gimple_vuse (def_stmt);
3696 }
3697 }
3698 while (vuse);
3699
3700 if (visited)
3701 BITMAP_FREE (visited);
3702
3703 timevar_pop (TV_ALIAS_STMT_WALK);
3704
3705 return res;
3706 }
3707
3708
3709 /* Based on the memory reference REF call WALKER for each vdef which
3710 defining statement may clobber REF, starting with VDEF. If REF
3711 is NULL_TREE, each defining statement is visited.
3712
3713 WALKER is called with REF, the current vdef and DATA. If WALKER
3714 returns true the walk is stopped, otherwise it continues.
3715
3716 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3717 The pointer may be NULL and then we do not track this information.
3718
3719 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3720 PHI argument (but only one walk continues on merge points), the
3721 return value is true if any of the walks was successful.
3722
3723 The function returns the number of statements walked or -1 if
3724 LIMIT stmts were walked and the walk was aborted at this point.
3725 If LIMIT is zero the walk is not aborted. */
3726
3727 static int
3728 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3729 bool (*walker)(ao_ref *, tree, void *), void *data,
3730 bitmap *visited, unsigned int cnt,
3731 bool *function_entry_reached, unsigned limit)
3732 {
3733 do
3734 {
3735 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3736
3737 if (*visited
3738 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3739 return cnt;
3740
3741 if (gimple_nop_p (def_stmt))
3742 {
3743 if (function_entry_reached)
3744 *function_entry_reached = true;
3745 return cnt;
3746 }
3747 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3748 {
3749 unsigned i;
3750 if (!*visited)
3751 *visited = BITMAP_ALLOC (NULL);
3752 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3753 {
3754 int res = walk_aliased_vdefs_1 (ref,
3755 gimple_phi_arg_def (def_stmt, i),
3756 walker, data, visited, cnt,
3757 function_entry_reached, limit);
3758 if (res == -1)
3759 return -1;
3760 cnt = res;
3761 }
3762 return cnt;
3763 }
3764
3765 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3766 cnt++;
3767 if (cnt == limit)
3768 return -1;
3769 if ((!ref
3770 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3771 && (*walker) (ref, vdef, data))
3772 return cnt;
3773
3774 vdef = gimple_vuse (def_stmt);
3775 }
3776 while (1);
3777 }
3778
3779 int
3780 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3781 bool (*walker)(ao_ref *, tree, void *), void *data,
3782 bitmap *visited,
3783 bool *function_entry_reached, unsigned int limit)
3784 {
3785 bitmap local_visited = NULL;
3786 int ret;
3787
3788 timevar_push (TV_ALIAS_STMT_WALK);
3789
3790 if (function_entry_reached)
3791 *function_entry_reached = false;
3792
3793 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3794 visited ? visited : &local_visited, 0,
3795 function_entry_reached, limit);
3796 if (local_visited)
3797 BITMAP_FREE (local_visited);
3798
3799 timevar_pop (TV_ALIAS_STMT_WALK);
3800
3801 return ret;
3802 }
3803