middle-end: Fix wrong code caused by disagreemed between FRE and access path oracle...
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
91 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
92
93 /* Query statistics for the different low-level disambiguators.
94 A high-level query may trigger multiple of them. */
95
96 static struct {
97 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
98 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
99 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
100 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
101 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
102 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
103 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
104 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
107 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
108 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
109 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
110 } alias_stats;
111
112 void
113 dump_alias_stats (FILE *s)
114 {
115 fprintf (s, "\nAlias oracle query stats:\n");
116 fprintf (s, " refs_may_alias_p: "
117 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
118 HOST_WIDE_INT_PRINT_DEC" queries\n",
119 alias_stats.refs_may_alias_p_no_alias,
120 alias_stats.refs_may_alias_p_no_alias
121 + alias_stats.refs_may_alias_p_may_alias);
122 fprintf (s, " ref_maybe_used_by_call_p: "
123 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
124 HOST_WIDE_INT_PRINT_DEC" queries\n",
125 alias_stats.ref_maybe_used_by_call_p_no_alias,
126 alias_stats.refs_may_alias_p_no_alias
127 + alias_stats.ref_maybe_used_by_call_p_may_alias);
128 fprintf (s, " call_may_clobber_ref_p: "
129 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
130 HOST_WIDE_INT_PRINT_DEC" queries\n",
131 alias_stats.call_may_clobber_ref_p_no_alias,
132 alias_stats.call_may_clobber_ref_p_no_alias
133 + alias_stats.call_may_clobber_ref_p_may_alias);
134 fprintf (s, " nonoverlapping_component_refs_p: "
135 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
136 HOST_WIDE_INT_PRINT_DEC" queries\n",
137 alias_stats.nonoverlapping_component_refs_p_no_alias,
138 alias_stats.nonoverlapping_component_refs_p_no_alias
139 + alias_stats.nonoverlapping_component_refs_p_may_alias);
140 fprintf (s, " nonoverlapping_refs_since_match_p: "
141 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
142 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
143 HOST_WIDE_INT_PRINT_DEC" queries\n",
144 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
145 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
146 alias_stats.nonoverlapping_refs_since_match_p_no_alias
147 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
148 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
149 fprintf (s, " aliasing_component_refs_p: "
150 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC" queries\n",
152 alias_stats.aliasing_component_refs_p_no_alias,
153 alias_stats.aliasing_component_refs_p_no_alias
154 + alias_stats.aliasing_component_refs_p_may_alias);
155 dump_alias_stats_in_alias_c (s);
156 }
157
158
159 /* Return true, if dereferencing PTR may alias with a global variable. */
160
161 bool
162 ptr_deref_may_alias_global_p (tree ptr)
163 {
164 struct ptr_info_def *pi;
165
166 /* If we end up with a pointer constant here that may point
167 to global memory. */
168 if (TREE_CODE (ptr) != SSA_NAME)
169 return true;
170
171 pi = SSA_NAME_PTR_INFO (ptr);
172
173 /* If we do not have points-to information for this variable,
174 we have to punt. */
175 if (!pi)
176 return true;
177
178 /* ??? This does not use TBAA to prune globals ptr may not access. */
179 return pt_solution_includes_global (&pi->pt);
180 }
181
182 /* Return true if dereferencing PTR may alias DECL.
183 The caller is responsible for applying TBAA to see if PTR
184 may access DECL at all. */
185
186 static bool
187 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
188 {
189 struct ptr_info_def *pi;
190
191 /* Conversions are irrelevant for points-to information and
192 data-dependence analysis can feed us those. */
193 STRIP_NOPS (ptr);
194
195 /* Anything we do not explicilty handle aliases. */
196 if ((TREE_CODE (ptr) != SSA_NAME
197 && TREE_CODE (ptr) != ADDR_EXPR
198 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
199 || !POINTER_TYPE_P (TREE_TYPE (ptr))
200 || (!VAR_P (decl)
201 && TREE_CODE (decl) != PARM_DECL
202 && TREE_CODE (decl) != RESULT_DECL))
203 return true;
204
205 /* Disregard pointer offsetting. */
206 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
207 {
208 do
209 {
210 ptr = TREE_OPERAND (ptr, 0);
211 }
212 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
213 return ptr_deref_may_alias_decl_p (ptr, decl);
214 }
215
216 /* ADDR_EXPR pointers either just offset another pointer or directly
217 specify the pointed-to set. */
218 if (TREE_CODE (ptr) == ADDR_EXPR)
219 {
220 tree base = get_base_address (TREE_OPERAND (ptr, 0));
221 if (base
222 && (TREE_CODE (base) == MEM_REF
223 || TREE_CODE (base) == TARGET_MEM_REF))
224 ptr = TREE_OPERAND (base, 0);
225 else if (base
226 && DECL_P (base))
227 return compare_base_decls (base, decl) != 0;
228 else if (base
229 && CONSTANT_CLASS_P (base))
230 return false;
231 else
232 return true;
233 }
234
235 /* Non-aliased variables cannot be pointed to. */
236 if (!may_be_aliased (decl))
237 return false;
238
239 /* If we do not have useful points-to information for this pointer
240 we cannot disambiguate anything else. */
241 pi = SSA_NAME_PTR_INFO (ptr);
242 if (!pi)
243 return true;
244
245 return pt_solution_includes (&pi->pt, decl);
246 }
247
248 /* Return true if dereferenced PTR1 and PTR2 may alias.
249 The caller is responsible for applying TBAA to see if accesses
250 through PTR1 and PTR2 may conflict at all. */
251
252 bool
253 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
254 {
255 struct ptr_info_def *pi1, *pi2;
256
257 /* Conversions are irrelevant for points-to information and
258 data-dependence analysis can feed us those. */
259 STRIP_NOPS (ptr1);
260 STRIP_NOPS (ptr2);
261
262 /* Disregard pointer offsetting. */
263 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
264 {
265 do
266 {
267 ptr1 = TREE_OPERAND (ptr1, 0);
268 }
269 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
270 return ptr_derefs_may_alias_p (ptr1, ptr2);
271 }
272 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
273 {
274 do
275 {
276 ptr2 = TREE_OPERAND (ptr2, 0);
277 }
278 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
279 return ptr_derefs_may_alias_p (ptr1, ptr2);
280 }
281
282 /* ADDR_EXPR pointers either just offset another pointer or directly
283 specify the pointed-to set. */
284 if (TREE_CODE (ptr1) == ADDR_EXPR)
285 {
286 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
287 if (base
288 && (TREE_CODE (base) == MEM_REF
289 || TREE_CODE (base) == TARGET_MEM_REF))
290 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
291 else if (base
292 && DECL_P (base))
293 return ptr_deref_may_alias_decl_p (ptr2, base);
294 else
295 return true;
296 }
297 if (TREE_CODE (ptr2) == ADDR_EXPR)
298 {
299 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
300 if (base
301 && (TREE_CODE (base) == MEM_REF
302 || TREE_CODE (base) == TARGET_MEM_REF))
303 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
304 else if (base
305 && DECL_P (base))
306 return ptr_deref_may_alias_decl_p (ptr1, base);
307 else
308 return true;
309 }
310
311 /* From here we require SSA name pointers. Anything else aliases. */
312 if (TREE_CODE (ptr1) != SSA_NAME
313 || TREE_CODE (ptr2) != SSA_NAME
314 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
315 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
316 return true;
317
318 /* We may end up with two empty points-to solutions for two same pointers.
319 In this case we still want to say both pointers alias, so shortcut
320 that here. */
321 if (ptr1 == ptr2)
322 return true;
323
324 /* If we do not have useful points-to information for either pointer
325 we cannot disambiguate anything else. */
326 pi1 = SSA_NAME_PTR_INFO (ptr1);
327 pi2 = SSA_NAME_PTR_INFO (ptr2);
328 if (!pi1 || !pi2)
329 return true;
330
331 /* ??? This does not use TBAA to prune decls from the intersection
332 that not both pointers may access. */
333 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
334 }
335
336 /* Return true if dereferencing PTR may alias *REF.
337 The caller is responsible for applying TBAA to see if PTR
338 may access *REF at all. */
339
340 static bool
341 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
342 {
343 tree base = ao_ref_base (ref);
344
345 if (TREE_CODE (base) == MEM_REF
346 || TREE_CODE (base) == TARGET_MEM_REF)
347 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
348 else if (DECL_P (base))
349 return ptr_deref_may_alias_decl_p (ptr, base);
350
351 return true;
352 }
353
354 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
355
356 bool
357 ptrs_compare_unequal (tree ptr1, tree ptr2)
358 {
359 /* First resolve the pointers down to a SSA name pointer base or
360 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
361 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
362 or STRING_CSTs which needs points-to adjustments to track them
363 in the points-to sets. */
364 tree obj1 = NULL_TREE;
365 tree obj2 = NULL_TREE;
366 if (TREE_CODE (ptr1) == ADDR_EXPR)
367 {
368 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
369 if (! tem)
370 return false;
371 if (VAR_P (tem)
372 || TREE_CODE (tem) == PARM_DECL
373 || TREE_CODE (tem) == RESULT_DECL)
374 obj1 = tem;
375 else if (TREE_CODE (tem) == MEM_REF)
376 ptr1 = TREE_OPERAND (tem, 0);
377 }
378 if (TREE_CODE (ptr2) == ADDR_EXPR)
379 {
380 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
381 if (! tem)
382 return false;
383 if (VAR_P (tem)
384 || TREE_CODE (tem) == PARM_DECL
385 || TREE_CODE (tem) == RESULT_DECL)
386 obj2 = tem;
387 else if (TREE_CODE (tem) == MEM_REF)
388 ptr2 = TREE_OPERAND (tem, 0);
389 }
390
391 /* Canonicalize ptr vs. object. */
392 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
393 {
394 std::swap (ptr1, ptr2);
395 std::swap (obj1, obj2);
396 }
397
398 if (obj1 && obj2)
399 /* Other code handles this correctly, no need to duplicate it here. */;
400 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
401 {
402 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
403 /* We may not use restrict to optimize pointer comparisons.
404 See PR71062. So we have to assume that restrict-pointed-to
405 may be in fact obj1. */
406 if (!pi
407 || pi->pt.vars_contains_restrict
408 || pi->pt.vars_contains_interposable)
409 return false;
410 if (VAR_P (obj1)
411 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
412 {
413 varpool_node *node = varpool_node::get (obj1);
414 /* If obj1 may bind to NULL give up (see below). */
415 if (! node
416 || ! node->nonzero_address ()
417 || ! decl_binds_to_current_def_p (obj1))
418 return false;
419 }
420 return !pt_solution_includes (&pi->pt, obj1);
421 }
422
423 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
424 but those require pt.null to be conservatively correct. */
425
426 return false;
427 }
428
429 /* Returns whether reference REF to BASE may refer to global memory. */
430
431 static bool
432 ref_may_alias_global_p_1 (tree base)
433 {
434 if (DECL_P (base))
435 return is_global_var (base);
436 else if (TREE_CODE (base) == MEM_REF
437 || TREE_CODE (base) == TARGET_MEM_REF)
438 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
439 return true;
440 }
441
442 bool
443 ref_may_alias_global_p (ao_ref *ref)
444 {
445 tree base = ao_ref_base (ref);
446 return ref_may_alias_global_p_1 (base);
447 }
448
449 bool
450 ref_may_alias_global_p (tree ref)
451 {
452 tree base = get_base_address (ref);
453 return ref_may_alias_global_p_1 (base);
454 }
455
456 /* Return true whether STMT may clobber global memory. */
457
458 bool
459 stmt_may_clobber_global_p (gimple *stmt)
460 {
461 tree lhs;
462
463 if (!gimple_vdef (stmt))
464 return false;
465
466 /* ??? We can ask the oracle whether an artificial pointer
467 dereference with a pointer with points-to information covering
468 all global memory (what about non-address taken memory?) maybe
469 clobbered by this call. As there is at the moment no convenient
470 way of doing that without generating garbage do some manual
471 checking instead.
472 ??? We could make a NULL ao_ref argument to the various
473 predicates special, meaning any global memory. */
474
475 switch (gimple_code (stmt))
476 {
477 case GIMPLE_ASSIGN:
478 lhs = gimple_assign_lhs (stmt);
479 return (TREE_CODE (lhs) != SSA_NAME
480 && ref_may_alias_global_p (lhs));
481 case GIMPLE_CALL:
482 return true;
483 default:
484 return true;
485 }
486 }
487
488
489 /* Dump alias information on FILE. */
490
491 void
492 dump_alias_info (FILE *file)
493 {
494 unsigned i;
495 tree ptr;
496 const char *funcname
497 = lang_hooks.decl_printable_name (current_function_decl, 2);
498 tree var;
499
500 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
501
502 fprintf (file, "Aliased symbols\n\n");
503
504 FOR_EACH_LOCAL_DECL (cfun, i, var)
505 {
506 if (may_be_aliased (var))
507 dump_variable (file, var);
508 }
509
510 fprintf (file, "\nCall clobber information\n");
511
512 fprintf (file, "\nESCAPED");
513 dump_points_to_solution (file, &cfun->gimple_df->escaped);
514
515 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
516
517 FOR_EACH_SSA_NAME (i, ptr, cfun)
518 {
519 struct ptr_info_def *pi;
520
521 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
522 || SSA_NAME_IN_FREE_LIST (ptr))
523 continue;
524
525 pi = SSA_NAME_PTR_INFO (ptr);
526 if (pi)
527 dump_points_to_info_for (file, ptr);
528 }
529
530 fprintf (file, "\n");
531 }
532
533
534 /* Dump alias information on stderr. */
535
536 DEBUG_FUNCTION void
537 debug_alias_info (void)
538 {
539 dump_alias_info (stderr);
540 }
541
542
543 /* Dump the points-to set *PT into FILE. */
544
545 void
546 dump_points_to_solution (FILE *file, struct pt_solution *pt)
547 {
548 if (pt->anything)
549 fprintf (file, ", points-to anything");
550
551 if (pt->nonlocal)
552 fprintf (file, ", points-to non-local");
553
554 if (pt->escaped)
555 fprintf (file, ", points-to escaped");
556
557 if (pt->ipa_escaped)
558 fprintf (file, ", points-to unit escaped");
559
560 if (pt->null)
561 fprintf (file, ", points-to NULL");
562
563 if (pt->vars)
564 {
565 fprintf (file, ", points-to vars: ");
566 dump_decl_set (file, pt->vars);
567 if (pt->vars_contains_nonlocal
568 || pt->vars_contains_escaped
569 || pt->vars_contains_escaped_heap
570 || pt->vars_contains_restrict)
571 {
572 const char *comma = "";
573 fprintf (file, " (");
574 if (pt->vars_contains_nonlocal)
575 {
576 fprintf (file, "nonlocal");
577 comma = ", ";
578 }
579 if (pt->vars_contains_escaped)
580 {
581 fprintf (file, "%sescaped", comma);
582 comma = ", ";
583 }
584 if (pt->vars_contains_escaped_heap)
585 {
586 fprintf (file, "%sescaped heap", comma);
587 comma = ", ";
588 }
589 if (pt->vars_contains_restrict)
590 {
591 fprintf (file, "%srestrict", comma);
592 comma = ", ";
593 }
594 if (pt->vars_contains_interposable)
595 fprintf (file, "%sinterposable", comma);
596 fprintf (file, ")");
597 }
598 }
599 }
600
601
602 /* Unified dump function for pt_solution. */
603
604 DEBUG_FUNCTION void
605 debug (pt_solution &ref)
606 {
607 dump_points_to_solution (stderr, &ref);
608 }
609
610 DEBUG_FUNCTION void
611 debug (pt_solution *ptr)
612 {
613 if (ptr)
614 debug (*ptr);
615 else
616 fprintf (stderr, "<nil>\n");
617 }
618
619
620 /* Dump points-to information for SSA_NAME PTR into FILE. */
621
622 void
623 dump_points_to_info_for (FILE *file, tree ptr)
624 {
625 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
626
627 print_generic_expr (file, ptr, dump_flags);
628
629 if (pi)
630 dump_points_to_solution (file, &pi->pt);
631 else
632 fprintf (file, ", points-to anything");
633
634 fprintf (file, "\n");
635 }
636
637
638 /* Dump points-to information for VAR into stderr. */
639
640 DEBUG_FUNCTION void
641 debug_points_to_info_for (tree var)
642 {
643 dump_points_to_info_for (stderr, var);
644 }
645
646
647 /* Initializes the alias-oracle reference representation *R from REF. */
648
649 void
650 ao_ref_init (ao_ref *r, tree ref)
651 {
652 r->ref = ref;
653 r->base = NULL_TREE;
654 r->offset = 0;
655 r->size = -1;
656 r->max_size = -1;
657 r->ref_alias_set = -1;
658 r->base_alias_set = -1;
659 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
660 }
661
662 /* Returns the base object of the memory reference *REF. */
663
664 tree
665 ao_ref_base (ao_ref *ref)
666 {
667 bool reverse;
668
669 if (ref->base)
670 return ref->base;
671 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
672 &ref->max_size, &reverse);
673 return ref->base;
674 }
675
676 /* Returns the base object alias set of the memory reference *REF. */
677
678 alias_set_type
679 ao_ref_base_alias_set (ao_ref *ref)
680 {
681 tree base_ref;
682 if (ref->base_alias_set != -1)
683 return ref->base_alias_set;
684 if (!ref->ref)
685 return 0;
686 base_ref = ref->ref;
687 while (handled_component_p (base_ref))
688 base_ref = TREE_OPERAND (base_ref, 0);
689 ref->base_alias_set = get_alias_set (base_ref);
690 return ref->base_alias_set;
691 }
692
693 /* Returns the reference alias set of the memory reference *REF. */
694
695 alias_set_type
696 ao_ref_alias_set (ao_ref *ref)
697 {
698 if (ref->ref_alias_set != -1)
699 return ref->ref_alias_set;
700 ref->ref_alias_set = get_alias_set (ref->ref);
701 return ref->ref_alias_set;
702 }
703
704 /* Init an alias-oracle reference representation from a gimple pointer
705 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
706 size is assumed to be unknown. The access is assumed to be only
707 to or after of the pointer target, not before it. */
708
709 void
710 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
711 {
712 poly_int64 t, size_hwi, extra_offset = 0;
713 ref->ref = NULL_TREE;
714 if (TREE_CODE (ptr) == SSA_NAME)
715 {
716 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
717 if (gimple_assign_single_p (stmt)
718 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
719 ptr = gimple_assign_rhs1 (stmt);
720 else if (is_gimple_assign (stmt)
721 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
722 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
723 {
724 ptr = gimple_assign_rhs1 (stmt);
725 extra_offset *= BITS_PER_UNIT;
726 }
727 }
728
729 if (TREE_CODE (ptr) == ADDR_EXPR)
730 {
731 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
732 if (ref->base)
733 ref->offset = BITS_PER_UNIT * t;
734 else
735 {
736 size = NULL_TREE;
737 ref->offset = 0;
738 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
739 }
740 }
741 else
742 {
743 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
744 ref->base = build2 (MEM_REF, char_type_node,
745 ptr, null_pointer_node);
746 ref->offset = 0;
747 }
748 ref->offset += extra_offset;
749 if (size
750 && poly_int_tree_p (size, &size_hwi)
751 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
752 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
753 else
754 ref->max_size = ref->size = -1;
755 ref->ref_alias_set = 0;
756 ref->base_alias_set = 0;
757 ref->volatile_p = false;
758 }
759
760 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
761 Return -1 if S1 < S2
762 Return 1 if S1 > S2
763 Return 0 if equal or incomparable. */
764
765 static int
766 compare_sizes (tree s1, tree s2)
767 {
768 if (!s1 || !s2)
769 return 0;
770
771 poly_uint64 size1;
772 poly_uint64 size2;
773
774 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
775 return 0;
776 if (known_lt (size1, size2))
777 return -1;
778 if (known_lt (size2, size1))
779 return 1;
780 return 0;
781 }
782
783 /* Compare TYPE1 and TYPE2 by its size.
784 Return -1 if size of TYPE1 < size of TYPE2
785 Return 1 if size of TYPE1 > size of TYPE2
786 Return 0 if types are of equal sizes or we can not compare them. */
787
788 static int
789 compare_type_sizes (tree type1, tree type2)
790 {
791 /* Be conservative for arrays and vectors. We want to support partial
792 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
793 while (TREE_CODE (type1) == ARRAY_TYPE
794 || TREE_CODE (type1) == VECTOR_TYPE)
795 type1 = TREE_TYPE (type1);
796 while (TREE_CODE (type2) == ARRAY_TYPE
797 || TREE_CODE (type2) == VECTOR_TYPE)
798 type2 = TREE_TYPE (type2);
799 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
800 }
801
802 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
803 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
804 decide. */
805
806 static inline int
807 same_type_for_tbaa (tree type1, tree type2)
808 {
809 type1 = TYPE_MAIN_VARIANT (type1);
810 type2 = TYPE_MAIN_VARIANT (type2);
811
812 /* Handle the most common case first. */
813 if (type1 == type2)
814 return 1;
815
816 /* If we would have to do structural comparison bail out. */
817 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
818 || TYPE_STRUCTURAL_EQUALITY_P (type2))
819 return -1;
820
821 /* Compare the canonical types. */
822 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
823 return 1;
824
825 /* ??? Array types are not properly unified in all cases as we have
826 spurious changes in the index types for example. Removing this
827 causes all sorts of problems with the Fortran frontend. */
828 if (TREE_CODE (type1) == ARRAY_TYPE
829 && TREE_CODE (type2) == ARRAY_TYPE)
830 return -1;
831
832 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
833 object of one of its constrained subtypes, e.g. when a function with an
834 unconstrained parameter passed by reference is called on an object and
835 inlined. But, even in the case of a fixed size, type and subtypes are
836 not equivalent enough as to share the same TYPE_CANONICAL, since this
837 would mean that conversions between them are useless, whereas they are
838 not (e.g. type and subtypes can have different modes). So, in the end,
839 they are only guaranteed to have the same alias set. */
840 if (get_alias_set (type1) == get_alias_set (type2))
841 return -1;
842
843 /* The types are known to be not equal. */
844 return 0;
845 }
846
847 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
848 components on it). */
849
850 static bool
851 type_has_components_p (tree type)
852 {
853 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
854 || TREE_CODE (type) == COMPLEX_TYPE;
855 }
856
857 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
858 respectively are either pointing to same address or are completely
859 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
860 just partly overlap.
861
862 Try to disambiguate using the access path starting from the match
863 and return false if there is no conflict.
864
865 Helper for aliasing_component_refs_p. */
866
867 static bool
868 aliasing_matching_component_refs_p (tree match1, tree ref1,
869 poly_int64 offset1, poly_int64 max_size1,
870 tree match2, tree ref2,
871 poly_int64 offset2, poly_int64 max_size2,
872 bool partial_overlap)
873 {
874 poly_int64 offadj, sztmp, msztmp;
875 bool reverse;
876
877 if (!partial_overlap)
878 {
879 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
880 offset2 -= offadj;
881 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
882 offset1 -= offadj;
883 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
884 {
885 ++alias_stats.aliasing_component_refs_p_no_alias;
886 return false;
887 }
888 }
889
890 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
891 partial_overlap);
892 if (cmp == 1
893 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
894 {
895 ++alias_stats.aliasing_component_refs_p_no_alias;
896 return false;
897 }
898 ++alias_stats.aliasing_component_refs_p_may_alias;
899 return true;
900 }
901
902 /* Return true if REF is reference to zero sized trailing array. I.e.
903 struct foo {int bar; int array[0];} *fooptr;
904 fooptr->array. */
905
906 static bool
907 component_ref_to_zero_sized_trailing_array_p (tree ref)
908 {
909 return (TREE_CODE (ref) == COMPONENT_REF
910 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
911 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
912 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
913 && array_at_struct_end_p (ref));
914 }
915
916 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
917 aliasing_component_refs_p.
918
919 Walk access path REF2 and try to find type matching TYPE1
920 (which is a start of possibly aliasing access path REF1).
921 If match is found, try to disambiguate.
922
923 Return 0 for sucessful disambiguation.
924 Return 1 if match was found but disambiguation failed
925 Return -1 if there is no match.
926 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
927 in access patch REF2 and -1 if we are not sure. */
928
929 static int
930 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
931 poly_int64 offset1, poly_int64 max_size1,
932 tree end_struct_ref1,
933 tree ref2, tree base2,
934 poly_int64 offset2, poly_int64 max_size2,
935 bool *maybe_match)
936 {
937 tree ref = ref2;
938 int same_p = 0;
939
940 while (true)
941 {
942 /* We walk from inner type to the outer types. If type we see is
943 already too large to be part of type1, terminate the search. */
944 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
945
946 if (cmp < 0
947 && (!end_struct_ref1
948 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
949 TREE_TYPE (ref)) < 0))
950 break;
951 /* If types may be of same size, see if we can decide about their
952 equality. */
953 if (cmp == 0)
954 {
955 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
956 if (same_p == 1)
957 break;
958 /* In case we can't decide whether types are same try to
959 continue looking for the exact match.
960 Remember however that we possibly saw a match
961 to bypass the access path continuations tests we do later. */
962 if (same_p == -1)
963 *maybe_match = true;
964 }
965 if (!handled_component_p (ref))
966 break;
967 ref = TREE_OPERAND (ref, 0);
968 }
969 if (same_p == 1)
970 {
971 bool partial_overlap = false;
972
973 /* We assume that arrays can overlap by multiple of their elements
974 size as tested in gcc.dg/torture/alias-2.c.
975 This partial overlap happen only when both arrays are bases of
976 the access and not contained within another component ref.
977 To be safe we also assume partial overlap for VLAs. */
978 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
979 && (!TYPE_SIZE (TREE_TYPE (base1))
980 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
981 || ref == base2))
982 {
983 /* Setting maybe_match to true triggers
984 nonoverlapping_component_refs_p test later that still may do
985 useful disambiguation. */
986 *maybe_match = true;
987 partial_overlap = true;
988 }
989 return aliasing_matching_component_refs_p (base1, ref1,
990 offset1, max_size1,
991 ref, ref2,
992 offset2, max_size2,
993 partial_overlap);
994 }
995 return -1;
996 }
997
998 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
999 Return true if they can be composed to single access path
1000 base1...ref1...base2...ref2.
1001
1002 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1003 a trailing array access after REF1 in the non-TBAA part of the access.
1004 REF1_ALIAS_SET is the alias set of REF1.
1005
1006 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1007 a traling array access in the TBAA part of access path2.
1008 BASE2_ALIAS_SET is the alias set of base2. */
1009
1010 bool
1011 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1012 alias_set_type ref1_alias_set,
1013 tree base_type2, tree end_struct_ref2,
1014 alias_set_type base2_alias_set)
1015 {
1016 /* Access path can not continue past types with no components. */
1017 if (!type_has_components_p (ref_type1))
1018 return false;
1019
1020 /* If first access path ends by too small type to hold base of
1021 the second access path, typically paths can not continue.
1022
1023 Punt if end_struct_past_end1 is true. We want to support arbitrary
1024 type puning past first COMPONENT_REF to union because redundant store
1025 elimination depends on this, see PR92152. For this reason we can not
1026 check size of the reference because types may partially overlap. */
1027 if (!end_struct_past_end1)
1028 {
1029 if (compare_type_sizes (ref_type1, base_type2) < 0)
1030 return false;
1031 /* If the path2 contains trailing array access we can strenghten the check
1032 to verify that also the size of element of the trailing array fits.
1033 In fact we could check for offset + type_size, but we do not track
1034 offsets and this is quite side case. */
1035 if (end_struct_ref2
1036 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1037 return false;
1038 }
1039 return (base2_alias_set == ref1_alias_set
1040 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1041 }
1042
1043 /* Determine if the two component references REF1 and REF2 which are
1044 based on access types TYPE1 and TYPE2 and of which at least one is based
1045 on an indirect reference may alias.
1046 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1047 are the respective alias sets. */
1048
1049 static bool
1050 aliasing_component_refs_p (tree ref1,
1051 alias_set_type ref1_alias_set,
1052 alias_set_type base1_alias_set,
1053 poly_int64 offset1, poly_int64 max_size1,
1054 tree ref2,
1055 alias_set_type ref2_alias_set,
1056 alias_set_type base2_alias_set,
1057 poly_int64 offset2, poly_int64 max_size2)
1058 {
1059 /* If one reference is a component references through pointers try to find a
1060 common base and apply offset based disambiguation. This handles
1061 for example
1062 struct A { int i; int j; } *q;
1063 struct B { struct A a; int k; } *p;
1064 disambiguating q->i and p->a.j. */
1065 tree base1, base2;
1066 tree type1, type2;
1067 bool maybe_match = false;
1068 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1069 bool end_struct_past_end1 = false;
1070 bool end_struct_past_end2 = false;
1071
1072 /* Choose bases and base types to search for.
1073 The access path is as follows:
1074 base....end_of_tbaa_ref...actual_ref
1075 At one place in the access path may be a reference to zero sized or
1076 trailing array.
1077
1078 We generally discard the segment after end_of_tbaa_ref however
1079 we need to be careful in case it contains zero sized or traling array.
1080 These may happen after refernce to union and in this case we need to
1081 not disambiguate type puning scenarios.
1082
1083 We set:
1084 base1 to point to base
1085
1086 ref1 to point to end_of_tbaa_ref
1087
1088 end_struct_ref1 to point the trailing reference (if it exists
1089 in range base....end_of_tbaa_ref
1090
1091 end_struct_past_end1 is true if this traling refernece occurs in
1092 end_of_tbaa_ref...actual_ref. */
1093 base1 = ref1;
1094 while (handled_component_p (base1))
1095 {
1096 /* Generally access paths are monotous in the size of object. The
1097 exception are trailing arrays of structures. I.e.
1098 struct a {int array[0];};
1099 or
1100 struct a {int array1[0]; int array[];};
1101 Such struct has size 0 but accesses to a.array may have non-zero size.
1102 In this case the size of TREE_TYPE (base1) is smaller than
1103 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
1104
1105 Because we compare sizes of arrays just by sizes of their elements,
1106 we only need to care about zero sized array fields here. */
1107 if (component_ref_to_zero_sized_trailing_array_p (base1))
1108 {
1109 gcc_checking_assert (!end_struct_ref1);
1110 end_struct_ref1 = base1;
1111 }
1112 if (ends_tbaa_access_path_p (base1))
1113 {
1114 ref1 = TREE_OPERAND (base1, 0);
1115 if (end_struct_ref1)
1116 {
1117 end_struct_past_end1 = true;
1118 end_struct_ref1 = NULL;
1119 }
1120 }
1121 base1 = TREE_OPERAND (base1, 0);
1122 }
1123 type1 = TREE_TYPE (base1);
1124 base2 = ref2;
1125 while (handled_component_p (base2))
1126 {
1127 if (component_ref_to_zero_sized_trailing_array_p (base2))
1128 {
1129 gcc_checking_assert (!end_struct_ref2);
1130 end_struct_ref2 = base2;
1131 }
1132 if (ends_tbaa_access_path_p (base2))
1133 {
1134 ref2 = TREE_OPERAND (base2, 0);
1135 if (end_struct_ref2)
1136 {
1137 end_struct_past_end2 = true;
1138 end_struct_ref2 = NULL;
1139 }
1140 }
1141 base2 = TREE_OPERAND (base2, 0);
1142 }
1143 type2 = TREE_TYPE (base2);
1144
1145 /* Now search for the type1 in the access path of ref2. This
1146 would be a common base for doing offset based disambiguation on.
1147 This however only makes sense if type2 is big enough to hold type1. */
1148 int cmp_outer = compare_type_sizes (type2, type1);
1149
1150 /* If type2 is big enough to contain type1 walk its access path.
1151 We also need to care of arrays at the end of structs that may extend
1152 beyond the end of structure. If this occurs in the TBAA part of the
1153 access path, we need to consider the increased type as well. */
1154 if (cmp_outer >= 0
1155 || (end_struct_ref2
1156 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1157 {
1158 int res = aliasing_component_refs_walk (ref1, type1, base1,
1159 offset1, max_size1,
1160 end_struct_ref1,
1161 ref2, base2, offset2, max_size2,
1162 &maybe_match);
1163 if (res != -1)
1164 return res;
1165 }
1166
1167 /* If we didn't find a common base, try the other way around. */
1168 if (cmp_outer <= 0
1169 || (end_struct_ref1
1170 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1171 {
1172 int res = aliasing_component_refs_walk (ref2, type2, base2,
1173 offset2, max_size2,
1174 end_struct_ref2,
1175 ref1, base1, offset1, max_size1,
1176 &maybe_match);
1177 if (res != -1)
1178 return res;
1179 }
1180
1181 /* In the following code we make an assumption that the types in access
1182 paths do not overlap and thus accesses alias only if one path can be
1183 continuation of another. If we was not able to decide about equivalence,
1184 we need to give up. */
1185 if (maybe_match)
1186 {
1187 if (!nonoverlapping_component_refs_p (ref1, ref2))
1188 {
1189 ++alias_stats.aliasing_component_refs_p_may_alias;
1190 return true;
1191 }
1192 ++alias_stats.aliasing_component_refs_p_no_alias;
1193 return false;
1194 }
1195
1196 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1197 ref1_alias_set,
1198 type2, end_struct_ref2,
1199 base2_alias_set)
1200 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1201 ref2_alias_set,
1202 type1, end_struct_ref1,
1203 base1_alias_set))
1204 {
1205 ++alias_stats.aliasing_component_refs_p_may_alias;
1206 return true;
1207 }
1208 ++alias_stats.aliasing_component_refs_p_no_alias;
1209 return false;
1210 }
1211
1212 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1213 that bases of both component refs are either equivalent or nonoverlapping.
1214 We do not assume that the containers of FIELD1 and FIELD2 are of the
1215 same type or size.
1216
1217 Return 0 in case the base address of component_refs are same then
1218 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1219 may not be of same type or size.
1220
1221 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1222
1223 Return -1 otherwise.
1224
1225 Main difference between 0 and -1 is to let
1226 nonoverlapping_component_refs_since_match_p discover the semantically
1227 equivalent part of the access path.
1228
1229 Note that this function is used even with -fno-strict-aliasing
1230 and makes use of no TBAA assumptions. */
1231
1232 static int
1233 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1234 {
1235 /* If both fields are of the same type, we could save hard work of
1236 comparing offsets. */
1237 tree type1 = DECL_CONTEXT (field1);
1238 tree type2 = DECL_CONTEXT (field2);
1239
1240 if (TREE_CODE (type1) == RECORD_TYPE
1241 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1242 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1243 if (TREE_CODE (type2) == RECORD_TYPE
1244 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1245 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1246
1247 /* ??? Bitfields can overlap at RTL level so punt on them.
1248 FIXME: RTL expansion should be fixed by adjusting the access path
1249 when producing MEM_ATTRs for MEMs which are wider than
1250 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1251 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1252 return -1;
1253
1254 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1255 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1256 return field1 != field2;
1257
1258 /* In common case the offsets and bit offsets will be the same.
1259 However if frontends do not agree on the alignment, they may be
1260 different even if they actually represent same address.
1261 Try the common case first and if that fails calcualte the
1262 actual bit offset. */
1263 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1264 DECL_FIELD_OFFSET (field2))
1265 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1266 DECL_FIELD_BIT_OFFSET (field2)))
1267 return 0;
1268
1269 /* Note that it may be possible to use component_ref_field_offset
1270 which would provide offsets as trees. However constructing and folding
1271 trees is expensive and does not seem to be worth the compile time
1272 cost. */
1273
1274 poly_uint64 offset1, offset2;
1275 poly_uint64 bit_offset1, bit_offset2;
1276
1277 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1278 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1279 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1280 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1281 {
1282 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1283 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1284
1285 if (known_eq (offset1, offset2))
1286 return 0;
1287
1288 poly_uint64 size1, size2;
1289
1290 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1291 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1292 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1293 return 1;
1294 }
1295 /* Resort to slower overlap checking by looking for matching types in
1296 the middle of access path. */
1297 return -1;
1298 }
1299
1300 /* Return low bound of array. Do not produce new trees
1301 and thus do not care about particular type of integer constant
1302 and placeholder exprs. */
1303
1304 static tree
1305 cheap_array_ref_low_bound (tree ref)
1306 {
1307 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1308
1309 /* Avoid expensive array_ref_low_bound.
1310 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1311 type or it is zero. */
1312 if (TREE_OPERAND (ref, 2))
1313 return TREE_OPERAND (ref, 2);
1314 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1315 return TYPE_MIN_VALUE (domain_type);
1316 else
1317 return integer_zero_node;
1318 }
1319
1320 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1321 completely disjoint.
1322
1323 Return 1 if the refs are non-overlapping.
1324 Return 0 if they are possibly overlapping but if so the overlap again
1325 starts on the same address.
1326 Return -1 otherwise. */
1327
1328 int
1329 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1330 {
1331 tree index1 = TREE_OPERAND (ref1, 1);
1332 tree index2 = TREE_OPERAND (ref2, 1);
1333 tree low_bound1 = cheap_array_ref_low_bound(ref1);
1334 tree low_bound2 = cheap_array_ref_low_bound(ref2);
1335
1336 /* Handle zero offsets first: we do not need to match type size in this
1337 case. */
1338 if (operand_equal_p (index1, low_bound1, 0)
1339 && operand_equal_p (index2, low_bound2, 0))
1340 return 0;
1341
1342 /* If type sizes are different, give up.
1343
1344 Avoid expensive array_ref_element_size.
1345 If operand 3 is present it denotes size in the alignmnet units.
1346 Otherwise size is TYPE_SIZE of the element type.
1347 Handle only common cases where types are of the same "kind". */
1348 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1349 return -1;
1350
1351 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1352 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1353
1354 if (TREE_OPERAND (ref1, 3))
1355 {
1356 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1357 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1358 TREE_OPERAND (ref2, 3), 0))
1359 return -1;
1360 }
1361 else
1362 {
1363 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1364 TYPE_SIZE_UNIT (elmt_type2), 0))
1365 return -1;
1366 }
1367
1368 /* Since we know that type sizes are the same, there is no need to return
1369 -1 after this point. Partial overlap can not be introduced. */
1370
1371 /* We may need to fold trees in this case.
1372 TODO: Handle integer constant case at least. */
1373 if (!operand_equal_p (low_bound1, low_bound2, 0))
1374 return 0;
1375
1376 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1377 {
1378 if (tree_int_cst_equal (index1, index2))
1379 return 0;
1380 return 1;
1381 }
1382 /* TODO: We can use VRP to further disambiguate here. */
1383 return 0;
1384 }
1385
1386 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1387 MATCH2 either point to the same address or are disjoint.
1388 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1389 respectively or NULL in the case we established equivalence of bases.
1390 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1391 overlap by exact multiply of their element size.
1392
1393 This test works by matching the initial segment of the access path
1394 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1395 match was determined without use of TBAA oracle.
1396
1397 Return 1 if we can determine that component references REF1 and REF2,
1398 that are within a common DECL, cannot overlap.
1399
1400 Return 0 if paths are same and thus there is nothing to disambiguate more
1401 (i.e. there is must alias assuming there is must alias between MATCH1 and
1402 MATCH2)
1403
1404 Return -1 if we can not determine 0 or 1 - this happens when we met
1405 non-matching types was met in the path.
1406 In this case it may make sense to continue by other disambiguation
1407 oracles. */
1408
1409 static int
1410 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1411 tree match2, tree ref2,
1412 bool partial_overlap)
1413 {
1414 int ntbaa1 = 0, ntbaa2 = 0;
1415 /* Early return if there are no references to match, we do not need
1416 to walk the access paths.
1417
1418 Do not consider this as may-alias for stats - it is more useful
1419 to have information how many disambiguations happened provided that
1420 the query was meaningful. */
1421
1422 if (match1 == ref1 || !handled_component_p (ref1)
1423 || match2 == ref2 || !handled_component_p (ref2))
1424 return -1;
1425
1426 auto_vec<tree, 16> component_refs1;
1427 auto_vec<tree, 16> component_refs2;
1428
1429 /* Create the stack of handled components for REF1. */
1430 while (handled_component_p (ref1) && ref1 != match1)
1431 {
1432 /* We use TBAA only to re-synchronize after mismatched refs. So we
1433 do not need to truncate access path after TBAA part ends. */
1434 if (ends_tbaa_access_path_p (ref1))
1435 ntbaa1 = 0;
1436 else
1437 ntbaa1++;
1438 component_refs1.safe_push (ref1);
1439 ref1 = TREE_OPERAND (ref1, 0);
1440 }
1441
1442 /* Create the stack of handled components for REF2. */
1443 while (handled_component_p (ref2) && ref2 != match2)
1444 {
1445 if (ends_tbaa_access_path_p (ref2))
1446 ntbaa2 = 0;
1447 else
1448 ntbaa2++;
1449 component_refs2.safe_push (ref2);
1450 ref2 = TREE_OPERAND (ref2, 0);
1451 }
1452
1453 if (!flag_strict_aliasing)
1454 {
1455 ntbaa1 = 0;
1456 ntbaa2 = 0;
1457 }
1458
1459 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1460 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1461
1462 /* If only one of access path starts with MEM_REF check that offset is 0
1463 so the addresses stays the same after stripping it.
1464 TODO: In this case we may walk the other access path until we get same
1465 offset.
1466
1467 If both starts with MEM_REF, offset has to be same. */
1468 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1469 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1470 || (mem_ref1 && mem_ref2
1471 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1472 TREE_OPERAND (ref2, 1))))
1473 {
1474 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1475 return -1;
1476 }
1477
1478 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1479 to handle them here at all. */
1480 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1481 && TREE_CODE (ref2) != TARGET_MEM_REF);
1482
1483 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1484 rank. This is sufficient because we start from the same DECL and you
1485 cannot reference several fields at a time with COMPONENT_REFs (unlike
1486 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1487 of them to access a sub-component, unless you're in a union, in which
1488 case the return value will precisely be false. */
1489 while (true)
1490 {
1491 /* Track if we seen unmatched ref with non-zero offset. In this case
1492 we must look for partial overlaps. */
1493 bool seen_unmatched_ref_p = false;
1494
1495 /* First match ARRAY_REFs an try to disambiguate. */
1496 if (!component_refs1.is_empty ()
1497 && !component_refs2.is_empty ())
1498 {
1499 unsigned int narray_refs1=0, narray_refs2=0;
1500
1501 /* We generally assume that both access paths starts by same sequence
1502 of refs. However if number of array refs is not in sync, try
1503 to recover and pop elts until number match. This helps the case
1504 where one access path starts by array and other by element. */
1505 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1506 narray_refs1++)
1507 if (TREE_CODE (component_refs1 [component_refs1.length()
1508 - 1 - narray_refs1]) != ARRAY_REF)
1509 break;
1510
1511 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1512 narray_refs2++)
1513 if (TREE_CODE (component_refs2 [component_refs2.length()
1514 - 1 - narray_refs2]) != ARRAY_REF)
1515 break;
1516 for (; narray_refs1 > narray_refs2; narray_refs1--)
1517 {
1518 ref1 = component_refs1.pop ();
1519 ntbaa1--;
1520
1521 /* If index is non-zero we need to check whether the reference
1522 does not break the main invariant that bases are either
1523 disjoint or equal. Consider the example:
1524
1525 unsigned char out[][1];
1526 out[1]="a";
1527 out[i][0];
1528
1529 Here bases out and out are same, but after removing the
1530 [i] index, this invariant no longer holds, because
1531 out[i] points to the middle of array out.
1532
1533 TODO: If size of type of the skipped reference is an integer
1534 multiply of the size of type of the other reference this
1535 invariant can be verified, but even then it is not completely
1536 safe with !flag_strict_aliasing if the other reference contains
1537 unbounded array accesses.
1538 See */
1539
1540 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1541 cheap_array_ref_low_bound (ref1), 0))
1542 return 0;
1543 }
1544 for (; narray_refs2 > narray_refs1; narray_refs2--)
1545 {
1546 ref2 = component_refs2.pop ();
1547 ntbaa2--;
1548 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1549 cheap_array_ref_low_bound (ref2), 0))
1550 return 0;
1551 }
1552 /* Try to disambiguate matched arrays. */
1553 for (unsigned int i = 0; i < narray_refs1; i++)
1554 {
1555 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1556 component_refs2.pop ());
1557 ntbaa1--;
1558 ntbaa2--;
1559 if (cmp == 1 && !partial_overlap)
1560 {
1561 ++alias_stats
1562 .nonoverlapping_refs_since_match_p_no_alias;
1563 return 1;
1564 }
1565 if (cmp == -1)
1566 {
1567 seen_unmatched_ref_p = true;
1568 /* We can not maintain the invariant that bases are either
1569 same or completely disjoint. However we can still recover
1570 from type based alias analysis if we reach referneces to
1571 same sizes. We do not attempt to match array sizes, so
1572 just finish array walking and look for component refs. */
1573 if (ntbaa1 < 0 || ntbaa2 < 0)
1574 {
1575 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1576 return -1;
1577 }
1578 for (i++; i < narray_refs1; i++)
1579 {
1580 component_refs1.pop ();
1581 component_refs2.pop ();
1582 ntbaa1--;
1583 ntbaa2--;
1584 }
1585 break;
1586 }
1587 partial_overlap = false;
1588 }
1589 }
1590
1591 /* Next look for component_refs. */
1592 do
1593 {
1594 if (component_refs1.is_empty ())
1595 {
1596 ++alias_stats
1597 .nonoverlapping_refs_since_match_p_must_overlap;
1598 return 0;
1599 }
1600 ref1 = component_refs1.pop ();
1601 ntbaa1--;
1602 if (TREE_CODE (ref1) != COMPONENT_REF)
1603 {
1604 seen_unmatched_ref_p = true;
1605 if (ntbaa1 < 0 || ntbaa2 < 0)
1606 {
1607 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1608 return -1;
1609 }
1610 }
1611 }
1612 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1613
1614 do
1615 {
1616 if (component_refs2.is_empty ())
1617 {
1618 ++alias_stats
1619 .nonoverlapping_refs_since_match_p_must_overlap;
1620 return 0;
1621 }
1622 ref2 = component_refs2.pop ();
1623 ntbaa2--;
1624 if (TREE_CODE (ref2) != COMPONENT_REF)
1625 {
1626 if (ntbaa1 < 0 || ntbaa2 < 0)
1627 {
1628 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1629 return -1;
1630 }
1631 seen_unmatched_ref_p = true;
1632 }
1633 }
1634 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1635
1636 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1637 earlier. */
1638 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1639 && TREE_CODE (ref2) == COMPONENT_REF);
1640
1641 tree field1 = TREE_OPERAND (ref1, 1);
1642 tree field2 = TREE_OPERAND (ref2, 1);
1643
1644 /* ??? We cannot simply use the type of operand #0 of the refs here
1645 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1646 for common blocks instead of using unions like everyone else. */
1647 tree type1 = DECL_CONTEXT (field1);
1648 tree type2 = DECL_CONTEXT (field2);
1649
1650 partial_overlap = false;
1651
1652 /* If we skipped array refs on type of different sizes, we can
1653 no longer be sure that there are not partial overlaps. */
1654 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1655 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1656 {
1657 ++alias_stats
1658 .nonoverlapping_refs_since_match_p_may_alias;
1659 return -1;
1660 }
1661
1662 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1663 if (cmp == -1)
1664 {
1665 ++alias_stats
1666 .nonoverlapping_refs_since_match_p_may_alias;
1667 return -1;
1668 }
1669 else if (cmp == 1)
1670 {
1671 ++alias_stats
1672 .nonoverlapping_refs_since_match_p_no_alias;
1673 return 1;
1674 }
1675 }
1676
1677 ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
1678 return 0;
1679 }
1680
1681 /* Return TYPE_UID which can be used to match record types we consider
1682 same for TBAA purposes. */
1683
1684 static inline int
1685 ncr_type_uid (const_tree field)
1686 {
1687 /* ??? We cannot simply use the type of operand #0 of the refs here
1688 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1689 for common blocks instead of using unions like everyone else. */
1690 tree type = DECL_FIELD_CONTEXT (field);
1691 /* With LTO types considered same_type_for_tbaa_p
1692 from different translation unit may not have same
1693 main variant. They however have same TYPE_CANONICAL. */
1694 if (TYPE_CANONICAL (type))
1695 return TYPE_UID (TYPE_CANONICAL (type));
1696 return TYPE_UID (type);
1697 }
1698
1699 /* qsort compare function to sort FIELD_DECLs after their
1700 DECL_FIELD_CONTEXT TYPE_UID. */
1701
1702 static inline int
1703 ncr_compar (const void *field1_, const void *field2_)
1704 {
1705 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1706 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1707 unsigned int uid1 = ncr_type_uid (field1);
1708 unsigned int uid2 = ncr_type_uid (field2);
1709
1710 if (uid1 < uid2)
1711 return -1;
1712 else if (uid1 > uid2)
1713 return 1;
1714 return 0;
1715 }
1716
1717 /* Return true if we can determine that the fields referenced cannot
1718 overlap for any pair of objects. This relies on TBAA. */
1719
1720 static bool
1721 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1722 {
1723 /* Early return if we have nothing to do.
1724
1725 Do not consider this as may-alias for stats - it is more useful
1726 to have information how many disambiguations happened provided that
1727 the query was meaningful. */
1728 if (!flag_strict_aliasing
1729 || !x || !y
1730 || !handled_component_p (x)
1731 || !handled_component_p (y))
1732 return false;
1733
1734 auto_vec<const_tree, 16> fieldsx;
1735 while (handled_component_p (x))
1736 {
1737 if (TREE_CODE (x) == COMPONENT_REF)
1738 {
1739 tree field = TREE_OPERAND (x, 1);
1740 tree type = DECL_FIELD_CONTEXT (field);
1741 if (TREE_CODE (type) == RECORD_TYPE)
1742 fieldsx.safe_push (field);
1743 }
1744 else if (ends_tbaa_access_path_p (x))
1745 fieldsx.truncate (0);
1746 x = TREE_OPERAND (x, 0);
1747 }
1748 if (fieldsx.length () == 0)
1749 return false;
1750 auto_vec<const_tree, 16> fieldsy;
1751 while (handled_component_p (y))
1752 {
1753 if (TREE_CODE (y) == COMPONENT_REF)
1754 {
1755 tree field = TREE_OPERAND (y, 1);
1756 tree type = DECL_FIELD_CONTEXT (field);
1757 if (TREE_CODE (type) == RECORD_TYPE)
1758 fieldsy.safe_push (TREE_OPERAND (y, 1));
1759 }
1760 else if (ends_tbaa_access_path_p (y))
1761 fieldsy.truncate (0);
1762 y = TREE_OPERAND (y, 0);
1763 }
1764 if (fieldsy.length () == 0)
1765 {
1766 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1767 return false;
1768 }
1769
1770 /* Most common case first. */
1771 if (fieldsx.length () == 1
1772 && fieldsy.length () == 1)
1773 {
1774 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1775 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1776 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1777 {
1778 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1779 return true;
1780 }
1781 else
1782 {
1783 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1784 return false;
1785 }
1786 }
1787
1788 if (fieldsx.length () == 2)
1789 {
1790 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1791 std::swap (fieldsx[0], fieldsx[1]);
1792 }
1793 else
1794 fieldsx.qsort (ncr_compar);
1795
1796 if (fieldsy.length () == 2)
1797 {
1798 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1799 std::swap (fieldsy[0], fieldsy[1]);
1800 }
1801 else
1802 fieldsy.qsort (ncr_compar);
1803
1804 unsigned i = 0, j = 0;
1805 do
1806 {
1807 const_tree fieldx = fieldsx[i];
1808 const_tree fieldy = fieldsy[j];
1809
1810 /* We're left with accessing different fields of a structure,
1811 no possible overlap. */
1812 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1813 DECL_FIELD_CONTEXT (fieldy)) == 1
1814 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1815 {
1816 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1817 return true;
1818 }
1819
1820 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1821 {
1822 i++;
1823 if (i == fieldsx.length ())
1824 break;
1825 }
1826 else
1827 {
1828 j++;
1829 if (j == fieldsy.length ())
1830 break;
1831 }
1832 }
1833 while (1);
1834
1835 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1836 return false;
1837 }
1838
1839
1840 /* Return true if two memory references based on the variables BASE1
1841 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1842 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1843 if non-NULL are the complete memory reference trees. */
1844
1845 static bool
1846 decl_refs_may_alias_p (tree ref1, tree base1,
1847 poly_int64 offset1, poly_int64 max_size1,
1848 poly_int64 size1,
1849 tree ref2, tree base2,
1850 poly_int64 offset2, poly_int64 max_size2,
1851 poly_int64 size2)
1852 {
1853 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1854
1855 /* If both references are based on different variables, they cannot alias. */
1856 if (compare_base_decls (base1, base2) == 0)
1857 return false;
1858
1859 /* If both references are based on the same variable, they cannot alias if
1860 the accesses do not overlap. */
1861 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1862 return false;
1863
1864 /* If there is must alias, there is no use disambiguating further. */
1865 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1866 return true;
1867
1868 /* For components with variable position, the above test isn't sufficient,
1869 so we disambiguate component references manually. */
1870 if (ref1 && ref2
1871 && handled_component_p (ref1) && handled_component_p (ref2)
1872 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
1873 return false;
1874
1875 return true;
1876 }
1877
1878 /* Return true if an indirect reference based on *PTR1 constrained
1879 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1880 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1881 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1882 in which case they are computed on-demand. REF1 and REF2
1883 if non-NULL are the complete memory reference trees. */
1884
1885 static bool
1886 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1887 poly_int64 offset1, poly_int64 max_size1,
1888 poly_int64 size1,
1889 alias_set_type ref1_alias_set,
1890 alias_set_type base1_alias_set,
1891 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1892 poly_int64 offset2, poly_int64 max_size2,
1893 poly_int64 size2,
1894 alias_set_type ref2_alias_set,
1895 alias_set_type base2_alias_set, bool tbaa_p)
1896 {
1897 tree ptr1;
1898 tree ptrtype1, dbase2;
1899
1900 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1901 || TREE_CODE (base1) == TARGET_MEM_REF)
1902 && DECL_P (base2));
1903
1904 ptr1 = TREE_OPERAND (base1, 0);
1905 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1906
1907 /* If only one reference is based on a variable, they cannot alias if
1908 the pointer access is beyond the extent of the variable access.
1909 (the pointer base cannot validly point to an offset less than zero
1910 of the variable).
1911 ??? IVOPTs creates bases that do not honor this restriction,
1912 so do not apply this optimization for TARGET_MEM_REFs. */
1913 if (TREE_CODE (base1) != TARGET_MEM_REF
1914 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1915 return false;
1916 /* They also cannot alias if the pointer may not point to the decl. */
1917 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1918 return false;
1919
1920 /* Disambiguations that rely on strict aliasing rules follow. */
1921 if (!flag_strict_aliasing || !tbaa_p)
1922 return true;
1923
1924 /* If the alias set for a pointer access is zero all bets are off. */
1925 if (base1_alias_set == 0 || base2_alias_set == 0)
1926 return true;
1927
1928 /* When we are trying to disambiguate an access with a pointer dereference
1929 as base versus one with a decl as base we can use both the size
1930 of the decl and its dynamic type for extra disambiguation.
1931 ??? We do not know anything about the dynamic type of the decl
1932 other than that its alias-set contains base2_alias_set as a subset
1933 which does not help us here. */
1934 /* As we know nothing useful about the dynamic type of the decl just
1935 use the usual conflict check rather than a subset test.
1936 ??? We could introduce -fvery-strict-aliasing when the language
1937 does not allow decls to have a dynamic type that differs from their
1938 static type. Then we can check
1939 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1940 if (base1_alias_set != base2_alias_set
1941 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1942 return false;
1943
1944 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1945
1946 /* If the size of the access relevant for TBAA through the pointer
1947 is bigger than the size of the decl we can't possibly access the
1948 decl via that pointer. */
1949 if (/* ??? This in turn may run afoul when a decl of type T which is
1950 a member of union type U is accessed through a pointer to
1951 type U and sizeof T is smaller than sizeof U. */
1952 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1953 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1954 && compare_sizes (DECL_SIZE (base2),
1955 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1956 return false;
1957
1958 if (!ref2)
1959 return true;
1960
1961 /* If the decl is accessed via a MEM_REF, reconstruct the base
1962 we can use for TBAA and an appropriately adjusted offset. */
1963 dbase2 = ref2;
1964 while (handled_component_p (dbase2))
1965 dbase2 = TREE_OPERAND (dbase2, 0);
1966 poly_int64 doffset1 = offset1;
1967 poly_offset_int doffset2 = offset2;
1968 if (TREE_CODE (dbase2) == MEM_REF
1969 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1970 {
1971 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1972 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
1973 /* If second reference is view-converted, give up now. */
1974 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
1975 return true;
1976 }
1977
1978 /* If first reference is view-converted, give up now. */
1979 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
1980 return true;
1981
1982 /* If both references are through the same type, they do not alias
1983 if the accesses do not overlap. This does extra disambiguation
1984 for mixed/pointer accesses but requires strict aliasing.
1985 For MEM_REFs we require that the component-ref offset we computed
1986 is relative to the start of the type which we ensure by
1987 comparing rvalue and access type and disregarding the constant
1988 pointer offset.
1989
1990 But avoid treating variable length arrays as "objects", instead assume they
1991 can overlap by an exact multiple of their element size.
1992 See gcc.dg/torture/alias-2.c. */
1993 if (((TREE_CODE (base1) != TARGET_MEM_REF
1994 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1995 && (TREE_CODE (dbase2) != TARGET_MEM_REF
1996 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
1997 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1998 {
1999 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2000 && (TYPE_SIZE (TREE_TYPE (base1))
2001 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2002 != INTEGER_CST));
2003 if (!partial_overlap
2004 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2005 return false;
2006 if (!ref1 || !ref2
2007 /* If there is must alias, there is no use disambiguating further. */
2008 || (!partial_overlap
2009 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2010 return true;
2011 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2012 partial_overlap);
2013 if (res == -1)
2014 return !nonoverlapping_component_refs_p (ref1, ref2);
2015 return !res;
2016 }
2017
2018 /* Do access-path based disambiguation. */
2019 if (ref1 && ref2
2020 && (handled_component_p (ref1) || handled_component_p (ref2)))
2021 return aliasing_component_refs_p (ref1,
2022 ref1_alias_set, base1_alias_set,
2023 offset1, max_size1,
2024 ref2,
2025 ref2_alias_set, base2_alias_set,
2026 offset2, max_size2);
2027
2028 return true;
2029 }
2030
2031 /* Return true if two indirect references based on *PTR1
2032 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2033 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2034 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2035 in which case they are computed on-demand. REF1 and REF2
2036 if non-NULL are the complete memory reference trees. */
2037
2038 static bool
2039 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2040 poly_int64 offset1, poly_int64 max_size1,
2041 poly_int64 size1,
2042 alias_set_type ref1_alias_set,
2043 alias_set_type base1_alias_set,
2044 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2045 poly_int64 offset2, poly_int64 max_size2,
2046 poly_int64 size2,
2047 alias_set_type ref2_alias_set,
2048 alias_set_type base2_alias_set, bool tbaa_p)
2049 {
2050 tree ptr1;
2051 tree ptr2;
2052 tree ptrtype1, ptrtype2;
2053
2054 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2055 || TREE_CODE (base1) == TARGET_MEM_REF)
2056 && (TREE_CODE (base2) == MEM_REF
2057 || TREE_CODE (base2) == TARGET_MEM_REF));
2058
2059 ptr1 = TREE_OPERAND (base1, 0);
2060 ptr2 = TREE_OPERAND (base2, 0);
2061
2062 /* If both bases are based on pointers they cannot alias if they may not
2063 point to the same memory object or if they point to the same object
2064 and the accesses do not overlap. */
2065 if ((!cfun || gimple_in_ssa_p (cfun))
2066 && operand_equal_p (ptr1, ptr2, 0)
2067 && (((TREE_CODE (base1) != TARGET_MEM_REF
2068 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2069 && (TREE_CODE (base2) != TARGET_MEM_REF
2070 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2071 || (TREE_CODE (base1) == TARGET_MEM_REF
2072 && TREE_CODE (base2) == TARGET_MEM_REF
2073 && (TMR_STEP (base1) == TMR_STEP (base2)
2074 || (TMR_STEP (base1) && TMR_STEP (base2)
2075 && operand_equal_p (TMR_STEP (base1),
2076 TMR_STEP (base2), 0)))
2077 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2078 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2079 && operand_equal_p (TMR_INDEX (base1),
2080 TMR_INDEX (base2), 0)))
2081 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2082 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2083 && operand_equal_p (TMR_INDEX2 (base1),
2084 TMR_INDEX2 (base2), 0))))))
2085 {
2086 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2087 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2088 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2089 offset2 + moff2, max_size2))
2090 return false;
2091 /* If there is must alias, there is no use disambiguating further. */
2092 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2093 return true;
2094 if (ref1 && ref2)
2095 {
2096 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2097 false);
2098 if (res != -1)
2099 return !res;
2100 }
2101 }
2102 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2103 return false;
2104
2105 /* Disambiguations that rely on strict aliasing rules follow. */
2106 if (!flag_strict_aliasing || !tbaa_p)
2107 return true;
2108
2109 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2110 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2111
2112 /* If the alias set for a pointer access is zero all bets are off. */
2113 if (base1_alias_set == 0
2114 || base2_alias_set == 0)
2115 return true;
2116
2117 /* Do type-based disambiguation. */
2118 if (base1_alias_set != base2_alias_set
2119 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2120 return false;
2121
2122 /* If either reference is view-converted, give up now. */
2123 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2124 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2125 return true;
2126
2127 /* If both references are through the same type, they do not alias
2128 if the accesses do not overlap. This does extra disambiguation
2129 for mixed/pointer accesses but requires strict aliasing. */
2130 if ((TREE_CODE (base1) != TARGET_MEM_REF
2131 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2132 && (TREE_CODE (base2) != TARGET_MEM_REF
2133 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2134 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2135 TREE_TYPE (ptrtype2)) == 1)
2136 {
2137 /* But avoid treating arrays as "objects", instead assume they
2138 can overlap by an exact multiple of their element size.
2139 See gcc.dg/torture/alias-2.c. */
2140 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2141
2142 if (!partial_overlap
2143 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2144 return false;
2145 if (!ref1 || !ref2
2146 || (!partial_overlap
2147 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2148 return true;
2149 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2150 partial_overlap);
2151 if (res == -1)
2152 return !nonoverlapping_component_refs_p (ref1, ref2);
2153 return !res;
2154 }
2155
2156 /* Do access-path based disambiguation. */
2157 if (ref1 && ref2
2158 && (handled_component_p (ref1) || handled_component_p (ref2)))
2159 return aliasing_component_refs_p (ref1,
2160 ref1_alias_set, base1_alias_set,
2161 offset1, max_size1,
2162 ref2,
2163 ref2_alias_set, base2_alias_set,
2164 offset2, max_size2);
2165
2166 return true;
2167 }
2168
2169 /* Return true, if the two memory references REF1 and REF2 may alias. */
2170
2171 static bool
2172 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2173 {
2174 tree base1, base2;
2175 poly_int64 offset1 = 0, offset2 = 0;
2176 poly_int64 max_size1 = -1, max_size2 = -1;
2177 bool var1_p, var2_p, ind1_p, ind2_p;
2178
2179 gcc_checking_assert ((!ref1->ref
2180 || TREE_CODE (ref1->ref) == SSA_NAME
2181 || DECL_P (ref1->ref)
2182 || TREE_CODE (ref1->ref) == STRING_CST
2183 || handled_component_p (ref1->ref)
2184 || TREE_CODE (ref1->ref) == MEM_REF
2185 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
2186 && (!ref2->ref
2187 || TREE_CODE (ref2->ref) == SSA_NAME
2188 || DECL_P (ref2->ref)
2189 || TREE_CODE (ref2->ref) == STRING_CST
2190 || handled_component_p (ref2->ref)
2191 || TREE_CODE (ref2->ref) == MEM_REF
2192 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
2193
2194 /* Decompose the references into their base objects and the access. */
2195 base1 = ao_ref_base (ref1);
2196 offset1 = ref1->offset;
2197 max_size1 = ref1->max_size;
2198 base2 = ao_ref_base (ref2);
2199 offset2 = ref2->offset;
2200 max_size2 = ref2->max_size;
2201
2202 /* We can end up with registers or constants as bases for example from
2203 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2204 which is seen as a struct copy. */
2205 if (TREE_CODE (base1) == SSA_NAME
2206 || TREE_CODE (base1) == CONST_DECL
2207 || TREE_CODE (base1) == CONSTRUCTOR
2208 || TREE_CODE (base1) == ADDR_EXPR
2209 || CONSTANT_CLASS_P (base1)
2210 || TREE_CODE (base2) == SSA_NAME
2211 || TREE_CODE (base2) == CONST_DECL
2212 || TREE_CODE (base2) == CONSTRUCTOR
2213 || TREE_CODE (base2) == ADDR_EXPR
2214 || CONSTANT_CLASS_P (base2))
2215 return false;
2216
2217 /* We can end up referring to code via function and label decls.
2218 As we likely do not properly track code aliases conservatively
2219 bail out. */
2220 if (TREE_CODE (base1) == FUNCTION_DECL
2221 || TREE_CODE (base1) == LABEL_DECL
2222 || TREE_CODE (base2) == FUNCTION_DECL
2223 || TREE_CODE (base2) == LABEL_DECL)
2224 return true;
2225
2226 /* Two volatile accesses always conflict. */
2227 if (ref1->volatile_p
2228 && ref2->volatile_p)
2229 return true;
2230
2231 /* Defer to simple offset based disambiguation if we have
2232 references based on two decls. Do this before defering to
2233 TBAA to handle must-alias cases in conformance with the
2234 GCC extension of allowing type-punning through unions. */
2235 var1_p = DECL_P (base1);
2236 var2_p = DECL_P (base2);
2237 if (var1_p && var2_p)
2238 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
2239 ref1->size,
2240 ref2->ref, base2, offset2, max_size2,
2241 ref2->size);
2242
2243 /* Handle restrict based accesses.
2244 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2245 here. */
2246 tree rbase1 = base1;
2247 tree rbase2 = base2;
2248 if (var1_p)
2249 {
2250 rbase1 = ref1->ref;
2251 if (rbase1)
2252 while (handled_component_p (rbase1))
2253 rbase1 = TREE_OPERAND (rbase1, 0);
2254 }
2255 if (var2_p)
2256 {
2257 rbase2 = ref2->ref;
2258 if (rbase2)
2259 while (handled_component_p (rbase2))
2260 rbase2 = TREE_OPERAND (rbase2, 0);
2261 }
2262 if (rbase1 && rbase2
2263 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
2264 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
2265 /* If the accesses are in the same restrict clique... */
2266 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
2267 /* But based on different pointers they do not alias. */
2268 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
2269 return false;
2270
2271 ind1_p = (TREE_CODE (base1) == MEM_REF
2272 || TREE_CODE (base1) == TARGET_MEM_REF);
2273 ind2_p = (TREE_CODE (base2) == MEM_REF
2274 || TREE_CODE (base2) == TARGET_MEM_REF);
2275
2276 /* Canonicalize the pointer-vs-decl case. */
2277 if (ind1_p && var2_p)
2278 {
2279 std::swap (offset1, offset2);
2280 std::swap (max_size1, max_size2);
2281 std::swap (base1, base2);
2282 std::swap (ref1, ref2);
2283 var1_p = true;
2284 ind1_p = false;
2285 var2_p = false;
2286 ind2_p = true;
2287 }
2288
2289 /* First defer to TBAA if possible. */
2290 if (tbaa_p
2291 && flag_strict_aliasing
2292 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2293 ao_ref_alias_set (ref2)))
2294 return false;
2295
2296 /* If the reference is based on a pointer that points to memory
2297 that may not be written to then the other reference cannot possibly
2298 clobber it. */
2299 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2300 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2301 || (ind1_p
2302 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2303 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2304 return false;
2305
2306 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2307 if (var1_p && ind2_p)
2308 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
2309 offset2, max_size2, ref2->size,
2310 ao_ref_alias_set (ref2),
2311 ao_ref_base_alias_set (ref2),
2312 ref1->ref, base1,
2313 offset1, max_size1, ref1->size,
2314 ao_ref_alias_set (ref1),
2315 ao_ref_base_alias_set (ref1),
2316 tbaa_p);
2317 else if (ind1_p && ind2_p)
2318 return indirect_refs_may_alias_p (ref1->ref, base1,
2319 offset1, max_size1, ref1->size,
2320 ao_ref_alias_set (ref1),
2321 ao_ref_base_alias_set (ref1),
2322 ref2->ref, base2,
2323 offset2, max_size2, ref2->size,
2324 ao_ref_alias_set (ref2),
2325 ao_ref_base_alias_set (ref2),
2326 tbaa_p);
2327
2328 gcc_unreachable ();
2329 }
2330
2331 /* Return true, if the two memory references REF1 and REF2 may alias
2332 and update statistics. */
2333
2334 bool
2335 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2336 {
2337 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2338 if (res)
2339 ++alias_stats.refs_may_alias_p_may_alias;
2340 else
2341 ++alias_stats.refs_may_alias_p_no_alias;
2342 return res;
2343 }
2344
2345 static bool
2346 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2347 {
2348 ao_ref r1;
2349 ao_ref_init (&r1, ref1);
2350 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2351 }
2352
2353 bool
2354 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2355 {
2356 ao_ref r1, r2;
2357 ao_ref_init (&r1, ref1);
2358 ao_ref_init (&r2, ref2);
2359 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2360 }
2361
2362 /* Returns true if there is a anti-dependence for the STORE that
2363 executes after the LOAD. */
2364
2365 bool
2366 refs_anti_dependent_p (tree load, tree store)
2367 {
2368 ao_ref r1, r2;
2369 ao_ref_init (&r1, load);
2370 ao_ref_init (&r2, store);
2371 return refs_may_alias_p_1 (&r1, &r2, false);
2372 }
2373
2374 /* Returns true if there is a output dependence for the stores
2375 STORE1 and STORE2. */
2376
2377 bool
2378 refs_output_dependent_p (tree store1, tree store2)
2379 {
2380 ao_ref r1, r2;
2381 ao_ref_init (&r1, store1);
2382 ao_ref_init (&r2, store2);
2383 return refs_may_alias_p_1 (&r1, &r2, false);
2384 }
2385
2386 /* If the call CALL may use the memory reference REF return true,
2387 otherwise return false. */
2388
2389 static bool
2390 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2391 {
2392 tree base, callee;
2393 unsigned i;
2394 int flags = gimple_call_flags (call);
2395
2396 /* Const functions without a static chain do not implicitly use memory. */
2397 if (!gimple_call_chain (call)
2398 && (flags & (ECF_CONST|ECF_NOVOPS)))
2399 goto process_args;
2400
2401 base = ao_ref_base (ref);
2402 if (!base)
2403 return true;
2404
2405 /* A call that is not without side-effects might involve volatile
2406 accesses and thus conflicts with all other volatile accesses. */
2407 if (ref->volatile_p)
2408 return true;
2409
2410 /* If the reference is based on a decl that is not aliased the call
2411 cannot possibly use it. */
2412 if (DECL_P (base)
2413 && !may_be_aliased (base)
2414 /* But local statics can be used through recursion. */
2415 && !is_global_var (base))
2416 goto process_args;
2417
2418 callee = gimple_call_fndecl (call);
2419
2420 /* Handle those builtin functions explicitly that do not act as
2421 escape points. See tree-ssa-structalias.c:find_func_aliases
2422 for the list of builtins we might need to handle here. */
2423 if (callee != NULL_TREE
2424 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2425 switch (DECL_FUNCTION_CODE (callee))
2426 {
2427 /* All the following functions read memory pointed to by
2428 their second argument. strcat/strncat additionally
2429 reads memory pointed to by the first argument. */
2430 case BUILT_IN_STRCAT:
2431 case BUILT_IN_STRNCAT:
2432 {
2433 ao_ref dref;
2434 ao_ref_init_from_ptr_and_size (&dref,
2435 gimple_call_arg (call, 0),
2436 NULL_TREE);
2437 if (refs_may_alias_p_1 (&dref, ref, false))
2438 return true;
2439 }
2440 /* FALLTHRU */
2441 case BUILT_IN_STRCPY:
2442 case BUILT_IN_STRNCPY:
2443 case BUILT_IN_MEMCPY:
2444 case BUILT_IN_MEMMOVE:
2445 case BUILT_IN_MEMPCPY:
2446 case BUILT_IN_STPCPY:
2447 case BUILT_IN_STPNCPY:
2448 case BUILT_IN_TM_MEMCPY:
2449 case BUILT_IN_TM_MEMMOVE:
2450 {
2451 ao_ref dref;
2452 tree size = NULL_TREE;
2453 if (gimple_call_num_args (call) == 3)
2454 size = gimple_call_arg (call, 2);
2455 ao_ref_init_from_ptr_and_size (&dref,
2456 gimple_call_arg (call, 1),
2457 size);
2458 return refs_may_alias_p_1 (&dref, ref, false);
2459 }
2460 case BUILT_IN_STRCAT_CHK:
2461 case BUILT_IN_STRNCAT_CHK:
2462 {
2463 ao_ref dref;
2464 ao_ref_init_from_ptr_and_size (&dref,
2465 gimple_call_arg (call, 0),
2466 NULL_TREE);
2467 if (refs_may_alias_p_1 (&dref, ref, false))
2468 return true;
2469 }
2470 /* FALLTHRU */
2471 case BUILT_IN_STRCPY_CHK:
2472 case BUILT_IN_STRNCPY_CHK:
2473 case BUILT_IN_MEMCPY_CHK:
2474 case BUILT_IN_MEMMOVE_CHK:
2475 case BUILT_IN_MEMPCPY_CHK:
2476 case BUILT_IN_STPCPY_CHK:
2477 case BUILT_IN_STPNCPY_CHK:
2478 {
2479 ao_ref dref;
2480 tree size = NULL_TREE;
2481 if (gimple_call_num_args (call) == 4)
2482 size = gimple_call_arg (call, 2);
2483 ao_ref_init_from_ptr_and_size (&dref,
2484 gimple_call_arg (call, 1),
2485 size);
2486 return refs_may_alias_p_1 (&dref, ref, false);
2487 }
2488 case BUILT_IN_BCOPY:
2489 {
2490 ao_ref dref;
2491 tree size = gimple_call_arg (call, 2);
2492 ao_ref_init_from_ptr_and_size (&dref,
2493 gimple_call_arg (call, 0),
2494 size);
2495 return refs_may_alias_p_1 (&dref, ref, false);
2496 }
2497
2498 /* The following functions read memory pointed to by their
2499 first argument. */
2500 CASE_BUILT_IN_TM_LOAD (1):
2501 CASE_BUILT_IN_TM_LOAD (2):
2502 CASE_BUILT_IN_TM_LOAD (4):
2503 CASE_BUILT_IN_TM_LOAD (8):
2504 CASE_BUILT_IN_TM_LOAD (FLOAT):
2505 CASE_BUILT_IN_TM_LOAD (DOUBLE):
2506 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
2507 CASE_BUILT_IN_TM_LOAD (M64):
2508 CASE_BUILT_IN_TM_LOAD (M128):
2509 CASE_BUILT_IN_TM_LOAD (M256):
2510 case BUILT_IN_TM_LOG:
2511 case BUILT_IN_TM_LOG_1:
2512 case BUILT_IN_TM_LOG_2:
2513 case BUILT_IN_TM_LOG_4:
2514 case BUILT_IN_TM_LOG_8:
2515 case BUILT_IN_TM_LOG_FLOAT:
2516 case BUILT_IN_TM_LOG_DOUBLE:
2517 case BUILT_IN_TM_LOG_LDOUBLE:
2518 case BUILT_IN_TM_LOG_M64:
2519 case BUILT_IN_TM_LOG_M128:
2520 case BUILT_IN_TM_LOG_M256:
2521 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
2522
2523 /* These read memory pointed to by the first argument. */
2524 case BUILT_IN_STRDUP:
2525 case BUILT_IN_STRNDUP:
2526 case BUILT_IN_REALLOC:
2527 {
2528 ao_ref dref;
2529 tree size = NULL_TREE;
2530 if (gimple_call_num_args (call) == 2)
2531 size = gimple_call_arg (call, 1);
2532 ao_ref_init_from_ptr_and_size (&dref,
2533 gimple_call_arg (call, 0),
2534 size);
2535 return refs_may_alias_p_1 (&dref, ref, false);
2536 }
2537 /* These read memory pointed to by the first argument. */
2538 case BUILT_IN_INDEX:
2539 case BUILT_IN_STRCHR:
2540 case BUILT_IN_STRRCHR:
2541 {
2542 ao_ref dref;
2543 ao_ref_init_from_ptr_and_size (&dref,
2544 gimple_call_arg (call, 0),
2545 NULL_TREE);
2546 return refs_may_alias_p_1 (&dref, ref, false);
2547 }
2548 /* These read memory pointed to by the first argument with size
2549 in the third argument. */
2550 case BUILT_IN_MEMCHR:
2551 {
2552 ao_ref dref;
2553 ao_ref_init_from_ptr_and_size (&dref,
2554 gimple_call_arg (call, 0),
2555 gimple_call_arg (call, 2));
2556 return refs_may_alias_p_1 (&dref, ref, false);
2557 }
2558 /* These read memory pointed to by the first and second arguments. */
2559 case BUILT_IN_STRSTR:
2560 case BUILT_IN_STRPBRK:
2561 {
2562 ao_ref dref;
2563 ao_ref_init_from_ptr_and_size (&dref,
2564 gimple_call_arg (call, 0),
2565 NULL_TREE);
2566 if (refs_may_alias_p_1 (&dref, ref, false))
2567 return true;
2568 ao_ref_init_from_ptr_and_size (&dref,
2569 gimple_call_arg (call, 1),
2570 NULL_TREE);
2571 return refs_may_alias_p_1 (&dref, ref, false);
2572 }
2573
2574 /* The following builtins do not read from memory. */
2575 case BUILT_IN_FREE:
2576 case BUILT_IN_MALLOC:
2577 case BUILT_IN_POSIX_MEMALIGN:
2578 case BUILT_IN_ALIGNED_ALLOC:
2579 case BUILT_IN_CALLOC:
2580 CASE_BUILT_IN_ALLOCA:
2581 case BUILT_IN_STACK_SAVE:
2582 case BUILT_IN_STACK_RESTORE:
2583 case BUILT_IN_MEMSET:
2584 case BUILT_IN_TM_MEMSET:
2585 case BUILT_IN_MEMSET_CHK:
2586 case BUILT_IN_FREXP:
2587 case BUILT_IN_FREXPF:
2588 case BUILT_IN_FREXPL:
2589 case BUILT_IN_GAMMA_R:
2590 case BUILT_IN_GAMMAF_R:
2591 case BUILT_IN_GAMMAL_R:
2592 case BUILT_IN_LGAMMA_R:
2593 case BUILT_IN_LGAMMAF_R:
2594 case BUILT_IN_LGAMMAL_R:
2595 case BUILT_IN_MODF:
2596 case BUILT_IN_MODFF:
2597 case BUILT_IN_MODFL:
2598 case BUILT_IN_REMQUO:
2599 case BUILT_IN_REMQUOF:
2600 case BUILT_IN_REMQUOL:
2601 case BUILT_IN_SINCOS:
2602 case BUILT_IN_SINCOSF:
2603 case BUILT_IN_SINCOSL:
2604 case BUILT_IN_ASSUME_ALIGNED:
2605 case BUILT_IN_VA_END:
2606 return false;
2607 /* __sync_* builtins and some OpenMP builtins act as threading
2608 barriers. */
2609 #undef DEF_SYNC_BUILTIN
2610 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2611 #include "sync-builtins.def"
2612 #undef DEF_SYNC_BUILTIN
2613 case BUILT_IN_GOMP_ATOMIC_START:
2614 case BUILT_IN_GOMP_ATOMIC_END:
2615 case BUILT_IN_GOMP_BARRIER:
2616 case BUILT_IN_GOMP_BARRIER_CANCEL:
2617 case BUILT_IN_GOMP_TASKWAIT:
2618 case BUILT_IN_GOMP_TASKGROUP_END:
2619 case BUILT_IN_GOMP_CRITICAL_START:
2620 case BUILT_IN_GOMP_CRITICAL_END:
2621 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2622 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2623 case BUILT_IN_GOMP_LOOP_END:
2624 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2625 case BUILT_IN_GOMP_ORDERED_START:
2626 case BUILT_IN_GOMP_ORDERED_END:
2627 case BUILT_IN_GOMP_SECTIONS_END:
2628 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2629 case BUILT_IN_GOMP_SINGLE_COPY_START:
2630 case BUILT_IN_GOMP_SINGLE_COPY_END:
2631 return true;
2632
2633 default:
2634 /* Fallthru to general call handling. */;
2635 }
2636
2637 /* Check if base is a global static variable that is not read
2638 by the function. */
2639 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2640 {
2641 struct cgraph_node *node = cgraph_node::get (callee);
2642 bitmap read;
2643 int id;
2644
2645 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2646 node yet. We should enforce that there are nodes for all decls in the
2647 IL and remove this check instead. */
2648 if (node
2649 && (id = ipa_reference_var_uid (base)) != -1
2650 && (read = ipa_reference_get_read_global (node))
2651 && !bitmap_bit_p (read, id))
2652 goto process_args;
2653 }
2654
2655 /* Check if the base variable is call-used. */
2656 if (DECL_P (base))
2657 {
2658 if (pt_solution_includes (gimple_call_use_set (call), base))
2659 return true;
2660 }
2661 else if ((TREE_CODE (base) == MEM_REF
2662 || TREE_CODE (base) == TARGET_MEM_REF)
2663 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2664 {
2665 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2666 if (!pi)
2667 return true;
2668
2669 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2670 return true;
2671 }
2672 else
2673 return true;
2674
2675 /* Inspect call arguments for passed-by-value aliases. */
2676 process_args:
2677 for (i = 0; i < gimple_call_num_args (call); ++i)
2678 {
2679 tree op = gimple_call_arg (call, i);
2680 int flags = gimple_call_arg_flags (call, i);
2681
2682 if (flags & EAF_UNUSED)
2683 continue;
2684
2685 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2686 op = TREE_OPERAND (op, 0);
2687
2688 if (TREE_CODE (op) != SSA_NAME
2689 && !is_gimple_min_invariant (op))
2690 {
2691 ao_ref r;
2692 ao_ref_init (&r, op);
2693 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2694 return true;
2695 }
2696 }
2697
2698 return false;
2699 }
2700
2701 static bool
2702 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2703 {
2704 bool res;
2705 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2706 if (res)
2707 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2708 else
2709 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2710 return res;
2711 }
2712
2713
2714 /* If the statement STMT may use the memory reference REF return
2715 true, otherwise return false. */
2716
2717 bool
2718 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2719 {
2720 if (is_gimple_assign (stmt))
2721 {
2722 tree rhs;
2723
2724 /* All memory assign statements are single. */
2725 if (!gimple_assign_single_p (stmt))
2726 return false;
2727
2728 rhs = gimple_assign_rhs1 (stmt);
2729 if (is_gimple_reg (rhs)
2730 || is_gimple_min_invariant (rhs)
2731 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2732 return false;
2733
2734 return refs_may_alias_p (rhs, ref, tbaa_p);
2735 }
2736 else if (is_gimple_call (stmt))
2737 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2738 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2739 {
2740 tree retval = gimple_return_retval (return_stmt);
2741 if (retval
2742 && TREE_CODE (retval) != SSA_NAME
2743 && !is_gimple_min_invariant (retval)
2744 && refs_may_alias_p (retval, ref, tbaa_p))
2745 return true;
2746 /* If ref escapes the function then the return acts as a use. */
2747 tree base = ao_ref_base (ref);
2748 if (!base)
2749 ;
2750 else if (DECL_P (base))
2751 return is_global_var (base);
2752 else if (TREE_CODE (base) == MEM_REF
2753 || TREE_CODE (base) == TARGET_MEM_REF)
2754 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2755 return false;
2756 }
2757
2758 return true;
2759 }
2760
2761 bool
2762 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2763 {
2764 ao_ref r;
2765 ao_ref_init (&r, ref);
2766 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2767 }
2768
2769 /* If the call in statement CALL may clobber the memory reference REF
2770 return true, otherwise return false. */
2771
2772 bool
2773 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2774 {
2775 tree base;
2776 tree callee;
2777
2778 /* If the call is pure or const it cannot clobber anything. */
2779 if (gimple_call_flags (call)
2780 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2781 return false;
2782 if (gimple_call_internal_p (call))
2783 switch (gimple_call_internal_fn (call))
2784 {
2785 /* Treat these internal calls like ECF_PURE for aliasing,
2786 they don't write to any memory the program should care about.
2787 They have important other side-effects, and read memory,
2788 so can't be ECF_NOVOPS. */
2789 case IFN_UBSAN_NULL:
2790 case IFN_UBSAN_BOUNDS:
2791 case IFN_UBSAN_VPTR:
2792 case IFN_UBSAN_OBJECT_SIZE:
2793 case IFN_UBSAN_PTR:
2794 case IFN_ASAN_CHECK:
2795 return false;
2796 default:
2797 break;
2798 }
2799
2800 base = ao_ref_base (ref);
2801 if (!base)
2802 return true;
2803
2804 if (TREE_CODE (base) == SSA_NAME
2805 || CONSTANT_CLASS_P (base))
2806 return false;
2807
2808 /* A call that is not without side-effects might involve volatile
2809 accesses and thus conflicts with all other volatile accesses. */
2810 if (ref->volatile_p)
2811 return true;
2812
2813 /* If the reference is based on a decl that is not aliased the call
2814 cannot possibly clobber it. */
2815 if (DECL_P (base)
2816 && !may_be_aliased (base)
2817 /* But local non-readonly statics can be modified through recursion
2818 or the call may implement a threading barrier which we must
2819 treat as may-def. */
2820 && (TREE_READONLY (base)
2821 || !is_global_var (base)))
2822 return false;
2823
2824 /* If the reference is based on a pointer that points to memory
2825 that may not be written to then the call cannot possibly clobber it. */
2826 if ((TREE_CODE (base) == MEM_REF
2827 || TREE_CODE (base) == TARGET_MEM_REF)
2828 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2829 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2830 return false;
2831
2832 callee = gimple_call_fndecl (call);
2833
2834 /* Handle those builtin functions explicitly that do not act as
2835 escape points. See tree-ssa-structalias.c:find_func_aliases
2836 for the list of builtins we might need to handle here. */
2837 if (callee != NULL_TREE
2838 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2839 switch (DECL_FUNCTION_CODE (callee))
2840 {
2841 /* All the following functions clobber memory pointed to by
2842 their first argument. */
2843 case BUILT_IN_STRCPY:
2844 case BUILT_IN_STRNCPY:
2845 case BUILT_IN_MEMCPY:
2846 case BUILT_IN_MEMMOVE:
2847 case BUILT_IN_MEMPCPY:
2848 case BUILT_IN_STPCPY:
2849 case BUILT_IN_STPNCPY:
2850 case BUILT_IN_STRCAT:
2851 case BUILT_IN_STRNCAT:
2852 case BUILT_IN_MEMSET:
2853 case BUILT_IN_TM_MEMSET:
2854 CASE_BUILT_IN_TM_STORE (1):
2855 CASE_BUILT_IN_TM_STORE (2):
2856 CASE_BUILT_IN_TM_STORE (4):
2857 CASE_BUILT_IN_TM_STORE (8):
2858 CASE_BUILT_IN_TM_STORE (FLOAT):
2859 CASE_BUILT_IN_TM_STORE (DOUBLE):
2860 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2861 CASE_BUILT_IN_TM_STORE (M64):
2862 CASE_BUILT_IN_TM_STORE (M128):
2863 CASE_BUILT_IN_TM_STORE (M256):
2864 case BUILT_IN_TM_MEMCPY:
2865 case BUILT_IN_TM_MEMMOVE:
2866 {
2867 ao_ref dref;
2868 tree size = NULL_TREE;
2869 /* Don't pass in size for strncat, as the maximum size
2870 is strlen (dest) + n + 1 instead of n, resp.
2871 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2872 known. */
2873 if (gimple_call_num_args (call) == 3
2874 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2875 size = gimple_call_arg (call, 2);
2876 ao_ref_init_from_ptr_and_size (&dref,
2877 gimple_call_arg (call, 0),
2878 size);
2879 return refs_may_alias_p_1 (&dref, ref, false);
2880 }
2881 case BUILT_IN_STRCPY_CHK:
2882 case BUILT_IN_STRNCPY_CHK:
2883 case BUILT_IN_MEMCPY_CHK:
2884 case BUILT_IN_MEMMOVE_CHK:
2885 case BUILT_IN_MEMPCPY_CHK:
2886 case BUILT_IN_STPCPY_CHK:
2887 case BUILT_IN_STPNCPY_CHK:
2888 case BUILT_IN_STRCAT_CHK:
2889 case BUILT_IN_STRNCAT_CHK:
2890 case BUILT_IN_MEMSET_CHK:
2891 {
2892 ao_ref dref;
2893 tree size = NULL_TREE;
2894 /* Don't pass in size for __strncat_chk, as the maximum size
2895 is strlen (dest) + n + 1 instead of n, resp.
2896 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2897 known. */
2898 if (gimple_call_num_args (call) == 4
2899 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2900 size = gimple_call_arg (call, 2);
2901 ao_ref_init_from_ptr_and_size (&dref,
2902 gimple_call_arg (call, 0),
2903 size);
2904 return refs_may_alias_p_1 (&dref, ref, false);
2905 }
2906 case BUILT_IN_BCOPY:
2907 {
2908 ao_ref dref;
2909 tree size = gimple_call_arg (call, 2);
2910 ao_ref_init_from_ptr_and_size (&dref,
2911 gimple_call_arg (call, 1),
2912 size);
2913 return refs_may_alias_p_1 (&dref, ref, false);
2914 }
2915 /* Allocating memory does not have any side-effects apart from
2916 being the definition point for the pointer. */
2917 case BUILT_IN_MALLOC:
2918 case BUILT_IN_ALIGNED_ALLOC:
2919 case BUILT_IN_CALLOC:
2920 case BUILT_IN_STRDUP:
2921 case BUILT_IN_STRNDUP:
2922 /* Unix98 specifies that errno is set on allocation failure. */
2923 if (flag_errno_math
2924 && targetm.ref_may_alias_errno (ref))
2925 return true;
2926 return false;
2927 case BUILT_IN_STACK_SAVE:
2928 CASE_BUILT_IN_ALLOCA:
2929 case BUILT_IN_ASSUME_ALIGNED:
2930 return false;
2931 /* But posix_memalign stores a pointer into the memory pointed to
2932 by its first argument. */
2933 case BUILT_IN_POSIX_MEMALIGN:
2934 {
2935 tree ptrptr = gimple_call_arg (call, 0);
2936 ao_ref dref;
2937 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2938 TYPE_SIZE_UNIT (ptr_type_node));
2939 return (refs_may_alias_p_1 (&dref, ref, false)
2940 || (flag_errno_math
2941 && targetm.ref_may_alias_errno (ref)));
2942 }
2943 /* Freeing memory kills the pointed-to memory. More importantly
2944 the call has to serve as a barrier for moving loads and stores
2945 across it. */
2946 case BUILT_IN_FREE:
2947 case BUILT_IN_VA_END:
2948 {
2949 tree ptr = gimple_call_arg (call, 0);
2950 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2951 }
2952 /* Realloc serves both as allocation point and deallocation point. */
2953 case BUILT_IN_REALLOC:
2954 {
2955 tree ptr = gimple_call_arg (call, 0);
2956 /* Unix98 specifies that errno is set on allocation failure. */
2957 return ((flag_errno_math
2958 && targetm.ref_may_alias_errno (ref))
2959 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2960 }
2961 case BUILT_IN_GAMMA_R:
2962 case BUILT_IN_GAMMAF_R:
2963 case BUILT_IN_GAMMAL_R:
2964 case BUILT_IN_LGAMMA_R:
2965 case BUILT_IN_LGAMMAF_R:
2966 case BUILT_IN_LGAMMAL_R:
2967 {
2968 tree out = gimple_call_arg (call, 1);
2969 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2970 return true;
2971 if (flag_errno_math)
2972 break;
2973 return false;
2974 }
2975 case BUILT_IN_FREXP:
2976 case BUILT_IN_FREXPF:
2977 case BUILT_IN_FREXPL:
2978 case BUILT_IN_MODF:
2979 case BUILT_IN_MODFF:
2980 case BUILT_IN_MODFL:
2981 {
2982 tree out = gimple_call_arg (call, 1);
2983 return ptr_deref_may_alias_ref_p_1 (out, ref);
2984 }
2985 case BUILT_IN_REMQUO:
2986 case BUILT_IN_REMQUOF:
2987 case BUILT_IN_REMQUOL:
2988 {
2989 tree out = gimple_call_arg (call, 2);
2990 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2991 return true;
2992 if (flag_errno_math)
2993 break;
2994 return false;
2995 }
2996 case BUILT_IN_SINCOS:
2997 case BUILT_IN_SINCOSF:
2998 case BUILT_IN_SINCOSL:
2999 {
3000 tree sin = gimple_call_arg (call, 1);
3001 tree cos = gimple_call_arg (call, 2);
3002 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
3003 || ptr_deref_may_alias_ref_p_1 (cos, ref));
3004 }
3005 /* __sync_* builtins and some OpenMP builtins act as threading
3006 barriers. */
3007 #undef DEF_SYNC_BUILTIN
3008 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
3009 #include "sync-builtins.def"
3010 #undef DEF_SYNC_BUILTIN
3011 case BUILT_IN_GOMP_ATOMIC_START:
3012 case BUILT_IN_GOMP_ATOMIC_END:
3013 case BUILT_IN_GOMP_BARRIER:
3014 case BUILT_IN_GOMP_BARRIER_CANCEL:
3015 case BUILT_IN_GOMP_TASKWAIT:
3016 case BUILT_IN_GOMP_TASKGROUP_END:
3017 case BUILT_IN_GOMP_CRITICAL_START:
3018 case BUILT_IN_GOMP_CRITICAL_END:
3019 case BUILT_IN_GOMP_CRITICAL_NAME_START:
3020 case BUILT_IN_GOMP_CRITICAL_NAME_END:
3021 case BUILT_IN_GOMP_LOOP_END:
3022 case BUILT_IN_GOMP_LOOP_END_CANCEL:
3023 case BUILT_IN_GOMP_ORDERED_START:
3024 case BUILT_IN_GOMP_ORDERED_END:
3025 case BUILT_IN_GOMP_SECTIONS_END:
3026 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
3027 case BUILT_IN_GOMP_SINGLE_COPY_START:
3028 case BUILT_IN_GOMP_SINGLE_COPY_END:
3029 return true;
3030 default:
3031 /* Fallthru to general call handling. */;
3032 }
3033
3034 /* Check if base is a global static variable that is not written
3035 by the function. */
3036 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3037 {
3038 struct cgraph_node *node = cgraph_node::get (callee);
3039 bitmap written;
3040 int id;
3041
3042 if (node
3043 && (id = ipa_reference_var_uid (base)) != -1
3044 && (written = ipa_reference_get_written_global (node))
3045 && !bitmap_bit_p (written, id))
3046 return false;
3047 }
3048
3049 /* Check if the base variable is call-clobbered. */
3050 if (DECL_P (base))
3051 return pt_solution_includes (gimple_call_clobber_set (call), base);
3052 else if ((TREE_CODE (base) == MEM_REF
3053 || TREE_CODE (base) == TARGET_MEM_REF)
3054 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3055 {
3056 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3057 if (!pi)
3058 return true;
3059
3060 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3061 }
3062
3063 return true;
3064 }
3065
3066 /* If the call in statement CALL may clobber the memory reference REF
3067 return true, otherwise return false. */
3068
3069 bool
3070 call_may_clobber_ref_p (gcall *call, tree ref)
3071 {
3072 bool res;
3073 ao_ref r;
3074 ao_ref_init (&r, ref);
3075 res = call_may_clobber_ref_p_1 (call, &r);
3076 if (res)
3077 ++alias_stats.call_may_clobber_ref_p_may_alias;
3078 else
3079 ++alias_stats.call_may_clobber_ref_p_no_alias;
3080 return res;
3081 }
3082
3083
3084 /* If the statement STMT may clobber the memory reference REF return true,
3085 otherwise return false. */
3086
3087 bool
3088 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3089 {
3090 if (is_gimple_call (stmt))
3091 {
3092 tree lhs = gimple_call_lhs (stmt);
3093 if (lhs
3094 && TREE_CODE (lhs) != SSA_NAME)
3095 {
3096 ao_ref r;
3097 ao_ref_init (&r, lhs);
3098 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3099 return true;
3100 }
3101
3102 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
3103 }
3104 else if (gimple_assign_single_p (stmt))
3105 {
3106 tree lhs = gimple_assign_lhs (stmt);
3107 if (TREE_CODE (lhs) != SSA_NAME)
3108 {
3109 ao_ref r;
3110 ao_ref_init (&r, lhs);
3111 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3112 }
3113 }
3114 else if (gimple_code (stmt) == GIMPLE_ASM)
3115 return true;
3116
3117 return false;
3118 }
3119
3120 bool
3121 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3122 {
3123 ao_ref r;
3124 ao_ref_init (&r, ref);
3125 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3126 }
3127
3128 /* Return true if store1 and store2 described by corresponding tuples
3129 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3130 address. */
3131
3132 static bool
3133 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3134 poly_int64 max_size1,
3135 tree base2, poly_int64 offset2, poly_int64 size2,
3136 poly_int64 max_size2)
3137 {
3138 /* Offsets need to be 0. */
3139 if (maybe_ne (offset1, 0)
3140 || maybe_ne (offset2, 0))
3141 return false;
3142
3143 bool base1_obj_p = SSA_VAR_P (base1);
3144 bool base2_obj_p = SSA_VAR_P (base2);
3145
3146 /* We need one object. */
3147 if (base1_obj_p == base2_obj_p)
3148 return false;
3149 tree obj = base1_obj_p ? base1 : base2;
3150
3151 /* And we need one MEM_REF. */
3152 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3153 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3154 if (base1_memref_p == base2_memref_p)
3155 return false;
3156 tree memref = base1_memref_p ? base1 : base2;
3157
3158 /* Sizes need to be valid. */
3159 if (!known_size_p (max_size1)
3160 || !known_size_p (max_size2)
3161 || !known_size_p (size1)
3162 || !known_size_p (size2))
3163 return false;
3164
3165 /* Max_size needs to match size. */
3166 if (maybe_ne (max_size1, size1)
3167 || maybe_ne (max_size2, size2))
3168 return false;
3169
3170 /* Sizes need to match. */
3171 if (maybe_ne (size1, size2))
3172 return false;
3173
3174
3175 /* Check that memref is a store to pointer with singleton points-to info. */
3176 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3177 return false;
3178 tree ptr = TREE_OPERAND (memref, 0);
3179 if (TREE_CODE (ptr) != SSA_NAME)
3180 return false;
3181 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3182 unsigned int pt_uid;
3183 if (pi == NULL
3184 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3185 return false;
3186
3187 /* Be conservative with non-call exceptions when the address might
3188 be NULL. */
3189 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3190 return false;
3191
3192 /* Check that ptr points relative to obj. */
3193 unsigned int obj_uid = DECL_PT_UID (obj);
3194 if (obj_uid != pt_uid)
3195 return false;
3196
3197 /* Check that the object size is the same as the store size. That ensures us
3198 that ptr points to the start of obj. */
3199 return (DECL_SIZE (obj)
3200 && poly_int_tree_p (DECL_SIZE (obj))
3201 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3202 }
3203
3204 /* If STMT kills the memory reference REF return true, otherwise
3205 return false. */
3206
3207 bool
3208 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3209 {
3210 if (!ao_ref_base (ref))
3211 return false;
3212
3213 if (gimple_has_lhs (stmt)
3214 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3215 /* The assignment is not necessarily carried out if it can throw
3216 and we can catch it in the current function where we could inspect
3217 the previous value.
3218 ??? We only need to care about the RHS throwing. For aggregate
3219 assignments or similar calls and non-call exceptions the LHS
3220 might throw as well. */
3221 && !stmt_can_throw_internal (cfun, stmt))
3222 {
3223 tree lhs = gimple_get_lhs (stmt);
3224 /* If LHS is literally a base of the access we are done. */
3225 if (ref->ref)
3226 {
3227 tree base = ref->ref;
3228 tree innermost_dropped_array_ref = NULL_TREE;
3229 if (handled_component_p (base))
3230 {
3231 tree saved_lhs0 = NULL_TREE;
3232 if (handled_component_p (lhs))
3233 {
3234 saved_lhs0 = TREE_OPERAND (lhs, 0);
3235 TREE_OPERAND (lhs, 0) = integer_zero_node;
3236 }
3237 do
3238 {
3239 /* Just compare the outermost handled component, if
3240 they are equal we have found a possible common
3241 base. */
3242 tree saved_base0 = TREE_OPERAND (base, 0);
3243 TREE_OPERAND (base, 0) = integer_zero_node;
3244 bool res = operand_equal_p (lhs, base, 0);
3245 TREE_OPERAND (base, 0) = saved_base0;
3246 if (res)
3247 break;
3248 /* Remember if we drop an array-ref that we need to
3249 double-check not being at struct end. */
3250 if (TREE_CODE (base) == ARRAY_REF
3251 || TREE_CODE (base) == ARRAY_RANGE_REF)
3252 innermost_dropped_array_ref = base;
3253 /* Otherwise drop handled components of the access. */
3254 base = saved_base0;
3255 }
3256 while (handled_component_p (base));
3257 if (saved_lhs0)
3258 TREE_OPERAND (lhs, 0) = saved_lhs0;
3259 }
3260 /* Finally check if the lhs has the same address and size as the
3261 base candidate of the access. Watch out if we have dropped
3262 an array-ref that was at struct end, this means ref->ref may
3263 be outside of the TYPE_SIZE of its base. */
3264 if ((! innermost_dropped_array_ref
3265 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3266 && (lhs == base
3267 || (((TYPE_SIZE (TREE_TYPE (lhs))
3268 == TYPE_SIZE (TREE_TYPE (base)))
3269 || (TYPE_SIZE (TREE_TYPE (lhs))
3270 && TYPE_SIZE (TREE_TYPE (base))
3271 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3272 TYPE_SIZE (TREE_TYPE (base)),
3273 0)))
3274 && operand_equal_p (lhs, base,
3275 OEP_ADDRESS_OF
3276 | OEP_MATCH_SIDE_EFFECTS))))
3277 return true;
3278 }
3279
3280 /* Now look for non-literal equal bases with the restriction of
3281 handling constant offset and size. */
3282 /* For a must-alias check we need to be able to constrain
3283 the access properly. */
3284 if (!ref->max_size_known_p ())
3285 return false;
3286 poly_int64 size, offset, max_size, ref_offset = ref->offset;
3287 bool reverse;
3288 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3289 &reverse);
3290 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3291 so base == ref->base does not always hold. */
3292 if (base != ref->base)
3293 {
3294 /* Try using points-to info. */
3295 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3296 ref->offset, ref->size, ref->max_size))
3297 return true;
3298
3299 /* If both base and ref->base are MEM_REFs, only compare the
3300 first operand, and if the second operand isn't equal constant,
3301 try to add the offsets into offset and ref_offset. */
3302 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3303 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3304 {
3305 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3306 TREE_OPERAND (ref->base, 1)))
3307 {
3308 poly_offset_int off1 = mem_ref_offset (base);
3309 off1 <<= LOG2_BITS_PER_UNIT;
3310 off1 += offset;
3311 poly_offset_int off2 = mem_ref_offset (ref->base);
3312 off2 <<= LOG2_BITS_PER_UNIT;
3313 off2 += ref_offset;
3314 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3315 size = -1;
3316 }
3317 }
3318 else
3319 size = -1;
3320 }
3321 /* For a must-alias check we need to be able to constrain
3322 the access properly. */
3323 if (known_eq (size, max_size)
3324 && known_subrange_p (ref_offset, ref->max_size, offset, size))
3325 return true;
3326 }
3327
3328 if (is_gimple_call (stmt))
3329 {
3330 tree callee = gimple_call_fndecl (stmt);
3331 if (callee != NULL_TREE
3332 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3333 switch (DECL_FUNCTION_CODE (callee))
3334 {
3335 case BUILT_IN_FREE:
3336 {
3337 tree ptr = gimple_call_arg (stmt, 0);
3338 tree base = ao_ref_base (ref);
3339 if (base && TREE_CODE (base) == MEM_REF
3340 && TREE_OPERAND (base, 0) == ptr)
3341 return true;
3342 break;
3343 }
3344
3345 case BUILT_IN_MEMCPY:
3346 case BUILT_IN_MEMPCPY:
3347 case BUILT_IN_MEMMOVE:
3348 case BUILT_IN_MEMSET:
3349 case BUILT_IN_MEMCPY_CHK:
3350 case BUILT_IN_MEMPCPY_CHK:
3351 case BUILT_IN_MEMMOVE_CHK:
3352 case BUILT_IN_MEMSET_CHK:
3353 case BUILT_IN_STRNCPY:
3354 case BUILT_IN_STPNCPY:
3355 case BUILT_IN_CALLOC:
3356 {
3357 /* For a must-alias check we need to be able to constrain
3358 the access properly. */
3359 if (!ref->max_size_known_p ())
3360 return false;
3361 tree dest;
3362 tree len;
3363
3364 /* In execution order a calloc call will never kill
3365 anything. However, DSE will (ab)use this interface
3366 to ask if a calloc call writes the same memory locations
3367 as a later assignment, memset, etc. So handle calloc
3368 in the expected way. */
3369 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3370 {
3371 tree arg0 = gimple_call_arg (stmt, 0);
3372 tree arg1 = gimple_call_arg (stmt, 1);
3373 if (TREE_CODE (arg0) != INTEGER_CST
3374 || TREE_CODE (arg1) != INTEGER_CST)
3375 return false;
3376
3377 dest = gimple_call_lhs (stmt);
3378 if (!dest)
3379 return false;
3380 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3381 }
3382 else
3383 {
3384 dest = gimple_call_arg (stmt, 0);
3385 len = gimple_call_arg (stmt, 2);
3386 }
3387 if (!poly_int_tree_p (len))
3388 return false;
3389 tree rbase = ref->base;
3390 poly_offset_int roffset = ref->offset;
3391 ao_ref dref;
3392 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3393 tree base = ao_ref_base (&dref);
3394 poly_offset_int offset = dref.offset;
3395 if (!base || !known_size_p (dref.size))
3396 return false;
3397 if (TREE_CODE (base) == MEM_REF)
3398 {
3399 if (TREE_CODE (rbase) != MEM_REF)
3400 return false;
3401 // Compare pointers.
3402 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
3403 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
3404 base = TREE_OPERAND (base, 0);
3405 rbase = TREE_OPERAND (rbase, 0);
3406 }
3407 if (base == rbase
3408 && known_subrange_p (roffset, ref->max_size, offset,
3409 wi::to_poly_offset (len)
3410 << LOG2_BITS_PER_UNIT))
3411 return true;
3412 break;
3413 }
3414
3415 case BUILT_IN_VA_END:
3416 {
3417 tree ptr = gimple_call_arg (stmt, 0);
3418 if (TREE_CODE (ptr) == ADDR_EXPR)
3419 {
3420 tree base = ao_ref_base (ref);
3421 if (TREE_OPERAND (ptr, 0) == base)
3422 return true;
3423 }
3424 break;
3425 }
3426
3427 default:;
3428 }
3429 }
3430 return false;
3431 }
3432
3433 bool
3434 stmt_kills_ref_p (gimple *stmt, tree ref)
3435 {
3436 ao_ref r;
3437 ao_ref_init (&r, ref);
3438 return stmt_kills_ref_p (stmt, &r);
3439 }
3440
3441
3442 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3443 TARGET or a statement clobbering the memory reference REF in which
3444 case false is returned. The walk starts with VUSE, one argument of PHI. */
3445
3446 static bool
3447 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3448 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3449 bitmap *visited, bool abort_on_visited,
3450 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3451 translate_flags disambiguate_only,
3452 void *data)
3453 {
3454 basic_block bb = gimple_bb (phi);
3455
3456 if (!*visited)
3457 *visited = BITMAP_ALLOC (NULL);
3458
3459 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3460
3461 /* Walk until we hit the target. */
3462 while (vuse != target)
3463 {
3464 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3465 /* If we are searching for the target VUSE by walking up to
3466 TARGET_BB dominating the original PHI we are finished once
3467 we reach a default def or a definition in a block dominating
3468 that block. Update TARGET and return. */
3469 if (!target
3470 && (gimple_nop_p (def_stmt)
3471 || dominated_by_p (CDI_DOMINATORS,
3472 target_bb, gimple_bb (def_stmt))))
3473 {
3474 target = vuse;
3475 return true;
3476 }
3477
3478 /* Recurse for PHI nodes. */
3479 if (gimple_code (def_stmt) == GIMPLE_PHI)
3480 {
3481 /* An already visited PHI node ends the walk successfully. */
3482 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3483 return !abort_on_visited;
3484 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3485 visited, abort_on_visited,
3486 translate, data, disambiguate_only);
3487 if (!vuse)
3488 return false;
3489 continue;
3490 }
3491 else if (gimple_nop_p (def_stmt))
3492 return false;
3493 else
3494 {
3495 /* A clobbering statement or the end of the IL ends it failing. */
3496 if ((int)limit <= 0)
3497 return false;
3498 --limit;
3499 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3500 {
3501 translate_flags tf = disambiguate_only;
3502 if (translate
3503 && (*translate) (ref, vuse, data, &tf) == NULL)
3504 ;
3505 else
3506 return false;
3507 }
3508 }
3509 /* If we reach a new basic-block see if we already skipped it
3510 in a previous walk that ended successfully. */
3511 if (gimple_bb (def_stmt) != bb)
3512 {
3513 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3514 return !abort_on_visited;
3515 bb = gimple_bb (def_stmt);
3516 }
3517 vuse = gimple_vuse (def_stmt);
3518 }
3519 return true;
3520 }
3521
3522
3523 /* Starting from a PHI node for the virtual operand of the memory reference
3524 REF find a continuation virtual operand that allows to continue walking
3525 statements dominating PHI skipping only statements that cannot possibly
3526 clobber REF. Decrements LIMIT for each alias disambiguation done
3527 and aborts the walk, returning NULL_TREE if it reaches zero.
3528 Returns NULL_TREE if no suitable virtual operand can be found. */
3529
3530 tree
3531 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3532 unsigned int &limit, bitmap *visited,
3533 bool abort_on_visited,
3534 void *(*translate)(ao_ref *, tree, void *,
3535 translate_flags *),
3536 void *data,
3537 translate_flags disambiguate_only)
3538 {
3539 unsigned nargs = gimple_phi_num_args (phi);
3540
3541 /* Through a single-argument PHI we can simply look through. */
3542 if (nargs == 1)
3543 return PHI_ARG_DEF (phi, 0);
3544
3545 /* For two or more arguments try to pairwise skip non-aliasing code
3546 until we hit the phi argument definition that dominates the other one. */
3547 basic_block phi_bb = gimple_bb (phi);
3548 tree arg0, arg1;
3549 unsigned i;
3550
3551 /* Find a candidate for the virtual operand which definition
3552 dominates those of all others. */
3553 /* First look if any of the args themselves satisfy this. */
3554 for (i = 0; i < nargs; ++i)
3555 {
3556 arg0 = PHI_ARG_DEF (phi, i);
3557 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3558 break;
3559 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3560 if (def_bb != phi_bb
3561 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3562 break;
3563 arg0 = NULL_TREE;
3564 }
3565 /* If not, look if we can reach such candidate by walking defs
3566 until we hit the immediate dominator. maybe_skip_until will
3567 do that for us. */
3568 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3569
3570 /* Then check against the (to be) found candidate. */
3571 for (i = 0; i < nargs; ++i)
3572 {
3573 arg1 = PHI_ARG_DEF (phi, i);
3574 if (arg1 == arg0)
3575 ;
3576 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3577 limit, visited,
3578 abort_on_visited,
3579 translate,
3580 /* Do not valueize when walking over
3581 backedges. */
3582 dominated_by_p
3583 (CDI_DOMINATORS,
3584 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3585 phi_bb)
3586 ? TR_DISAMBIGUATE
3587 : disambiguate_only, data))
3588 return NULL_TREE;
3589 }
3590
3591 return arg0;
3592 }
3593
3594 /* Based on the memory reference REF and its virtual use VUSE call
3595 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3596 itself. That is, for each virtual use for which its defining statement
3597 does not clobber REF.
3598
3599 WALKER is called with REF, the current virtual use and DATA. If
3600 WALKER returns non-NULL the walk stops and its result is returned.
3601 At the end of a non-successful walk NULL is returned.
3602
3603 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3604 use which definition is a statement that may clobber REF and DATA.
3605 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3606 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3607 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3608 to adjust REF and *DATA to make that valid.
3609
3610 VALUEIZE if non-NULL is called with the next VUSE that is considered
3611 and return value is substituted for that. This can be used to
3612 implement optimistic value-numbering for example. Note that the
3613 VUSE argument is assumed to be valueized already.
3614
3615 LIMIT specifies the number of alias queries we are allowed to do,
3616 the walk stops when it reaches zero and NULL is returned. LIMIT
3617 is decremented by the number of alias queries (plus adjustments
3618 done by the callbacks) upon return.
3619
3620 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3621
3622 void *
3623 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3624 void *(*walker)(ao_ref *, tree, void *),
3625 void *(*translate)(ao_ref *, tree, void *,
3626 translate_flags *),
3627 tree (*valueize)(tree),
3628 unsigned &limit, void *data)
3629 {
3630 bitmap visited = NULL;
3631 void *res;
3632 bool translated = false;
3633
3634 timevar_push (TV_ALIAS_STMT_WALK);
3635
3636 do
3637 {
3638 gimple *def_stmt;
3639
3640 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3641 res = (*walker) (ref, vuse, data);
3642 /* Abort walk. */
3643 if (res == (void *)-1)
3644 {
3645 res = NULL;
3646 break;
3647 }
3648 /* Lookup succeeded. */
3649 else if (res != NULL)
3650 break;
3651
3652 if (valueize)
3653 {
3654 vuse = valueize (vuse);
3655 if (!vuse)
3656 {
3657 res = NULL;
3658 break;
3659 }
3660 }
3661 def_stmt = SSA_NAME_DEF_STMT (vuse);
3662 if (gimple_nop_p (def_stmt))
3663 break;
3664 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3665 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3666 &visited, translated, translate, data);
3667 else
3668 {
3669 if ((int)limit <= 0)
3670 {
3671 res = NULL;
3672 break;
3673 }
3674 --limit;
3675 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3676 {
3677 if (!translate)
3678 break;
3679 translate_flags disambiguate_only = TR_TRANSLATE;
3680 res = (*translate) (ref, vuse, data, &disambiguate_only);
3681 /* Failed lookup and translation. */
3682 if (res == (void *)-1)
3683 {
3684 res = NULL;
3685 break;
3686 }
3687 /* Lookup succeeded. */
3688 else if (res != NULL)
3689 break;
3690 /* Translation succeeded, continue walking. */
3691 translated = translated || disambiguate_only == TR_TRANSLATE;
3692 }
3693 vuse = gimple_vuse (def_stmt);
3694 }
3695 }
3696 while (vuse);
3697
3698 if (visited)
3699 BITMAP_FREE (visited);
3700
3701 timevar_pop (TV_ALIAS_STMT_WALK);
3702
3703 return res;
3704 }
3705
3706
3707 /* Based on the memory reference REF call WALKER for each vdef which
3708 defining statement may clobber REF, starting with VDEF. If REF
3709 is NULL_TREE, each defining statement is visited.
3710
3711 WALKER is called with REF, the current vdef and DATA. If WALKER
3712 returns true the walk is stopped, otherwise it continues.
3713
3714 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3715 The pointer may be NULL and then we do not track this information.
3716
3717 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3718 PHI argument (but only one walk continues on merge points), the
3719 return value is true if any of the walks was successful.
3720
3721 The function returns the number of statements walked or -1 if
3722 LIMIT stmts were walked and the walk was aborted at this point.
3723 If LIMIT is zero the walk is not aborted. */
3724
3725 static int
3726 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3727 bool (*walker)(ao_ref *, tree, void *), void *data,
3728 bitmap *visited, unsigned int cnt,
3729 bool *function_entry_reached, unsigned limit)
3730 {
3731 do
3732 {
3733 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3734
3735 if (*visited
3736 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3737 return cnt;
3738
3739 if (gimple_nop_p (def_stmt))
3740 {
3741 if (function_entry_reached)
3742 *function_entry_reached = true;
3743 return cnt;
3744 }
3745 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3746 {
3747 unsigned i;
3748 if (!*visited)
3749 *visited = BITMAP_ALLOC (NULL);
3750 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3751 {
3752 int res = walk_aliased_vdefs_1 (ref,
3753 gimple_phi_arg_def (def_stmt, i),
3754 walker, data, visited, cnt,
3755 function_entry_reached, limit);
3756 if (res == -1)
3757 return -1;
3758 cnt = res;
3759 }
3760 return cnt;
3761 }
3762
3763 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3764 cnt++;
3765 if (cnt == limit)
3766 return -1;
3767 if ((!ref
3768 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3769 && (*walker) (ref, vdef, data))
3770 return cnt;
3771
3772 vdef = gimple_vuse (def_stmt);
3773 }
3774 while (1);
3775 }
3776
3777 int
3778 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3779 bool (*walker)(ao_ref *, tree, void *), void *data,
3780 bitmap *visited,
3781 bool *function_entry_reached, unsigned int limit)
3782 {
3783 bitmap local_visited = NULL;
3784 int ret;
3785
3786 timevar_push (TV_ALIAS_STMT_WALK);
3787
3788 if (function_entry_reached)
3789 *function_entry_reached = false;
3790
3791 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3792 visited ? visited : &local_visited, 0,
3793 function_entry_reached, limit);
3794 if (local_visited)
3795 BITMAP_FREE (local_visited);
3796
3797 timevar_pop (TV_ALIAS_STMT_WALK);
3798
3799 return ret;
3800 }
3801