tree-ssa-alias.c (type_has_components_p): New function.
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
93
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
102 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
103 } alias_stats;
104
105 void
106 dump_alias_stats (FILE *s)
107 {
108 fprintf (s, "\nAlias oracle query stats:\n");
109 fprintf (s, " refs_may_alias_p: "
110 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
111 HOST_WIDE_INT_PRINT_DEC" queries\n",
112 alias_stats.refs_may_alias_p_no_alias,
113 alias_stats.refs_may_alias_p_no_alias
114 + alias_stats.refs_may_alias_p_may_alias);
115 fprintf (s, " ref_maybe_used_by_call_p: "
116 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
117 HOST_WIDE_INT_PRINT_DEC" queries\n",
118 alias_stats.ref_maybe_used_by_call_p_no_alias,
119 alias_stats.refs_may_alias_p_no_alias
120 + alias_stats.ref_maybe_used_by_call_p_may_alias);
121 fprintf (s, " call_may_clobber_ref_p: "
122 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
123 HOST_WIDE_INT_PRINT_DEC" queries\n",
124 alias_stats.call_may_clobber_ref_p_no_alias,
125 alias_stats.call_may_clobber_ref_p_no_alias
126 + alias_stats.call_may_clobber_ref_p_may_alias);
127 fprintf (s, " aliasing_component_ref_p: "
128 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
129 HOST_WIDE_INT_PRINT_DEC" queries\n",
130 alias_stats.aliasing_component_refs_p_no_alias,
131 alias_stats.aliasing_component_refs_p_no_alias
132 + alias_stats.aliasing_component_refs_p_may_alias);
133 dump_alias_stats_in_alias_c (s);
134 }
135
136
137 /* Return true, if dereferencing PTR may alias with a global variable. */
138
139 bool
140 ptr_deref_may_alias_global_p (tree ptr)
141 {
142 struct ptr_info_def *pi;
143
144 /* If we end up with a pointer constant here that may point
145 to global memory. */
146 if (TREE_CODE (ptr) != SSA_NAME)
147 return true;
148
149 pi = SSA_NAME_PTR_INFO (ptr);
150
151 /* If we do not have points-to information for this variable,
152 we have to punt. */
153 if (!pi)
154 return true;
155
156 /* ??? This does not use TBAA to prune globals ptr may not access. */
157 return pt_solution_includes_global (&pi->pt);
158 }
159
160 /* Return true if dereferencing PTR may alias DECL.
161 The caller is responsible for applying TBAA to see if PTR
162 may access DECL at all. */
163
164 static bool
165 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
166 {
167 struct ptr_info_def *pi;
168
169 /* Conversions are irrelevant for points-to information and
170 data-dependence analysis can feed us those. */
171 STRIP_NOPS (ptr);
172
173 /* Anything we do not explicilty handle aliases. */
174 if ((TREE_CODE (ptr) != SSA_NAME
175 && TREE_CODE (ptr) != ADDR_EXPR
176 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
177 || !POINTER_TYPE_P (TREE_TYPE (ptr))
178 || (!VAR_P (decl)
179 && TREE_CODE (decl) != PARM_DECL
180 && TREE_CODE (decl) != RESULT_DECL))
181 return true;
182
183 /* Disregard pointer offsetting. */
184 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
185 {
186 do
187 {
188 ptr = TREE_OPERAND (ptr, 0);
189 }
190 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
191 return ptr_deref_may_alias_decl_p (ptr, decl);
192 }
193
194 /* ADDR_EXPR pointers either just offset another pointer or directly
195 specify the pointed-to set. */
196 if (TREE_CODE (ptr) == ADDR_EXPR)
197 {
198 tree base = get_base_address (TREE_OPERAND (ptr, 0));
199 if (base
200 && (TREE_CODE (base) == MEM_REF
201 || TREE_CODE (base) == TARGET_MEM_REF))
202 ptr = TREE_OPERAND (base, 0);
203 else if (base
204 && DECL_P (base))
205 return compare_base_decls (base, decl) != 0;
206 else if (base
207 && CONSTANT_CLASS_P (base))
208 return false;
209 else
210 return true;
211 }
212
213 /* Non-aliased variables cannot be pointed to. */
214 if (!may_be_aliased (decl))
215 return false;
216
217 /* If we do not have useful points-to information for this pointer
218 we cannot disambiguate anything else. */
219 pi = SSA_NAME_PTR_INFO (ptr);
220 if (!pi)
221 return true;
222
223 return pt_solution_includes (&pi->pt, decl);
224 }
225
226 /* Return true if dereferenced PTR1 and PTR2 may alias.
227 The caller is responsible for applying TBAA to see if accesses
228 through PTR1 and PTR2 may conflict at all. */
229
230 bool
231 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
232 {
233 struct ptr_info_def *pi1, *pi2;
234
235 /* Conversions are irrelevant for points-to information and
236 data-dependence analysis can feed us those. */
237 STRIP_NOPS (ptr1);
238 STRIP_NOPS (ptr2);
239
240 /* Disregard pointer offsetting. */
241 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
242 {
243 do
244 {
245 ptr1 = TREE_OPERAND (ptr1, 0);
246 }
247 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
248 return ptr_derefs_may_alias_p (ptr1, ptr2);
249 }
250 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
251 {
252 do
253 {
254 ptr2 = TREE_OPERAND (ptr2, 0);
255 }
256 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
257 return ptr_derefs_may_alias_p (ptr1, ptr2);
258 }
259
260 /* ADDR_EXPR pointers either just offset another pointer or directly
261 specify the pointed-to set. */
262 if (TREE_CODE (ptr1) == ADDR_EXPR)
263 {
264 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
265 if (base
266 && (TREE_CODE (base) == MEM_REF
267 || TREE_CODE (base) == TARGET_MEM_REF))
268 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
269 else if (base
270 && DECL_P (base))
271 return ptr_deref_may_alias_decl_p (ptr2, base);
272 else
273 return true;
274 }
275 if (TREE_CODE (ptr2) == ADDR_EXPR)
276 {
277 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
278 if (base
279 && (TREE_CODE (base) == MEM_REF
280 || TREE_CODE (base) == TARGET_MEM_REF))
281 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
282 else if (base
283 && DECL_P (base))
284 return ptr_deref_may_alias_decl_p (ptr1, base);
285 else
286 return true;
287 }
288
289 /* From here we require SSA name pointers. Anything else aliases. */
290 if (TREE_CODE (ptr1) != SSA_NAME
291 || TREE_CODE (ptr2) != SSA_NAME
292 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
293 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
294 return true;
295
296 /* We may end up with two empty points-to solutions for two same pointers.
297 In this case we still want to say both pointers alias, so shortcut
298 that here. */
299 if (ptr1 == ptr2)
300 return true;
301
302 /* If we do not have useful points-to information for either pointer
303 we cannot disambiguate anything else. */
304 pi1 = SSA_NAME_PTR_INFO (ptr1);
305 pi2 = SSA_NAME_PTR_INFO (ptr2);
306 if (!pi1 || !pi2)
307 return true;
308
309 /* ??? This does not use TBAA to prune decls from the intersection
310 that not both pointers may access. */
311 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
312 }
313
314 /* Return true if dereferencing PTR may alias *REF.
315 The caller is responsible for applying TBAA to see if PTR
316 may access *REF at all. */
317
318 static bool
319 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
320 {
321 tree base = ao_ref_base (ref);
322
323 if (TREE_CODE (base) == MEM_REF
324 || TREE_CODE (base) == TARGET_MEM_REF)
325 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
326 else if (DECL_P (base))
327 return ptr_deref_may_alias_decl_p (ptr, base);
328
329 return true;
330 }
331
332 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
333
334 bool
335 ptrs_compare_unequal (tree ptr1, tree ptr2)
336 {
337 /* First resolve the pointers down to a SSA name pointer base or
338 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
339 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
340 or STRING_CSTs which needs points-to adjustments to track them
341 in the points-to sets. */
342 tree obj1 = NULL_TREE;
343 tree obj2 = NULL_TREE;
344 if (TREE_CODE (ptr1) == ADDR_EXPR)
345 {
346 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
347 if (! tem)
348 return false;
349 if (VAR_P (tem)
350 || TREE_CODE (tem) == PARM_DECL
351 || TREE_CODE (tem) == RESULT_DECL)
352 obj1 = tem;
353 else if (TREE_CODE (tem) == MEM_REF)
354 ptr1 = TREE_OPERAND (tem, 0);
355 }
356 if (TREE_CODE (ptr2) == ADDR_EXPR)
357 {
358 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
359 if (! tem)
360 return false;
361 if (VAR_P (tem)
362 || TREE_CODE (tem) == PARM_DECL
363 || TREE_CODE (tem) == RESULT_DECL)
364 obj2 = tem;
365 else if (TREE_CODE (tem) == MEM_REF)
366 ptr2 = TREE_OPERAND (tem, 0);
367 }
368
369 /* Canonicalize ptr vs. object. */
370 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
371 {
372 std::swap (ptr1, ptr2);
373 std::swap (obj1, obj2);
374 }
375
376 if (obj1 && obj2)
377 /* Other code handles this correctly, no need to duplicate it here. */;
378 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
379 {
380 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
381 /* We may not use restrict to optimize pointer comparisons.
382 See PR71062. So we have to assume that restrict-pointed-to
383 may be in fact obj1. */
384 if (!pi
385 || pi->pt.vars_contains_restrict
386 || pi->pt.vars_contains_interposable)
387 return false;
388 if (VAR_P (obj1)
389 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
390 {
391 varpool_node *node = varpool_node::get (obj1);
392 /* If obj1 may bind to NULL give up (see below). */
393 if (! node
394 || ! node->nonzero_address ()
395 || ! decl_binds_to_current_def_p (obj1))
396 return false;
397 }
398 return !pt_solution_includes (&pi->pt, obj1);
399 }
400
401 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
402 but those require pt.null to be conservatively correct. */
403
404 return false;
405 }
406
407 /* Returns whether reference REF to BASE may refer to global memory. */
408
409 static bool
410 ref_may_alias_global_p_1 (tree base)
411 {
412 if (DECL_P (base))
413 return is_global_var (base);
414 else if (TREE_CODE (base) == MEM_REF
415 || TREE_CODE (base) == TARGET_MEM_REF)
416 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
417 return true;
418 }
419
420 bool
421 ref_may_alias_global_p (ao_ref *ref)
422 {
423 tree base = ao_ref_base (ref);
424 return ref_may_alias_global_p_1 (base);
425 }
426
427 bool
428 ref_may_alias_global_p (tree ref)
429 {
430 tree base = get_base_address (ref);
431 return ref_may_alias_global_p_1 (base);
432 }
433
434 /* Return true whether STMT may clobber global memory. */
435
436 bool
437 stmt_may_clobber_global_p (gimple *stmt)
438 {
439 tree lhs;
440
441 if (!gimple_vdef (stmt))
442 return false;
443
444 /* ??? We can ask the oracle whether an artificial pointer
445 dereference with a pointer with points-to information covering
446 all global memory (what about non-address taken memory?) maybe
447 clobbered by this call. As there is at the moment no convenient
448 way of doing that without generating garbage do some manual
449 checking instead.
450 ??? We could make a NULL ao_ref argument to the various
451 predicates special, meaning any global memory. */
452
453 switch (gimple_code (stmt))
454 {
455 case GIMPLE_ASSIGN:
456 lhs = gimple_assign_lhs (stmt);
457 return (TREE_CODE (lhs) != SSA_NAME
458 && ref_may_alias_global_p (lhs));
459 case GIMPLE_CALL:
460 return true;
461 default:
462 return true;
463 }
464 }
465
466
467 /* Dump alias information on FILE. */
468
469 void
470 dump_alias_info (FILE *file)
471 {
472 unsigned i;
473 tree ptr;
474 const char *funcname
475 = lang_hooks.decl_printable_name (current_function_decl, 2);
476 tree var;
477
478 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
479
480 fprintf (file, "Aliased symbols\n\n");
481
482 FOR_EACH_LOCAL_DECL (cfun, i, var)
483 {
484 if (may_be_aliased (var))
485 dump_variable (file, var);
486 }
487
488 fprintf (file, "\nCall clobber information\n");
489
490 fprintf (file, "\nESCAPED");
491 dump_points_to_solution (file, &cfun->gimple_df->escaped);
492
493 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
494
495 FOR_EACH_SSA_NAME (i, ptr, cfun)
496 {
497 struct ptr_info_def *pi;
498
499 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
500 || SSA_NAME_IN_FREE_LIST (ptr))
501 continue;
502
503 pi = SSA_NAME_PTR_INFO (ptr);
504 if (pi)
505 dump_points_to_info_for (file, ptr);
506 }
507
508 fprintf (file, "\n");
509 }
510
511
512 /* Dump alias information on stderr. */
513
514 DEBUG_FUNCTION void
515 debug_alias_info (void)
516 {
517 dump_alias_info (stderr);
518 }
519
520
521 /* Dump the points-to set *PT into FILE. */
522
523 void
524 dump_points_to_solution (FILE *file, struct pt_solution *pt)
525 {
526 if (pt->anything)
527 fprintf (file, ", points-to anything");
528
529 if (pt->nonlocal)
530 fprintf (file, ", points-to non-local");
531
532 if (pt->escaped)
533 fprintf (file, ", points-to escaped");
534
535 if (pt->ipa_escaped)
536 fprintf (file, ", points-to unit escaped");
537
538 if (pt->null)
539 fprintf (file, ", points-to NULL");
540
541 if (pt->vars)
542 {
543 fprintf (file, ", points-to vars: ");
544 dump_decl_set (file, pt->vars);
545 if (pt->vars_contains_nonlocal
546 || pt->vars_contains_escaped
547 || pt->vars_contains_escaped_heap
548 || pt->vars_contains_restrict)
549 {
550 const char *comma = "";
551 fprintf (file, " (");
552 if (pt->vars_contains_nonlocal)
553 {
554 fprintf (file, "nonlocal");
555 comma = ", ";
556 }
557 if (pt->vars_contains_escaped)
558 {
559 fprintf (file, "%sescaped", comma);
560 comma = ", ";
561 }
562 if (pt->vars_contains_escaped_heap)
563 {
564 fprintf (file, "%sescaped heap", comma);
565 comma = ", ";
566 }
567 if (pt->vars_contains_restrict)
568 {
569 fprintf (file, "%srestrict", comma);
570 comma = ", ";
571 }
572 if (pt->vars_contains_interposable)
573 fprintf (file, "%sinterposable", comma);
574 fprintf (file, ")");
575 }
576 }
577 }
578
579
580 /* Unified dump function for pt_solution. */
581
582 DEBUG_FUNCTION void
583 debug (pt_solution &ref)
584 {
585 dump_points_to_solution (stderr, &ref);
586 }
587
588 DEBUG_FUNCTION void
589 debug (pt_solution *ptr)
590 {
591 if (ptr)
592 debug (*ptr);
593 else
594 fprintf (stderr, "<nil>\n");
595 }
596
597
598 /* Dump points-to information for SSA_NAME PTR into FILE. */
599
600 void
601 dump_points_to_info_for (FILE *file, tree ptr)
602 {
603 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
604
605 print_generic_expr (file, ptr, dump_flags);
606
607 if (pi)
608 dump_points_to_solution (file, &pi->pt);
609 else
610 fprintf (file, ", points-to anything");
611
612 fprintf (file, "\n");
613 }
614
615
616 /* Dump points-to information for VAR into stderr. */
617
618 DEBUG_FUNCTION void
619 debug_points_to_info_for (tree var)
620 {
621 dump_points_to_info_for (stderr, var);
622 }
623
624
625 /* Initializes the alias-oracle reference representation *R from REF. */
626
627 void
628 ao_ref_init (ao_ref *r, tree ref)
629 {
630 r->ref = ref;
631 r->base = NULL_TREE;
632 r->offset = 0;
633 r->size = -1;
634 r->max_size = -1;
635 r->ref_alias_set = -1;
636 r->base_alias_set = -1;
637 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
638 }
639
640 /* Returns the base object of the memory reference *REF. */
641
642 tree
643 ao_ref_base (ao_ref *ref)
644 {
645 bool reverse;
646
647 if (ref->base)
648 return ref->base;
649 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
650 &ref->max_size, &reverse);
651 return ref->base;
652 }
653
654 /* Returns the base object alias set of the memory reference *REF. */
655
656 alias_set_type
657 ao_ref_base_alias_set (ao_ref *ref)
658 {
659 tree base_ref;
660 if (ref->base_alias_set != -1)
661 return ref->base_alias_set;
662 if (!ref->ref)
663 return 0;
664 base_ref = ref->ref;
665 while (handled_component_p (base_ref))
666 base_ref = TREE_OPERAND (base_ref, 0);
667 ref->base_alias_set = get_alias_set (base_ref);
668 return ref->base_alias_set;
669 }
670
671 /* Returns the reference alias set of the memory reference *REF. */
672
673 alias_set_type
674 ao_ref_alias_set (ao_ref *ref)
675 {
676 if (ref->ref_alias_set != -1)
677 return ref->ref_alias_set;
678 ref->ref_alias_set = get_alias_set (ref->ref);
679 return ref->ref_alias_set;
680 }
681
682 /* Init an alias-oracle reference representation from a gimple pointer
683 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
684 size is assumed to be unknown. The access is assumed to be only
685 to or after of the pointer target, not before it. */
686
687 void
688 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
689 {
690 poly_int64 t, size_hwi, extra_offset = 0;
691 ref->ref = NULL_TREE;
692 if (TREE_CODE (ptr) == SSA_NAME)
693 {
694 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
695 if (gimple_assign_single_p (stmt)
696 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
697 ptr = gimple_assign_rhs1 (stmt);
698 else if (is_gimple_assign (stmt)
699 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
700 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
701 {
702 ptr = gimple_assign_rhs1 (stmt);
703 extra_offset *= BITS_PER_UNIT;
704 }
705 }
706
707 if (TREE_CODE (ptr) == ADDR_EXPR)
708 {
709 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
710 if (ref->base)
711 ref->offset = BITS_PER_UNIT * t;
712 else
713 {
714 size = NULL_TREE;
715 ref->offset = 0;
716 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
717 }
718 }
719 else
720 {
721 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
722 ref->base = build2 (MEM_REF, char_type_node,
723 ptr, null_pointer_node);
724 ref->offset = 0;
725 }
726 ref->offset += extra_offset;
727 if (size
728 && poly_int_tree_p (size, &size_hwi)
729 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
730 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
731 else
732 ref->max_size = ref->size = -1;
733 ref->ref_alias_set = 0;
734 ref->base_alias_set = 0;
735 ref->volatile_p = false;
736 }
737
738 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
739 Return -1 if S1 < S2
740 Return 1 if S1 > S2
741 Return 0 if equal or incomparable. */
742
743 static int
744 compare_sizes (tree s1, tree s2)
745 {
746 if (!s1 || !s2)
747 return 0;
748
749 poly_uint64 size1;
750 poly_uint64 size2;
751
752 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
753 return 0;
754 if (known_lt (size1, size2))
755 return -1;
756 if (known_lt (size2, size1))
757 return 1;
758 return 0;
759 }
760
761 /* Compare TYPE1 and TYPE2 by its size.
762 Return -1 if size of TYPE1 < size of TYPE2
763 Return 1 if size of TYPE1 > size of TYPE2
764 Return 0 if types are of equal sizes or we can not compare them. */
765
766 static int
767 compare_type_sizes (tree type1, tree type2)
768 {
769 /* Be conservative for arrays and vectors. We want to support partial
770 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
771 while (TREE_CODE (type1) == ARRAY_TYPE
772 || TREE_CODE (type1) == VECTOR_TYPE)
773 type1 = TREE_TYPE (type1);
774 while (TREE_CODE (type2) == ARRAY_TYPE
775 || TREE_CODE (type2) == VECTOR_TYPE)
776 type2 = TREE_TYPE (type2);
777 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
778 }
779
780 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
781 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
782 decide. */
783
784 static inline int
785 same_type_for_tbaa (tree type1, tree type2)
786 {
787 type1 = TYPE_MAIN_VARIANT (type1);
788 type2 = TYPE_MAIN_VARIANT (type2);
789
790 /* Handle the most common case first. */
791 if (type1 == type2)
792 return 1;
793
794 /* If we would have to do structural comparison bail out. */
795 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
796 || TYPE_STRUCTURAL_EQUALITY_P (type2))
797 return -1;
798
799 /* Compare the canonical types. */
800 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
801 return 1;
802
803 /* ??? Array types are not properly unified in all cases as we have
804 spurious changes in the index types for example. Removing this
805 causes all sorts of problems with the Fortran frontend. */
806 if (TREE_CODE (type1) == ARRAY_TYPE
807 && TREE_CODE (type2) == ARRAY_TYPE)
808 return -1;
809
810 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
811 object of one of its constrained subtypes, e.g. when a function with an
812 unconstrained parameter passed by reference is called on an object and
813 inlined. But, even in the case of a fixed size, type and subtypes are
814 not equivalent enough as to share the same TYPE_CANONICAL, since this
815 would mean that conversions between them are useless, whereas they are
816 not (e.g. type and subtypes can have different modes). So, in the end,
817 they are only guaranteed to have the same alias set. */
818 if (get_alias_set (type1) == get_alias_set (type2))
819 return -1;
820
821 /* The types are known to be not equal. */
822 return 0;
823 }
824
825 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
826 components on it). */
827
828 static bool
829 type_has_components_p (tree type)
830 {
831 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
832 || TREE_CODE (type) == COMPLEX_TYPE;
833 }
834
835 /* Determine if the two component references REF1 and REF2 which are
836 based on access types TYPE1 and TYPE2 and of which at least one is based
837 on an indirect reference may alias. REF2 is the only one that can
838 be a decl in which case REF2_IS_DECL is true.
839 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
840 are the respective alias sets. */
841
842 static bool
843 aliasing_component_refs_p (tree ref1,
844 alias_set_type ref1_alias_set,
845 alias_set_type base1_alias_set,
846 poly_int64 offset1, poly_int64 max_size1,
847 tree ref2,
848 alias_set_type ref2_alias_set,
849 alias_set_type base2_alias_set,
850 poly_int64 offset2, poly_int64 max_size2,
851 bool ref2_is_decl)
852 {
853 /* If one reference is a component references through pointers try to find a
854 common base and apply offset based disambiguation. This handles
855 for example
856 struct A { int i; int j; } *q;
857 struct B { struct A a; int k; } *p;
858 disambiguating q->i and p->a.j. */
859 tree base1, base2;
860 tree type1, type2;
861 tree *refp;
862 int same_p1 = 0, same_p2 = 0;
863
864 /* Choose bases and base types to search for. */
865 base1 = ref1;
866 while (handled_component_p (base1))
867 base1 = TREE_OPERAND (base1, 0);
868 type1 = TREE_TYPE (base1);
869 base2 = ref2;
870 while (handled_component_p (base2))
871 base2 = TREE_OPERAND (base2, 0);
872 type2 = TREE_TYPE (base2);
873
874 /* Now search for the type1 in the access path of ref2. This
875 would be a common base for doing offset based disambiguation on.
876 This however only makes sense if type2 is big enough to hold type1. */
877 int cmp_outer = compare_type_sizes (type2, type1);
878 if (cmp_outer >= 0)
879 {
880 refp = &ref2;
881 while (true)
882 {
883 /* We walk from inner type to the outer types. If type we see is
884 already too large to be part of type1, terminate the search. */
885 int cmp = compare_type_sizes (type1, TREE_TYPE (*refp));
886 if (cmp < 0)
887 break;
888 /* If types may be of same size, see if we can decide about their
889 equality. */
890 if (cmp == 0)
891 {
892 same_p2 = same_type_for_tbaa (TREE_TYPE (*refp), type1);
893 if (same_p2 != 0)
894 break;
895 }
896 if (!handled_component_p (*refp))
897 break;
898 refp = &TREE_OPERAND (*refp, 0);
899 }
900 if (same_p2 == 1)
901 {
902 poly_int64 offadj, sztmp, msztmp;
903 bool reverse;
904 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
905 offset2 -= offadj;
906 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
907 offset1 -= offadj;
908 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
909 {
910 ++alias_stats.aliasing_component_refs_p_may_alias;
911 return true;
912 }
913 else
914 {
915 ++alias_stats.aliasing_component_refs_p_no_alias;
916 return false;
917 }
918 }
919 }
920
921 /* If we didn't find a common base, try the other way around. */
922 if (cmp_outer <= 0)
923 {
924 refp = &ref1;
925 while (true)
926 {
927 int cmp = compare_type_sizes (type2, TREE_TYPE (*refp));
928 if (cmp < 0)
929 break;
930 /* If types may be of same size, see if we can decide about their
931 equality. */
932 if (cmp == 0)
933 {
934 same_p1 = same_type_for_tbaa (TREE_TYPE (*refp), type2);
935 if (same_p1 != 0)
936 break;
937 }
938 if (!handled_component_p (*refp))
939 break;
940 refp = &TREE_OPERAND (*refp, 0);
941 }
942 if (same_p1 == 1)
943 {
944 poly_int64 offadj, sztmp, msztmp;
945 bool reverse;
946
947 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
948 offset1 -= offadj;
949 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
950 offset2 -= offadj;
951 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
952 {
953 ++alias_stats.aliasing_component_refs_p_may_alias;
954 return true;
955 }
956 else
957 {
958 ++alias_stats.aliasing_component_refs_p_no_alias;
959 return false;
960 }
961 }
962 }
963
964 /* In the following code we make an assumption that the types in access
965 paths do not overlap and thus accesses alias only if one path can be
966 continuation of another. If we was not able to decide about equivalence,
967 we need to give up. */
968 if (same_p1 == -1 || same_p2 == -1)
969 return true;
970
971 /* If we have two type access paths B1.path1 and B2.path2 they may
972 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
973 But we can still have a path that goes B1.path1...B2.path2 with
974 a part that we do not see. So we can only disambiguate now
975 if there is no B2 in the tail of path1 and no B1 on the
976 tail of path2. */
977 if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0
978 && type_has_components_p (TREE_TYPE (ref2))
979 && (base1_alias_set == ref2_alias_set
980 || alias_set_subset_of (base1_alias_set, ref2_alias_set)))
981 {
982 ++alias_stats.aliasing_component_refs_p_may_alias;
983 return true;
984 }
985 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
986 if (!ref2_is_decl
987 && compare_type_sizes (TREE_TYPE (ref1), type2) >= 0
988 && type_has_components_p (TREE_TYPE (ref1))
989 && (base2_alias_set == ref1_alias_set
990 || alias_set_subset_of (base2_alias_set, ref1_alias_set)))
991 {
992 ++alias_stats.aliasing_component_refs_p_may_alias;
993 return true;
994 }
995 ++alias_stats.aliasing_component_refs_p_no_alias;
996 return false;
997 }
998
999 /* Return true if we can determine that component references REF1 and REF2,
1000 that are within a common DECL, cannot overlap. */
1001
1002 static bool
1003 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
1004 {
1005 auto_vec<tree, 16> component_refs1;
1006 auto_vec<tree, 16> component_refs2;
1007
1008 /* Create the stack of handled components for REF1. */
1009 while (handled_component_p (ref1))
1010 {
1011 component_refs1.safe_push (ref1);
1012 ref1 = TREE_OPERAND (ref1, 0);
1013 }
1014 if (TREE_CODE (ref1) == MEM_REF)
1015 {
1016 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
1017 return false;
1018 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
1019 }
1020
1021 /* Create the stack of handled components for REF2. */
1022 while (handled_component_p (ref2))
1023 {
1024 component_refs2.safe_push (ref2);
1025 ref2 = TREE_OPERAND (ref2, 0);
1026 }
1027 if (TREE_CODE (ref2) == MEM_REF)
1028 {
1029 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
1030 return false;
1031 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
1032 }
1033
1034 /* Bases must be either same or uncomparable. */
1035 gcc_checking_assert (ref1 == ref2
1036 || (DECL_P (ref1) && DECL_P (ref2)
1037 && compare_base_decls (ref1, ref2) != 0));
1038
1039 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1040 rank. This is sufficient because we start from the same DECL and you
1041 cannot reference several fields at a time with COMPONENT_REFs (unlike
1042 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1043 of them to access a sub-component, unless you're in a union, in which
1044 case the return value will precisely be false. */
1045 while (true)
1046 {
1047 do
1048 {
1049 if (component_refs1.is_empty ())
1050 return false;
1051 ref1 = component_refs1.pop ();
1052 }
1053 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1054
1055 do
1056 {
1057 if (component_refs2.is_empty ())
1058 return false;
1059 ref2 = component_refs2.pop ();
1060 }
1061 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1062
1063 /* Beware of BIT_FIELD_REF. */
1064 if (TREE_CODE (ref1) != COMPONENT_REF
1065 || TREE_CODE (ref2) != COMPONENT_REF)
1066 return false;
1067
1068 tree field1 = TREE_OPERAND (ref1, 1);
1069 tree field2 = TREE_OPERAND (ref2, 1);
1070
1071 /* ??? We cannot simply use the type of operand #0 of the refs here
1072 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1073 for common blocks instead of using unions like everyone else. */
1074 tree type1 = DECL_CONTEXT (field1);
1075 tree type2 = DECL_CONTEXT (field2);
1076
1077 /* We cannot disambiguate fields in a union or qualified union. */
1078 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
1079 return false;
1080
1081 if (field1 != field2)
1082 {
1083 /* A field and its representative need to be considered the
1084 same. */
1085 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
1086 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
1087 return false;
1088 /* Different fields of the same record type cannot overlap.
1089 ??? Bitfields can overlap at RTL level so punt on them. */
1090 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1091 return false;
1092 return true;
1093 }
1094 }
1095
1096 return false;
1097 }
1098
1099 /* qsort compare function to sort FIELD_DECLs after their
1100 DECL_FIELD_CONTEXT TYPE_UID. */
1101
1102 static inline int
1103 ncr_compar (const void *field1_, const void *field2_)
1104 {
1105 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1106 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1107 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
1108 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
1109 if (uid1 < uid2)
1110 return -1;
1111 else if (uid1 > uid2)
1112 return 1;
1113 return 0;
1114 }
1115
1116 /* Return true if we can determine that the fields referenced cannot
1117 overlap for any pair of objects. */
1118
1119 static bool
1120 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1121 {
1122 if (!flag_strict_aliasing
1123 || !x || !y
1124 || TREE_CODE (x) != COMPONENT_REF
1125 || TREE_CODE (y) != COMPONENT_REF)
1126 return false;
1127
1128 auto_vec<const_tree, 16> fieldsx;
1129 while (TREE_CODE (x) == COMPONENT_REF)
1130 {
1131 tree field = TREE_OPERAND (x, 1);
1132 tree type = DECL_FIELD_CONTEXT (field);
1133 if (TREE_CODE (type) == RECORD_TYPE)
1134 fieldsx.safe_push (field);
1135 x = TREE_OPERAND (x, 0);
1136 }
1137 if (fieldsx.length () == 0)
1138 return false;
1139 auto_vec<const_tree, 16> fieldsy;
1140 while (TREE_CODE (y) == COMPONENT_REF)
1141 {
1142 tree field = TREE_OPERAND (y, 1);
1143 tree type = DECL_FIELD_CONTEXT (field);
1144 if (TREE_CODE (type) == RECORD_TYPE)
1145 fieldsy.safe_push (TREE_OPERAND (y, 1));
1146 y = TREE_OPERAND (y, 0);
1147 }
1148 if (fieldsy.length () == 0)
1149 return false;
1150
1151 /* Most common case first. */
1152 if (fieldsx.length () == 1
1153 && fieldsy.length () == 1)
1154 return ((DECL_FIELD_CONTEXT (fieldsx[0])
1155 == DECL_FIELD_CONTEXT (fieldsy[0]))
1156 && fieldsx[0] != fieldsy[0]
1157 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
1158
1159 if (fieldsx.length () == 2)
1160 {
1161 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1162 std::swap (fieldsx[0], fieldsx[1]);
1163 }
1164 else
1165 fieldsx.qsort (ncr_compar);
1166
1167 if (fieldsy.length () == 2)
1168 {
1169 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1170 std::swap (fieldsy[0], fieldsy[1]);
1171 }
1172 else
1173 fieldsy.qsort (ncr_compar);
1174
1175 unsigned i = 0, j = 0;
1176 do
1177 {
1178 const_tree fieldx = fieldsx[i];
1179 const_tree fieldy = fieldsy[j];
1180 tree typex = DECL_FIELD_CONTEXT (fieldx);
1181 tree typey = DECL_FIELD_CONTEXT (fieldy);
1182 if (typex == typey)
1183 {
1184 /* We're left with accessing different fields of a structure,
1185 no possible overlap. */
1186 if (fieldx != fieldy)
1187 {
1188 /* A field and its representative need to be considered the
1189 same. */
1190 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1191 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1192 return false;
1193 /* Different fields of the same record type cannot overlap.
1194 ??? Bitfields can overlap at RTL level so punt on them. */
1195 if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1196 return false;
1197 return true;
1198 }
1199 }
1200 if (TYPE_UID (typex) < TYPE_UID (typey))
1201 {
1202 i++;
1203 if (i == fieldsx.length ())
1204 break;
1205 }
1206 else
1207 {
1208 j++;
1209 if (j == fieldsy.length ())
1210 break;
1211 }
1212 }
1213 while (1);
1214
1215 return false;
1216 }
1217
1218
1219 /* Return true if two memory references based on the variables BASE1
1220 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1221 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1222 if non-NULL are the complete memory reference trees. */
1223
1224 static bool
1225 decl_refs_may_alias_p (tree ref1, tree base1,
1226 poly_int64 offset1, poly_int64 max_size1,
1227 tree ref2, tree base2,
1228 poly_int64 offset2, poly_int64 max_size2)
1229 {
1230 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1231
1232 /* If both references are based on different variables, they cannot alias. */
1233 if (compare_base_decls (base1, base2) == 0)
1234 return false;
1235
1236 /* If both references are based on the same variable, they cannot alias if
1237 the accesses do not overlap. */
1238 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1239 return false;
1240
1241 /* For components with variable position, the above test isn't sufficient,
1242 so we disambiguate component references manually. */
1243 if (ref1 && ref2
1244 && handled_component_p (ref1) && handled_component_p (ref2)
1245 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1246 return false;
1247
1248 return true;
1249 }
1250
1251 /* Return true if an indirect reference based on *PTR1 constrained
1252 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1253 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1254 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1255 in which case they are computed on-demand. REF1 and REF2
1256 if non-NULL are the complete memory reference trees. */
1257
1258 static bool
1259 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1260 poly_int64 offset1, poly_int64 max_size1,
1261 alias_set_type ref1_alias_set,
1262 alias_set_type base1_alias_set,
1263 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1264 poly_int64 offset2, poly_int64 max_size2,
1265 alias_set_type ref2_alias_set,
1266 alias_set_type base2_alias_set, bool tbaa_p)
1267 {
1268 tree ptr1;
1269 tree ptrtype1, dbase2;
1270
1271 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1272 || TREE_CODE (base1) == TARGET_MEM_REF)
1273 && DECL_P (base2));
1274
1275 ptr1 = TREE_OPERAND (base1, 0);
1276 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1277
1278 /* If only one reference is based on a variable, they cannot alias if
1279 the pointer access is beyond the extent of the variable access.
1280 (the pointer base cannot validly point to an offset less than zero
1281 of the variable).
1282 ??? IVOPTs creates bases that do not honor this restriction,
1283 so do not apply this optimization for TARGET_MEM_REFs. */
1284 if (TREE_CODE (base1) != TARGET_MEM_REF
1285 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1286 return false;
1287 /* They also cannot alias if the pointer may not point to the decl. */
1288 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1289 return false;
1290
1291 /* Disambiguations that rely on strict aliasing rules follow. */
1292 if (!flag_strict_aliasing || !tbaa_p)
1293 return true;
1294
1295 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1296
1297 /* If the alias set for a pointer access is zero all bets are off. */
1298 if (base1_alias_set == 0)
1299 return true;
1300
1301 /* When we are trying to disambiguate an access with a pointer dereference
1302 as base versus one with a decl as base we can use both the size
1303 of the decl and its dynamic type for extra disambiguation.
1304 ??? We do not know anything about the dynamic type of the decl
1305 other than that its alias-set contains base2_alias_set as a subset
1306 which does not help us here. */
1307 /* As we know nothing useful about the dynamic type of the decl just
1308 use the usual conflict check rather than a subset test.
1309 ??? We could introduce -fvery-strict-aliasing when the language
1310 does not allow decls to have a dynamic type that differs from their
1311 static type. Then we can check
1312 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1313 if (base1_alias_set != base2_alias_set
1314 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1315 return false;
1316 /* If the size of the access relevant for TBAA through the pointer
1317 is bigger than the size of the decl we can't possibly access the
1318 decl via that pointer. */
1319 if (/* ??? This in turn may run afoul when a decl of type T which is
1320 a member of union type U is accessed through a pointer to
1321 type U and sizeof T is smaller than sizeof U. */
1322 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1323 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1324 && compare_sizes (DECL_SIZE (base2),
1325 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1326 return false;
1327
1328 if (!ref2)
1329 return true;
1330
1331 /* If the decl is accessed via a MEM_REF, reconstruct the base
1332 we can use for TBAA and an appropriately adjusted offset. */
1333 dbase2 = ref2;
1334 while (handled_component_p (dbase2))
1335 dbase2 = TREE_OPERAND (dbase2, 0);
1336 poly_int64 doffset1 = offset1;
1337 poly_offset_int doffset2 = offset2;
1338 if (TREE_CODE (dbase2) == MEM_REF
1339 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1340 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1341
1342 /* If either reference is view-converted, give up now. */
1343 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1344 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1345 return true;
1346
1347 /* If both references are through the same type, they do not alias
1348 if the accesses do not overlap. This does extra disambiguation
1349 for mixed/pointer accesses but requires strict aliasing.
1350 For MEM_REFs we require that the component-ref offset we computed
1351 is relative to the start of the type which we ensure by
1352 comparing rvalue and access type and disregarding the constant
1353 pointer offset. */
1354 if ((TREE_CODE (base1) != TARGET_MEM_REF
1355 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1356 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1357 return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1358
1359 if (ref1 && ref2
1360 && nonoverlapping_component_refs_p (ref1, ref2))
1361 return false;
1362
1363 /* Do access-path based disambiguation. */
1364 if (ref1 && ref2
1365 && (handled_component_p (ref1) || handled_component_p (ref2)))
1366 return aliasing_component_refs_p (ref1,
1367 ref1_alias_set, base1_alias_set,
1368 offset1, max_size1,
1369 ref2,
1370 ref2_alias_set, base2_alias_set,
1371 offset2, max_size2, true);
1372
1373 return true;
1374 }
1375
1376 /* Return true if two indirect references based on *PTR1
1377 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1378 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1379 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1380 in which case they are computed on-demand. REF1 and REF2
1381 if non-NULL are the complete memory reference trees. */
1382
1383 static bool
1384 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1385 poly_int64 offset1, poly_int64 max_size1,
1386 alias_set_type ref1_alias_set,
1387 alias_set_type base1_alias_set,
1388 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1389 poly_int64 offset2, poly_int64 max_size2,
1390 alias_set_type ref2_alias_set,
1391 alias_set_type base2_alias_set, bool tbaa_p)
1392 {
1393 tree ptr1;
1394 tree ptr2;
1395 tree ptrtype1, ptrtype2;
1396
1397 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1398 || TREE_CODE (base1) == TARGET_MEM_REF)
1399 && (TREE_CODE (base2) == MEM_REF
1400 || TREE_CODE (base2) == TARGET_MEM_REF));
1401
1402 ptr1 = TREE_OPERAND (base1, 0);
1403 ptr2 = TREE_OPERAND (base2, 0);
1404
1405 /* If both bases are based on pointers they cannot alias if they may not
1406 point to the same memory object or if they point to the same object
1407 and the accesses do not overlap. */
1408 if ((!cfun || gimple_in_ssa_p (cfun))
1409 && operand_equal_p (ptr1, ptr2, 0)
1410 && (((TREE_CODE (base1) != TARGET_MEM_REF
1411 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1412 && (TREE_CODE (base2) != TARGET_MEM_REF
1413 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1414 || (TREE_CODE (base1) == TARGET_MEM_REF
1415 && TREE_CODE (base2) == TARGET_MEM_REF
1416 && (TMR_STEP (base1) == TMR_STEP (base2)
1417 || (TMR_STEP (base1) && TMR_STEP (base2)
1418 && operand_equal_p (TMR_STEP (base1),
1419 TMR_STEP (base2), 0)))
1420 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1421 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1422 && operand_equal_p (TMR_INDEX (base1),
1423 TMR_INDEX (base2), 0)))
1424 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1425 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1426 && operand_equal_p (TMR_INDEX2 (base1),
1427 TMR_INDEX2 (base2), 0))))))
1428 {
1429 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1430 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1431 return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1432 offset2 + moff2, max_size2);
1433 }
1434 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1435 return false;
1436
1437 /* Disambiguations that rely on strict aliasing rules follow. */
1438 if (!flag_strict_aliasing || !tbaa_p)
1439 return true;
1440
1441 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1442 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1443
1444 /* If the alias set for a pointer access is zero all bets are off. */
1445 if (base1_alias_set == 0
1446 || base2_alias_set == 0)
1447 return true;
1448
1449 /* If both references are through the same type, they do not alias
1450 if the accesses do not overlap. This does extra disambiguation
1451 for mixed/pointer accesses but requires strict aliasing. */
1452 if ((TREE_CODE (base1) != TARGET_MEM_REF
1453 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1454 && (TREE_CODE (base2) != TARGET_MEM_REF
1455 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1456 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1457 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1458 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1459 TREE_TYPE (ptrtype2)) == 1
1460 /* But avoid treating arrays as "objects", instead assume they
1461 can overlap by an exact multiple of their element size. */
1462 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1463 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1464
1465 /* Do type-based disambiguation. */
1466 if (base1_alias_set != base2_alias_set
1467 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1468 return false;
1469
1470 /* If either reference is view-converted, give up now. */
1471 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1472 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1473 return true;
1474
1475 if (ref1 && ref2
1476 && nonoverlapping_component_refs_p (ref1, ref2))
1477 return false;
1478
1479 /* Do access-path based disambiguation. */
1480 if (ref1 && ref2
1481 && (handled_component_p (ref1) || handled_component_p (ref2)))
1482 return aliasing_component_refs_p (ref1,
1483 ref1_alias_set, base1_alias_set,
1484 offset1, max_size1,
1485 ref2,
1486 ref2_alias_set, base2_alias_set,
1487 offset2, max_size2, false);
1488
1489 return true;
1490 }
1491
1492 /* Return true, if the two memory references REF1 and REF2 may alias. */
1493
1494 static bool
1495 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1496 {
1497 tree base1, base2;
1498 poly_int64 offset1 = 0, offset2 = 0;
1499 poly_int64 max_size1 = -1, max_size2 = -1;
1500 bool var1_p, var2_p, ind1_p, ind2_p;
1501
1502 gcc_checking_assert ((!ref1->ref
1503 || TREE_CODE (ref1->ref) == SSA_NAME
1504 || DECL_P (ref1->ref)
1505 || TREE_CODE (ref1->ref) == STRING_CST
1506 || handled_component_p (ref1->ref)
1507 || TREE_CODE (ref1->ref) == MEM_REF
1508 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1509 && (!ref2->ref
1510 || TREE_CODE (ref2->ref) == SSA_NAME
1511 || DECL_P (ref2->ref)
1512 || TREE_CODE (ref2->ref) == STRING_CST
1513 || handled_component_p (ref2->ref)
1514 || TREE_CODE (ref2->ref) == MEM_REF
1515 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1516
1517 /* Decompose the references into their base objects and the access. */
1518 base1 = ao_ref_base (ref1);
1519 offset1 = ref1->offset;
1520 max_size1 = ref1->max_size;
1521 base2 = ao_ref_base (ref2);
1522 offset2 = ref2->offset;
1523 max_size2 = ref2->max_size;
1524
1525 /* We can end up with registers or constants as bases for example from
1526 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1527 which is seen as a struct copy. */
1528 if (TREE_CODE (base1) == SSA_NAME
1529 || TREE_CODE (base1) == CONST_DECL
1530 || TREE_CODE (base1) == CONSTRUCTOR
1531 || TREE_CODE (base1) == ADDR_EXPR
1532 || CONSTANT_CLASS_P (base1)
1533 || TREE_CODE (base2) == SSA_NAME
1534 || TREE_CODE (base2) == CONST_DECL
1535 || TREE_CODE (base2) == CONSTRUCTOR
1536 || TREE_CODE (base2) == ADDR_EXPR
1537 || CONSTANT_CLASS_P (base2))
1538 return false;
1539
1540 /* We can end up referring to code via function and label decls.
1541 As we likely do not properly track code aliases conservatively
1542 bail out. */
1543 if (TREE_CODE (base1) == FUNCTION_DECL
1544 || TREE_CODE (base1) == LABEL_DECL
1545 || TREE_CODE (base2) == FUNCTION_DECL
1546 || TREE_CODE (base2) == LABEL_DECL)
1547 return true;
1548
1549 /* Two volatile accesses always conflict. */
1550 if (ref1->volatile_p
1551 && ref2->volatile_p)
1552 return true;
1553
1554 /* Defer to simple offset based disambiguation if we have
1555 references based on two decls. Do this before defering to
1556 TBAA to handle must-alias cases in conformance with the
1557 GCC extension of allowing type-punning through unions. */
1558 var1_p = DECL_P (base1);
1559 var2_p = DECL_P (base2);
1560 if (var1_p && var2_p)
1561 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1562 ref2->ref, base2, offset2, max_size2);
1563
1564 /* Handle restrict based accesses.
1565 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1566 here. */
1567 tree rbase1 = base1;
1568 tree rbase2 = base2;
1569 if (var1_p)
1570 {
1571 rbase1 = ref1->ref;
1572 if (rbase1)
1573 while (handled_component_p (rbase1))
1574 rbase1 = TREE_OPERAND (rbase1, 0);
1575 }
1576 if (var2_p)
1577 {
1578 rbase2 = ref2->ref;
1579 if (rbase2)
1580 while (handled_component_p (rbase2))
1581 rbase2 = TREE_OPERAND (rbase2, 0);
1582 }
1583 if (rbase1 && rbase2
1584 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1585 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1586 /* If the accesses are in the same restrict clique... */
1587 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1588 /* But based on different pointers they do not alias. */
1589 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1590 return false;
1591
1592 ind1_p = (TREE_CODE (base1) == MEM_REF
1593 || TREE_CODE (base1) == TARGET_MEM_REF);
1594 ind2_p = (TREE_CODE (base2) == MEM_REF
1595 || TREE_CODE (base2) == TARGET_MEM_REF);
1596
1597 /* Canonicalize the pointer-vs-decl case. */
1598 if (ind1_p && var2_p)
1599 {
1600 std::swap (offset1, offset2);
1601 std::swap (max_size1, max_size2);
1602 std::swap (base1, base2);
1603 std::swap (ref1, ref2);
1604 var1_p = true;
1605 ind1_p = false;
1606 var2_p = false;
1607 ind2_p = true;
1608 }
1609
1610 /* First defer to TBAA if possible. */
1611 if (tbaa_p
1612 && flag_strict_aliasing
1613 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1614 ao_ref_alias_set (ref2)))
1615 return false;
1616
1617 /* If the reference is based on a pointer that points to memory
1618 that may not be written to then the other reference cannot possibly
1619 clobber it. */
1620 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1621 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1622 || (ind1_p
1623 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1624 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1625 return false;
1626
1627 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1628 if (var1_p && ind2_p)
1629 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1630 offset2, max_size2,
1631 ao_ref_alias_set (ref2),
1632 ao_ref_base_alias_set (ref2),
1633 ref1->ref, base1,
1634 offset1, max_size1,
1635 ao_ref_alias_set (ref1),
1636 ao_ref_base_alias_set (ref1),
1637 tbaa_p);
1638 else if (ind1_p && ind2_p)
1639 return indirect_refs_may_alias_p (ref1->ref, base1,
1640 offset1, max_size1,
1641 ao_ref_alias_set (ref1),
1642 ao_ref_base_alias_set (ref1),
1643 ref2->ref, base2,
1644 offset2, max_size2,
1645 ao_ref_alias_set (ref2),
1646 ao_ref_base_alias_set (ref2),
1647 tbaa_p);
1648
1649 gcc_unreachable ();
1650 }
1651
1652 /* Return true, if the two memory references REF1 and REF2 may alias
1653 and update statistics. */
1654
1655 bool
1656 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1657 {
1658 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
1659 if (res)
1660 ++alias_stats.refs_may_alias_p_may_alias;
1661 else
1662 ++alias_stats.refs_may_alias_p_no_alias;
1663 return res;
1664 }
1665
1666 static bool
1667 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1668 {
1669 ao_ref r1;
1670 ao_ref_init (&r1, ref1);
1671 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1672 }
1673
1674 bool
1675 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1676 {
1677 ao_ref r1, r2;
1678 ao_ref_init (&r1, ref1);
1679 ao_ref_init (&r2, ref2);
1680 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1681 }
1682
1683 /* Returns true if there is a anti-dependence for the STORE that
1684 executes after the LOAD. */
1685
1686 bool
1687 refs_anti_dependent_p (tree load, tree store)
1688 {
1689 ao_ref r1, r2;
1690 ao_ref_init (&r1, load);
1691 ao_ref_init (&r2, store);
1692 return refs_may_alias_p_1 (&r1, &r2, false);
1693 }
1694
1695 /* Returns true if there is a output dependence for the stores
1696 STORE1 and STORE2. */
1697
1698 bool
1699 refs_output_dependent_p (tree store1, tree store2)
1700 {
1701 ao_ref r1, r2;
1702 ao_ref_init (&r1, store1);
1703 ao_ref_init (&r2, store2);
1704 return refs_may_alias_p_1 (&r1, &r2, false);
1705 }
1706
1707 /* If the call CALL may use the memory reference REF return true,
1708 otherwise return false. */
1709
1710 static bool
1711 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1712 {
1713 tree base, callee;
1714 unsigned i;
1715 int flags = gimple_call_flags (call);
1716
1717 /* Const functions without a static chain do not implicitly use memory. */
1718 if (!gimple_call_chain (call)
1719 && (flags & (ECF_CONST|ECF_NOVOPS)))
1720 goto process_args;
1721
1722 base = ao_ref_base (ref);
1723 if (!base)
1724 return true;
1725
1726 /* A call that is not without side-effects might involve volatile
1727 accesses and thus conflicts with all other volatile accesses. */
1728 if (ref->volatile_p)
1729 return true;
1730
1731 /* If the reference is based on a decl that is not aliased the call
1732 cannot possibly use it. */
1733 if (DECL_P (base)
1734 && !may_be_aliased (base)
1735 /* But local statics can be used through recursion. */
1736 && !is_global_var (base))
1737 goto process_args;
1738
1739 callee = gimple_call_fndecl (call);
1740
1741 /* Handle those builtin functions explicitly that do not act as
1742 escape points. See tree-ssa-structalias.c:find_func_aliases
1743 for the list of builtins we might need to handle here. */
1744 if (callee != NULL_TREE
1745 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1746 switch (DECL_FUNCTION_CODE (callee))
1747 {
1748 /* All the following functions read memory pointed to by
1749 their second argument. strcat/strncat additionally
1750 reads memory pointed to by the first argument. */
1751 case BUILT_IN_STRCAT:
1752 case BUILT_IN_STRNCAT:
1753 {
1754 ao_ref dref;
1755 ao_ref_init_from_ptr_and_size (&dref,
1756 gimple_call_arg (call, 0),
1757 NULL_TREE);
1758 if (refs_may_alias_p_1 (&dref, ref, false))
1759 return true;
1760 }
1761 /* FALLTHRU */
1762 case BUILT_IN_STRCPY:
1763 case BUILT_IN_STRNCPY:
1764 case BUILT_IN_MEMCPY:
1765 case BUILT_IN_MEMMOVE:
1766 case BUILT_IN_MEMPCPY:
1767 case BUILT_IN_STPCPY:
1768 case BUILT_IN_STPNCPY:
1769 case BUILT_IN_TM_MEMCPY:
1770 case BUILT_IN_TM_MEMMOVE:
1771 {
1772 ao_ref dref;
1773 tree size = NULL_TREE;
1774 if (gimple_call_num_args (call) == 3)
1775 size = gimple_call_arg (call, 2);
1776 ao_ref_init_from_ptr_and_size (&dref,
1777 gimple_call_arg (call, 1),
1778 size);
1779 return refs_may_alias_p_1 (&dref, ref, false);
1780 }
1781 case BUILT_IN_STRCAT_CHK:
1782 case BUILT_IN_STRNCAT_CHK:
1783 {
1784 ao_ref dref;
1785 ao_ref_init_from_ptr_and_size (&dref,
1786 gimple_call_arg (call, 0),
1787 NULL_TREE);
1788 if (refs_may_alias_p_1 (&dref, ref, false))
1789 return true;
1790 }
1791 /* FALLTHRU */
1792 case BUILT_IN_STRCPY_CHK:
1793 case BUILT_IN_STRNCPY_CHK:
1794 case BUILT_IN_MEMCPY_CHK:
1795 case BUILT_IN_MEMMOVE_CHK:
1796 case BUILT_IN_MEMPCPY_CHK:
1797 case BUILT_IN_STPCPY_CHK:
1798 case BUILT_IN_STPNCPY_CHK:
1799 {
1800 ao_ref dref;
1801 tree size = NULL_TREE;
1802 if (gimple_call_num_args (call) == 4)
1803 size = gimple_call_arg (call, 2);
1804 ao_ref_init_from_ptr_and_size (&dref,
1805 gimple_call_arg (call, 1),
1806 size);
1807 return refs_may_alias_p_1 (&dref, ref, false);
1808 }
1809 case BUILT_IN_BCOPY:
1810 {
1811 ao_ref dref;
1812 tree size = gimple_call_arg (call, 2);
1813 ao_ref_init_from_ptr_and_size (&dref,
1814 gimple_call_arg (call, 0),
1815 size);
1816 return refs_may_alias_p_1 (&dref, ref, false);
1817 }
1818
1819 /* The following functions read memory pointed to by their
1820 first argument. */
1821 CASE_BUILT_IN_TM_LOAD (1):
1822 CASE_BUILT_IN_TM_LOAD (2):
1823 CASE_BUILT_IN_TM_LOAD (4):
1824 CASE_BUILT_IN_TM_LOAD (8):
1825 CASE_BUILT_IN_TM_LOAD (FLOAT):
1826 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1827 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1828 CASE_BUILT_IN_TM_LOAD (M64):
1829 CASE_BUILT_IN_TM_LOAD (M128):
1830 CASE_BUILT_IN_TM_LOAD (M256):
1831 case BUILT_IN_TM_LOG:
1832 case BUILT_IN_TM_LOG_1:
1833 case BUILT_IN_TM_LOG_2:
1834 case BUILT_IN_TM_LOG_4:
1835 case BUILT_IN_TM_LOG_8:
1836 case BUILT_IN_TM_LOG_FLOAT:
1837 case BUILT_IN_TM_LOG_DOUBLE:
1838 case BUILT_IN_TM_LOG_LDOUBLE:
1839 case BUILT_IN_TM_LOG_M64:
1840 case BUILT_IN_TM_LOG_M128:
1841 case BUILT_IN_TM_LOG_M256:
1842 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1843
1844 /* These read memory pointed to by the first argument. */
1845 case BUILT_IN_STRDUP:
1846 case BUILT_IN_STRNDUP:
1847 case BUILT_IN_REALLOC:
1848 {
1849 ao_ref dref;
1850 tree size = NULL_TREE;
1851 if (gimple_call_num_args (call) == 2)
1852 size = gimple_call_arg (call, 1);
1853 ao_ref_init_from_ptr_and_size (&dref,
1854 gimple_call_arg (call, 0),
1855 size);
1856 return refs_may_alias_p_1 (&dref, ref, false);
1857 }
1858 /* These read memory pointed to by the first argument. */
1859 case BUILT_IN_INDEX:
1860 case BUILT_IN_STRCHR:
1861 case BUILT_IN_STRRCHR:
1862 {
1863 ao_ref dref;
1864 ao_ref_init_from_ptr_and_size (&dref,
1865 gimple_call_arg (call, 0),
1866 NULL_TREE);
1867 return refs_may_alias_p_1 (&dref, ref, false);
1868 }
1869 /* These read memory pointed to by the first argument with size
1870 in the third argument. */
1871 case BUILT_IN_MEMCHR:
1872 {
1873 ao_ref dref;
1874 ao_ref_init_from_ptr_and_size (&dref,
1875 gimple_call_arg (call, 0),
1876 gimple_call_arg (call, 2));
1877 return refs_may_alias_p_1 (&dref, ref, false);
1878 }
1879 /* These read memory pointed to by the first and second arguments. */
1880 case BUILT_IN_STRSTR:
1881 case BUILT_IN_STRPBRK:
1882 {
1883 ao_ref dref;
1884 ao_ref_init_from_ptr_and_size (&dref,
1885 gimple_call_arg (call, 0),
1886 NULL_TREE);
1887 if (refs_may_alias_p_1 (&dref, ref, false))
1888 return true;
1889 ao_ref_init_from_ptr_and_size (&dref,
1890 gimple_call_arg (call, 1),
1891 NULL_TREE);
1892 return refs_may_alias_p_1 (&dref, ref, false);
1893 }
1894
1895 /* The following builtins do not read from memory. */
1896 case BUILT_IN_FREE:
1897 case BUILT_IN_MALLOC:
1898 case BUILT_IN_POSIX_MEMALIGN:
1899 case BUILT_IN_ALIGNED_ALLOC:
1900 case BUILT_IN_CALLOC:
1901 CASE_BUILT_IN_ALLOCA:
1902 case BUILT_IN_STACK_SAVE:
1903 case BUILT_IN_STACK_RESTORE:
1904 case BUILT_IN_MEMSET:
1905 case BUILT_IN_TM_MEMSET:
1906 case BUILT_IN_MEMSET_CHK:
1907 case BUILT_IN_FREXP:
1908 case BUILT_IN_FREXPF:
1909 case BUILT_IN_FREXPL:
1910 case BUILT_IN_GAMMA_R:
1911 case BUILT_IN_GAMMAF_R:
1912 case BUILT_IN_GAMMAL_R:
1913 case BUILT_IN_LGAMMA_R:
1914 case BUILT_IN_LGAMMAF_R:
1915 case BUILT_IN_LGAMMAL_R:
1916 case BUILT_IN_MODF:
1917 case BUILT_IN_MODFF:
1918 case BUILT_IN_MODFL:
1919 case BUILT_IN_REMQUO:
1920 case BUILT_IN_REMQUOF:
1921 case BUILT_IN_REMQUOL:
1922 case BUILT_IN_SINCOS:
1923 case BUILT_IN_SINCOSF:
1924 case BUILT_IN_SINCOSL:
1925 case BUILT_IN_ASSUME_ALIGNED:
1926 case BUILT_IN_VA_END:
1927 return false;
1928 /* __sync_* builtins and some OpenMP builtins act as threading
1929 barriers. */
1930 #undef DEF_SYNC_BUILTIN
1931 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1932 #include "sync-builtins.def"
1933 #undef DEF_SYNC_BUILTIN
1934 case BUILT_IN_GOMP_ATOMIC_START:
1935 case BUILT_IN_GOMP_ATOMIC_END:
1936 case BUILT_IN_GOMP_BARRIER:
1937 case BUILT_IN_GOMP_BARRIER_CANCEL:
1938 case BUILT_IN_GOMP_TASKWAIT:
1939 case BUILT_IN_GOMP_TASKGROUP_END:
1940 case BUILT_IN_GOMP_CRITICAL_START:
1941 case BUILT_IN_GOMP_CRITICAL_END:
1942 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1943 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1944 case BUILT_IN_GOMP_LOOP_END:
1945 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1946 case BUILT_IN_GOMP_ORDERED_START:
1947 case BUILT_IN_GOMP_ORDERED_END:
1948 case BUILT_IN_GOMP_SECTIONS_END:
1949 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1950 case BUILT_IN_GOMP_SINGLE_COPY_START:
1951 case BUILT_IN_GOMP_SINGLE_COPY_END:
1952 return true;
1953
1954 default:
1955 /* Fallthru to general call handling. */;
1956 }
1957
1958 /* Check if base is a global static variable that is not read
1959 by the function. */
1960 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
1961 {
1962 struct cgraph_node *node = cgraph_node::get (callee);
1963 bitmap not_read;
1964
1965 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1966 node yet. We should enforce that there are nodes for all decls in the
1967 IL and remove this check instead. */
1968 if (node
1969 && (not_read = ipa_reference_get_not_read_global (node))
1970 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1971 goto process_args;
1972 }
1973
1974 /* Check if the base variable is call-used. */
1975 if (DECL_P (base))
1976 {
1977 if (pt_solution_includes (gimple_call_use_set (call), base))
1978 return true;
1979 }
1980 else if ((TREE_CODE (base) == MEM_REF
1981 || TREE_CODE (base) == TARGET_MEM_REF)
1982 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1983 {
1984 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1985 if (!pi)
1986 return true;
1987
1988 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1989 return true;
1990 }
1991 else
1992 return true;
1993
1994 /* Inspect call arguments for passed-by-value aliases. */
1995 process_args:
1996 for (i = 0; i < gimple_call_num_args (call); ++i)
1997 {
1998 tree op = gimple_call_arg (call, i);
1999 int flags = gimple_call_arg_flags (call, i);
2000
2001 if (flags & EAF_UNUSED)
2002 continue;
2003
2004 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2005 op = TREE_OPERAND (op, 0);
2006
2007 if (TREE_CODE (op) != SSA_NAME
2008 && !is_gimple_min_invariant (op))
2009 {
2010 ao_ref r;
2011 ao_ref_init (&r, op);
2012 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2013 return true;
2014 }
2015 }
2016
2017 return false;
2018 }
2019
2020 static bool
2021 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2022 {
2023 bool res;
2024 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2025 if (res)
2026 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2027 else
2028 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2029 return res;
2030 }
2031
2032
2033 /* If the statement STMT may use the memory reference REF return
2034 true, otherwise return false. */
2035
2036 bool
2037 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2038 {
2039 if (is_gimple_assign (stmt))
2040 {
2041 tree rhs;
2042
2043 /* All memory assign statements are single. */
2044 if (!gimple_assign_single_p (stmt))
2045 return false;
2046
2047 rhs = gimple_assign_rhs1 (stmt);
2048 if (is_gimple_reg (rhs)
2049 || is_gimple_min_invariant (rhs)
2050 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2051 return false;
2052
2053 return refs_may_alias_p (rhs, ref, tbaa_p);
2054 }
2055 else if (is_gimple_call (stmt))
2056 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2057 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2058 {
2059 tree retval = gimple_return_retval (return_stmt);
2060 if (retval
2061 && TREE_CODE (retval) != SSA_NAME
2062 && !is_gimple_min_invariant (retval)
2063 && refs_may_alias_p (retval, ref, tbaa_p))
2064 return true;
2065 /* If ref escapes the function then the return acts as a use. */
2066 tree base = ao_ref_base (ref);
2067 if (!base)
2068 ;
2069 else if (DECL_P (base))
2070 return is_global_var (base);
2071 else if (TREE_CODE (base) == MEM_REF
2072 || TREE_CODE (base) == TARGET_MEM_REF)
2073 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2074 return false;
2075 }
2076
2077 return true;
2078 }
2079
2080 bool
2081 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2082 {
2083 ao_ref r;
2084 ao_ref_init (&r, ref);
2085 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2086 }
2087
2088 /* If the call in statement CALL may clobber the memory reference REF
2089 return true, otherwise return false. */
2090
2091 bool
2092 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2093 {
2094 tree base;
2095 tree callee;
2096
2097 /* If the call is pure or const it cannot clobber anything. */
2098 if (gimple_call_flags (call)
2099 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2100 return false;
2101 if (gimple_call_internal_p (call))
2102 switch (gimple_call_internal_fn (call))
2103 {
2104 /* Treat these internal calls like ECF_PURE for aliasing,
2105 they don't write to any memory the program should care about.
2106 They have important other side-effects, and read memory,
2107 so can't be ECF_NOVOPS. */
2108 case IFN_UBSAN_NULL:
2109 case IFN_UBSAN_BOUNDS:
2110 case IFN_UBSAN_VPTR:
2111 case IFN_UBSAN_OBJECT_SIZE:
2112 case IFN_UBSAN_PTR:
2113 case IFN_ASAN_CHECK:
2114 return false;
2115 default:
2116 break;
2117 }
2118
2119 base = ao_ref_base (ref);
2120 if (!base)
2121 return true;
2122
2123 if (TREE_CODE (base) == SSA_NAME
2124 || CONSTANT_CLASS_P (base))
2125 return false;
2126
2127 /* A call that is not without side-effects might involve volatile
2128 accesses and thus conflicts with all other volatile accesses. */
2129 if (ref->volatile_p)
2130 return true;
2131
2132 /* If the reference is based on a decl that is not aliased the call
2133 cannot possibly clobber it. */
2134 if (DECL_P (base)
2135 && !may_be_aliased (base)
2136 /* But local non-readonly statics can be modified through recursion
2137 or the call may implement a threading barrier which we must
2138 treat as may-def. */
2139 && (TREE_READONLY (base)
2140 || !is_global_var (base)))
2141 return false;
2142
2143 /* If the reference is based on a pointer that points to memory
2144 that may not be written to then the call cannot possibly clobber it. */
2145 if ((TREE_CODE (base) == MEM_REF
2146 || TREE_CODE (base) == TARGET_MEM_REF)
2147 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2148 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2149 return false;
2150
2151 callee = gimple_call_fndecl (call);
2152
2153 /* Handle those builtin functions explicitly that do not act as
2154 escape points. See tree-ssa-structalias.c:find_func_aliases
2155 for the list of builtins we might need to handle here. */
2156 if (callee != NULL_TREE
2157 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2158 switch (DECL_FUNCTION_CODE (callee))
2159 {
2160 /* All the following functions clobber memory pointed to by
2161 their first argument. */
2162 case BUILT_IN_STRCPY:
2163 case BUILT_IN_STRNCPY:
2164 case BUILT_IN_MEMCPY:
2165 case BUILT_IN_MEMMOVE:
2166 case BUILT_IN_MEMPCPY:
2167 case BUILT_IN_STPCPY:
2168 case BUILT_IN_STPNCPY:
2169 case BUILT_IN_STRCAT:
2170 case BUILT_IN_STRNCAT:
2171 case BUILT_IN_MEMSET:
2172 case BUILT_IN_TM_MEMSET:
2173 CASE_BUILT_IN_TM_STORE (1):
2174 CASE_BUILT_IN_TM_STORE (2):
2175 CASE_BUILT_IN_TM_STORE (4):
2176 CASE_BUILT_IN_TM_STORE (8):
2177 CASE_BUILT_IN_TM_STORE (FLOAT):
2178 CASE_BUILT_IN_TM_STORE (DOUBLE):
2179 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2180 CASE_BUILT_IN_TM_STORE (M64):
2181 CASE_BUILT_IN_TM_STORE (M128):
2182 CASE_BUILT_IN_TM_STORE (M256):
2183 case BUILT_IN_TM_MEMCPY:
2184 case BUILT_IN_TM_MEMMOVE:
2185 {
2186 ao_ref dref;
2187 tree size = NULL_TREE;
2188 /* Don't pass in size for strncat, as the maximum size
2189 is strlen (dest) + n + 1 instead of n, resp.
2190 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2191 known. */
2192 if (gimple_call_num_args (call) == 3
2193 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2194 size = gimple_call_arg (call, 2);
2195 ao_ref_init_from_ptr_and_size (&dref,
2196 gimple_call_arg (call, 0),
2197 size);
2198 return refs_may_alias_p_1 (&dref, ref, false);
2199 }
2200 case BUILT_IN_STRCPY_CHK:
2201 case BUILT_IN_STRNCPY_CHK:
2202 case BUILT_IN_MEMCPY_CHK:
2203 case BUILT_IN_MEMMOVE_CHK:
2204 case BUILT_IN_MEMPCPY_CHK:
2205 case BUILT_IN_STPCPY_CHK:
2206 case BUILT_IN_STPNCPY_CHK:
2207 case BUILT_IN_STRCAT_CHK:
2208 case BUILT_IN_STRNCAT_CHK:
2209 case BUILT_IN_MEMSET_CHK:
2210 {
2211 ao_ref dref;
2212 tree size = NULL_TREE;
2213 /* Don't pass in size for __strncat_chk, as the maximum size
2214 is strlen (dest) + n + 1 instead of n, resp.
2215 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2216 known. */
2217 if (gimple_call_num_args (call) == 4
2218 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2219 size = gimple_call_arg (call, 2);
2220 ao_ref_init_from_ptr_and_size (&dref,
2221 gimple_call_arg (call, 0),
2222 size);
2223 return refs_may_alias_p_1 (&dref, ref, false);
2224 }
2225 case BUILT_IN_BCOPY:
2226 {
2227 ao_ref dref;
2228 tree size = gimple_call_arg (call, 2);
2229 ao_ref_init_from_ptr_and_size (&dref,
2230 gimple_call_arg (call, 1),
2231 size);
2232 return refs_may_alias_p_1 (&dref, ref, false);
2233 }
2234 /* Allocating memory does not have any side-effects apart from
2235 being the definition point for the pointer. */
2236 case BUILT_IN_MALLOC:
2237 case BUILT_IN_ALIGNED_ALLOC:
2238 case BUILT_IN_CALLOC:
2239 case BUILT_IN_STRDUP:
2240 case BUILT_IN_STRNDUP:
2241 /* Unix98 specifies that errno is set on allocation failure. */
2242 if (flag_errno_math
2243 && targetm.ref_may_alias_errno (ref))
2244 return true;
2245 return false;
2246 case BUILT_IN_STACK_SAVE:
2247 CASE_BUILT_IN_ALLOCA:
2248 case BUILT_IN_ASSUME_ALIGNED:
2249 return false;
2250 /* But posix_memalign stores a pointer into the memory pointed to
2251 by its first argument. */
2252 case BUILT_IN_POSIX_MEMALIGN:
2253 {
2254 tree ptrptr = gimple_call_arg (call, 0);
2255 ao_ref dref;
2256 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2257 TYPE_SIZE_UNIT (ptr_type_node));
2258 return (refs_may_alias_p_1 (&dref, ref, false)
2259 || (flag_errno_math
2260 && targetm.ref_may_alias_errno (ref)));
2261 }
2262 /* Freeing memory kills the pointed-to memory. More importantly
2263 the call has to serve as a barrier for moving loads and stores
2264 across it. */
2265 case BUILT_IN_FREE:
2266 case BUILT_IN_VA_END:
2267 {
2268 tree ptr = gimple_call_arg (call, 0);
2269 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2270 }
2271 /* Realloc serves both as allocation point and deallocation point. */
2272 case BUILT_IN_REALLOC:
2273 {
2274 tree ptr = gimple_call_arg (call, 0);
2275 /* Unix98 specifies that errno is set on allocation failure. */
2276 return ((flag_errno_math
2277 && targetm.ref_may_alias_errno (ref))
2278 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2279 }
2280 case BUILT_IN_GAMMA_R:
2281 case BUILT_IN_GAMMAF_R:
2282 case BUILT_IN_GAMMAL_R:
2283 case BUILT_IN_LGAMMA_R:
2284 case BUILT_IN_LGAMMAF_R:
2285 case BUILT_IN_LGAMMAL_R:
2286 {
2287 tree out = gimple_call_arg (call, 1);
2288 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2289 return true;
2290 if (flag_errno_math)
2291 break;
2292 return false;
2293 }
2294 case BUILT_IN_FREXP:
2295 case BUILT_IN_FREXPF:
2296 case BUILT_IN_FREXPL:
2297 case BUILT_IN_MODF:
2298 case BUILT_IN_MODFF:
2299 case BUILT_IN_MODFL:
2300 {
2301 tree out = gimple_call_arg (call, 1);
2302 return ptr_deref_may_alias_ref_p_1 (out, ref);
2303 }
2304 case BUILT_IN_REMQUO:
2305 case BUILT_IN_REMQUOF:
2306 case BUILT_IN_REMQUOL:
2307 {
2308 tree out = gimple_call_arg (call, 2);
2309 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2310 return true;
2311 if (flag_errno_math)
2312 break;
2313 return false;
2314 }
2315 case BUILT_IN_SINCOS:
2316 case BUILT_IN_SINCOSF:
2317 case BUILT_IN_SINCOSL:
2318 {
2319 tree sin = gimple_call_arg (call, 1);
2320 tree cos = gimple_call_arg (call, 2);
2321 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2322 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2323 }
2324 /* __sync_* builtins and some OpenMP builtins act as threading
2325 barriers. */
2326 #undef DEF_SYNC_BUILTIN
2327 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2328 #include "sync-builtins.def"
2329 #undef DEF_SYNC_BUILTIN
2330 case BUILT_IN_GOMP_ATOMIC_START:
2331 case BUILT_IN_GOMP_ATOMIC_END:
2332 case BUILT_IN_GOMP_BARRIER:
2333 case BUILT_IN_GOMP_BARRIER_CANCEL:
2334 case BUILT_IN_GOMP_TASKWAIT:
2335 case BUILT_IN_GOMP_TASKGROUP_END:
2336 case BUILT_IN_GOMP_CRITICAL_START:
2337 case BUILT_IN_GOMP_CRITICAL_END:
2338 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2339 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2340 case BUILT_IN_GOMP_LOOP_END:
2341 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2342 case BUILT_IN_GOMP_ORDERED_START:
2343 case BUILT_IN_GOMP_ORDERED_END:
2344 case BUILT_IN_GOMP_SECTIONS_END:
2345 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2346 case BUILT_IN_GOMP_SINGLE_COPY_START:
2347 case BUILT_IN_GOMP_SINGLE_COPY_END:
2348 return true;
2349 default:
2350 /* Fallthru to general call handling. */;
2351 }
2352
2353 /* Check if base is a global static variable that is not written
2354 by the function. */
2355 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2356 {
2357 struct cgraph_node *node = cgraph_node::get (callee);
2358 bitmap not_written;
2359
2360 if (node
2361 && (not_written = ipa_reference_get_not_written_global (node))
2362 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2363 return false;
2364 }
2365
2366 /* Check if the base variable is call-clobbered. */
2367 if (DECL_P (base))
2368 return pt_solution_includes (gimple_call_clobber_set (call), base);
2369 else if ((TREE_CODE (base) == MEM_REF
2370 || TREE_CODE (base) == TARGET_MEM_REF)
2371 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2372 {
2373 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2374 if (!pi)
2375 return true;
2376
2377 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2378 }
2379
2380 return true;
2381 }
2382
2383 /* If the call in statement CALL may clobber the memory reference REF
2384 return true, otherwise return false. */
2385
2386 bool
2387 call_may_clobber_ref_p (gcall *call, tree ref)
2388 {
2389 bool res;
2390 ao_ref r;
2391 ao_ref_init (&r, ref);
2392 res = call_may_clobber_ref_p_1 (call, &r);
2393 if (res)
2394 ++alias_stats.call_may_clobber_ref_p_may_alias;
2395 else
2396 ++alias_stats.call_may_clobber_ref_p_no_alias;
2397 return res;
2398 }
2399
2400
2401 /* If the statement STMT may clobber the memory reference REF return true,
2402 otherwise return false. */
2403
2404 bool
2405 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2406 {
2407 if (is_gimple_call (stmt))
2408 {
2409 tree lhs = gimple_call_lhs (stmt);
2410 if (lhs
2411 && TREE_CODE (lhs) != SSA_NAME)
2412 {
2413 ao_ref r;
2414 ao_ref_init (&r, lhs);
2415 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2416 return true;
2417 }
2418
2419 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2420 }
2421 else if (gimple_assign_single_p (stmt))
2422 {
2423 tree lhs = gimple_assign_lhs (stmt);
2424 if (TREE_CODE (lhs) != SSA_NAME)
2425 {
2426 ao_ref r;
2427 ao_ref_init (&r, lhs);
2428 return refs_may_alias_p_1 (ref, &r, tbaa_p);
2429 }
2430 }
2431 else if (gimple_code (stmt) == GIMPLE_ASM)
2432 return true;
2433
2434 return false;
2435 }
2436
2437 bool
2438 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2439 {
2440 ao_ref r;
2441 ao_ref_init (&r, ref);
2442 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2443 }
2444
2445 /* Return true if store1 and store2 described by corresponding tuples
2446 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2447 address. */
2448
2449 static bool
2450 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2451 poly_int64 max_size1,
2452 tree base2, poly_int64 offset2, poly_int64 size2,
2453 poly_int64 max_size2)
2454 {
2455 /* Offsets need to be 0. */
2456 if (maybe_ne (offset1, 0)
2457 || maybe_ne (offset2, 0))
2458 return false;
2459
2460 bool base1_obj_p = SSA_VAR_P (base1);
2461 bool base2_obj_p = SSA_VAR_P (base2);
2462
2463 /* We need one object. */
2464 if (base1_obj_p == base2_obj_p)
2465 return false;
2466 tree obj = base1_obj_p ? base1 : base2;
2467
2468 /* And we need one MEM_REF. */
2469 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2470 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2471 if (base1_memref_p == base2_memref_p)
2472 return false;
2473 tree memref = base1_memref_p ? base1 : base2;
2474
2475 /* Sizes need to be valid. */
2476 if (!known_size_p (max_size1)
2477 || !known_size_p (max_size2)
2478 || !known_size_p (size1)
2479 || !known_size_p (size2))
2480 return false;
2481
2482 /* Max_size needs to match size. */
2483 if (maybe_ne (max_size1, size1)
2484 || maybe_ne (max_size2, size2))
2485 return false;
2486
2487 /* Sizes need to match. */
2488 if (maybe_ne (size1, size2))
2489 return false;
2490
2491
2492 /* Check that memref is a store to pointer with singleton points-to info. */
2493 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2494 return false;
2495 tree ptr = TREE_OPERAND (memref, 0);
2496 if (TREE_CODE (ptr) != SSA_NAME)
2497 return false;
2498 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2499 unsigned int pt_uid;
2500 if (pi == NULL
2501 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2502 return false;
2503
2504 /* Be conservative with non-call exceptions when the address might
2505 be NULL. */
2506 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2507 return false;
2508
2509 /* Check that ptr points relative to obj. */
2510 unsigned int obj_uid = DECL_PT_UID (obj);
2511 if (obj_uid != pt_uid)
2512 return false;
2513
2514 /* Check that the object size is the same as the store size. That ensures us
2515 that ptr points to the start of obj. */
2516 return (DECL_SIZE (obj)
2517 && poly_int_tree_p (DECL_SIZE (obj))
2518 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2519 }
2520
2521 /* If STMT kills the memory reference REF return true, otherwise
2522 return false. */
2523
2524 bool
2525 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2526 {
2527 if (!ao_ref_base (ref))
2528 return false;
2529
2530 if (gimple_has_lhs (stmt)
2531 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2532 /* The assignment is not necessarily carried out if it can throw
2533 and we can catch it in the current function where we could inspect
2534 the previous value.
2535 ??? We only need to care about the RHS throwing. For aggregate
2536 assignments or similar calls and non-call exceptions the LHS
2537 might throw as well. */
2538 && !stmt_can_throw_internal (cfun, stmt))
2539 {
2540 tree lhs = gimple_get_lhs (stmt);
2541 /* If LHS is literally a base of the access we are done. */
2542 if (ref->ref)
2543 {
2544 tree base = ref->ref;
2545 tree innermost_dropped_array_ref = NULL_TREE;
2546 if (handled_component_p (base))
2547 {
2548 tree saved_lhs0 = NULL_TREE;
2549 if (handled_component_p (lhs))
2550 {
2551 saved_lhs0 = TREE_OPERAND (lhs, 0);
2552 TREE_OPERAND (lhs, 0) = integer_zero_node;
2553 }
2554 do
2555 {
2556 /* Just compare the outermost handled component, if
2557 they are equal we have found a possible common
2558 base. */
2559 tree saved_base0 = TREE_OPERAND (base, 0);
2560 TREE_OPERAND (base, 0) = integer_zero_node;
2561 bool res = operand_equal_p (lhs, base, 0);
2562 TREE_OPERAND (base, 0) = saved_base0;
2563 if (res)
2564 break;
2565 /* Remember if we drop an array-ref that we need to
2566 double-check not being at struct end. */
2567 if (TREE_CODE (base) == ARRAY_REF
2568 || TREE_CODE (base) == ARRAY_RANGE_REF)
2569 innermost_dropped_array_ref = base;
2570 /* Otherwise drop handled components of the access. */
2571 base = saved_base0;
2572 }
2573 while (handled_component_p (base));
2574 if (saved_lhs0)
2575 TREE_OPERAND (lhs, 0) = saved_lhs0;
2576 }
2577 /* Finally check if the lhs has the same address and size as the
2578 base candidate of the access. Watch out if we have dropped
2579 an array-ref that was at struct end, this means ref->ref may
2580 be outside of the TYPE_SIZE of its base. */
2581 if ((! innermost_dropped_array_ref
2582 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2583 && (lhs == base
2584 || (((TYPE_SIZE (TREE_TYPE (lhs))
2585 == TYPE_SIZE (TREE_TYPE (base)))
2586 || (TYPE_SIZE (TREE_TYPE (lhs))
2587 && TYPE_SIZE (TREE_TYPE (base))
2588 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2589 TYPE_SIZE (TREE_TYPE (base)),
2590 0)))
2591 && operand_equal_p (lhs, base,
2592 OEP_ADDRESS_OF
2593 | OEP_MATCH_SIDE_EFFECTS))))
2594 return true;
2595 }
2596
2597 /* Now look for non-literal equal bases with the restriction of
2598 handling constant offset and size. */
2599 /* For a must-alias check we need to be able to constrain
2600 the access properly. */
2601 if (!ref->max_size_known_p ())
2602 return false;
2603 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2604 bool reverse;
2605 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2606 &reverse);
2607 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2608 so base == ref->base does not always hold. */
2609 if (base != ref->base)
2610 {
2611 /* Try using points-to info. */
2612 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2613 ref->offset, ref->size, ref->max_size))
2614 return true;
2615
2616 /* If both base and ref->base are MEM_REFs, only compare the
2617 first operand, and if the second operand isn't equal constant,
2618 try to add the offsets into offset and ref_offset. */
2619 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2620 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2621 {
2622 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2623 TREE_OPERAND (ref->base, 1)))
2624 {
2625 poly_offset_int off1 = mem_ref_offset (base);
2626 off1 <<= LOG2_BITS_PER_UNIT;
2627 off1 += offset;
2628 poly_offset_int off2 = mem_ref_offset (ref->base);
2629 off2 <<= LOG2_BITS_PER_UNIT;
2630 off2 += ref_offset;
2631 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2632 size = -1;
2633 }
2634 }
2635 else
2636 size = -1;
2637 }
2638 /* For a must-alias check we need to be able to constrain
2639 the access properly. */
2640 if (known_eq (size, max_size)
2641 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2642 return true;
2643 }
2644
2645 if (is_gimple_call (stmt))
2646 {
2647 tree callee = gimple_call_fndecl (stmt);
2648 if (callee != NULL_TREE
2649 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2650 switch (DECL_FUNCTION_CODE (callee))
2651 {
2652 case BUILT_IN_FREE:
2653 {
2654 tree ptr = gimple_call_arg (stmt, 0);
2655 tree base = ao_ref_base (ref);
2656 if (base && TREE_CODE (base) == MEM_REF
2657 && TREE_OPERAND (base, 0) == ptr)
2658 return true;
2659 break;
2660 }
2661
2662 case BUILT_IN_MEMCPY:
2663 case BUILT_IN_MEMPCPY:
2664 case BUILT_IN_MEMMOVE:
2665 case BUILT_IN_MEMSET:
2666 case BUILT_IN_MEMCPY_CHK:
2667 case BUILT_IN_MEMPCPY_CHK:
2668 case BUILT_IN_MEMMOVE_CHK:
2669 case BUILT_IN_MEMSET_CHK:
2670 case BUILT_IN_STRNCPY:
2671 case BUILT_IN_STPNCPY:
2672 {
2673 /* For a must-alias check we need to be able to constrain
2674 the access properly. */
2675 if (!ref->max_size_known_p ())
2676 return false;
2677 tree dest = gimple_call_arg (stmt, 0);
2678 tree len = gimple_call_arg (stmt, 2);
2679 if (!poly_int_tree_p (len))
2680 return false;
2681 tree rbase = ref->base;
2682 poly_offset_int roffset = ref->offset;
2683 ao_ref dref;
2684 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2685 tree base = ao_ref_base (&dref);
2686 poly_offset_int offset = dref.offset;
2687 if (!base || !known_size_p (dref.size))
2688 return false;
2689 if (TREE_CODE (base) == MEM_REF)
2690 {
2691 if (TREE_CODE (rbase) != MEM_REF)
2692 return false;
2693 // Compare pointers.
2694 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2695 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2696 base = TREE_OPERAND (base, 0);
2697 rbase = TREE_OPERAND (rbase, 0);
2698 }
2699 if (base == rbase
2700 && known_subrange_p (roffset, ref->max_size, offset,
2701 wi::to_poly_offset (len)
2702 << LOG2_BITS_PER_UNIT))
2703 return true;
2704 break;
2705 }
2706
2707 case BUILT_IN_VA_END:
2708 {
2709 tree ptr = gimple_call_arg (stmt, 0);
2710 if (TREE_CODE (ptr) == ADDR_EXPR)
2711 {
2712 tree base = ao_ref_base (ref);
2713 if (TREE_OPERAND (ptr, 0) == base)
2714 return true;
2715 }
2716 break;
2717 }
2718
2719 default:;
2720 }
2721 }
2722 return false;
2723 }
2724
2725 bool
2726 stmt_kills_ref_p (gimple *stmt, tree ref)
2727 {
2728 ao_ref r;
2729 ao_ref_init (&r, ref);
2730 return stmt_kills_ref_p (stmt, &r);
2731 }
2732
2733
2734 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2735 TARGET or a statement clobbering the memory reference REF in which
2736 case false is returned. The walk starts with VUSE, one argument of PHI. */
2737
2738 static bool
2739 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
2740 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
2741 bool abort_on_visited,
2742 void *(*translate)(ao_ref *, tree, void *, bool *),
2743 void *data)
2744 {
2745 basic_block bb = gimple_bb (phi);
2746
2747 if (!*visited)
2748 *visited = BITMAP_ALLOC (NULL);
2749
2750 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2751
2752 /* Walk until we hit the target. */
2753 while (vuse != target)
2754 {
2755 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2756 /* If we are searching for the target VUSE by walking up to
2757 TARGET_BB dominating the original PHI we are finished once
2758 we reach a default def or a definition in a block dominating
2759 that block. Update TARGET and return. */
2760 if (!target
2761 && (gimple_nop_p (def_stmt)
2762 || dominated_by_p (CDI_DOMINATORS,
2763 target_bb, gimple_bb (def_stmt))))
2764 {
2765 target = vuse;
2766 return true;
2767 }
2768
2769 /* Recurse for PHI nodes. */
2770 if (gimple_code (def_stmt) == GIMPLE_PHI)
2771 {
2772 /* An already visited PHI node ends the walk successfully. */
2773 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2774 return !abort_on_visited;
2775 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2776 visited, abort_on_visited,
2777 translate, data);
2778 if (!vuse)
2779 return false;
2780 continue;
2781 }
2782 else if (gimple_nop_p (def_stmt))
2783 return false;
2784 else
2785 {
2786 /* A clobbering statement or the end of the IL ends it failing. */
2787 if ((int)limit <= 0)
2788 return false;
2789 --limit;
2790 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2791 {
2792 bool disambiguate_only = true;
2793 if (translate
2794 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2795 ;
2796 else
2797 return false;
2798 }
2799 }
2800 /* If we reach a new basic-block see if we already skipped it
2801 in a previous walk that ended successfully. */
2802 if (gimple_bb (def_stmt) != bb)
2803 {
2804 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2805 return !abort_on_visited;
2806 bb = gimple_bb (def_stmt);
2807 }
2808 vuse = gimple_vuse (def_stmt);
2809 }
2810 return true;
2811 }
2812
2813
2814 /* Starting from a PHI node for the virtual operand of the memory reference
2815 REF find a continuation virtual operand that allows to continue walking
2816 statements dominating PHI skipping only statements that cannot possibly
2817 clobber REF. Decrements LIMIT for each alias disambiguation done
2818 and aborts the walk, returning NULL_TREE if it reaches zero.
2819 Returns NULL_TREE if no suitable virtual operand can be found. */
2820
2821 tree
2822 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2823 unsigned int &limit, bitmap *visited,
2824 bool abort_on_visited,
2825 void *(*translate)(ao_ref *, tree, void *, bool *),
2826 void *data)
2827 {
2828 unsigned nargs = gimple_phi_num_args (phi);
2829
2830 /* Through a single-argument PHI we can simply look through. */
2831 if (nargs == 1)
2832 return PHI_ARG_DEF (phi, 0);
2833
2834 /* For two or more arguments try to pairwise skip non-aliasing code
2835 until we hit the phi argument definition that dominates the other one. */
2836 basic_block phi_bb = gimple_bb (phi);
2837 tree arg0, arg1;
2838 unsigned i;
2839
2840 /* Find a candidate for the virtual operand which definition
2841 dominates those of all others. */
2842 /* First look if any of the args themselves satisfy this. */
2843 for (i = 0; i < nargs; ++i)
2844 {
2845 arg0 = PHI_ARG_DEF (phi, i);
2846 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
2847 break;
2848 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
2849 if (def_bb != phi_bb
2850 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
2851 break;
2852 arg0 = NULL_TREE;
2853 }
2854 /* If not, look if we can reach such candidate by walking defs
2855 until we hit the immediate dominator. maybe_skip_until will
2856 do that for us. */
2857 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
2858
2859 /* Then check against the (to be) found candidate. */
2860 for (i = 0; i < nargs; ++i)
2861 {
2862 arg1 = PHI_ARG_DEF (phi, i);
2863 if (arg1 == arg0)
2864 ;
2865 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
2866 abort_on_visited,
2867 /* Do not translate when walking over
2868 backedges. */
2869 dominated_by_p
2870 (CDI_DOMINATORS,
2871 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
2872 phi_bb)
2873 ? NULL : translate, data))
2874 return NULL_TREE;
2875 }
2876
2877 return arg0;
2878 }
2879
2880 /* Based on the memory reference REF and its virtual use VUSE call
2881 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2882 itself. That is, for each virtual use for which its defining statement
2883 does not clobber REF.
2884
2885 WALKER is called with REF, the current virtual use and DATA. If
2886 WALKER returns non-NULL the walk stops and its result is returned.
2887 At the end of a non-successful walk NULL is returned.
2888
2889 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2890 use which definition is a statement that may clobber REF and DATA.
2891 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2892 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2893 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2894 to adjust REF and *DATA to make that valid.
2895
2896 VALUEIZE if non-NULL is called with the next VUSE that is considered
2897 and return value is substituted for that. This can be used to
2898 implement optimistic value-numbering for example. Note that the
2899 VUSE argument is assumed to be valueized already.
2900
2901 LIMIT specifies the number of alias queries we are allowed to do,
2902 the walk stops when it reaches zero and NULL is returned. LIMIT
2903 is decremented by the number of alias queries (plus adjustments
2904 done by the callbacks) upon return.
2905
2906 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2907
2908 void *
2909 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2910 void *(*walker)(ao_ref *, tree, void *),
2911 void *(*translate)(ao_ref *, tree, void *, bool *),
2912 tree (*valueize)(tree),
2913 unsigned &limit, void *data)
2914 {
2915 bitmap visited = NULL;
2916 void *res;
2917 bool translated = false;
2918
2919 timevar_push (TV_ALIAS_STMT_WALK);
2920
2921 do
2922 {
2923 gimple *def_stmt;
2924
2925 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2926 res = (*walker) (ref, vuse, data);
2927 /* Abort walk. */
2928 if (res == (void *)-1)
2929 {
2930 res = NULL;
2931 break;
2932 }
2933 /* Lookup succeeded. */
2934 else if (res != NULL)
2935 break;
2936
2937 if (valueize)
2938 {
2939 vuse = valueize (vuse);
2940 if (!vuse)
2941 {
2942 res = NULL;
2943 break;
2944 }
2945 }
2946 def_stmt = SSA_NAME_DEF_STMT (vuse);
2947 if (gimple_nop_p (def_stmt))
2948 break;
2949 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2950 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2951 &visited, translated, translate, data);
2952 else
2953 {
2954 if ((int)limit <= 0)
2955 {
2956 res = NULL;
2957 break;
2958 }
2959 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2960 {
2961 if (!translate)
2962 break;
2963 bool disambiguate_only = false;
2964 res = (*translate) (ref, vuse, data, &disambiguate_only);
2965 /* Failed lookup and translation. */
2966 if (res == (void *)-1)
2967 {
2968 res = NULL;
2969 break;
2970 }
2971 /* Lookup succeeded. */
2972 else if (res != NULL)
2973 break;
2974 /* Translation succeeded, continue walking. */
2975 translated = translated || !disambiguate_only;
2976 }
2977 vuse = gimple_vuse (def_stmt);
2978 }
2979 }
2980 while (vuse);
2981
2982 if (visited)
2983 BITMAP_FREE (visited);
2984
2985 timevar_pop (TV_ALIAS_STMT_WALK);
2986
2987 return res;
2988 }
2989
2990
2991 /* Based on the memory reference REF call WALKER for each vdef which
2992 defining statement may clobber REF, starting with VDEF. If REF
2993 is NULL_TREE, each defining statement is visited.
2994
2995 WALKER is called with REF, the current vdef and DATA. If WALKER
2996 returns true the walk is stopped, otherwise it continues.
2997
2998 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2999 The pointer may be NULL and then we do not track this information.
3000
3001 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3002 PHI argument (but only one walk continues on merge points), the
3003 return value is true if any of the walks was successful.
3004
3005 The function returns the number of statements walked or -1 if
3006 LIMIT stmts were walked and the walk was aborted at this point.
3007 If LIMIT is zero the walk is not aborted. */
3008
3009 static int
3010 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3011 bool (*walker)(ao_ref *, tree, void *), void *data,
3012 bitmap *visited, unsigned int cnt,
3013 bool *function_entry_reached, unsigned limit)
3014 {
3015 do
3016 {
3017 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3018
3019 if (*visited
3020 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3021 return cnt;
3022
3023 if (gimple_nop_p (def_stmt))
3024 {
3025 if (function_entry_reached)
3026 *function_entry_reached = true;
3027 return cnt;
3028 }
3029 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3030 {
3031 unsigned i;
3032 if (!*visited)
3033 *visited = BITMAP_ALLOC (NULL);
3034 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3035 {
3036 int res = walk_aliased_vdefs_1 (ref,
3037 gimple_phi_arg_def (def_stmt, i),
3038 walker, data, visited, cnt,
3039 function_entry_reached, limit);
3040 if (res == -1)
3041 return -1;
3042 cnt = res;
3043 }
3044 return cnt;
3045 }
3046
3047 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3048 cnt++;
3049 if (cnt == limit)
3050 return -1;
3051 if ((!ref
3052 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3053 && (*walker) (ref, vdef, data))
3054 return cnt;
3055
3056 vdef = gimple_vuse (def_stmt);
3057 }
3058 while (1);
3059 }
3060
3061 int
3062 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3063 bool (*walker)(ao_ref *, tree, void *), void *data,
3064 bitmap *visited,
3065 bool *function_entry_reached, unsigned int limit)
3066 {
3067 bitmap local_visited = NULL;
3068 int ret;
3069
3070 timevar_push (TV_ALIAS_STMT_WALK);
3071
3072 if (function_entry_reached)
3073 *function_entry_reached = false;
3074
3075 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3076 visited ? visited : &local_visited, 0,
3077 function_entry_reached, limit);
3078 if (local_visited)
3079 BITMAP_FREE (local_visited);
3080
3081 timevar_pop (TV_ALIAS_STMT_WALK);
3082
3083 return ret;
3084 }
3085