tree-ssa-alias.c (nonoverlapping_component_refs_p): Also truncate access path on...
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
93
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
102 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
103 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
104 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_of_decl_p_may_alias;
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_of_decl_p_no_alias;
107 } alias_stats;
108
109 void
110 dump_alias_stats (FILE *s)
111 {
112 fprintf (s, "\nAlias oracle query stats:\n");
113 fprintf (s, " refs_may_alias_p: "
114 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC" queries\n",
116 alias_stats.refs_may_alias_p_no_alias,
117 alias_stats.refs_may_alias_p_no_alias
118 + alias_stats.refs_may_alias_p_may_alias);
119 fprintf (s, " ref_maybe_used_by_call_p: "
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC" queries\n",
122 alias_stats.ref_maybe_used_by_call_p_no_alias,
123 alias_stats.refs_may_alias_p_no_alias
124 + alias_stats.ref_maybe_used_by_call_p_may_alias);
125 fprintf (s, " call_may_clobber_ref_p: "
126 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
127 HOST_WIDE_INT_PRINT_DEC" queries\n",
128 alias_stats.call_may_clobber_ref_p_no_alias,
129 alias_stats.call_may_clobber_ref_p_no_alias
130 + alias_stats.call_may_clobber_ref_p_may_alias);
131 fprintf (s, " nonoverlapping_component_refs_p: "
132 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
133 HOST_WIDE_INT_PRINT_DEC" queries\n",
134 alias_stats.nonoverlapping_component_refs_p_no_alias,
135 alias_stats.nonoverlapping_component_refs_p_no_alias
136 + alias_stats.nonoverlapping_component_refs_p_may_alias);
137 fprintf (s, " nonoverlapping_component_refs_of_decl_p: "
138 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC" queries\n",
140 alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias,
141 alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias
142 + alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias);
143 fprintf (s, " aliasing_component_refs_p: "
144 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC" queries\n",
146 alias_stats.aliasing_component_refs_p_no_alias,
147 alias_stats.aliasing_component_refs_p_no_alias
148 + alias_stats.aliasing_component_refs_p_may_alias);
149 dump_alias_stats_in_alias_c (s);
150 }
151
152
153 /* Return true, if dereferencing PTR may alias with a global variable. */
154
155 bool
156 ptr_deref_may_alias_global_p (tree ptr)
157 {
158 struct ptr_info_def *pi;
159
160 /* If we end up with a pointer constant here that may point
161 to global memory. */
162 if (TREE_CODE (ptr) != SSA_NAME)
163 return true;
164
165 pi = SSA_NAME_PTR_INFO (ptr);
166
167 /* If we do not have points-to information for this variable,
168 we have to punt. */
169 if (!pi)
170 return true;
171
172 /* ??? This does not use TBAA to prune globals ptr may not access. */
173 return pt_solution_includes_global (&pi->pt);
174 }
175
176 /* Return true if dereferencing PTR may alias DECL.
177 The caller is responsible for applying TBAA to see if PTR
178 may access DECL at all. */
179
180 static bool
181 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
182 {
183 struct ptr_info_def *pi;
184
185 /* Conversions are irrelevant for points-to information and
186 data-dependence analysis can feed us those. */
187 STRIP_NOPS (ptr);
188
189 /* Anything we do not explicilty handle aliases. */
190 if ((TREE_CODE (ptr) != SSA_NAME
191 && TREE_CODE (ptr) != ADDR_EXPR
192 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
193 || !POINTER_TYPE_P (TREE_TYPE (ptr))
194 || (!VAR_P (decl)
195 && TREE_CODE (decl) != PARM_DECL
196 && TREE_CODE (decl) != RESULT_DECL))
197 return true;
198
199 /* Disregard pointer offsetting. */
200 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
201 {
202 do
203 {
204 ptr = TREE_OPERAND (ptr, 0);
205 }
206 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
207 return ptr_deref_may_alias_decl_p (ptr, decl);
208 }
209
210 /* ADDR_EXPR pointers either just offset another pointer or directly
211 specify the pointed-to set. */
212 if (TREE_CODE (ptr) == ADDR_EXPR)
213 {
214 tree base = get_base_address (TREE_OPERAND (ptr, 0));
215 if (base
216 && (TREE_CODE (base) == MEM_REF
217 || TREE_CODE (base) == TARGET_MEM_REF))
218 ptr = TREE_OPERAND (base, 0);
219 else if (base
220 && DECL_P (base))
221 return compare_base_decls (base, decl) != 0;
222 else if (base
223 && CONSTANT_CLASS_P (base))
224 return false;
225 else
226 return true;
227 }
228
229 /* Non-aliased variables cannot be pointed to. */
230 if (!may_be_aliased (decl))
231 return false;
232
233 /* If we do not have useful points-to information for this pointer
234 we cannot disambiguate anything else. */
235 pi = SSA_NAME_PTR_INFO (ptr);
236 if (!pi)
237 return true;
238
239 return pt_solution_includes (&pi->pt, decl);
240 }
241
242 /* Return true if dereferenced PTR1 and PTR2 may alias.
243 The caller is responsible for applying TBAA to see if accesses
244 through PTR1 and PTR2 may conflict at all. */
245
246 bool
247 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
248 {
249 struct ptr_info_def *pi1, *pi2;
250
251 /* Conversions are irrelevant for points-to information and
252 data-dependence analysis can feed us those. */
253 STRIP_NOPS (ptr1);
254 STRIP_NOPS (ptr2);
255
256 /* Disregard pointer offsetting. */
257 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
258 {
259 do
260 {
261 ptr1 = TREE_OPERAND (ptr1, 0);
262 }
263 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
264 return ptr_derefs_may_alias_p (ptr1, ptr2);
265 }
266 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
267 {
268 do
269 {
270 ptr2 = TREE_OPERAND (ptr2, 0);
271 }
272 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
273 return ptr_derefs_may_alias_p (ptr1, ptr2);
274 }
275
276 /* ADDR_EXPR pointers either just offset another pointer or directly
277 specify the pointed-to set. */
278 if (TREE_CODE (ptr1) == ADDR_EXPR)
279 {
280 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
281 if (base
282 && (TREE_CODE (base) == MEM_REF
283 || TREE_CODE (base) == TARGET_MEM_REF))
284 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
285 else if (base
286 && DECL_P (base))
287 return ptr_deref_may_alias_decl_p (ptr2, base);
288 else
289 return true;
290 }
291 if (TREE_CODE (ptr2) == ADDR_EXPR)
292 {
293 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
294 if (base
295 && (TREE_CODE (base) == MEM_REF
296 || TREE_CODE (base) == TARGET_MEM_REF))
297 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
298 else if (base
299 && DECL_P (base))
300 return ptr_deref_may_alias_decl_p (ptr1, base);
301 else
302 return true;
303 }
304
305 /* From here we require SSA name pointers. Anything else aliases. */
306 if (TREE_CODE (ptr1) != SSA_NAME
307 || TREE_CODE (ptr2) != SSA_NAME
308 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
309 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
310 return true;
311
312 /* We may end up with two empty points-to solutions for two same pointers.
313 In this case we still want to say both pointers alias, so shortcut
314 that here. */
315 if (ptr1 == ptr2)
316 return true;
317
318 /* If we do not have useful points-to information for either pointer
319 we cannot disambiguate anything else. */
320 pi1 = SSA_NAME_PTR_INFO (ptr1);
321 pi2 = SSA_NAME_PTR_INFO (ptr2);
322 if (!pi1 || !pi2)
323 return true;
324
325 /* ??? This does not use TBAA to prune decls from the intersection
326 that not both pointers may access. */
327 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
328 }
329
330 /* Return true if dereferencing PTR may alias *REF.
331 The caller is responsible for applying TBAA to see if PTR
332 may access *REF at all. */
333
334 static bool
335 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
336 {
337 tree base = ao_ref_base (ref);
338
339 if (TREE_CODE (base) == MEM_REF
340 || TREE_CODE (base) == TARGET_MEM_REF)
341 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
342 else if (DECL_P (base))
343 return ptr_deref_may_alias_decl_p (ptr, base);
344
345 return true;
346 }
347
348 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
349
350 bool
351 ptrs_compare_unequal (tree ptr1, tree ptr2)
352 {
353 /* First resolve the pointers down to a SSA name pointer base or
354 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
355 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
356 or STRING_CSTs which needs points-to adjustments to track them
357 in the points-to sets. */
358 tree obj1 = NULL_TREE;
359 tree obj2 = NULL_TREE;
360 if (TREE_CODE (ptr1) == ADDR_EXPR)
361 {
362 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
363 if (! tem)
364 return false;
365 if (VAR_P (tem)
366 || TREE_CODE (tem) == PARM_DECL
367 || TREE_CODE (tem) == RESULT_DECL)
368 obj1 = tem;
369 else if (TREE_CODE (tem) == MEM_REF)
370 ptr1 = TREE_OPERAND (tem, 0);
371 }
372 if (TREE_CODE (ptr2) == ADDR_EXPR)
373 {
374 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
375 if (! tem)
376 return false;
377 if (VAR_P (tem)
378 || TREE_CODE (tem) == PARM_DECL
379 || TREE_CODE (tem) == RESULT_DECL)
380 obj2 = tem;
381 else if (TREE_CODE (tem) == MEM_REF)
382 ptr2 = TREE_OPERAND (tem, 0);
383 }
384
385 /* Canonicalize ptr vs. object. */
386 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
387 {
388 std::swap (ptr1, ptr2);
389 std::swap (obj1, obj2);
390 }
391
392 if (obj1 && obj2)
393 /* Other code handles this correctly, no need to duplicate it here. */;
394 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
395 {
396 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
397 /* We may not use restrict to optimize pointer comparisons.
398 See PR71062. So we have to assume that restrict-pointed-to
399 may be in fact obj1. */
400 if (!pi
401 || pi->pt.vars_contains_restrict
402 || pi->pt.vars_contains_interposable)
403 return false;
404 if (VAR_P (obj1)
405 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
406 {
407 varpool_node *node = varpool_node::get (obj1);
408 /* If obj1 may bind to NULL give up (see below). */
409 if (! node
410 || ! node->nonzero_address ()
411 || ! decl_binds_to_current_def_p (obj1))
412 return false;
413 }
414 return !pt_solution_includes (&pi->pt, obj1);
415 }
416
417 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
418 but those require pt.null to be conservatively correct. */
419
420 return false;
421 }
422
423 /* Returns whether reference REF to BASE may refer to global memory. */
424
425 static bool
426 ref_may_alias_global_p_1 (tree base)
427 {
428 if (DECL_P (base))
429 return is_global_var (base);
430 else if (TREE_CODE (base) == MEM_REF
431 || TREE_CODE (base) == TARGET_MEM_REF)
432 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
433 return true;
434 }
435
436 bool
437 ref_may_alias_global_p (ao_ref *ref)
438 {
439 tree base = ao_ref_base (ref);
440 return ref_may_alias_global_p_1 (base);
441 }
442
443 bool
444 ref_may_alias_global_p (tree ref)
445 {
446 tree base = get_base_address (ref);
447 return ref_may_alias_global_p_1 (base);
448 }
449
450 /* Return true whether STMT may clobber global memory. */
451
452 bool
453 stmt_may_clobber_global_p (gimple *stmt)
454 {
455 tree lhs;
456
457 if (!gimple_vdef (stmt))
458 return false;
459
460 /* ??? We can ask the oracle whether an artificial pointer
461 dereference with a pointer with points-to information covering
462 all global memory (what about non-address taken memory?) maybe
463 clobbered by this call. As there is at the moment no convenient
464 way of doing that without generating garbage do some manual
465 checking instead.
466 ??? We could make a NULL ao_ref argument to the various
467 predicates special, meaning any global memory. */
468
469 switch (gimple_code (stmt))
470 {
471 case GIMPLE_ASSIGN:
472 lhs = gimple_assign_lhs (stmt);
473 return (TREE_CODE (lhs) != SSA_NAME
474 && ref_may_alias_global_p (lhs));
475 case GIMPLE_CALL:
476 return true;
477 default:
478 return true;
479 }
480 }
481
482
483 /* Dump alias information on FILE. */
484
485 void
486 dump_alias_info (FILE *file)
487 {
488 unsigned i;
489 tree ptr;
490 const char *funcname
491 = lang_hooks.decl_printable_name (current_function_decl, 2);
492 tree var;
493
494 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
495
496 fprintf (file, "Aliased symbols\n\n");
497
498 FOR_EACH_LOCAL_DECL (cfun, i, var)
499 {
500 if (may_be_aliased (var))
501 dump_variable (file, var);
502 }
503
504 fprintf (file, "\nCall clobber information\n");
505
506 fprintf (file, "\nESCAPED");
507 dump_points_to_solution (file, &cfun->gimple_df->escaped);
508
509 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
510
511 FOR_EACH_SSA_NAME (i, ptr, cfun)
512 {
513 struct ptr_info_def *pi;
514
515 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
516 || SSA_NAME_IN_FREE_LIST (ptr))
517 continue;
518
519 pi = SSA_NAME_PTR_INFO (ptr);
520 if (pi)
521 dump_points_to_info_for (file, ptr);
522 }
523
524 fprintf (file, "\n");
525 }
526
527
528 /* Dump alias information on stderr. */
529
530 DEBUG_FUNCTION void
531 debug_alias_info (void)
532 {
533 dump_alias_info (stderr);
534 }
535
536
537 /* Dump the points-to set *PT into FILE. */
538
539 void
540 dump_points_to_solution (FILE *file, struct pt_solution *pt)
541 {
542 if (pt->anything)
543 fprintf (file, ", points-to anything");
544
545 if (pt->nonlocal)
546 fprintf (file, ", points-to non-local");
547
548 if (pt->escaped)
549 fprintf (file, ", points-to escaped");
550
551 if (pt->ipa_escaped)
552 fprintf (file, ", points-to unit escaped");
553
554 if (pt->null)
555 fprintf (file, ", points-to NULL");
556
557 if (pt->vars)
558 {
559 fprintf (file, ", points-to vars: ");
560 dump_decl_set (file, pt->vars);
561 if (pt->vars_contains_nonlocal
562 || pt->vars_contains_escaped
563 || pt->vars_contains_escaped_heap
564 || pt->vars_contains_restrict)
565 {
566 const char *comma = "";
567 fprintf (file, " (");
568 if (pt->vars_contains_nonlocal)
569 {
570 fprintf (file, "nonlocal");
571 comma = ", ";
572 }
573 if (pt->vars_contains_escaped)
574 {
575 fprintf (file, "%sescaped", comma);
576 comma = ", ";
577 }
578 if (pt->vars_contains_escaped_heap)
579 {
580 fprintf (file, "%sescaped heap", comma);
581 comma = ", ";
582 }
583 if (pt->vars_contains_restrict)
584 {
585 fprintf (file, "%srestrict", comma);
586 comma = ", ";
587 }
588 if (pt->vars_contains_interposable)
589 fprintf (file, "%sinterposable", comma);
590 fprintf (file, ")");
591 }
592 }
593 }
594
595
596 /* Unified dump function for pt_solution. */
597
598 DEBUG_FUNCTION void
599 debug (pt_solution &ref)
600 {
601 dump_points_to_solution (stderr, &ref);
602 }
603
604 DEBUG_FUNCTION void
605 debug (pt_solution *ptr)
606 {
607 if (ptr)
608 debug (*ptr);
609 else
610 fprintf (stderr, "<nil>\n");
611 }
612
613
614 /* Dump points-to information for SSA_NAME PTR into FILE. */
615
616 void
617 dump_points_to_info_for (FILE *file, tree ptr)
618 {
619 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
620
621 print_generic_expr (file, ptr, dump_flags);
622
623 if (pi)
624 dump_points_to_solution (file, &pi->pt);
625 else
626 fprintf (file, ", points-to anything");
627
628 fprintf (file, "\n");
629 }
630
631
632 /* Dump points-to information for VAR into stderr. */
633
634 DEBUG_FUNCTION void
635 debug_points_to_info_for (tree var)
636 {
637 dump_points_to_info_for (stderr, var);
638 }
639
640
641 /* Initializes the alias-oracle reference representation *R from REF. */
642
643 void
644 ao_ref_init (ao_ref *r, tree ref)
645 {
646 r->ref = ref;
647 r->base = NULL_TREE;
648 r->offset = 0;
649 r->size = -1;
650 r->max_size = -1;
651 r->ref_alias_set = -1;
652 r->base_alias_set = -1;
653 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
654 }
655
656 /* Returns the base object of the memory reference *REF. */
657
658 tree
659 ao_ref_base (ao_ref *ref)
660 {
661 bool reverse;
662
663 if (ref->base)
664 return ref->base;
665 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
666 &ref->max_size, &reverse);
667 return ref->base;
668 }
669
670 /* Returns the base object alias set of the memory reference *REF. */
671
672 alias_set_type
673 ao_ref_base_alias_set (ao_ref *ref)
674 {
675 tree base_ref;
676 if (ref->base_alias_set != -1)
677 return ref->base_alias_set;
678 if (!ref->ref)
679 return 0;
680 base_ref = ref->ref;
681 while (handled_component_p (base_ref))
682 base_ref = TREE_OPERAND (base_ref, 0);
683 ref->base_alias_set = get_alias_set (base_ref);
684 return ref->base_alias_set;
685 }
686
687 /* Returns the reference alias set of the memory reference *REF. */
688
689 alias_set_type
690 ao_ref_alias_set (ao_ref *ref)
691 {
692 if (ref->ref_alias_set != -1)
693 return ref->ref_alias_set;
694 ref->ref_alias_set = get_alias_set (ref->ref);
695 return ref->ref_alias_set;
696 }
697
698 /* Init an alias-oracle reference representation from a gimple pointer
699 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
700 size is assumed to be unknown. The access is assumed to be only
701 to or after of the pointer target, not before it. */
702
703 void
704 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
705 {
706 poly_int64 t, size_hwi, extra_offset = 0;
707 ref->ref = NULL_TREE;
708 if (TREE_CODE (ptr) == SSA_NAME)
709 {
710 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
711 if (gimple_assign_single_p (stmt)
712 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
713 ptr = gimple_assign_rhs1 (stmt);
714 else if (is_gimple_assign (stmt)
715 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
716 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
717 {
718 ptr = gimple_assign_rhs1 (stmt);
719 extra_offset *= BITS_PER_UNIT;
720 }
721 }
722
723 if (TREE_CODE (ptr) == ADDR_EXPR)
724 {
725 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
726 if (ref->base)
727 ref->offset = BITS_PER_UNIT * t;
728 else
729 {
730 size = NULL_TREE;
731 ref->offset = 0;
732 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
733 }
734 }
735 else
736 {
737 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
738 ref->base = build2 (MEM_REF, char_type_node,
739 ptr, null_pointer_node);
740 ref->offset = 0;
741 }
742 ref->offset += extra_offset;
743 if (size
744 && poly_int_tree_p (size, &size_hwi)
745 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
746 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
747 else
748 ref->max_size = ref->size = -1;
749 ref->ref_alias_set = 0;
750 ref->base_alias_set = 0;
751 ref->volatile_p = false;
752 }
753
754 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
755 Return -1 if S1 < S2
756 Return 1 if S1 > S2
757 Return 0 if equal or incomparable. */
758
759 static int
760 compare_sizes (tree s1, tree s2)
761 {
762 if (!s1 || !s2)
763 return 0;
764
765 poly_uint64 size1;
766 poly_uint64 size2;
767
768 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
769 return 0;
770 if (known_lt (size1, size2))
771 return -1;
772 if (known_lt (size2, size1))
773 return 1;
774 return 0;
775 }
776
777 /* Compare TYPE1 and TYPE2 by its size.
778 Return -1 if size of TYPE1 < size of TYPE2
779 Return 1 if size of TYPE1 > size of TYPE2
780 Return 0 if types are of equal sizes or we can not compare them. */
781
782 static int
783 compare_type_sizes (tree type1, tree type2)
784 {
785 /* Be conservative for arrays and vectors. We want to support partial
786 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
787 while (TREE_CODE (type1) == ARRAY_TYPE
788 || TREE_CODE (type1) == VECTOR_TYPE)
789 type1 = TREE_TYPE (type1);
790 while (TREE_CODE (type2) == ARRAY_TYPE
791 || TREE_CODE (type2) == VECTOR_TYPE)
792 type2 = TREE_TYPE (type2);
793 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
794 }
795
796 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
797 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
798 decide. */
799
800 static inline int
801 same_type_for_tbaa (tree type1, tree type2)
802 {
803 type1 = TYPE_MAIN_VARIANT (type1);
804 type2 = TYPE_MAIN_VARIANT (type2);
805
806 /* Handle the most common case first. */
807 if (type1 == type2)
808 return 1;
809
810 /* If we would have to do structural comparison bail out. */
811 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
812 || TYPE_STRUCTURAL_EQUALITY_P (type2))
813 return -1;
814
815 /* Compare the canonical types. */
816 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
817 return 1;
818
819 /* ??? Array types are not properly unified in all cases as we have
820 spurious changes in the index types for example. Removing this
821 causes all sorts of problems with the Fortran frontend. */
822 if (TREE_CODE (type1) == ARRAY_TYPE
823 && TREE_CODE (type2) == ARRAY_TYPE)
824 return -1;
825
826 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
827 object of one of its constrained subtypes, e.g. when a function with an
828 unconstrained parameter passed by reference is called on an object and
829 inlined. But, even in the case of a fixed size, type and subtypes are
830 not equivalent enough as to share the same TYPE_CANONICAL, since this
831 would mean that conversions between them are useless, whereas they are
832 not (e.g. type and subtypes can have different modes). So, in the end,
833 they are only guaranteed to have the same alias set. */
834 if (get_alias_set (type1) == get_alias_set (type2))
835 return -1;
836
837 /* The types are known to be not equal. */
838 return 0;
839 }
840
841 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
842 components on it). */
843
844 static bool
845 type_has_components_p (tree type)
846 {
847 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
848 || TREE_CODE (type) == COMPLEX_TYPE;
849 }
850
851 /* Determine if the two component references REF1 and REF2 which are
852 based on access types TYPE1 and TYPE2 and of which at least one is based
853 on an indirect reference may alias. REF2 is the only one that can
854 be a decl in which case REF2_IS_DECL is true.
855 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
856 are the respective alias sets. */
857
858 static bool
859 aliasing_component_refs_p (tree ref1,
860 alias_set_type ref1_alias_set,
861 alias_set_type base1_alias_set,
862 poly_int64 offset1, poly_int64 max_size1,
863 tree ref2,
864 alias_set_type ref2_alias_set,
865 alias_set_type base2_alias_set,
866 poly_int64 offset2, poly_int64 max_size2,
867 bool ref2_is_decl)
868 {
869 /* If one reference is a component references through pointers try to find a
870 common base and apply offset based disambiguation. This handles
871 for example
872 struct A { int i; int j; } *q;
873 struct B { struct A a; int k; } *p;
874 disambiguating q->i and p->a.j. */
875 tree base1, base2;
876 tree type1, type2;
877 tree *refp;
878 int same_p1 = 0, same_p2 = 0;
879 bool maybe_match = false;
880 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
881
882 /* Choose bases and base types to search for. */
883 base1 = ref1;
884 while (handled_component_p (base1))
885 {
886 /* Generally access paths are monotous in the size of object. The
887 exception are trailing arrays of structures. I.e.
888 struct a {int array[0];};
889 or
890 struct a {int array1[0]; int array[];};
891 Such struct has size 0 but accesses to a.array may have non-zero size.
892 In this case the size of TREE_TYPE (base1) is smaller than
893 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
894
895 Because we compare sizes of arrays just by sizes of their elements,
896 we only need to care about zero sized array fields here. */
897 if (TREE_CODE (base1) == COMPONENT_REF
898 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base1, 1))) == ARRAY_TYPE
899 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base1, 1)))
900 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base1, 1)))))
901 && array_at_struct_end_p (base1))
902 {
903 gcc_checking_assert (!end_struct_ref1);
904 end_struct_ref1 = base1;
905 }
906 base1 = TREE_OPERAND (base1, 0);
907 }
908 type1 = TREE_TYPE (base1);
909 base2 = ref2;
910 while (handled_component_p (base2))
911 {
912 if (TREE_CODE (base2) == COMPONENT_REF
913 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base2, 1))) == ARRAY_TYPE
914 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base2, 1)))
915 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base2, 1)))))
916 && array_at_struct_end_p (base2))
917 {
918 gcc_checking_assert (!end_struct_ref2);
919 end_struct_ref2 = base2;
920 }
921 base2 = TREE_OPERAND (base2, 0);
922 }
923 type2 = TREE_TYPE (base2);
924
925 /* Now search for the type1 in the access path of ref2. This
926 would be a common base for doing offset based disambiguation on.
927 This however only makes sense if type2 is big enough to hold type1. */
928 int cmp_outer = compare_type_sizes (type2, type1);
929
930 /* If type2 is big enough to contain type1 walk its access path.
931 We also need to care of arrays at the end of structs that may extend
932 beyond the end of structure. */
933 if (cmp_outer >= 0
934 || (end_struct_ref2
935 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
936 {
937 refp = &ref2;
938 while (true)
939 {
940 /* We walk from inner type to the outer types. If type we see is
941 already too large to be part of type1, terminate the search. */
942 int cmp = compare_type_sizes (type1, TREE_TYPE (*refp));
943
944 if (cmp < 0
945 && (!end_struct_ref1
946 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
947 TREE_TYPE (*refp)) < 0))
948 break;
949 /* If types may be of same size, see if we can decide about their
950 equality. */
951 if (cmp == 0)
952 {
953 same_p2 = same_type_for_tbaa (TREE_TYPE (*refp), type1);
954 if (same_p2 == 1)
955 break;
956 /* In case we can't decide whether types are same try to
957 continue looking for the exact match.
958 Remember however that we possibly saw a match
959 to bypass the access path continuations tests we do later. */
960 if (same_p2 == -1)
961 maybe_match = true;
962 }
963 if (!handled_component_p (*refp))
964 break;
965 refp = &TREE_OPERAND (*refp, 0);
966 }
967 if (same_p2 == 1)
968 {
969 poly_int64 offadj, sztmp, msztmp;
970 bool reverse;
971
972 /* We assume that arrays can overlap by multiple of their elements
973 size as tested in gcc.dg/torture/alias-2.c.
974 This partial overlap happen only when both arrays are bases of
975 the access and not contained within another component ref.
976 To be safe we also assume partial overlap for VLAs. */
977 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
978 && (!TYPE_SIZE (TREE_TYPE (base1))
979 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
980 || (*refp == base2 && !ref2_is_decl)))
981 {
982 ++alias_stats.aliasing_component_refs_p_may_alias;
983 return true;
984 }
985
986 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
987 offset2 -= offadj;
988 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
989 offset1 -= offadj;
990 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
991 {
992 ++alias_stats.aliasing_component_refs_p_may_alias;
993 return true;
994 }
995 else
996 {
997 ++alias_stats.aliasing_component_refs_p_no_alias;
998 return false;
999 }
1000 }
1001 }
1002
1003 /* If we didn't find a common base, try the other way around. */
1004 if (cmp_outer <= 0
1005 || (end_struct_ref1
1006 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1007 {
1008 refp = &ref1;
1009 while (true)
1010 {
1011 int cmp = compare_type_sizes (type2, TREE_TYPE (*refp));
1012 if (cmp < 0
1013 && (!end_struct_ref2
1014 || compare_type_sizes (TREE_TYPE (end_struct_ref2),
1015 TREE_TYPE (*refp)) < 0))
1016 break;
1017 /* If types may be of same size, see if we can decide about their
1018 equality. */
1019 if (cmp == 0)
1020 {
1021 same_p1 = same_type_for_tbaa (TREE_TYPE (*refp), type2);
1022 if (same_p1 == 1)
1023 break;
1024 if (same_p1 == -1)
1025 maybe_match = true;
1026 }
1027 if (!handled_component_p (*refp))
1028 break;
1029 refp = &TREE_OPERAND (*refp, 0);
1030 }
1031 if (same_p1 == 1)
1032 {
1033 poly_int64 offadj, sztmp, msztmp;
1034 bool reverse;
1035
1036 if (TREE_CODE (TREE_TYPE (base2)) == ARRAY_TYPE
1037 && (!TYPE_SIZE (TREE_TYPE (base2))
1038 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base2))) != INTEGER_CST
1039 || (*refp == base1 && !ref2_is_decl)))
1040 {
1041 ++alias_stats.aliasing_component_refs_p_may_alias;
1042 return true;
1043 }
1044
1045 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
1046 offset1 -= offadj;
1047 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
1048 offset2 -= offadj;
1049 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1050 {
1051 ++alias_stats.aliasing_component_refs_p_may_alias;
1052 return true;
1053 }
1054 else
1055 {
1056 ++alias_stats.aliasing_component_refs_p_no_alias;
1057 return false;
1058 }
1059 }
1060 }
1061
1062 /* In the following code we make an assumption that the types in access
1063 paths do not overlap and thus accesses alias only if one path can be
1064 continuation of another. If we was not able to decide about equivalence,
1065 we need to give up. */
1066 if (maybe_match)
1067 return true;
1068
1069 /* If we have two type access paths B1.path1 and B2.path2 they may
1070 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
1071 But we can still have a path that goes B1.path1...B2.path2 with
1072 a part that we do not see. So we can only disambiguate now
1073 if there is no B2 in the tail of path1 and no B1 on the
1074 tail of path2. */
1075 if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0
1076 && (!end_struct_ref1
1077 || compare_type_sizes (TREE_TYPE (ref2),
1078 TREE_TYPE (end_struct_ref1)) >= 0)
1079 && type_has_components_p (TREE_TYPE (ref2))
1080 && (base1_alias_set == ref2_alias_set
1081 || alias_set_subset_of (base1_alias_set, ref2_alias_set)))
1082 {
1083 ++alias_stats.aliasing_component_refs_p_may_alias;
1084 return true;
1085 }
1086 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
1087 if (!ref2_is_decl
1088 && compare_type_sizes (TREE_TYPE (ref1), type2) >= 0
1089 && (!end_struct_ref2
1090 || compare_type_sizes (TREE_TYPE (ref1),
1091 TREE_TYPE (end_struct_ref2)) >= 0)
1092 && type_has_components_p (TREE_TYPE (ref1))
1093 && (base2_alias_set == ref1_alias_set
1094 || alias_set_subset_of (base2_alias_set, ref1_alias_set)))
1095 {
1096 ++alias_stats.aliasing_component_refs_p_may_alias;
1097 return true;
1098 }
1099 ++alias_stats.aliasing_component_refs_p_no_alias;
1100 return false;
1101 }
1102
1103 /* Return true if we can determine that component references REF1 and REF2,
1104 that are within a common DECL, cannot overlap. */
1105
1106 static bool
1107 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
1108 {
1109 auto_vec<tree, 16> component_refs1;
1110 auto_vec<tree, 16> component_refs2;
1111
1112 /* Create the stack of handled components for REF1. */
1113 while (handled_component_p (ref1))
1114 {
1115 component_refs1.safe_push (ref1);
1116 ref1 = TREE_OPERAND (ref1, 0);
1117 }
1118 if (TREE_CODE (ref1) == MEM_REF)
1119 {
1120 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
1121 {
1122 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1123 return false;
1124 }
1125 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
1126 }
1127
1128 /* Create the stack of handled components for REF2. */
1129 while (handled_component_p (ref2))
1130 {
1131 component_refs2.safe_push (ref2);
1132 ref2 = TREE_OPERAND (ref2, 0);
1133 }
1134 if (TREE_CODE (ref2) == MEM_REF)
1135 {
1136 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
1137 {
1138 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1139 return false;
1140 }
1141 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
1142 }
1143
1144 /* Bases must be either same or uncomparable. */
1145 gcc_checking_assert (ref1 == ref2
1146 || (DECL_P (ref1) && DECL_P (ref2)
1147 && compare_base_decls (ref1, ref2) != 0));
1148
1149 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1150 rank. This is sufficient because we start from the same DECL and you
1151 cannot reference several fields at a time with COMPONENT_REFs (unlike
1152 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1153 of them to access a sub-component, unless you're in a union, in which
1154 case the return value will precisely be false. */
1155 while (true)
1156 {
1157 do
1158 {
1159 if (component_refs1.is_empty ())
1160 {
1161 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1162 return false;
1163 }
1164 ref1 = component_refs1.pop ();
1165 }
1166 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1167
1168 do
1169 {
1170 if (component_refs2.is_empty ())
1171 {
1172 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1173 return false;
1174 }
1175 ref2 = component_refs2.pop ();
1176 }
1177 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1178
1179 /* Beware of BIT_FIELD_REF. */
1180 if (TREE_CODE (ref1) != COMPONENT_REF
1181 || TREE_CODE (ref2) != COMPONENT_REF)
1182 {
1183 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1184 return false;
1185 }
1186
1187 tree field1 = TREE_OPERAND (ref1, 1);
1188 tree field2 = TREE_OPERAND (ref2, 1);
1189
1190 /* ??? We cannot simply use the type of operand #0 of the refs here
1191 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1192 for common blocks instead of using unions like everyone else. */
1193 tree type1 = DECL_CONTEXT (field1);
1194 tree type2 = DECL_CONTEXT (field2);
1195
1196 /* We cannot disambiguate fields in a union or qualified union. */
1197 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
1198 {
1199 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1200 return false;
1201 }
1202
1203 if (field1 != field2)
1204 {
1205 /* A field and its representative need to be considered the
1206 same. */
1207 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
1208 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
1209 {
1210 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1211 return false;
1212 }
1213 /* Different fields of the same record type cannot overlap.
1214 ??? Bitfields can overlap at RTL level so punt on them. */
1215 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1216 {
1217 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1218 return false;
1219 }
1220 ++alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias;
1221 return true;
1222 }
1223 }
1224
1225 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1226 return false;
1227 }
1228
1229 /* qsort compare function to sort FIELD_DECLs after their
1230 DECL_FIELD_CONTEXT TYPE_UID. */
1231
1232 static inline int
1233 ncr_compar (const void *field1_, const void *field2_)
1234 {
1235 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1236 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1237 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
1238 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
1239 if (uid1 < uid2)
1240 return -1;
1241 else if (uid1 > uid2)
1242 return 1;
1243 return 0;
1244 }
1245
1246 /* Return true if we can determine that the fields referenced cannot
1247 overlap for any pair of objects. */
1248
1249 static bool
1250 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1251 {
1252 if (!flag_strict_aliasing
1253 || !x || !y
1254 || !handled_component_p (x)
1255 || !handled_component_p (y))
1256 {
1257 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1258 return false;
1259 }
1260
1261 auto_vec<const_tree, 16> fieldsx;
1262 while (handled_component_p (x))
1263 {
1264 if (TREE_CODE (x) == COMPONENT_REF)
1265 {
1266 tree field = TREE_OPERAND (x, 1);
1267 tree type = DECL_FIELD_CONTEXT (field);
1268 if (TREE_CODE (type) == RECORD_TYPE)
1269 fieldsx.safe_push (field);
1270 }
1271 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
1272 || TREE_CODE (x) == BIT_FIELD_REF)
1273 fieldsx.truncate (0);
1274 x = TREE_OPERAND (x, 0);
1275 }
1276 if (fieldsx.length () == 0)
1277 return false;
1278 auto_vec<const_tree, 16> fieldsy;
1279 while (handled_component_p (y))
1280 {
1281 if (TREE_CODE (y) == COMPONENT_REF)
1282 {
1283 tree field = TREE_OPERAND (y, 1);
1284 tree type = DECL_FIELD_CONTEXT (field);
1285 if (TREE_CODE (type) == RECORD_TYPE)
1286 fieldsy.safe_push (TREE_OPERAND (y, 1));
1287 }
1288 else if (TREE_CODE (y) == VIEW_CONVERT_EXPR
1289 || TREE_CODE (y) == BIT_FIELD_REF)
1290 fieldsy.truncate (0);
1291 y = TREE_OPERAND (y, 0);
1292 }
1293 if (fieldsy.length () == 0)
1294 {
1295 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1296 return false;
1297 }
1298
1299 /* Most common case first. */
1300 if (fieldsx.length () == 1
1301 && fieldsy.length () == 1)
1302 {
1303 if ((DECL_FIELD_CONTEXT (fieldsx[0])
1304 == DECL_FIELD_CONTEXT (fieldsy[0]))
1305 && fieldsx[0] != fieldsy[0]
1306 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])))
1307 {
1308 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1309 return true;
1310 }
1311 else
1312 {
1313 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1314 return false;
1315 }
1316 }
1317
1318 if (fieldsx.length () == 2)
1319 {
1320 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1321 std::swap (fieldsx[0], fieldsx[1]);
1322 }
1323 else
1324 fieldsx.qsort (ncr_compar);
1325
1326 if (fieldsy.length () == 2)
1327 {
1328 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1329 std::swap (fieldsy[0], fieldsy[1]);
1330 }
1331 else
1332 fieldsy.qsort (ncr_compar);
1333
1334 unsigned i = 0, j = 0;
1335 do
1336 {
1337 const_tree fieldx = fieldsx[i];
1338 const_tree fieldy = fieldsy[j];
1339 tree typex = DECL_FIELD_CONTEXT (fieldx);
1340 tree typey = DECL_FIELD_CONTEXT (fieldy);
1341 if (typex == typey)
1342 {
1343 /* We're left with accessing different fields of a structure,
1344 no possible overlap. */
1345 if (fieldx != fieldy)
1346 {
1347 /* A field and its representative need to be considered the
1348 same. */
1349 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1350 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1351 {
1352 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1353 return false;
1354 }
1355 /* Different fields of the same record type cannot overlap.
1356 ??? Bitfields can overlap at RTL level so punt on them. */
1357 if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1358 {
1359 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1360 return false;
1361 }
1362 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1363 return true;
1364 }
1365 }
1366 if (TYPE_UID (typex) < TYPE_UID (typey))
1367 {
1368 i++;
1369 if (i == fieldsx.length ())
1370 break;
1371 }
1372 else
1373 {
1374 j++;
1375 if (j == fieldsy.length ())
1376 break;
1377 }
1378 }
1379 while (1);
1380
1381 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1382 return false;
1383 }
1384
1385
1386 /* Return true if two memory references based on the variables BASE1
1387 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1388 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1389 if non-NULL are the complete memory reference trees. */
1390
1391 static bool
1392 decl_refs_may_alias_p (tree ref1, tree base1,
1393 poly_int64 offset1, poly_int64 max_size1,
1394 tree ref2, tree base2,
1395 poly_int64 offset2, poly_int64 max_size2)
1396 {
1397 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1398
1399 /* If both references are based on different variables, they cannot alias. */
1400 if (compare_base_decls (base1, base2) == 0)
1401 return false;
1402
1403 /* If both references are based on the same variable, they cannot alias if
1404 the accesses do not overlap. */
1405 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1406 return false;
1407
1408 /* For components with variable position, the above test isn't sufficient,
1409 so we disambiguate component references manually. */
1410 if (ref1 && ref2
1411 && handled_component_p (ref1) && handled_component_p (ref2)
1412 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1413 return false;
1414
1415 return true;
1416 }
1417
1418 /* Return true if an indirect reference based on *PTR1 constrained
1419 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1420 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1421 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1422 in which case they are computed on-demand. REF1 and REF2
1423 if non-NULL are the complete memory reference trees. */
1424
1425 static bool
1426 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1427 poly_int64 offset1, poly_int64 max_size1,
1428 alias_set_type ref1_alias_set,
1429 alias_set_type base1_alias_set,
1430 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1431 poly_int64 offset2, poly_int64 max_size2,
1432 alias_set_type ref2_alias_set,
1433 alias_set_type base2_alias_set, bool tbaa_p)
1434 {
1435 tree ptr1;
1436 tree ptrtype1, dbase2;
1437
1438 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1439 || TREE_CODE (base1) == TARGET_MEM_REF)
1440 && DECL_P (base2));
1441
1442 ptr1 = TREE_OPERAND (base1, 0);
1443 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1444
1445 /* If only one reference is based on a variable, they cannot alias if
1446 the pointer access is beyond the extent of the variable access.
1447 (the pointer base cannot validly point to an offset less than zero
1448 of the variable).
1449 ??? IVOPTs creates bases that do not honor this restriction,
1450 so do not apply this optimization for TARGET_MEM_REFs. */
1451 if (TREE_CODE (base1) != TARGET_MEM_REF
1452 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1453 return false;
1454 /* They also cannot alias if the pointer may not point to the decl. */
1455 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1456 return false;
1457
1458 /* Disambiguations that rely on strict aliasing rules follow. */
1459 if (!flag_strict_aliasing || !tbaa_p)
1460 return true;
1461
1462 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1463
1464 /* If the alias set for a pointer access is zero all bets are off. */
1465 if (base1_alias_set == 0)
1466 return true;
1467
1468 /* When we are trying to disambiguate an access with a pointer dereference
1469 as base versus one with a decl as base we can use both the size
1470 of the decl and its dynamic type for extra disambiguation.
1471 ??? We do not know anything about the dynamic type of the decl
1472 other than that its alias-set contains base2_alias_set as a subset
1473 which does not help us here. */
1474 /* As we know nothing useful about the dynamic type of the decl just
1475 use the usual conflict check rather than a subset test.
1476 ??? We could introduce -fvery-strict-aliasing when the language
1477 does not allow decls to have a dynamic type that differs from their
1478 static type. Then we can check
1479 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1480 if (base1_alias_set != base2_alias_set
1481 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1482 return false;
1483 /* If the size of the access relevant for TBAA through the pointer
1484 is bigger than the size of the decl we can't possibly access the
1485 decl via that pointer. */
1486 if (/* ??? This in turn may run afoul when a decl of type T which is
1487 a member of union type U is accessed through a pointer to
1488 type U and sizeof T is smaller than sizeof U. */
1489 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1490 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1491 && compare_sizes (DECL_SIZE (base2),
1492 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1493 return false;
1494
1495 if (!ref2)
1496 return true;
1497
1498 /* If the decl is accessed via a MEM_REF, reconstruct the base
1499 we can use for TBAA and an appropriately adjusted offset. */
1500 dbase2 = ref2;
1501 while (handled_component_p (dbase2))
1502 dbase2 = TREE_OPERAND (dbase2, 0);
1503 poly_int64 doffset1 = offset1;
1504 poly_offset_int doffset2 = offset2;
1505 if (TREE_CODE (dbase2) == MEM_REF
1506 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1507 {
1508 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1509 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
1510 /* If second reference is view-converted, give up now. */
1511 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
1512 return true;
1513 }
1514
1515 /* If first reference is view-converted, give up now. */
1516 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
1517 return true;
1518
1519 /* If both references are through the same type, they do not alias
1520 if the accesses do not overlap. This does extra disambiguation
1521 for mixed/pointer accesses but requires strict aliasing.
1522 For MEM_REFs we require that the component-ref offset we computed
1523 is relative to the start of the type which we ensure by
1524 comparing rvalue and access type and disregarding the constant
1525 pointer offset.
1526
1527 But avoid treating variable length arrays as "objects", instead assume they
1528 can overlap by an exact multiple of their element size.
1529 See gcc.dg/torture/alias-2.c. */
1530 if (((TREE_CODE (base1) != TARGET_MEM_REF
1531 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1532 && (TREE_CODE (dbase2) != TARGET_MEM_REF
1533 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (base2))))
1534 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
1535 && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
1536 || (TYPE_SIZE (TREE_TYPE (base1))
1537 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST)))
1538 return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1539
1540 if (ref1 && ref2
1541 && nonoverlapping_component_refs_p (ref1, ref2))
1542 return false;
1543
1544 /* Do access-path based disambiguation. */
1545 if (ref1 && ref2
1546 && (handled_component_p (ref1) || handled_component_p (ref2)))
1547 return aliasing_component_refs_p (ref1,
1548 ref1_alias_set, base1_alias_set,
1549 offset1, max_size1,
1550 ref2,
1551 ref2_alias_set, base2_alias_set,
1552 offset2, max_size2,
1553 /* Only if the other reference is actual
1554 decl we can safely check only toplevel
1555 part of access path 1. */
1556 same_type_for_tbaa (TREE_TYPE (dbase2),
1557 TREE_TYPE (base2))
1558 == 1);
1559
1560 return true;
1561 }
1562
1563 /* Return true if two indirect references based on *PTR1
1564 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1565 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1566 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1567 in which case they are computed on-demand. REF1 and REF2
1568 if non-NULL are the complete memory reference trees. */
1569
1570 static bool
1571 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1572 poly_int64 offset1, poly_int64 max_size1,
1573 alias_set_type ref1_alias_set,
1574 alias_set_type base1_alias_set,
1575 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1576 poly_int64 offset2, poly_int64 max_size2,
1577 alias_set_type ref2_alias_set,
1578 alias_set_type base2_alias_set, bool tbaa_p)
1579 {
1580 tree ptr1;
1581 tree ptr2;
1582 tree ptrtype1, ptrtype2;
1583
1584 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1585 || TREE_CODE (base1) == TARGET_MEM_REF)
1586 && (TREE_CODE (base2) == MEM_REF
1587 || TREE_CODE (base2) == TARGET_MEM_REF));
1588
1589 ptr1 = TREE_OPERAND (base1, 0);
1590 ptr2 = TREE_OPERAND (base2, 0);
1591
1592 /* If both bases are based on pointers they cannot alias if they may not
1593 point to the same memory object or if they point to the same object
1594 and the accesses do not overlap. */
1595 if ((!cfun || gimple_in_ssa_p (cfun))
1596 && operand_equal_p (ptr1, ptr2, 0)
1597 && (((TREE_CODE (base1) != TARGET_MEM_REF
1598 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1599 && (TREE_CODE (base2) != TARGET_MEM_REF
1600 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1601 || (TREE_CODE (base1) == TARGET_MEM_REF
1602 && TREE_CODE (base2) == TARGET_MEM_REF
1603 && (TMR_STEP (base1) == TMR_STEP (base2)
1604 || (TMR_STEP (base1) && TMR_STEP (base2)
1605 && operand_equal_p (TMR_STEP (base1),
1606 TMR_STEP (base2), 0)))
1607 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1608 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1609 && operand_equal_p (TMR_INDEX (base1),
1610 TMR_INDEX (base2), 0)))
1611 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1612 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1613 && operand_equal_p (TMR_INDEX2 (base1),
1614 TMR_INDEX2 (base2), 0))))))
1615 {
1616 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1617 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1618 return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1619 offset2 + moff2, max_size2);
1620 }
1621 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1622 return false;
1623
1624 /* Disambiguations that rely on strict aliasing rules follow. */
1625 if (!flag_strict_aliasing || !tbaa_p)
1626 return true;
1627
1628 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1629 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1630
1631 /* If the alias set for a pointer access is zero all bets are off. */
1632 if (base1_alias_set == 0
1633 || base2_alias_set == 0)
1634 return true;
1635
1636 /* Do type-based disambiguation. */
1637 if (base1_alias_set != base2_alias_set
1638 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1639 return false;
1640
1641 /* If either reference is view-converted, give up now. */
1642 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1643 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1644 return true;
1645
1646 /* If both references are through the same type, they do not alias
1647 if the accesses do not overlap. This does extra disambiguation
1648 for mixed/pointer accesses but requires strict aliasing. */
1649 if ((TREE_CODE (base1) != TARGET_MEM_REF
1650 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1651 && (TREE_CODE (base2) != TARGET_MEM_REF
1652 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1653 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1654 TREE_TYPE (ptrtype2)) == 1
1655 /* But avoid treating arrays as "objects", instead assume they
1656 can overlap by an exact multiple of their element size.
1657 See gcc.dg/torture/alias-2.c. */
1658 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1659 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1660
1661 if (ref1 && ref2
1662 && nonoverlapping_component_refs_p (ref1, ref2))
1663 return false;
1664
1665 /* Do access-path based disambiguation. */
1666 if (ref1 && ref2
1667 && (handled_component_p (ref1) || handled_component_p (ref2)))
1668 return aliasing_component_refs_p (ref1,
1669 ref1_alias_set, base1_alias_set,
1670 offset1, max_size1,
1671 ref2,
1672 ref2_alias_set, base2_alias_set,
1673 offset2, max_size2, false);
1674
1675 return true;
1676 }
1677
1678 /* Return true, if the two memory references REF1 and REF2 may alias. */
1679
1680 static bool
1681 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1682 {
1683 tree base1, base2;
1684 poly_int64 offset1 = 0, offset2 = 0;
1685 poly_int64 max_size1 = -1, max_size2 = -1;
1686 bool var1_p, var2_p, ind1_p, ind2_p;
1687
1688 gcc_checking_assert ((!ref1->ref
1689 || TREE_CODE (ref1->ref) == SSA_NAME
1690 || DECL_P (ref1->ref)
1691 || TREE_CODE (ref1->ref) == STRING_CST
1692 || handled_component_p (ref1->ref)
1693 || TREE_CODE (ref1->ref) == MEM_REF
1694 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1695 && (!ref2->ref
1696 || TREE_CODE (ref2->ref) == SSA_NAME
1697 || DECL_P (ref2->ref)
1698 || TREE_CODE (ref2->ref) == STRING_CST
1699 || handled_component_p (ref2->ref)
1700 || TREE_CODE (ref2->ref) == MEM_REF
1701 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1702
1703 /* Decompose the references into their base objects and the access. */
1704 base1 = ao_ref_base (ref1);
1705 offset1 = ref1->offset;
1706 max_size1 = ref1->max_size;
1707 base2 = ao_ref_base (ref2);
1708 offset2 = ref2->offset;
1709 max_size2 = ref2->max_size;
1710
1711 /* We can end up with registers or constants as bases for example from
1712 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1713 which is seen as a struct copy. */
1714 if (TREE_CODE (base1) == SSA_NAME
1715 || TREE_CODE (base1) == CONST_DECL
1716 || TREE_CODE (base1) == CONSTRUCTOR
1717 || TREE_CODE (base1) == ADDR_EXPR
1718 || CONSTANT_CLASS_P (base1)
1719 || TREE_CODE (base2) == SSA_NAME
1720 || TREE_CODE (base2) == CONST_DECL
1721 || TREE_CODE (base2) == CONSTRUCTOR
1722 || TREE_CODE (base2) == ADDR_EXPR
1723 || CONSTANT_CLASS_P (base2))
1724 return false;
1725
1726 /* We can end up referring to code via function and label decls.
1727 As we likely do not properly track code aliases conservatively
1728 bail out. */
1729 if (TREE_CODE (base1) == FUNCTION_DECL
1730 || TREE_CODE (base1) == LABEL_DECL
1731 || TREE_CODE (base2) == FUNCTION_DECL
1732 || TREE_CODE (base2) == LABEL_DECL)
1733 return true;
1734
1735 /* Two volatile accesses always conflict. */
1736 if (ref1->volatile_p
1737 && ref2->volatile_p)
1738 return true;
1739
1740 /* Defer to simple offset based disambiguation if we have
1741 references based on two decls. Do this before defering to
1742 TBAA to handle must-alias cases in conformance with the
1743 GCC extension of allowing type-punning through unions. */
1744 var1_p = DECL_P (base1);
1745 var2_p = DECL_P (base2);
1746 if (var1_p && var2_p)
1747 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1748 ref2->ref, base2, offset2, max_size2);
1749
1750 /* Handle restrict based accesses.
1751 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1752 here. */
1753 tree rbase1 = base1;
1754 tree rbase2 = base2;
1755 if (var1_p)
1756 {
1757 rbase1 = ref1->ref;
1758 if (rbase1)
1759 while (handled_component_p (rbase1))
1760 rbase1 = TREE_OPERAND (rbase1, 0);
1761 }
1762 if (var2_p)
1763 {
1764 rbase2 = ref2->ref;
1765 if (rbase2)
1766 while (handled_component_p (rbase2))
1767 rbase2 = TREE_OPERAND (rbase2, 0);
1768 }
1769 if (rbase1 && rbase2
1770 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1771 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1772 /* If the accesses are in the same restrict clique... */
1773 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1774 /* But based on different pointers they do not alias. */
1775 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1776 return false;
1777
1778 ind1_p = (TREE_CODE (base1) == MEM_REF
1779 || TREE_CODE (base1) == TARGET_MEM_REF);
1780 ind2_p = (TREE_CODE (base2) == MEM_REF
1781 || TREE_CODE (base2) == TARGET_MEM_REF);
1782
1783 /* Canonicalize the pointer-vs-decl case. */
1784 if (ind1_p && var2_p)
1785 {
1786 std::swap (offset1, offset2);
1787 std::swap (max_size1, max_size2);
1788 std::swap (base1, base2);
1789 std::swap (ref1, ref2);
1790 var1_p = true;
1791 ind1_p = false;
1792 var2_p = false;
1793 ind2_p = true;
1794 }
1795
1796 /* First defer to TBAA if possible. */
1797 if (tbaa_p
1798 && flag_strict_aliasing
1799 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1800 ao_ref_alias_set (ref2)))
1801 return false;
1802
1803 /* If the reference is based on a pointer that points to memory
1804 that may not be written to then the other reference cannot possibly
1805 clobber it. */
1806 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1807 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1808 || (ind1_p
1809 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1810 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1811 return false;
1812
1813 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1814 if (var1_p && ind2_p)
1815 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1816 offset2, max_size2,
1817 ao_ref_alias_set (ref2),
1818 ao_ref_base_alias_set (ref2),
1819 ref1->ref, base1,
1820 offset1, max_size1,
1821 ao_ref_alias_set (ref1),
1822 ao_ref_base_alias_set (ref1),
1823 tbaa_p);
1824 else if (ind1_p && ind2_p)
1825 return indirect_refs_may_alias_p (ref1->ref, base1,
1826 offset1, max_size1,
1827 ao_ref_alias_set (ref1),
1828 ao_ref_base_alias_set (ref1),
1829 ref2->ref, base2,
1830 offset2, max_size2,
1831 ao_ref_alias_set (ref2),
1832 ao_ref_base_alias_set (ref2),
1833 tbaa_p);
1834
1835 gcc_unreachable ();
1836 }
1837
1838 /* Return true, if the two memory references REF1 and REF2 may alias
1839 and update statistics. */
1840
1841 bool
1842 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1843 {
1844 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
1845 if (res)
1846 ++alias_stats.refs_may_alias_p_may_alias;
1847 else
1848 ++alias_stats.refs_may_alias_p_no_alias;
1849 return res;
1850 }
1851
1852 static bool
1853 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1854 {
1855 ao_ref r1;
1856 ao_ref_init (&r1, ref1);
1857 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1858 }
1859
1860 bool
1861 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1862 {
1863 ao_ref r1, r2;
1864 ao_ref_init (&r1, ref1);
1865 ao_ref_init (&r2, ref2);
1866 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1867 }
1868
1869 /* Returns true if there is a anti-dependence for the STORE that
1870 executes after the LOAD. */
1871
1872 bool
1873 refs_anti_dependent_p (tree load, tree store)
1874 {
1875 ao_ref r1, r2;
1876 ao_ref_init (&r1, load);
1877 ao_ref_init (&r2, store);
1878 return refs_may_alias_p_1 (&r1, &r2, false);
1879 }
1880
1881 /* Returns true if there is a output dependence for the stores
1882 STORE1 and STORE2. */
1883
1884 bool
1885 refs_output_dependent_p (tree store1, tree store2)
1886 {
1887 ao_ref r1, r2;
1888 ao_ref_init (&r1, store1);
1889 ao_ref_init (&r2, store2);
1890 return refs_may_alias_p_1 (&r1, &r2, false);
1891 }
1892
1893 /* If the call CALL may use the memory reference REF return true,
1894 otherwise return false. */
1895
1896 static bool
1897 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1898 {
1899 tree base, callee;
1900 unsigned i;
1901 int flags = gimple_call_flags (call);
1902
1903 /* Const functions without a static chain do not implicitly use memory. */
1904 if (!gimple_call_chain (call)
1905 && (flags & (ECF_CONST|ECF_NOVOPS)))
1906 goto process_args;
1907
1908 base = ao_ref_base (ref);
1909 if (!base)
1910 return true;
1911
1912 /* A call that is not without side-effects might involve volatile
1913 accesses and thus conflicts with all other volatile accesses. */
1914 if (ref->volatile_p)
1915 return true;
1916
1917 /* If the reference is based on a decl that is not aliased the call
1918 cannot possibly use it. */
1919 if (DECL_P (base)
1920 && !may_be_aliased (base)
1921 /* But local statics can be used through recursion. */
1922 && !is_global_var (base))
1923 goto process_args;
1924
1925 callee = gimple_call_fndecl (call);
1926
1927 /* Handle those builtin functions explicitly that do not act as
1928 escape points. See tree-ssa-structalias.c:find_func_aliases
1929 for the list of builtins we might need to handle here. */
1930 if (callee != NULL_TREE
1931 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1932 switch (DECL_FUNCTION_CODE (callee))
1933 {
1934 /* All the following functions read memory pointed to by
1935 their second argument. strcat/strncat additionally
1936 reads memory pointed to by the first argument. */
1937 case BUILT_IN_STRCAT:
1938 case BUILT_IN_STRNCAT:
1939 {
1940 ao_ref dref;
1941 ao_ref_init_from_ptr_and_size (&dref,
1942 gimple_call_arg (call, 0),
1943 NULL_TREE);
1944 if (refs_may_alias_p_1 (&dref, ref, false))
1945 return true;
1946 }
1947 /* FALLTHRU */
1948 case BUILT_IN_STRCPY:
1949 case BUILT_IN_STRNCPY:
1950 case BUILT_IN_MEMCPY:
1951 case BUILT_IN_MEMMOVE:
1952 case BUILT_IN_MEMPCPY:
1953 case BUILT_IN_STPCPY:
1954 case BUILT_IN_STPNCPY:
1955 case BUILT_IN_TM_MEMCPY:
1956 case BUILT_IN_TM_MEMMOVE:
1957 {
1958 ao_ref dref;
1959 tree size = NULL_TREE;
1960 if (gimple_call_num_args (call) == 3)
1961 size = gimple_call_arg (call, 2);
1962 ao_ref_init_from_ptr_and_size (&dref,
1963 gimple_call_arg (call, 1),
1964 size);
1965 return refs_may_alias_p_1 (&dref, ref, false);
1966 }
1967 case BUILT_IN_STRCAT_CHK:
1968 case BUILT_IN_STRNCAT_CHK:
1969 {
1970 ao_ref dref;
1971 ao_ref_init_from_ptr_and_size (&dref,
1972 gimple_call_arg (call, 0),
1973 NULL_TREE);
1974 if (refs_may_alias_p_1 (&dref, ref, false))
1975 return true;
1976 }
1977 /* FALLTHRU */
1978 case BUILT_IN_STRCPY_CHK:
1979 case BUILT_IN_STRNCPY_CHK:
1980 case BUILT_IN_MEMCPY_CHK:
1981 case BUILT_IN_MEMMOVE_CHK:
1982 case BUILT_IN_MEMPCPY_CHK:
1983 case BUILT_IN_STPCPY_CHK:
1984 case BUILT_IN_STPNCPY_CHK:
1985 {
1986 ao_ref dref;
1987 tree size = NULL_TREE;
1988 if (gimple_call_num_args (call) == 4)
1989 size = gimple_call_arg (call, 2);
1990 ao_ref_init_from_ptr_and_size (&dref,
1991 gimple_call_arg (call, 1),
1992 size);
1993 return refs_may_alias_p_1 (&dref, ref, false);
1994 }
1995 case BUILT_IN_BCOPY:
1996 {
1997 ao_ref dref;
1998 tree size = gimple_call_arg (call, 2);
1999 ao_ref_init_from_ptr_and_size (&dref,
2000 gimple_call_arg (call, 0),
2001 size);
2002 return refs_may_alias_p_1 (&dref, ref, false);
2003 }
2004
2005 /* The following functions read memory pointed to by their
2006 first argument. */
2007 CASE_BUILT_IN_TM_LOAD (1):
2008 CASE_BUILT_IN_TM_LOAD (2):
2009 CASE_BUILT_IN_TM_LOAD (4):
2010 CASE_BUILT_IN_TM_LOAD (8):
2011 CASE_BUILT_IN_TM_LOAD (FLOAT):
2012 CASE_BUILT_IN_TM_LOAD (DOUBLE):
2013 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
2014 CASE_BUILT_IN_TM_LOAD (M64):
2015 CASE_BUILT_IN_TM_LOAD (M128):
2016 CASE_BUILT_IN_TM_LOAD (M256):
2017 case BUILT_IN_TM_LOG:
2018 case BUILT_IN_TM_LOG_1:
2019 case BUILT_IN_TM_LOG_2:
2020 case BUILT_IN_TM_LOG_4:
2021 case BUILT_IN_TM_LOG_8:
2022 case BUILT_IN_TM_LOG_FLOAT:
2023 case BUILT_IN_TM_LOG_DOUBLE:
2024 case BUILT_IN_TM_LOG_LDOUBLE:
2025 case BUILT_IN_TM_LOG_M64:
2026 case BUILT_IN_TM_LOG_M128:
2027 case BUILT_IN_TM_LOG_M256:
2028 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
2029
2030 /* These read memory pointed to by the first argument. */
2031 case BUILT_IN_STRDUP:
2032 case BUILT_IN_STRNDUP:
2033 case BUILT_IN_REALLOC:
2034 {
2035 ao_ref dref;
2036 tree size = NULL_TREE;
2037 if (gimple_call_num_args (call) == 2)
2038 size = gimple_call_arg (call, 1);
2039 ao_ref_init_from_ptr_and_size (&dref,
2040 gimple_call_arg (call, 0),
2041 size);
2042 return refs_may_alias_p_1 (&dref, ref, false);
2043 }
2044 /* These read memory pointed to by the first argument. */
2045 case BUILT_IN_INDEX:
2046 case BUILT_IN_STRCHR:
2047 case BUILT_IN_STRRCHR:
2048 {
2049 ao_ref dref;
2050 ao_ref_init_from_ptr_and_size (&dref,
2051 gimple_call_arg (call, 0),
2052 NULL_TREE);
2053 return refs_may_alias_p_1 (&dref, ref, false);
2054 }
2055 /* These read memory pointed to by the first argument with size
2056 in the third argument. */
2057 case BUILT_IN_MEMCHR:
2058 {
2059 ao_ref dref;
2060 ao_ref_init_from_ptr_and_size (&dref,
2061 gimple_call_arg (call, 0),
2062 gimple_call_arg (call, 2));
2063 return refs_may_alias_p_1 (&dref, ref, false);
2064 }
2065 /* These read memory pointed to by the first and second arguments. */
2066 case BUILT_IN_STRSTR:
2067 case BUILT_IN_STRPBRK:
2068 {
2069 ao_ref dref;
2070 ao_ref_init_from_ptr_and_size (&dref,
2071 gimple_call_arg (call, 0),
2072 NULL_TREE);
2073 if (refs_may_alias_p_1 (&dref, ref, false))
2074 return true;
2075 ao_ref_init_from_ptr_and_size (&dref,
2076 gimple_call_arg (call, 1),
2077 NULL_TREE);
2078 return refs_may_alias_p_1 (&dref, ref, false);
2079 }
2080
2081 /* The following builtins do not read from memory. */
2082 case BUILT_IN_FREE:
2083 case BUILT_IN_MALLOC:
2084 case BUILT_IN_POSIX_MEMALIGN:
2085 case BUILT_IN_ALIGNED_ALLOC:
2086 case BUILT_IN_CALLOC:
2087 CASE_BUILT_IN_ALLOCA:
2088 case BUILT_IN_STACK_SAVE:
2089 case BUILT_IN_STACK_RESTORE:
2090 case BUILT_IN_MEMSET:
2091 case BUILT_IN_TM_MEMSET:
2092 case BUILT_IN_MEMSET_CHK:
2093 case BUILT_IN_FREXP:
2094 case BUILT_IN_FREXPF:
2095 case BUILT_IN_FREXPL:
2096 case BUILT_IN_GAMMA_R:
2097 case BUILT_IN_GAMMAF_R:
2098 case BUILT_IN_GAMMAL_R:
2099 case BUILT_IN_LGAMMA_R:
2100 case BUILT_IN_LGAMMAF_R:
2101 case BUILT_IN_LGAMMAL_R:
2102 case BUILT_IN_MODF:
2103 case BUILT_IN_MODFF:
2104 case BUILT_IN_MODFL:
2105 case BUILT_IN_REMQUO:
2106 case BUILT_IN_REMQUOF:
2107 case BUILT_IN_REMQUOL:
2108 case BUILT_IN_SINCOS:
2109 case BUILT_IN_SINCOSF:
2110 case BUILT_IN_SINCOSL:
2111 case BUILT_IN_ASSUME_ALIGNED:
2112 case BUILT_IN_VA_END:
2113 return false;
2114 /* __sync_* builtins and some OpenMP builtins act as threading
2115 barriers. */
2116 #undef DEF_SYNC_BUILTIN
2117 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2118 #include "sync-builtins.def"
2119 #undef DEF_SYNC_BUILTIN
2120 case BUILT_IN_GOMP_ATOMIC_START:
2121 case BUILT_IN_GOMP_ATOMIC_END:
2122 case BUILT_IN_GOMP_BARRIER:
2123 case BUILT_IN_GOMP_BARRIER_CANCEL:
2124 case BUILT_IN_GOMP_TASKWAIT:
2125 case BUILT_IN_GOMP_TASKGROUP_END:
2126 case BUILT_IN_GOMP_CRITICAL_START:
2127 case BUILT_IN_GOMP_CRITICAL_END:
2128 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2129 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2130 case BUILT_IN_GOMP_LOOP_END:
2131 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2132 case BUILT_IN_GOMP_ORDERED_START:
2133 case BUILT_IN_GOMP_ORDERED_END:
2134 case BUILT_IN_GOMP_SECTIONS_END:
2135 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2136 case BUILT_IN_GOMP_SINGLE_COPY_START:
2137 case BUILT_IN_GOMP_SINGLE_COPY_END:
2138 return true;
2139
2140 default:
2141 /* Fallthru to general call handling. */;
2142 }
2143
2144 /* Check if base is a global static variable that is not read
2145 by the function. */
2146 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2147 {
2148 struct cgraph_node *node = cgraph_node::get (callee);
2149 bitmap not_read;
2150
2151 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2152 node yet. We should enforce that there are nodes for all decls in the
2153 IL and remove this check instead. */
2154 if (node
2155 && (not_read = ipa_reference_get_not_read_global (node))
2156 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
2157 goto process_args;
2158 }
2159
2160 /* Check if the base variable is call-used. */
2161 if (DECL_P (base))
2162 {
2163 if (pt_solution_includes (gimple_call_use_set (call), base))
2164 return true;
2165 }
2166 else if ((TREE_CODE (base) == MEM_REF
2167 || TREE_CODE (base) == TARGET_MEM_REF)
2168 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2169 {
2170 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2171 if (!pi)
2172 return true;
2173
2174 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2175 return true;
2176 }
2177 else
2178 return true;
2179
2180 /* Inspect call arguments for passed-by-value aliases. */
2181 process_args:
2182 for (i = 0; i < gimple_call_num_args (call); ++i)
2183 {
2184 tree op = gimple_call_arg (call, i);
2185 int flags = gimple_call_arg_flags (call, i);
2186
2187 if (flags & EAF_UNUSED)
2188 continue;
2189
2190 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2191 op = TREE_OPERAND (op, 0);
2192
2193 if (TREE_CODE (op) != SSA_NAME
2194 && !is_gimple_min_invariant (op))
2195 {
2196 ao_ref r;
2197 ao_ref_init (&r, op);
2198 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2199 return true;
2200 }
2201 }
2202
2203 return false;
2204 }
2205
2206 static bool
2207 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2208 {
2209 bool res;
2210 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2211 if (res)
2212 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2213 else
2214 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2215 return res;
2216 }
2217
2218
2219 /* If the statement STMT may use the memory reference REF return
2220 true, otherwise return false. */
2221
2222 bool
2223 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2224 {
2225 if (is_gimple_assign (stmt))
2226 {
2227 tree rhs;
2228
2229 /* All memory assign statements are single. */
2230 if (!gimple_assign_single_p (stmt))
2231 return false;
2232
2233 rhs = gimple_assign_rhs1 (stmt);
2234 if (is_gimple_reg (rhs)
2235 || is_gimple_min_invariant (rhs)
2236 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2237 return false;
2238
2239 return refs_may_alias_p (rhs, ref, tbaa_p);
2240 }
2241 else if (is_gimple_call (stmt))
2242 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2243 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2244 {
2245 tree retval = gimple_return_retval (return_stmt);
2246 if (retval
2247 && TREE_CODE (retval) != SSA_NAME
2248 && !is_gimple_min_invariant (retval)
2249 && refs_may_alias_p (retval, ref, tbaa_p))
2250 return true;
2251 /* If ref escapes the function then the return acts as a use. */
2252 tree base = ao_ref_base (ref);
2253 if (!base)
2254 ;
2255 else if (DECL_P (base))
2256 return is_global_var (base);
2257 else if (TREE_CODE (base) == MEM_REF
2258 || TREE_CODE (base) == TARGET_MEM_REF)
2259 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2260 return false;
2261 }
2262
2263 return true;
2264 }
2265
2266 bool
2267 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2268 {
2269 ao_ref r;
2270 ao_ref_init (&r, ref);
2271 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2272 }
2273
2274 /* If the call in statement CALL may clobber the memory reference REF
2275 return true, otherwise return false. */
2276
2277 bool
2278 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2279 {
2280 tree base;
2281 tree callee;
2282
2283 /* If the call is pure or const it cannot clobber anything. */
2284 if (gimple_call_flags (call)
2285 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2286 return false;
2287 if (gimple_call_internal_p (call))
2288 switch (gimple_call_internal_fn (call))
2289 {
2290 /* Treat these internal calls like ECF_PURE for aliasing,
2291 they don't write to any memory the program should care about.
2292 They have important other side-effects, and read memory,
2293 so can't be ECF_NOVOPS. */
2294 case IFN_UBSAN_NULL:
2295 case IFN_UBSAN_BOUNDS:
2296 case IFN_UBSAN_VPTR:
2297 case IFN_UBSAN_OBJECT_SIZE:
2298 case IFN_UBSAN_PTR:
2299 case IFN_ASAN_CHECK:
2300 return false;
2301 default:
2302 break;
2303 }
2304
2305 base = ao_ref_base (ref);
2306 if (!base)
2307 return true;
2308
2309 if (TREE_CODE (base) == SSA_NAME
2310 || CONSTANT_CLASS_P (base))
2311 return false;
2312
2313 /* A call that is not without side-effects might involve volatile
2314 accesses and thus conflicts with all other volatile accesses. */
2315 if (ref->volatile_p)
2316 return true;
2317
2318 /* If the reference is based on a decl that is not aliased the call
2319 cannot possibly clobber it. */
2320 if (DECL_P (base)
2321 && !may_be_aliased (base)
2322 /* But local non-readonly statics can be modified through recursion
2323 or the call may implement a threading barrier which we must
2324 treat as may-def. */
2325 && (TREE_READONLY (base)
2326 || !is_global_var (base)))
2327 return false;
2328
2329 /* If the reference is based on a pointer that points to memory
2330 that may not be written to then the call cannot possibly clobber it. */
2331 if ((TREE_CODE (base) == MEM_REF
2332 || TREE_CODE (base) == TARGET_MEM_REF)
2333 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2334 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2335 return false;
2336
2337 callee = gimple_call_fndecl (call);
2338
2339 /* Handle those builtin functions explicitly that do not act as
2340 escape points. See tree-ssa-structalias.c:find_func_aliases
2341 for the list of builtins we might need to handle here. */
2342 if (callee != NULL_TREE
2343 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2344 switch (DECL_FUNCTION_CODE (callee))
2345 {
2346 /* All the following functions clobber memory pointed to by
2347 their first argument. */
2348 case BUILT_IN_STRCPY:
2349 case BUILT_IN_STRNCPY:
2350 case BUILT_IN_MEMCPY:
2351 case BUILT_IN_MEMMOVE:
2352 case BUILT_IN_MEMPCPY:
2353 case BUILT_IN_STPCPY:
2354 case BUILT_IN_STPNCPY:
2355 case BUILT_IN_STRCAT:
2356 case BUILT_IN_STRNCAT:
2357 case BUILT_IN_MEMSET:
2358 case BUILT_IN_TM_MEMSET:
2359 CASE_BUILT_IN_TM_STORE (1):
2360 CASE_BUILT_IN_TM_STORE (2):
2361 CASE_BUILT_IN_TM_STORE (4):
2362 CASE_BUILT_IN_TM_STORE (8):
2363 CASE_BUILT_IN_TM_STORE (FLOAT):
2364 CASE_BUILT_IN_TM_STORE (DOUBLE):
2365 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2366 CASE_BUILT_IN_TM_STORE (M64):
2367 CASE_BUILT_IN_TM_STORE (M128):
2368 CASE_BUILT_IN_TM_STORE (M256):
2369 case BUILT_IN_TM_MEMCPY:
2370 case BUILT_IN_TM_MEMMOVE:
2371 {
2372 ao_ref dref;
2373 tree size = NULL_TREE;
2374 /* Don't pass in size for strncat, as the maximum size
2375 is strlen (dest) + n + 1 instead of n, resp.
2376 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2377 known. */
2378 if (gimple_call_num_args (call) == 3
2379 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2380 size = gimple_call_arg (call, 2);
2381 ao_ref_init_from_ptr_and_size (&dref,
2382 gimple_call_arg (call, 0),
2383 size);
2384 return refs_may_alias_p_1 (&dref, ref, false);
2385 }
2386 case BUILT_IN_STRCPY_CHK:
2387 case BUILT_IN_STRNCPY_CHK:
2388 case BUILT_IN_MEMCPY_CHK:
2389 case BUILT_IN_MEMMOVE_CHK:
2390 case BUILT_IN_MEMPCPY_CHK:
2391 case BUILT_IN_STPCPY_CHK:
2392 case BUILT_IN_STPNCPY_CHK:
2393 case BUILT_IN_STRCAT_CHK:
2394 case BUILT_IN_STRNCAT_CHK:
2395 case BUILT_IN_MEMSET_CHK:
2396 {
2397 ao_ref dref;
2398 tree size = NULL_TREE;
2399 /* Don't pass in size for __strncat_chk, as the maximum size
2400 is strlen (dest) + n + 1 instead of n, resp.
2401 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2402 known. */
2403 if (gimple_call_num_args (call) == 4
2404 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2405 size = gimple_call_arg (call, 2);
2406 ao_ref_init_from_ptr_and_size (&dref,
2407 gimple_call_arg (call, 0),
2408 size);
2409 return refs_may_alias_p_1 (&dref, ref, false);
2410 }
2411 case BUILT_IN_BCOPY:
2412 {
2413 ao_ref dref;
2414 tree size = gimple_call_arg (call, 2);
2415 ao_ref_init_from_ptr_and_size (&dref,
2416 gimple_call_arg (call, 1),
2417 size);
2418 return refs_may_alias_p_1 (&dref, ref, false);
2419 }
2420 /* Allocating memory does not have any side-effects apart from
2421 being the definition point for the pointer. */
2422 case BUILT_IN_MALLOC:
2423 case BUILT_IN_ALIGNED_ALLOC:
2424 case BUILT_IN_CALLOC:
2425 case BUILT_IN_STRDUP:
2426 case BUILT_IN_STRNDUP:
2427 /* Unix98 specifies that errno is set on allocation failure. */
2428 if (flag_errno_math
2429 && targetm.ref_may_alias_errno (ref))
2430 return true;
2431 return false;
2432 case BUILT_IN_STACK_SAVE:
2433 CASE_BUILT_IN_ALLOCA:
2434 case BUILT_IN_ASSUME_ALIGNED:
2435 return false;
2436 /* But posix_memalign stores a pointer into the memory pointed to
2437 by its first argument. */
2438 case BUILT_IN_POSIX_MEMALIGN:
2439 {
2440 tree ptrptr = gimple_call_arg (call, 0);
2441 ao_ref dref;
2442 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2443 TYPE_SIZE_UNIT (ptr_type_node));
2444 return (refs_may_alias_p_1 (&dref, ref, false)
2445 || (flag_errno_math
2446 && targetm.ref_may_alias_errno (ref)));
2447 }
2448 /* Freeing memory kills the pointed-to memory. More importantly
2449 the call has to serve as a barrier for moving loads and stores
2450 across it. */
2451 case BUILT_IN_FREE:
2452 case BUILT_IN_VA_END:
2453 {
2454 tree ptr = gimple_call_arg (call, 0);
2455 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2456 }
2457 /* Realloc serves both as allocation point and deallocation point. */
2458 case BUILT_IN_REALLOC:
2459 {
2460 tree ptr = gimple_call_arg (call, 0);
2461 /* Unix98 specifies that errno is set on allocation failure. */
2462 return ((flag_errno_math
2463 && targetm.ref_may_alias_errno (ref))
2464 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2465 }
2466 case BUILT_IN_GAMMA_R:
2467 case BUILT_IN_GAMMAF_R:
2468 case BUILT_IN_GAMMAL_R:
2469 case BUILT_IN_LGAMMA_R:
2470 case BUILT_IN_LGAMMAF_R:
2471 case BUILT_IN_LGAMMAL_R:
2472 {
2473 tree out = gimple_call_arg (call, 1);
2474 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2475 return true;
2476 if (flag_errno_math)
2477 break;
2478 return false;
2479 }
2480 case BUILT_IN_FREXP:
2481 case BUILT_IN_FREXPF:
2482 case BUILT_IN_FREXPL:
2483 case BUILT_IN_MODF:
2484 case BUILT_IN_MODFF:
2485 case BUILT_IN_MODFL:
2486 {
2487 tree out = gimple_call_arg (call, 1);
2488 return ptr_deref_may_alias_ref_p_1 (out, ref);
2489 }
2490 case BUILT_IN_REMQUO:
2491 case BUILT_IN_REMQUOF:
2492 case BUILT_IN_REMQUOL:
2493 {
2494 tree out = gimple_call_arg (call, 2);
2495 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2496 return true;
2497 if (flag_errno_math)
2498 break;
2499 return false;
2500 }
2501 case BUILT_IN_SINCOS:
2502 case BUILT_IN_SINCOSF:
2503 case BUILT_IN_SINCOSL:
2504 {
2505 tree sin = gimple_call_arg (call, 1);
2506 tree cos = gimple_call_arg (call, 2);
2507 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2508 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2509 }
2510 /* __sync_* builtins and some OpenMP builtins act as threading
2511 barriers. */
2512 #undef DEF_SYNC_BUILTIN
2513 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2514 #include "sync-builtins.def"
2515 #undef DEF_SYNC_BUILTIN
2516 case BUILT_IN_GOMP_ATOMIC_START:
2517 case BUILT_IN_GOMP_ATOMIC_END:
2518 case BUILT_IN_GOMP_BARRIER:
2519 case BUILT_IN_GOMP_BARRIER_CANCEL:
2520 case BUILT_IN_GOMP_TASKWAIT:
2521 case BUILT_IN_GOMP_TASKGROUP_END:
2522 case BUILT_IN_GOMP_CRITICAL_START:
2523 case BUILT_IN_GOMP_CRITICAL_END:
2524 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2525 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2526 case BUILT_IN_GOMP_LOOP_END:
2527 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2528 case BUILT_IN_GOMP_ORDERED_START:
2529 case BUILT_IN_GOMP_ORDERED_END:
2530 case BUILT_IN_GOMP_SECTIONS_END:
2531 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2532 case BUILT_IN_GOMP_SINGLE_COPY_START:
2533 case BUILT_IN_GOMP_SINGLE_COPY_END:
2534 return true;
2535 default:
2536 /* Fallthru to general call handling. */;
2537 }
2538
2539 /* Check if base is a global static variable that is not written
2540 by the function. */
2541 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2542 {
2543 struct cgraph_node *node = cgraph_node::get (callee);
2544 bitmap not_written;
2545
2546 if (node
2547 && (not_written = ipa_reference_get_not_written_global (node))
2548 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2549 return false;
2550 }
2551
2552 /* Check if the base variable is call-clobbered. */
2553 if (DECL_P (base))
2554 return pt_solution_includes (gimple_call_clobber_set (call), base);
2555 else if ((TREE_CODE (base) == MEM_REF
2556 || TREE_CODE (base) == TARGET_MEM_REF)
2557 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2558 {
2559 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2560 if (!pi)
2561 return true;
2562
2563 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2564 }
2565
2566 return true;
2567 }
2568
2569 /* If the call in statement CALL may clobber the memory reference REF
2570 return true, otherwise return false. */
2571
2572 bool
2573 call_may_clobber_ref_p (gcall *call, tree ref)
2574 {
2575 bool res;
2576 ao_ref r;
2577 ao_ref_init (&r, ref);
2578 res = call_may_clobber_ref_p_1 (call, &r);
2579 if (res)
2580 ++alias_stats.call_may_clobber_ref_p_may_alias;
2581 else
2582 ++alias_stats.call_may_clobber_ref_p_no_alias;
2583 return res;
2584 }
2585
2586
2587 /* If the statement STMT may clobber the memory reference REF return true,
2588 otherwise return false. */
2589
2590 bool
2591 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2592 {
2593 if (is_gimple_call (stmt))
2594 {
2595 tree lhs = gimple_call_lhs (stmt);
2596 if (lhs
2597 && TREE_CODE (lhs) != SSA_NAME)
2598 {
2599 ao_ref r;
2600 ao_ref_init (&r, lhs);
2601 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2602 return true;
2603 }
2604
2605 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2606 }
2607 else if (gimple_assign_single_p (stmt))
2608 {
2609 tree lhs = gimple_assign_lhs (stmt);
2610 if (TREE_CODE (lhs) != SSA_NAME)
2611 {
2612 ao_ref r;
2613 ao_ref_init (&r, lhs);
2614 return refs_may_alias_p_1 (ref, &r, tbaa_p);
2615 }
2616 }
2617 else if (gimple_code (stmt) == GIMPLE_ASM)
2618 return true;
2619
2620 return false;
2621 }
2622
2623 bool
2624 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2625 {
2626 ao_ref r;
2627 ao_ref_init (&r, ref);
2628 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2629 }
2630
2631 /* Return true if store1 and store2 described by corresponding tuples
2632 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2633 address. */
2634
2635 static bool
2636 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2637 poly_int64 max_size1,
2638 tree base2, poly_int64 offset2, poly_int64 size2,
2639 poly_int64 max_size2)
2640 {
2641 /* Offsets need to be 0. */
2642 if (maybe_ne (offset1, 0)
2643 || maybe_ne (offset2, 0))
2644 return false;
2645
2646 bool base1_obj_p = SSA_VAR_P (base1);
2647 bool base2_obj_p = SSA_VAR_P (base2);
2648
2649 /* We need one object. */
2650 if (base1_obj_p == base2_obj_p)
2651 return false;
2652 tree obj = base1_obj_p ? base1 : base2;
2653
2654 /* And we need one MEM_REF. */
2655 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2656 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2657 if (base1_memref_p == base2_memref_p)
2658 return false;
2659 tree memref = base1_memref_p ? base1 : base2;
2660
2661 /* Sizes need to be valid. */
2662 if (!known_size_p (max_size1)
2663 || !known_size_p (max_size2)
2664 || !known_size_p (size1)
2665 || !known_size_p (size2))
2666 return false;
2667
2668 /* Max_size needs to match size. */
2669 if (maybe_ne (max_size1, size1)
2670 || maybe_ne (max_size2, size2))
2671 return false;
2672
2673 /* Sizes need to match. */
2674 if (maybe_ne (size1, size2))
2675 return false;
2676
2677
2678 /* Check that memref is a store to pointer with singleton points-to info. */
2679 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2680 return false;
2681 tree ptr = TREE_OPERAND (memref, 0);
2682 if (TREE_CODE (ptr) != SSA_NAME)
2683 return false;
2684 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2685 unsigned int pt_uid;
2686 if (pi == NULL
2687 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2688 return false;
2689
2690 /* Be conservative with non-call exceptions when the address might
2691 be NULL. */
2692 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2693 return false;
2694
2695 /* Check that ptr points relative to obj. */
2696 unsigned int obj_uid = DECL_PT_UID (obj);
2697 if (obj_uid != pt_uid)
2698 return false;
2699
2700 /* Check that the object size is the same as the store size. That ensures us
2701 that ptr points to the start of obj. */
2702 return (DECL_SIZE (obj)
2703 && poly_int_tree_p (DECL_SIZE (obj))
2704 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2705 }
2706
2707 /* If STMT kills the memory reference REF return true, otherwise
2708 return false. */
2709
2710 bool
2711 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2712 {
2713 if (!ao_ref_base (ref))
2714 return false;
2715
2716 if (gimple_has_lhs (stmt)
2717 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2718 /* The assignment is not necessarily carried out if it can throw
2719 and we can catch it in the current function where we could inspect
2720 the previous value.
2721 ??? We only need to care about the RHS throwing. For aggregate
2722 assignments or similar calls and non-call exceptions the LHS
2723 might throw as well. */
2724 && !stmt_can_throw_internal (cfun, stmt))
2725 {
2726 tree lhs = gimple_get_lhs (stmt);
2727 /* If LHS is literally a base of the access we are done. */
2728 if (ref->ref)
2729 {
2730 tree base = ref->ref;
2731 tree innermost_dropped_array_ref = NULL_TREE;
2732 if (handled_component_p (base))
2733 {
2734 tree saved_lhs0 = NULL_TREE;
2735 if (handled_component_p (lhs))
2736 {
2737 saved_lhs0 = TREE_OPERAND (lhs, 0);
2738 TREE_OPERAND (lhs, 0) = integer_zero_node;
2739 }
2740 do
2741 {
2742 /* Just compare the outermost handled component, if
2743 they are equal we have found a possible common
2744 base. */
2745 tree saved_base0 = TREE_OPERAND (base, 0);
2746 TREE_OPERAND (base, 0) = integer_zero_node;
2747 bool res = operand_equal_p (lhs, base, 0);
2748 TREE_OPERAND (base, 0) = saved_base0;
2749 if (res)
2750 break;
2751 /* Remember if we drop an array-ref that we need to
2752 double-check not being at struct end. */
2753 if (TREE_CODE (base) == ARRAY_REF
2754 || TREE_CODE (base) == ARRAY_RANGE_REF)
2755 innermost_dropped_array_ref = base;
2756 /* Otherwise drop handled components of the access. */
2757 base = saved_base0;
2758 }
2759 while (handled_component_p (base));
2760 if (saved_lhs0)
2761 TREE_OPERAND (lhs, 0) = saved_lhs0;
2762 }
2763 /* Finally check if the lhs has the same address and size as the
2764 base candidate of the access. Watch out if we have dropped
2765 an array-ref that was at struct end, this means ref->ref may
2766 be outside of the TYPE_SIZE of its base. */
2767 if ((! innermost_dropped_array_ref
2768 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2769 && (lhs == base
2770 || (((TYPE_SIZE (TREE_TYPE (lhs))
2771 == TYPE_SIZE (TREE_TYPE (base)))
2772 || (TYPE_SIZE (TREE_TYPE (lhs))
2773 && TYPE_SIZE (TREE_TYPE (base))
2774 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2775 TYPE_SIZE (TREE_TYPE (base)),
2776 0)))
2777 && operand_equal_p (lhs, base,
2778 OEP_ADDRESS_OF
2779 | OEP_MATCH_SIDE_EFFECTS))))
2780 return true;
2781 }
2782
2783 /* Now look for non-literal equal bases with the restriction of
2784 handling constant offset and size. */
2785 /* For a must-alias check we need to be able to constrain
2786 the access properly. */
2787 if (!ref->max_size_known_p ())
2788 return false;
2789 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2790 bool reverse;
2791 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2792 &reverse);
2793 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2794 so base == ref->base does not always hold. */
2795 if (base != ref->base)
2796 {
2797 /* Try using points-to info. */
2798 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2799 ref->offset, ref->size, ref->max_size))
2800 return true;
2801
2802 /* If both base and ref->base are MEM_REFs, only compare the
2803 first operand, and if the second operand isn't equal constant,
2804 try to add the offsets into offset and ref_offset. */
2805 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2806 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2807 {
2808 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2809 TREE_OPERAND (ref->base, 1)))
2810 {
2811 poly_offset_int off1 = mem_ref_offset (base);
2812 off1 <<= LOG2_BITS_PER_UNIT;
2813 off1 += offset;
2814 poly_offset_int off2 = mem_ref_offset (ref->base);
2815 off2 <<= LOG2_BITS_PER_UNIT;
2816 off2 += ref_offset;
2817 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2818 size = -1;
2819 }
2820 }
2821 else
2822 size = -1;
2823 }
2824 /* For a must-alias check we need to be able to constrain
2825 the access properly. */
2826 if (known_eq (size, max_size)
2827 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2828 return true;
2829 }
2830
2831 if (is_gimple_call (stmt))
2832 {
2833 tree callee = gimple_call_fndecl (stmt);
2834 if (callee != NULL_TREE
2835 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2836 switch (DECL_FUNCTION_CODE (callee))
2837 {
2838 case BUILT_IN_FREE:
2839 {
2840 tree ptr = gimple_call_arg (stmt, 0);
2841 tree base = ao_ref_base (ref);
2842 if (base && TREE_CODE (base) == MEM_REF
2843 && TREE_OPERAND (base, 0) == ptr)
2844 return true;
2845 break;
2846 }
2847
2848 case BUILT_IN_MEMCPY:
2849 case BUILT_IN_MEMPCPY:
2850 case BUILT_IN_MEMMOVE:
2851 case BUILT_IN_MEMSET:
2852 case BUILT_IN_MEMCPY_CHK:
2853 case BUILT_IN_MEMPCPY_CHK:
2854 case BUILT_IN_MEMMOVE_CHK:
2855 case BUILT_IN_MEMSET_CHK:
2856 case BUILT_IN_STRNCPY:
2857 case BUILT_IN_STPNCPY:
2858 {
2859 /* For a must-alias check we need to be able to constrain
2860 the access properly. */
2861 if (!ref->max_size_known_p ())
2862 return false;
2863 tree dest = gimple_call_arg (stmt, 0);
2864 tree len = gimple_call_arg (stmt, 2);
2865 if (!poly_int_tree_p (len))
2866 return false;
2867 tree rbase = ref->base;
2868 poly_offset_int roffset = ref->offset;
2869 ao_ref dref;
2870 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2871 tree base = ao_ref_base (&dref);
2872 poly_offset_int offset = dref.offset;
2873 if (!base || !known_size_p (dref.size))
2874 return false;
2875 if (TREE_CODE (base) == MEM_REF)
2876 {
2877 if (TREE_CODE (rbase) != MEM_REF)
2878 return false;
2879 // Compare pointers.
2880 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2881 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2882 base = TREE_OPERAND (base, 0);
2883 rbase = TREE_OPERAND (rbase, 0);
2884 }
2885 if (base == rbase
2886 && known_subrange_p (roffset, ref->max_size, offset,
2887 wi::to_poly_offset (len)
2888 << LOG2_BITS_PER_UNIT))
2889 return true;
2890 break;
2891 }
2892
2893 case BUILT_IN_VA_END:
2894 {
2895 tree ptr = gimple_call_arg (stmt, 0);
2896 if (TREE_CODE (ptr) == ADDR_EXPR)
2897 {
2898 tree base = ao_ref_base (ref);
2899 if (TREE_OPERAND (ptr, 0) == base)
2900 return true;
2901 }
2902 break;
2903 }
2904
2905 default:;
2906 }
2907 }
2908 return false;
2909 }
2910
2911 bool
2912 stmt_kills_ref_p (gimple *stmt, tree ref)
2913 {
2914 ao_ref r;
2915 ao_ref_init (&r, ref);
2916 return stmt_kills_ref_p (stmt, &r);
2917 }
2918
2919
2920 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2921 TARGET or a statement clobbering the memory reference REF in which
2922 case false is returned. The walk starts with VUSE, one argument of PHI. */
2923
2924 static bool
2925 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
2926 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
2927 bool abort_on_visited,
2928 void *(*translate)(ao_ref *, tree, void *, bool *),
2929 void *data)
2930 {
2931 basic_block bb = gimple_bb (phi);
2932
2933 if (!*visited)
2934 *visited = BITMAP_ALLOC (NULL);
2935
2936 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2937
2938 /* Walk until we hit the target. */
2939 while (vuse != target)
2940 {
2941 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2942 /* If we are searching for the target VUSE by walking up to
2943 TARGET_BB dominating the original PHI we are finished once
2944 we reach a default def or a definition in a block dominating
2945 that block. Update TARGET and return. */
2946 if (!target
2947 && (gimple_nop_p (def_stmt)
2948 || dominated_by_p (CDI_DOMINATORS,
2949 target_bb, gimple_bb (def_stmt))))
2950 {
2951 target = vuse;
2952 return true;
2953 }
2954
2955 /* Recurse for PHI nodes. */
2956 if (gimple_code (def_stmt) == GIMPLE_PHI)
2957 {
2958 /* An already visited PHI node ends the walk successfully. */
2959 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2960 return !abort_on_visited;
2961 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2962 visited, abort_on_visited,
2963 translate, data);
2964 if (!vuse)
2965 return false;
2966 continue;
2967 }
2968 else if (gimple_nop_p (def_stmt))
2969 return false;
2970 else
2971 {
2972 /* A clobbering statement or the end of the IL ends it failing. */
2973 if ((int)limit <= 0)
2974 return false;
2975 --limit;
2976 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2977 {
2978 bool disambiguate_only = true;
2979 if (translate
2980 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2981 ;
2982 else
2983 return false;
2984 }
2985 }
2986 /* If we reach a new basic-block see if we already skipped it
2987 in a previous walk that ended successfully. */
2988 if (gimple_bb (def_stmt) != bb)
2989 {
2990 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2991 return !abort_on_visited;
2992 bb = gimple_bb (def_stmt);
2993 }
2994 vuse = gimple_vuse (def_stmt);
2995 }
2996 return true;
2997 }
2998
2999
3000 /* Starting from a PHI node for the virtual operand of the memory reference
3001 REF find a continuation virtual operand that allows to continue walking
3002 statements dominating PHI skipping only statements that cannot possibly
3003 clobber REF. Decrements LIMIT for each alias disambiguation done
3004 and aborts the walk, returning NULL_TREE if it reaches zero.
3005 Returns NULL_TREE if no suitable virtual operand can be found. */
3006
3007 tree
3008 get_continuation_for_phi (gimple *phi, ao_ref *ref,
3009 unsigned int &limit, bitmap *visited,
3010 bool abort_on_visited,
3011 void *(*translate)(ao_ref *, tree, void *, bool *),
3012 void *data)
3013 {
3014 unsigned nargs = gimple_phi_num_args (phi);
3015
3016 /* Through a single-argument PHI we can simply look through. */
3017 if (nargs == 1)
3018 return PHI_ARG_DEF (phi, 0);
3019
3020 /* For two or more arguments try to pairwise skip non-aliasing code
3021 until we hit the phi argument definition that dominates the other one. */
3022 basic_block phi_bb = gimple_bb (phi);
3023 tree arg0, arg1;
3024 unsigned i;
3025
3026 /* Find a candidate for the virtual operand which definition
3027 dominates those of all others. */
3028 /* First look if any of the args themselves satisfy this. */
3029 for (i = 0; i < nargs; ++i)
3030 {
3031 arg0 = PHI_ARG_DEF (phi, i);
3032 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3033 break;
3034 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3035 if (def_bb != phi_bb
3036 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3037 break;
3038 arg0 = NULL_TREE;
3039 }
3040 /* If not, look if we can reach such candidate by walking defs
3041 until we hit the immediate dominator. maybe_skip_until will
3042 do that for us. */
3043 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3044
3045 /* Then check against the (to be) found candidate. */
3046 for (i = 0; i < nargs; ++i)
3047 {
3048 arg1 = PHI_ARG_DEF (phi, i);
3049 if (arg1 == arg0)
3050 ;
3051 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
3052 abort_on_visited,
3053 /* Do not translate when walking over
3054 backedges. */
3055 dominated_by_p
3056 (CDI_DOMINATORS,
3057 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3058 phi_bb)
3059 ? NULL : translate, data))
3060 return NULL_TREE;
3061 }
3062
3063 return arg0;
3064 }
3065
3066 /* Based on the memory reference REF and its virtual use VUSE call
3067 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3068 itself. That is, for each virtual use for which its defining statement
3069 does not clobber REF.
3070
3071 WALKER is called with REF, the current virtual use and DATA. If
3072 WALKER returns non-NULL the walk stops and its result is returned.
3073 At the end of a non-successful walk NULL is returned.
3074
3075 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3076 use which definition is a statement that may clobber REF and DATA.
3077 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3078 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3079 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3080 to adjust REF and *DATA to make that valid.
3081
3082 VALUEIZE if non-NULL is called with the next VUSE that is considered
3083 and return value is substituted for that. This can be used to
3084 implement optimistic value-numbering for example. Note that the
3085 VUSE argument is assumed to be valueized already.
3086
3087 LIMIT specifies the number of alias queries we are allowed to do,
3088 the walk stops when it reaches zero and NULL is returned. LIMIT
3089 is decremented by the number of alias queries (plus adjustments
3090 done by the callbacks) upon return.
3091
3092 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3093
3094 void *
3095 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
3096 void *(*walker)(ao_ref *, tree, void *),
3097 void *(*translate)(ao_ref *, tree, void *, bool *),
3098 tree (*valueize)(tree),
3099 unsigned &limit, void *data)
3100 {
3101 bitmap visited = NULL;
3102 void *res;
3103 bool translated = false;
3104
3105 timevar_push (TV_ALIAS_STMT_WALK);
3106
3107 do
3108 {
3109 gimple *def_stmt;
3110
3111 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3112 res = (*walker) (ref, vuse, data);
3113 /* Abort walk. */
3114 if (res == (void *)-1)
3115 {
3116 res = NULL;
3117 break;
3118 }
3119 /* Lookup succeeded. */
3120 else if (res != NULL)
3121 break;
3122
3123 if (valueize)
3124 {
3125 vuse = valueize (vuse);
3126 if (!vuse)
3127 {
3128 res = NULL;
3129 break;
3130 }
3131 }
3132 def_stmt = SSA_NAME_DEF_STMT (vuse);
3133 if (gimple_nop_p (def_stmt))
3134 break;
3135 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3136 vuse = get_continuation_for_phi (def_stmt, ref, limit,
3137 &visited, translated, translate, data);
3138 else
3139 {
3140 if ((int)limit <= 0)
3141 {
3142 res = NULL;
3143 break;
3144 }
3145 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
3146 {
3147 if (!translate)
3148 break;
3149 bool disambiguate_only = false;
3150 res = (*translate) (ref, vuse, data, &disambiguate_only);
3151 /* Failed lookup and translation. */
3152 if (res == (void *)-1)
3153 {
3154 res = NULL;
3155 break;
3156 }
3157 /* Lookup succeeded. */
3158 else if (res != NULL)
3159 break;
3160 /* Translation succeeded, continue walking. */
3161 translated = translated || !disambiguate_only;
3162 }
3163 vuse = gimple_vuse (def_stmt);
3164 }
3165 }
3166 while (vuse);
3167
3168 if (visited)
3169 BITMAP_FREE (visited);
3170
3171 timevar_pop (TV_ALIAS_STMT_WALK);
3172
3173 return res;
3174 }
3175
3176
3177 /* Based on the memory reference REF call WALKER for each vdef which
3178 defining statement may clobber REF, starting with VDEF. If REF
3179 is NULL_TREE, each defining statement is visited.
3180
3181 WALKER is called with REF, the current vdef and DATA. If WALKER
3182 returns true the walk is stopped, otherwise it continues.
3183
3184 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3185 The pointer may be NULL and then we do not track this information.
3186
3187 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3188 PHI argument (but only one walk continues on merge points), the
3189 return value is true if any of the walks was successful.
3190
3191 The function returns the number of statements walked or -1 if
3192 LIMIT stmts were walked and the walk was aborted at this point.
3193 If LIMIT is zero the walk is not aborted. */
3194
3195 static int
3196 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3197 bool (*walker)(ao_ref *, tree, void *), void *data,
3198 bitmap *visited, unsigned int cnt,
3199 bool *function_entry_reached, unsigned limit)
3200 {
3201 do
3202 {
3203 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3204
3205 if (*visited
3206 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3207 return cnt;
3208
3209 if (gimple_nop_p (def_stmt))
3210 {
3211 if (function_entry_reached)
3212 *function_entry_reached = true;
3213 return cnt;
3214 }
3215 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3216 {
3217 unsigned i;
3218 if (!*visited)
3219 *visited = BITMAP_ALLOC (NULL);
3220 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3221 {
3222 int res = walk_aliased_vdefs_1 (ref,
3223 gimple_phi_arg_def (def_stmt, i),
3224 walker, data, visited, cnt,
3225 function_entry_reached, limit);
3226 if (res == -1)
3227 return -1;
3228 cnt = res;
3229 }
3230 return cnt;
3231 }
3232
3233 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3234 cnt++;
3235 if (cnt == limit)
3236 return -1;
3237 if ((!ref
3238 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3239 && (*walker) (ref, vdef, data))
3240 return cnt;
3241
3242 vdef = gimple_vuse (def_stmt);
3243 }
3244 while (1);
3245 }
3246
3247 int
3248 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3249 bool (*walker)(ao_ref *, tree, void *), void *data,
3250 bitmap *visited,
3251 bool *function_entry_reached, unsigned int limit)
3252 {
3253 bitmap local_visited = NULL;
3254 int ret;
3255
3256 timevar_push (TV_ALIAS_STMT_WALK);
3257
3258 if (function_entry_reached)
3259 *function_entry_reached = false;
3260
3261 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3262 visited ? visited : &local_visited, 0,
3263 function_entry_reached, limit);
3264 if (local_visited)
3265 BITMAP_FREE (local_visited);
3266
3267 timevar_pop (TV_ALIAS_STMT_WALK);
3268
3269 return ret;
3270 }
3271