re PR bootstrap/90873 (-Wmaybe-uninitialized warning in gcc/tree-ssa-forwprop.c break...
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
93
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
102 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
103 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
104 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_of_decl_p_may_alias;
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_of_decl_p_no_alias;
107 } alias_stats;
108
109 void
110 dump_alias_stats (FILE *s)
111 {
112 fprintf (s, "\nAlias oracle query stats:\n");
113 fprintf (s, " refs_may_alias_p: "
114 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC" queries\n",
116 alias_stats.refs_may_alias_p_no_alias,
117 alias_stats.refs_may_alias_p_no_alias
118 + alias_stats.refs_may_alias_p_may_alias);
119 fprintf (s, " ref_maybe_used_by_call_p: "
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC" queries\n",
122 alias_stats.ref_maybe_used_by_call_p_no_alias,
123 alias_stats.refs_may_alias_p_no_alias
124 + alias_stats.ref_maybe_used_by_call_p_may_alias);
125 fprintf (s, " call_may_clobber_ref_p: "
126 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
127 HOST_WIDE_INT_PRINT_DEC" queries\n",
128 alias_stats.call_may_clobber_ref_p_no_alias,
129 alias_stats.call_may_clobber_ref_p_no_alias
130 + alias_stats.call_may_clobber_ref_p_may_alias);
131 fprintf (s, " nonoverlapping_component_refs_p: "
132 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
133 HOST_WIDE_INT_PRINT_DEC" queries\n",
134 alias_stats.nonoverlapping_component_refs_p_no_alias,
135 alias_stats.nonoverlapping_component_refs_p_no_alias
136 + alias_stats.nonoverlapping_component_refs_p_may_alias);
137 fprintf (s, " nonoverlapping_component_refs_of_decl_p: "
138 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC" queries\n",
140 alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias,
141 alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias
142 + alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias);
143 fprintf (s, " aliasing_component_refs_p: "
144 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC" queries\n",
146 alias_stats.aliasing_component_refs_p_no_alias,
147 alias_stats.aliasing_component_refs_p_no_alias
148 + alias_stats.aliasing_component_refs_p_may_alias);
149 dump_alias_stats_in_alias_c (s);
150 }
151
152
153 /* Return true, if dereferencing PTR may alias with a global variable. */
154
155 bool
156 ptr_deref_may_alias_global_p (tree ptr)
157 {
158 struct ptr_info_def *pi;
159
160 /* If we end up with a pointer constant here that may point
161 to global memory. */
162 if (TREE_CODE (ptr) != SSA_NAME)
163 return true;
164
165 pi = SSA_NAME_PTR_INFO (ptr);
166
167 /* If we do not have points-to information for this variable,
168 we have to punt. */
169 if (!pi)
170 return true;
171
172 /* ??? This does not use TBAA to prune globals ptr may not access. */
173 return pt_solution_includes_global (&pi->pt);
174 }
175
176 /* Return true if dereferencing PTR may alias DECL.
177 The caller is responsible for applying TBAA to see if PTR
178 may access DECL at all. */
179
180 static bool
181 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
182 {
183 struct ptr_info_def *pi;
184
185 /* Conversions are irrelevant for points-to information and
186 data-dependence analysis can feed us those. */
187 STRIP_NOPS (ptr);
188
189 /* Anything we do not explicilty handle aliases. */
190 if ((TREE_CODE (ptr) != SSA_NAME
191 && TREE_CODE (ptr) != ADDR_EXPR
192 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
193 || !POINTER_TYPE_P (TREE_TYPE (ptr))
194 || (!VAR_P (decl)
195 && TREE_CODE (decl) != PARM_DECL
196 && TREE_CODE (decl) != RESULT_DECL))
197 return true;
198
199 /* Disregard pointer offsetting. */
200 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
201 {
202 do
203 {
204 ptr = TREE_OPERAND (ptr, 0);
205 }
206 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
207 return ptr_deref_may_alias_decl_p (ptr, decl);
208 }
209
210 /* ADDR_EXPR pointers either just offset another pointer or directly
211 specify the pointed-to set. */
212 if (TREE_CODE (ptr) == ADDR_EXPR)
213 {
214 tree base = get_base_address (TREE_OPERAND (ptr, 0));
215 if (base
216 && (TREE_CODE (base) == MEM_REF
217 || TREE_CODE (base) == TARGET_MEM_REF))
218 ptr = TREE_OPERAND (base, 0);
219 else if (base
220 && DECL_P (base))
221 return compare_base_decls (base, decl) != 0;
222 else if (base
223 && CONSTANT_CLASS_P (base))
224 return false;
225 else
226 return true;
227 }
228
229 /* Non-aliased variables cannot be pointed to. */
230 if (!may_be_aliased (decl))
231 return false;
232
233 /* If we do not have useful points-to information for this pointer
234 we cannot disambiguate anything else. */
235 pi = SSA_NAME_PTR_INFO (ptr);
236 if (!pi)
237 return true;
238
239 return pt_solution_includes (&pi->pt, decl);
240 }
241
242 /* Return true if dereferenced PTR1 and PTR2 may alias.
243 The caller is responsible for applying TBAA to see if accesses
244 through PTR1 and PTR2 may conflict at all. */
245
246 bool
247 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
248 {
249 struct ptr_info_def *pi1, *pi2;
250
251 /* Conversions are irrelevant for points-to information and
252 data-dependence analysis can feed us those. */
253 STRIP_NOPS (ptr1);
254 STRIP_NOPS (ptr2);
255
256 /* Disregard pointer offsetting. */
257 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
258 {
259 do
260 {
261 ptr1 = TREE_OPERAND (ptr1, 0);
262 }
263 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
264 return ptr_derefs_may_alias_p (ptr1, ptr2);
265 }
266 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
267 {
268 do
269 {
270 ptr2 = TREE_OPERAND (ptr2, 0);
271 }
272 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
273 return ptr_derefs_may_alias_p (ptr1, ptr2);
274 }
275
276 /* ADDR_EXPR pointers either just offset another pointer or directly
277 specify the pointed-to set. */
278 if (TREE_CODE (ptr1) == ADDR_EXPR)
279 {
280 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
281 if (base
282 && (TREE_CODE (base) == MEM_REF
283 || TREE_CODE (base) == TARGET_MEM_REF))
284 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
285 else if (base
286 && DECL_P (base))
287 return ptr_deref_may_alias_decl_p (ptr2, base);
288 else
289 return true;
290 }
291 if (TREE_CODE (ptr2) == ADDR_EXPR)
292 {
293 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
294 if (base
295 && (TREE_CODE (base) == MEM_REF
296 || TREE_CODE (base) == TARGET_MEM_REF))
297 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
298 else if (base
299 && DECL_P (base))
300 return ptr_deref_may_alias_decl_p (ptr1, base);
301 else
302 return true;
303 }
304
305 /* From here we require SSA name pointers. Anything else aliases. */
306 if (TREE_CODE (ptr1) != SSA_NAME
307 || TREE_CODE (ptr2) != SSA_NAME
308 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
309 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
310 return true;
311
312 /* We may end up with two empty points-to solutions for two same pointers.
313 In this case we still want to say both pointers alias, so shortcut
314 that here. */
315 if (ptr1 == ptr2)
316 return true;
317
318 /* If we do not have useful points-to information for either pointer
319 we cannot disambiguate anything else. */
320 pi1 = SSA_NAME_PTR_INFO (ptr1);
321 pi2 = SSA_NAME_PTR_INFO (ptr2);
322 if (!pi1 || !pi2)
323 return true;
324
325 /* ??? This does not use TBAA to prune decls from the intersection
326 that not both pointers may access. */
327 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
328 }
329
330 /* Return true if dereferencing PTR may alias *REF.
331 The caller is responsible for applying TBAA to see if PTR
332 may access *REF at all. */
333
334 static bool
335 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
336 {
337 tree base = ao_ref_base (ref);
338
339 if (TREE_CODE (base) == MEM_REF
340 || TREE_CODE (base) == TARGET_MEM_REF)
341 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
342 else if (DECL_P (base))
343 return ptr_deref_may_alias_decl_p (ptr, base);
344
345 return true;
346 }
347
348 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
349
350 bool
351 ptrs_compare_unequal (tree ptr1, tree ptr2)
352 {
353 /* First resolve the pointers down to a SSA name pointer base or
354 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
355 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
356 or STRING_CSTs which needs points-to adjustments to track them
357 in the points-to sets. */
358 tree obj1 = NULL_TREE;
359 tree obj2 = NULL_TREE;
360 if (TREE_CODE (ptr1) == ADDR_EXPR)
361 {
362 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
363 if (! tem)
364 return false;
365 if (VAR_P (tem)
366 || TREE_CODE (tem) == PARM_DECL
367 || TREE_CODE (tem) == RESULT_DECL)
368 obj1 = tem;
369 else if (TREE_CODE (tem) == MEM_REF)
370 ptr1 = TREE_OPERAND (tem, 0);
371 }
372 if (TREE_CODE (ptr2) == ADDR_EXPR)
373 {
374 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
375 if (! tem)
376 return false;
377 if (VAR_P (tem)
378 || TREE_CODE (tem) == PARM_DECL
379 || TREE_CODE (tem) == RESULT_DECL)
380 obj2 = tem;
381 else if (TREE_CODE (tem) == MEM_REF)
382 ptr2 = TREE_OPERAND (tem, 0);
383 }
384
385 /* Canonicalize ptr vs. object. */
386 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
387 {
388 std::swap (ptr1, ptr2);
389 std::swap (obj1, obj2);
390 }
391
392 if (obj1 && obj2)
393 /* Other code handles this correctly, no need to duplicate it here. */;
394 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
395 {
396 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
397 /* We may not use restrict to optimize pointer comparisons.
398 See PR71062. So we have to assume that restrict-pointed-to
399 may be in fact obj1. */
400 if (!pi
401 || pi->pt.vars_contains_restrict
402 || pi->pt.vars_contains_interposable)
403 return false;
404 if (VAR_P (obj1)
405 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
406 {
407 varpool_node *node = varpool_node::get (obj1);
408 /* If obj1 may bind to NULL give up (see below). */
409 if (! node
410 || ! node->nonzero_address ()
411 || ! decl_binds_to_current_def_p (obj1))
412 return false;
413 }
414 return !pt_solution_includes (&pi->pt, obj1);
415 }
416
417 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
418 but those require pt.null to be conservatively correct. */
419
420 return false;
421 }
422
423 /* Returns whether reference REF to BASE may refer to global memory. */
424
425 static bool
426 ref_may_alias_global_p_1 (tree base)
427 {
428 if (DECL_P (base))
429 return is_global_var (base);
430 else if (TREE_CODE (base) == MEM_REF
431 || TREE_CODE (base) == TARGET_MEM_REF)
432 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
433 return true;
434 }
435
436 bool
437 ref_may_alias_global_p (ao_ref *ref)
438 {
439 tree base = ao_ref_base (ref);
440 return ref_may_alias_global_p_1 (base);
441 }
442
443 bool
444 ref_may_alias_global_p (tree ref)
445 {
446 tree base = get_base_address (ref);
447 return ref_may_alias_global_p_1 (base);
448 }
449
450 /* Return true whether STMT may clobber global memory. */
451
452 bool
453 stmt_may_clobber_global_p (gimple *stmt)
454 {
455 tree lhs;
456
457 if (!gimple_vdef (stmt))
458 return false;
459
460 /* ??? We can ask the oracle whether an artificial pointer
461 dereference with a pointer with points-to information covering
462 all global memory (what about non-address taken memory?) maybe
463 clobbered by this call. As there is at the moment no convenient
464 way of doing that without generating garbage do some manual
465 checking instead.
466 ??? We could make a NULL ao_ref argument to the various
467 predicates special, meaning any global memory. */
468
469 switch (gimple_code (stmt))
470 {
471 case GIMPLE_ASSIGN:
472 lhs = gimple_assign_lhs (stmt);
473 return (TREE_CODE (lhs) != SSA_NAME
474 && ref_may_alias_global_p (lhs));
475 case GIMPLE_CALL:
476 return true;
477 default:
478 return true;
479 }
480 }
481
482
483 /* Dump alias information on FILE. */
484
485 void
486 dump_alias_info (FILE *file)
487 {
488 unsigned i;
489 tree ptr;
490 const char *funcname
491 = lang_hooks.decl_printable_name (current_function_decl, 2);
492 tree var;
493
494 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
495
496 fprintf (file, "Aliased symbols\n\n");
497
498 FOR_EACH_LOCAL_DECL (cfun, i, var)
499 {
500 if (may_be_aliased (var))
501 dump_variable (file, var);
502 }
503
504 fprintf (file, "\nCall clobber information\n");
505
506 fprintf (file, "\nESCAPED");
507 dump_points_to_solution (file, &cfun->gimple_df->escaped);
508
509 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
510
511 FOR_EACH_SSA_NAME (i, ptr, cfun)
512 {
513 struct ptr_info_def *pi;
514
515 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
516 || SSA_NAME_IN_FREE_LIST (ptr))
517 continue;
518
519 pi = SSA_NAME_PTR_INFO (ptr);
520 if (pi)
521 dump_points_to_info_for (file, ptr);
522 }
523
524 fprintf (file, "\n");
525 }
526
527
528 /* Dump alias information on stderr. */
529
530 DEBUG_FUNCTION void
531 debug_alias_info (void)
532 {
533 dump_alias_info (stderr);
534 }
535
536
537 /* Dump the points-to set *PT into FILE. */
538
539 void
540 dump_points_to_solution (FILE *file, struct pt_solution *pt)
541 {
542 if (pt->anything)
543 fprintf (file, ", points-to anything");
544
545 if (pt->nonlocal)
546 fprintf (file, ", points-to non-local");
547
548 if (pt->escaped)
549 fprintf (file, ", points-to escaped");
550
551 if (pt->ipa_escaped)
552 fprintf (file, ", points-to unit escaped");
553
554 if (pt->null)
555 fprintf (file, ", points-to NULL");
556
557 if (pt->vars)
558 {
559 fprintf (file, ", points-to vars: ");
560 dump_decl_set (file, pt->vars);
561 if (pt->vars_contains_nonlocal
562 || pt->vars_contains_escaped
563 || pt->vars_contains_escaped_heap
564 || pt->vars_contains_restrict)
565 {
566 const char *comma = "";
567 fprintf (file, " (");
568 if (pt->vars_contains_nonlocal)
569 {
570 fprintf (file, "nonlocal");
571 comma = ", ";
572 }
573 if (pt->vars_contains_escaped)
574 {
575 fprintf (file, "%sescaped", comma);
576 comma = ", ";
577 }
578 if (pt->vars_contains_escaped_heap)
579 {
580 fprintf (file, "%sescaped heap", comma);
581 comma = ", ";
582 }
583 if (pt->vars_contains_restrict)
584 {
585 fprintf (file, "%srestrict", comma);
586 comma = ", ";
587 }
588 if (pt->vars_contains_interposable)
589 fprintf (file, "%sinterposable", comma);
590 fprintf (file, ")");
591 }
592 }
593 }
594
595
596 /* Unified dump function for pt_solution. */
597
598 DEBUG_FUNCTION void
599 debug (pt_solution &ref)
600 {
601 dump_points_to_solution (stderr, &ref);
602 }
603
604 DEBUG_FUNCTION void
605 debug (pt_solution *ptr)
606 {
607 if (ptr)
608 debug (*ptr);
609 else
610 fprintf (stderr, "<nil>\n");
611 }
612
613
614 /* Dump points-to information for SSA_NAME PTR into FILE. */
615
616 void
617 dump_points_to_info_for (FILE *file, tree ptr)
618 {
619 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
620
621 print_generic_expr (file, ptr, dump_flags);
622
623 if (pi)
624 dump_points_to_solution (file, &pi->pt);
625 else
626 fprintf (file, ", points-to anything");
627
628 fprintf (file, "\n");
629 }
630
631
632 /* Dump points-to information for VAR into stderr. */
633
634 DEBUG_FUNCTION void
635 debug_points_to_info_for (tree var)
636 {
637 dump_points_to_info_for (stderr, var);
638 }
639
640
641 /* Initializes the alias-oracle reference representation *R from REF. */
642
643 void
644 ao_ref_init (ao_ref *r, tree ref)
645 {
646 r->ref = ref;
647 r->base = NULL_TREE;
648 r->offset = 0;
649 r->size = -1;
650 r->max_size = -1;
651 r->ref_alias_set = -1;
652 r->base_alias_set = -1;
653 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
654 }
655
656 /* Returns the base object of the memory reference *REF. */
657
658 tree
659 ao_ref_base (ao_ref *ref)
660 {
661 bool reverse;
662
663 if (ref->base)
664 return ref->base;
665 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
666 &ref->max_size, &reverse);
667 return ref->base;
668 }
669
670 /* Returns the base object alias set of the memory reference *REF. */
671
672 alias_set_type
673 ao_ref_base_alias_set (ao_ref *ref)
674 {
675 tree base_ref;
676 if (ref->base_alias_set != -1)
677 return ref->base_alias_set;
678 if (!ref->ref)
679 return 0;
680 base_ref = ref->ref;
681 while (handled_component_p (base_ref))
682 base_ref = TREE_OPERAND (base_ref, 0);
683 ref->base_alias_set = get_alias_set (base_ref);
684 return ref->base_alias_set;
685 }
686
687 /* Returns the reference alias set of the memory reference *REF. */
688
689 alias_set_type
690 ao_ref_alias_set (ao_ref *ref)
691 {
692 if (ref->ref_alias_set != -1)
693 return ref->ref_alias_set;
694 ref->ref_alias_set = get_alias_set (ref->ref);
695 return ref->ref_alias_set;
696 }
697
698 /* Init an alias-oracle reference representation from a gimple pointer
699 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
700 size is assumed to be unknown. The access is assumed to be only
701 to or after of the pointer target, not before it. */
702
703 void
704 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
705 {
706 poly_int64 t, size_hwi, extra_offset = 0;
707 ref->ref = NULL_TREE;
708 if (TREE_CODE (ptr) == SSA_NAME)
709 {
710 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
711 if (gimple_assign_single_p (stmt)
712 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
713 ptr = gimple_assign_rhs1 (stmt);
714 else if (is_gimple_assign (stmt)
715 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
716 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
717 {
718 ptr = gimple_assign_rhs1 (stmt);
719 extra_offset *= BITS_PER_UNIT;
720 }
721 }
722
723 if (TREE_CODE (ptr) == ADDR_EXPR)
724 {
725 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
726 if (ref->base)
727 ref->offset = BITS_PER_UNIT * t;
728 else
729 {
730 size = NULL_TREE;
731 ref->offset = 0;
732 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
733 }
734 }
735 else
736 {
737 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
738 ref->base = build2 (MEM_REF, char_type_node,
739 ptr, null_pointer_node);
740 ref->offset = 0;
741 }
742 ref->offset += extra_offset;
743 if (size
744 && poly_int_tree_p (size, &size_hwi)
745 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
746 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
747 else
748 ref->max_size = ref->size = -1;
749 ref->ref_alias_set = 0;
750 ref->base_alias_set = 0;
751 ref->volatile_p = false;
752 }
753
754 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
755 Return -1 if S1 < S2
756 Return 1 if S1 > S2
757 Return 0 if equal or incomparable. */
758
759 static int
760 compare_sizes (tree s1, tree s2)
761 {
762 if (!s1 || !s2)
763 return 0;
764
765 poly_uint64 size1;
766 poly_uint64 size2;
767
768 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
769 return 0;
770 if (known_lt (size1, size2))
771 return -1;
772 if (known_lt (size2, size1))
773 return 1;
774 return 0;
775 }
776
777 /* Compare TYPE1 and TYPE2 by its size.
778 Return -1 if size of TYPE1 < size of TYPE2
779 Return 1 if size of TYPE1 > size of TYPE2
780 Return 0 if types are of equal sizes or we can not compare them. */
781
782 static int
783 compare_type_sizes (tree type1, tree type2)
784 {
785 /* Be conservative for arrays and vectors. We want to support partial
786 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
787 while (TREE_CODE (type1) == ARRAY_TYPE
788 || TREE_CODE (type1) == VECTOR_TYPE)
789 type1 = TREE_TYPE (type1);
790 while (TREE_CODE (type2) == ARRAY_TYPE
791 || TREE_CODE (type2) == VECTOR_TYPE)
792 type2 = TREE_TYPE (type2);
793 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
794 }
795
796 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
797 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
798 decide. */
799
800 static inline int
801 same_type_for_tbaa (tree type1, tree type2)
802 {
803 type1 = TYPE_MAIN_VARIANT (type1);
804 type2 = TYPE_MAIN_VARIANT (type2);
805
806 /* Handle the most common case first. */
807 if (type1 == type2)
808 return 1;
809
810 /* If we would have to do structural comparison bail out. */
811 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
812 || TYPE_STRUCTURAL_EQUALITY_P (type2))
813 return -1;
814
815 /* Compare the canonical types. */
816 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
817 return 1;
818
819 /* ??? Array types are not properly unified in all cases as we have
820 spurious changes in the index types for example. Removing this
821 causes all sorts of problems with the Fortran frontend. */
822 if (TREE_CODE (type1) == ARRAY_TYPE
823 && TREE_CODE (type2) == ARRAY_TYPE)
824 return -1;
825
826 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
827 object of one of its constrained subtypes, e.g. when a function with an
828 unconstrained parameter passed by reference is called on an object and
829 inlined. But, even in the case of a fixed size, type and subtypes are
830 not equivalent enough as to share the same TYPE_CANONICAL, since this
831 would mean that conversions between them are useless, whereas they are
832 not (e.g. type and subtypes can have different modes). So, in the end,
833 they are only guaranteed to have the same alias set. */
834 if (get_alias_set (type1) == get_alias_set (type2))
835 return -1;
836
837 /* The types are known to be not equal. */
838 return 0;
839 }
840
841 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
842 components on it). */
843
844 static bool
845 type_has_components_p (tree type)
846 {
847 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
848 || TREE_CODE (type) == COMPLEX_TYPE;
849 }
850
851 /* Determine if the two component references REF1 and REF2 which are
852 based on access types TYPE1 and TYPE2 and of which at least one is based
853 on an indirect reference may alias. REF2 is the only one that can
854 be a decl in which case REF2_IS_DECL is true.
855 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
856 are the respective alias sets. */
857
858 static bool
859 aliasing_component_refs_p (tree ref1,
860 alias_set_type ref1_alias_set,
861 alias_set_type base1_alias_set,
862 poly_int64 offset1, poly_int64 max_size1,
863 tree ref2,
864 alias_set_type ref2_alias_set,
865 alias_set_type base2_alias_set,
866 poly_int64 offset2, poly_int64 max_size2,
867 bool ref2_is_decl)
868 {
869 /* If one reference is a component references through pointers try to find a
870 common base and apply offset based disambiguation. This handles
871 for example
872 struct A { int i; int j; } *q;
873 struct B { struct A a; int k; } *p;
874 disambiguating q->i and p->a.j. */
875 tree base1, base2;
876 tree type1, type2;
877 int same_p1 = 0, same_p2 = 0;
878 bool maybe_match = false;
879 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
880
881 /* Choose bases and base types to search for. */
882 base1 = ref1;
883 while (handled_component_p (base1))
884 {
885 /* Generally access paths are monotous in the size of object. The
886 exception are trailing arrays of structures. I.e.
887 struct a {int array[0];};
888 or
889 struct a {int array1[0]; int array[];};
890 Such struct has size 0 but accesses to a.array may have non-zero size.
891 In this case the size of TREE_TYPE (base1) is smaller than
892 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
893
894 Because we compare sizes of arrays just by sizes of their elements,
895 we only need to care about zero sized array fields here. */
896 if (TREE_CODE (base1) == COMPONENT_REF
897 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base1, 1))) == ARRAY_TYPE
898 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base1, 1)))
899 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base1, 1)))))
900 && array_at_struct_end_p (base1))
901 {
902 gcc_checking_assert (!end_struct_ref1);
903 end_struct_ref1 = base1;
904 }
905 if (TREE_CODE (base1) == VIEW_CONVERT_EXPR
906 || TREE_CODE (base1) == BIT_FIELD_REF)
907 ref1 = TREE_OPERAND (base1, 0);
908 base1 = TREE_OPERAND (base1, 0);
909 }
910 type1 = TREE_TYPE (base1);
911 base2 = ref2;
912 while (handled_component_p (base2))
913 {
914 if (TREE_CODE (base2) == COMPONENT_REF
915 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base2, 1))) == ARRAY_TYPE
916 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base2, 1)))
917 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base2, 1)))))
918 && array_at_struct_end_p (base2))
919 {
920 gcc_checking_assert (!end_struct_ref2);
921 end_struct_ref2 = base2;
922 }
923 if (TREE_CODE (base2) == VIEW_CONVERT_EXPR
924 || TREE_CODE (base2) == BIT_FIELD_REF)
925 ref2 = TREE_OPERAND (base2, 0);
926 base2 = TREE_OPERAND (base2, 0);
927 }
928 type2 = TREE_TYPE (base2);
929
930 /* Now search for the type1 in the access path of ref2. This
931 would be a common base for doing offset based disambiguation on.
932 This however only makes sense if type2 is big enough to hold type1. */
933 int cmp_outer = compare_type_sizes (type2, type1);
934
935 /* If type2 is big enough to contain type1 walk its access path.
936 We also need to care of arrays at the end of structs that may extend
937 beyond the end of structure. */
938 if (cmp_outer >= 0
939 || (end_struct_ref2
940 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
941 {
942 tree ref = ref2;
943 while (true)
944 {
945 /* We walk from inner type to the outer types. If type we see is
946 already too large to be part of type1, terminate the search. */
947 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
948
949 if (cmp < 0
950 && (!end_struct_ref1
951 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
952 TREE_TYPE (ref)) < 0))
953 break;
954 /* If types may be of same size, see if we can decide about their
955 equality. */
956 if (cmp == 0)
957 {
958 same_p2 = same_type_for_tbaa (TREE_TYPE (ref), type1);
959 if (same_p2 == 1)
960 break;
961 /* In case we can't decide whether types are same try to
962 continue looking for the exact match.
963 Remember however that we possibly saw a match
964 to bypass the access path continuations tests we do later. */
965 if (same_p2 == -1)
966 maybe_match = true;
967 }
968 if (!handled_component_p (ref))
969 break;
970 ref = TREE_OPERAND (ref, 0);
971 }
972 if (same_p2 == 1)
973 {
974 poly_int64 offadj, sztmp, msztmp;
975 bool reverse;
976
977 /* We assume that arrays can overlap by multiple of their elements
978 size as tested in gcc.dg/torture/alias-2.c.
979 This partial overlap happen only when both arrays are bases of
980 the access and not contained within another component ref.
981 To be safe we also assume partial overlap for VLAs. */
982 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
983 && (!TYPE_SIZE (TREE_TYPE (base1))
984 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
985 || (ref == base2 && !ref2_is_decl)))
986 {
987 ++alias_stats.aliasing_component_refs_p_may_alias;
988 return true;
989 }
990
991 get_ref_base_and_extent (ref, &offadj, &sztmp, &msztmp, &reverse);
992 offset2 -= offadj;
993 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
994 offset1 -= offadj;
995 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
996 {
997 ++alias_stats.aliasing_component_refs_p_may_alias;
998 return true;
999 }
1000 else
1001 {
1002 ++alias_stats.aliasing_component_refs_p_no_alias;
1003 return false;
1004 }
1005 }
1006 }
1007
1008 /* If we didn't find a common base, try the other way around. */
1009 if (cmp_outer <= 0
1010 || (end_struct_ref1
1011 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1012 {
1013 tree ref = ref1;
1014 while (true)
1015 {
1016 int cmp = compare_type_sizes (type2, TREE_TYPE (ref));
1017 if (cmp < 0
1018 && (!end_struct_ref2
1019 || compare_type_sizes (TREE_TYPE (end_struct_ref2),
1020 TREE_TYPE (ref)) < 0))
1021 break;
1022 /* If types may be of same size, see if we can decide about their
1023 equality. */
1024 if (cmp == 0)
1025 {
1026 same_p1 = same_type_for_tbaa (TREE_TYPE (ref), type2);
1027 if (same_p1 == 1)
1028 break;
1029 if (same_p1 == -1)
1030 maybe_match = true;
1031 }
1032 if (!handled_component_p (ref))
1033 break;
1034 ref = TREE_OPERAND (ref, 0);
1035 }
1036 if (same_p1 == 1)
1037 {
1038 poly_int64 offadj, sztmp, msztmp;
1039 bool reverse;
1040
1041 if (TREE_CODE (TREE_TYPE (base2)) == ARRAY_TYPE
1042 && (!TYPE_SIZE (TREE_TYPE (base2))
1043 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base2))) != INTEGER_CST
1044 || (ref == base1 && !ref2_is_decl)))
1045 {
1046 ++alias_stats.aliasing_component_refs_p_may_alias;
1047 return true;
1048 }
1049
1050 get_ref_base_and_extent (ref, &offadj, &sztmp, &msztmp, &reverse);
1051 offset1 -= offadj;
1052 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
1053 offset2 -= offadj;
1054 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1055 {
1056 ++alias_stats.aliasing_component_refs_p_may_alias;
1057 return true;
1058 }
1059 else
1060 {
1061 ++alias_stats.aliasing_component_refs_p_no_alias;
1062 return false;
1063 }
1064 }
1065 }
1066
1067 /* In the following code we make an assumption that the types in access
1068 paths do not overlap and thus accesses alias only if one path can be
1069 continuation of another. If we was not able to decide about equivalence,
1070 we need to give up. */
1071 if (maybe_match)
1072 return true;
1073
1074 /* If we have two type access paths B1.path1 and B2.path2 they may
1075 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
1076 But we can still have a path that goes B1.path1...B2.path2 with
1077 a part that we do not see. So we can only disambiguate now
1078 if there is no B2 in the tail of path1 and no B1 on the
1079 tail of path2. */
1080 if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0
1081 && (!end_struct_ref1
1082 || compare_type_sizes (TREE_TYPE (ref2),
1083 TREE_TYPE (end_struct_ref1)) >= 0)
1084 && type_has_components_p (TREE_TYPE (ref2))
1085 && (base1_alias_set == ref2_alias_set
1086 || alias_set_subset_of (base1_alias_set, ref2_alias_set)))
1087 {
1088 ++alias_stats.aliasing_component_refs_p_may_alias;
1089 return true;
1090 }
1091 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
1092 if (!ref2_is_decl
1093 && compare_type_sizes (TREE_TYPE (ref1), type2) >= 0
1094 && (!end_struct_ref2
1095 || compare_type_sizes (TREE_TYPE (ref1),
1096 TREE_TYPE (end_struct_ref2)) >= 0)
1097 && type_has_components_p (TREE_TYPE (ref1))
1098 && (base2_alias_set == ref1_alias_set
1099 || alias_set_subset_of (base2_alias_set, ref1_alias_set)))
1100 {
1101 ++alias_stats.aliasing_component_refs_p_may_alias;
1102 return true;
1103 }
1104 ++alias_stats.aliasing_component_refs_p_no_alias;
1105 return false;
1106 }
1107
1108 /* Return true if we can determine that component references REF1 and REF2,
1109 that are within a common DECL, cannot overlap. */
1110
1111 static bool
1112 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
1113 {
1114 auto_vec<tree, 16> component_refs1;
1115 auto_vec<tree, 16> component_refs2;
1116
1117 /* Create the stack of handled components for REF1. */
1118 while (handled_component_p (ref1))
1119 {
1120 component_refs1.safe_push (ref1);
1121 ref1 = TREE_OPERAND (ref1, 0);
1122 }
1123 if (TREE_CODE (ref1) == MEM_REF)
1124 {
1125 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
1126 {
1127 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1128 return false;
1129 }
1130 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
1131 }
1132
1133 /* Create the stack of handled components for REF2. */
1134 while (handled_component_p (ref2))
1135 {
1136 component_refs2.safe_push (ref2);
1137 ref2 = TREE_OPERAND (ref2, 0);
1138 }
1139 if (TREE_CODE (ref2) == MEM_REF)
1140 {
1141 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
1142 {
1143 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1144 return false;
1145 }
1146 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
1147 }
1148
1149 /* Bases must be either same or uncomparable. */
1150 gcc_checking_assert (ref1 == ref2
1151 || (DECL_P (ref1) && DECL_P (ref2)
1152 && compare_base_decls (ref1, ref2) != 0));
1153
1154 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1155 rank. This is sufficient because we start from the same DECL and you
1156 cannot reference several fields at a time with COMPONENT_REFs (unlike
1157 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1158 of them to access a sub-component, unless you're in a union, in which
1159 case the return value will precisely be false. */
1160 while (true)
1161 {
1162 do
1163 {
1164 if (component_refs1.is_empty ())
1165 {
1166 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1167 return false;
1168 }
1169 ref1 = component_refs1.pop ();
1170 }
1171 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1172
1173 do
1174 {
1175 if (component_refs2.is_empty ())
1176 {
1177 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1178 return false;
1179 }
1180 ref2 = component_refs2.pop ();
1181 }
1182 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1183
1184 /* Beware of BIT_FIELD_REF. */
1185 if (TREE_CODE (ref1) != COMPONENT_REF
1186 || TREE_CODE (ref2) != COMPONENT_REF)
1187 {
1188 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1189 return false;
1190 }
1191
1192 tree field1 = TREE_OPERAND (ref1, 1);
1193 tree field2 = TREE_OPERAND (ref2, 1);
1194
1195 /* ??? We cannot simply use the type of operand #0 of the refs here
1196 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1197 for common blocks instead of using unions like everyone else. */
1198 tree type1 = DECL_CONTEXT (field1);
1199 tree type2 = DECL_CONTEXT (field2);
1200
1201 /* We cannot disambiguate fields in a union or qualified union. */
1202 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
1203 {
1204 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1205 return false;
1206 }
1207
1208 if (field1 != field2)
1209 {
1210 /* A field and its representative need to be considered the
1211 same. */
1212 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
1213 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
1214 {
1215 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1216 return false;
1217 }
1218 /* Different fields of the same record type cannot overlap.
1219 ??? Bitfields can overlap at RTL level so punt on them. */
1220 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1221 {
1222 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1223 return false;
1224 }
1225 ++alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias;
1226 return true;
1227 }
1228 }
1229
1230 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1231 return false;
1232 }
1233
1234 /* qsort compare function to sort FIELD_DECLs after their
1235 DECL_FIELD_CONTEXT TYPE_UID. */
1236
1237 static inline int
1238 ncr_compar (const void *field1_, const void *field2_)
1239 {
1240 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1241 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1242 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
1243 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
1244 if (uid1 < uid2)
1245 return -1;
1246 else if (uid1 > uid2)
1247 return 1;
1248 return 0;
1249 }
1250
1251 /* Return true if we can determine that the fields referenced cannot
1252 overlap for any pair of objects. */
1253
1254 static bool
1255 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1256 {
1257 if (!flag_strict_aliasing
1258 || !x || !y
1259 || !handled_component_p (x)
1260 || !handled_component_p (y))
1261 {
1262 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1263 return false;
1264 }
1265
1266 auto_vec<const_tree, 16> fieldsx;
1267 while (handled_component_p (x))
1268 {
1269 if (TREE_CODE (x) == COMPONENT_REF)
1270 {
1271 tree field = TREE_OPERAND (x, 1);
1272 tree type = DECL_FIELD_CONTEXT (field);
1273 if (TREE_CODE (type) == RECORD_TYPE)
1274 fieldsx.safe_push (field);
1275 }
1276 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
1277 || TREE_CODE (x) == BIT_FIELD_REF)
1278 fieldsx.truncate (0);
1279 x = TREE_OPERAND (x, 0);
1280 }
1281 if (fieldsx.length () == 0)
1282 return false;
1283 auto_vec<const_tree, 16> fieldsy;
1284 while (handled_component_p (y))
1285 {
1286 if (TREE_CODE (y) == COMPONENT_REF)
1287 {
1288 tree field = TREE_OPERAND (y, 1);
1289 tree type = DECL_FIELD_CONTEXT (field);
1290 if (TREE_CODE (type) == RECORD_TYPE)
1291 fieldsy.safe_push (TREE_OPERAND (y, 1));
1292 }
1293 else if (TREE_CODE (y) == VIEW_CONVERT_EXPR
1294 || TREE_CODE (y) == BIT_FIELD_REF)
1295 fieldsy.truncate (0);
1296 y = TREE_OPERAND (y, 0);
1297 }
1298 if (fieldsy.length () == 0)
1299 {
1300 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1301 return false;
1302 }
1303
1304 /* Most common case first. */
1305 if (fieldsx.length () == 1
1306 && fieldsy.length () == 1)
1307 {
1308 if ((DECL_FIELD_CONTEXT (fieldsx[0])
1309 == DECL_FIELD_CONTEXT (fieldsy[0]))
1310 && fieldsx[0] != fieldsy[0]
1311 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])))
1312 {
1313 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1314 return true;
1315 }
1316 else
1317 {
1318 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1319 return false;
1320 }
1321 }
1322
1323 if (fieldsx.length () == 2)
1324 {
1325 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1326 std::swap (fieldsx[0], fieldsx[1]);
1327 }
1328 else
1329 fieldsx.qsort (ncr_compar);
1330
1331 if (fieldsy.length () == 2)
1332 {
1333 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1334 std::swap (fieldsy[0], fieldsy[1]);
1335 }
1336 else
1337 fieldsy.qsort (ncr_compar);
1338
1339 unsigned i = 0, j = 0;
1340 do
1341 {
1342 const_tree fieldx = fieldsx[i];
1343 const_tree fieldy = fieldsy[j];
1344 tree typex = DECL_FIELD_CONTEXT (fieldx);
1345 tree typey = DECL_FIELD_CONTEXT (fieldy);
1346 if (typex == typey)
1347 {
1348 /* We're left with accessing different fields of a structure,
1349 no possible overlap. */
1350 if (fieldx != fieldy)
1351 {
1352 /* A field and its representative need to be considered the
1353 same. */
1354 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1355 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1356 {
1357 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1358 return false;
1359 }
1360 /* Different fields of the same record type cannot overlap.
1361 ??? Bitfields can overlap at RTL level so punt on them. */
1362 if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1363 {
1364 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1365 return false;
1366 }
1367 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1368 return true;
1369 }
1370 }
1371 if (TYPE_UID (typex) < TYPE_UID (typey))
1372 {
1373 i++;
1374 if (i == fieldsx.length ())
1375 break;
1376 }
1377 else
1378 {
1379 j++;
1380 if (j == fieldsy.length ())
1381 break;
1382 }
1383 }
1384 while (1);
1385
1386 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1387 return false;
1388 }
1389
1390
1391 /* Return true if two memory references based on the variables BASE1
1392 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1393 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1394 if non-NULL are the complete memory reference trees. */
1395
1396 static bool
1397 decl_refs_may_alias_p (tree ref1, tree base1,
1398 poly_int64 offset1, poly_int64 max_size1,
1399 tree ref2, tree base2,
1400 poly_int64 offset2, poly_int64 max_size2)
1401 {
1402 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1403
1404 /* If both references are based on different variables, they cannot alias. */
1405 if (compare_base_decls (base1, base2) == 0)
1406 return false;
1407
1408 /* If both references are based on the same variable, they cannot alias if
1409 the accesses do not overlap. */
1410 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1411 return false;
1412
1413 /* For components with variable position, the above test isn't sufficient,
1414 so we disambiguate component references manually. */
1415 if (ref1 && ref2
1416 && handled_component_p (ref1) && handled_component_p (ref2)
1417 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1418 return false;
1419
1420 return true;
1421 }
1422
1423 /* Return true if an indirect reference based on *PTR1 constrained
1424 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1425 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1426 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1427 in which case they are computed on-demand. REF1 and REF2
1428 if non-NULL are the complete memory reference trees. */
1429
1430 static bool
1431 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1432 poly_int64 offset1, poly_int64 max_size1,
1433 alias_set_type ref1_alias_set,
1434 alias_set_type base1_alias_set,
1435 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1436 poly_int64 offset2, poly_int64 max_size2,
1437 alias_set_type ref2_alias_set,
1438 alias_set_type base2_alias_set, bool tbaa_p)
1439 {
1440 tree ptr1;
1441 tree ptrtype1, dbase2;
1442
1443 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1444 || TREE_CODE (base1) == TARGET_MEM_REF)
1445 && DECL_P (base2));
1446
1447 ptr1 = TREE_OPERAND (base1, 0);
1448 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1449
1450 /* If only one reference is based on a variable, they cannot alias if
1451 the pointer access is beyond the extent of the variable access.
1452 (the pointer base cannot validly point to an offset less than zero
1453 of the variable).
1454 ??? IVOPTs creates bases that do not honor this restriction,
1455 so do not apply this optimization for TARGET_MEM_REFs. */
1456 if (TREE_CODE (base1) != TARGET_MEM_REF
1457 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1458 return false;
1459 /* They also cannot alias if the pointer may not point to the decl. */
1460 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1461 return false;
1462
1463 /* Disambiguations that rely on strict aliasing rules follow. */
1464 if (!flag_strict_aliasing || !tbaa_p)
1465 return true;
1466
1467 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1468
1469 /* If the alias set for a pointer access is zero all bets are off. */
1470 if (base1_alias_set == 0)
1471 return true;
1472
1473 /* When we are trying to disambiguate an access with a pointer dereference
1474 as base versus one with a decl as base we can use both the size
1475 of the decl and its dynamic type for extra disambiguation.
1476 ??? We do not know anything about the dynamic type of the decl
1477 other than that its alias-set contains base2_alias_set as a subset
1478 which does not help us here. */
1479 /* As we know nothing useful about the dynamic type of the decl just
1480 use the usual conflict check rather than a subset test.
1481 ??? We could introduce -fvery-strict-aliasing when the language
1482 does not allow decls to have a dynamic type that differs from their
1483 static type. Then we can check
1484 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1485 if (base1_alias_set != base2_alias_set
1486 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1487 return false;
1488 /* If the size of the access relevant for TBAA through the pointer
1489 is bigger than the size of the decl we can't possibly access the
1490 decl via that pointer. */
1491 if (/* ??? This in turn may run afoul when a decl of type T which is
1492 a member of union type U is accessed through a pointer to
1493 type U and sizeof T is smaller than sizeof U. */
1494 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1495 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1496 && compare_sizes (DECL_SIZE (base2),
1497 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1498 return false;
1499
1500 if (!ref2)
1501 return true;
1502
1503 /* If the decl is accessed via a MEM_REF, reconstruct the base
1504 we can use for TBAA and an appropriately adjusted offset. */
1505 dbase2 = ref2;
1506 while (handled_component_p (dbase2))
1507 dbase2 = TREE_OPERAND (dbase2, 0);
1508 poly_int64 doffset1 = offset1;
1509 poly_offset_int doffset2 = offset2;
1510 if (TREE_CODE (dbase2) == MEM_REF
1511 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1512 {
1513 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1514 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
1515 /* If second reference is view-converted, give up now. */
1516 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
1517 return true;
1518 }
1519
1520 /* If first reference is view-converted, give up now. */
1521 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
1522 return true;
1523
1524 /* If both references are through the same type, they do not alias
1525 if the accesses do not overlap. This does extra disambiguation
1526 for mixed/pointer accesses but requires strict aliasing.
1527 For MEM_REFs we require that the component-ref offset we computed
1528 is relative to the start of the type which we ensure by
1529 comparing rvalue and access type and disregarding the constant
1530 pointer offset.
1531
1532 But avoid treating variable length arrays as "objects", instead assume they
1533 can overlap by an exact multiple of their element size.
1534 See gcc.dg/torture/alias-2.c. */
1535 if (((TREE_CODE (base1) != TARGET_MEM_REF
1536 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1537 && (TREE_CODE (dbase2) != TARGET_MEM_REF
1538 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
1539 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
1540 && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
1541 || (TYPE_SIZE (TREE_TYPE (base1))
1542 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST)))
1543 return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1544
1545 if (ref1 && ref2
1546 && nonoverlapping_component_refs_p (ref1, ref2))
1547 return false;
1548
1549 /* Do access-path based disambiguation. */
1550 if (ref1 && ref2
1551 && (handled_component_p (ref1) || handled_component_p (ref2)))
1552 return aliasing_component_refs_p (ref1,
1553 ref1_alias_set, base1_alias_set,
1554 offset1, max_size1,
1555 ref2,
1556 ref2_alias_set, base2_alias_set,
1557 offset2, max_size2,
1558 /* Only if the other reference is actual
1559 decl we can safely check only toplevel
1560 part of access path 1. */
1561 same_type_for_tbaa (TREE_TYPE (dbase2),
1562 TREE_TYPE (base2))
1563 == 1);
1564
1565 return true;
1566 }
1567
1568 /* Return true if two indirect references based on *PTR1
1569 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1570 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1571 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1572 in which case they are computed on-demand. REF1 and REF2
1573 if non-NULL are the complete memory reference trees. */
1574
1575 static bool
1576 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1577 poly_int64 offset1, poly_int64 max_size1,
1578 alias_set_type ref1_alias_set,
1579 alias_set_type base1_alias_set,
1580 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1581 poly_int64 offset2, poly_int64 max_size2,
1582 alias_set_type ref2_alias_set,
1583 alias_set_type base2_alias_set, bool tbaa_p)
1584 {
1585 tree ptr1;
1586 tree ptr2;
1587 tree ptrtype1, ptrtype2;
1588
1589 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1590 || TREE_CODE (base1) == TARGET_MEM_REF)
1591 && (TREE_CODE (base2) == MEM_REF
1592 || TREE_CODE (base2) == TARGET_MEM_REF));
1593
1594 ptr1 = TREE_OPERAND (base1, 0);
1595 ptr2 = TREE_OPERAND (base2, 0);
1596
1597 /* If both bases are based on pointers they cannot alias if they may not
1598 point to the same memory object or if they point to the same object
1599 and the accesses do not overlap. */
1600 if ((!cfun || gimple_in_ssa_p (cfun))
1601 && operand_equal_p (ptr1, ptr2, 0)
1602 && (((TREE_CODE (base1) != TARGET_MEM_REF
1603 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1604 && (TREE_CODE (base2) != TARGET_MEM_REF
1605 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1606 || (TREE_CODE (base1) == TARGET_MEM_REF
1607 && TREE_CODE (base2) == TARGET_MEM_REF
1608 && (TMR_STEP (base1) == TMR_STEP (base2)
1609 || (TMR_STEP (base1) && TMR_STEP (base2)
1610 && operand_equal_p (TMR_STEP (base1),
1611 TMR_STEP (base2), 0)))
1612 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1613 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1614 && operand_equal_p (TMR_INDEX (base1),
1615 TMR_INDEX (base2), 0)))
1616 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1617 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1618 && operand_equal_p (TMR_INDEX2 (base1),
1619 TMR_INDEX2 (base2), 0))))))
1620 {
1621 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1622 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1623 return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1624 offset2 + moff2, max_size2);
1625 }
1626 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1627 return false;
1628
1629 /* Disambiguations that rely on strict aliasing rules follow. */
1630 if (!flag_strict_aliasing || !tbaa_p)
1631 return true;
1632
1633 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1634 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1635
1636 /* If the alias set for a pointer access is zero all bets are off. */
1637 if (base1_alias_set == 0
1638 || base2_alias_set == 0)
1639 return true;
1640
1641 /* Do type-based disambiguation. */
1642 if (base1_alias_set != base2_alias_set
1643 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1644 return false;
1645
1646 /* If either reference is view-converted, give up now. */
1647 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1648 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1649 return true;
1650
1651 /* If both references are through the same type, they do not alias
1652 if the accesses do not overlap. This does extra disambiguation
1653 for mixed/pointer accesses but requires strict aliasing. */
1654 if ((TREE_CODE (base1) != TARGET_MEM_REF
1655 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1656 && (TREE_CODE (base2) != TARGET_MEM_REF
1657 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1658 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1659 TREE_TYPE (ptrtype2)) == 1
1660 /* But avoid treating arrays as "objects", instead assume they
1661 can overlap by an exact multiple of their element size.
1662 See gcc.dg/torture/alias-2.c. */
1663 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1664 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1665
1666 if (ref1 && ref2
1667 && nonoverlapping_component_refs_p (ref1, ref2))
1668 return false;
1669
1670 /* Do access-path based disambiguation. */
1671 if (ref1 && ref2
1672 && (handled_component_p (ref1) || handled_component_p (ref2)))
1673 return aliasing_component_refs_p (ref1,
1674 ref1_alias_set, base1_alias_set,
1675 offset1, max_size1,
1676 ref2,
1677 ref2_alias_set, base2_alias_set,
1678 offset2, max_size2, false);
1679
1680 return true;
1681 }
1682
1683 /* Return true, if the two memory references REF1 and REF2 may alias. */
1684
1685 static bool
1686 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1687 {
1688 tree base1, base2;
1689 poly_int64 offset1 = 0, offset2 = 0;
1690 poly_int64 max_size1 = -1, max_size2 = -1;
1691 bool var1_p, var2_p, ind1_p, ind2_p;
1692
1693 gcc_checking_assert ((!ref1->ref
1694 || TREE_CODE (ref1->ref) == SSA_NAME
1695 || DECL_P (ref1->ref)
1696 || TREE_CODE (ref1->ref) == STRING_CST
1697 || handled_component_p (ref1->ref)
1698 || TREE_CODE (ref1->ref) == MEM_REF
1699 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1700 && (!ref2->ref
1701 || TREE_CODE (ref2->ref) == SSA_NAME
1702 || DECL_P (ref2->ref)
1703 || TREE_CODE (ref2->ref) == STRING_CST
1704 || handled_component_p (ref2->ref)
1705 || TREE_CODE (ref2->ref) == MEM_REF
1706 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1707
1708 /* Decompose the references into their base objects and the access. */
1709 base1 = ao_ref_base (ref1);
1710 offset1 = ref1->offset;
1711 max_size1 = ref1->max_size;
1712 base2 = ao_ref_base (ref2);
1713 offset2 = ref2->offset;
1714 max_size2 = ref2->max_size;
1715
1716 /* We can end up with registers or constants as bases for example from
1717 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1718 which is seen as a struct copy. */
1719 if (TREE_CODE (base1) == SSA_NAME
1720 || TREE_CODE (base1) == CONST_DECL
1721 || TREE_CODE (base1) == CONSTRUCTOR
1722 || TREE_CODE (base1) == ADDR_EXPR
1723 || CONSTANT_CLASS_P (base1)
1724 || TREE_CODE (base2) == SSA_NAME
1725 || TREE_CODE (base2) == CONST_DECL
1726 || TREE_CODE (base2) == CONSTRUCTOR
1727 || TREE_CODE (base2) == ADDR_EXPR
1728 || CONSTANT_CLASS_P (base2))
1729 return false;
1730
1731 /* We can end up referring to code via function and label decls.
1732 As we likely do not properly track code aliases conservatively
1733 bail out. */
1734 if (TREE_CODE (base1) == FUNCTION_DECL
1735 || TREE_CODE (base1) == LABEL_DECL
1736 || TREE_CODE (base2) == FUNCTION_DECL
1737 || TREE_CODE (base2) == LABEL_DECL)
1738 return true;
1739
1740 /* Two volatile accesses always conflict. */
1741 if (ref1->volatile_p
1742 && ref2->volatile_p)
1743 return true;
1744
1745 /* Defer to simple offset based disambiguation if we have
1746 references based on two decls. Do this before defering to
1747 TBAA to handle must-alias cases in conformance with the
1748 GCC extension of allowing type-punning through unions. */
1749 var1_p = DECL_P (base1);
1750 var2_p = DECL_P (base2);
1751 if (var1_p && var2_p)
1752 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1753 ref2->ref, base2, offset2, max_size2);
1754
1755 /* Handle restrict based accesses.
1756 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1757 here. */
1758 tree rbase1 = base1;
1759 tree rbase2 = base2;
1760 if (var1_p)
1761 {
1762 rbase1 = ref1->ref;
1763 if (rbase1)
1764 while (handled_component_p (rbase1))
1765 rbase1 = TREE_OPERAND (rbase1, 0);
1766 }
1767 if (var2_p)
1768 {
1769 rbase2 = ref2->ref;
1770 if (rbase2)
1771 while (handled_component_p (rbase2))
1772 rbase2 = TREE_OPERAND (rbase2, 0);
1773 }
1774 if (rbase1 && rbase2
1775 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1776 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1777 /* If the accesses are in the same restrict clique... */
1778 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1779 /* But based on different pointers they do not alias. */
1780 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1781 return false;
1782
1783 ind1_p = (TREE_CODE (base1) == MEM_REF
1784 || TREE_CODE (base1) == TARGET_MEM_REF);
1785 ind2_p = (TREE_CODE (base2) == MEM_REF
1786 || TREE_CODE (base2) == TARGET_MEM_REF);
1787
1788 /* Canonicalize the pointer-vs-decl case. */
1789 if (ind1_p && var2_p)
1790 {
1791 std::swap (offset1, offset2);
1792 std::swap (max_size1, max_size2);
1793 std::swap (base1, base2);
1794 std::swap (ref1, ref2);
1795 var1_p = true;
1796 ind1_p = false;
1797 var2_p = false;
1798 ind2_p = true;
1799 }
1800
1801 /* First defer to TBAA if possible. */
1802 if (tbaa_p
1803 && flag_strict_aliasing
1804 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1805 ao_ref_alias_set (ref2)))
1806 return false;
1807
1808 /* If the reference is based on a pointer that points to memory
1809 that may not be written to then the other reference cannot possibly
1810 clobber it. */
1811 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1812 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1813 || (ind1_p
1814 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1815 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1816 return false;
1817
1818 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1819 if (var1_p && ind2_p)
1820 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1821 offset2, max_size2,
1822 ao_ref_alias_set (ref2),
1823 ao_ref_base_alias_set (ref2),
1824 ref1->ref, base1,
1825 offset1, max_size1,
1826 ao_ref_alias_set (ref1),
1827 ao_ref_base_alias_set (ref1),
1828 tbaa_p);
1829 else if (ind1_p && ind2_p)
1830 return indirect_refs_may_alias_p (ref1->ref, base1,
1831 offset1, max_size1,
1832 ao_ref_alias_set (ref1),
1833 ao_ref_base_alias_set (ref1),
1834 ref2->ref, base2,
1835 offset2, max_size2,
1836 ao_ref_alias_set (ref2),
1837 ao_ref_base_alias_set (ref2),
1838 tbaa_p);
1839
1840 gcc_unreachable ();
1841 }
1842
1843 /* Return true, if the two memory references REF1 and REF2 may alias
1844 and update statistics. */
1845
1846 bool
1847 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1848 {
1849 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
1850 if (res)
1851 ++alias_stats.refs_may_alias_p_may_alias;
1852 else
1853 ++alias_stats.refs_may_alias_p_no_alias;
1854 return res;
1855 }
1856
1857 static bool
1858 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1859 {
1860 ao_ref r1;
1861 ao_ref_init (&r1, ref1);
1862 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1863 }
1864
1865 bool
1866 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1867 {
1868 ao_ref r1, r2;
1869 ao_ref_init (&r1, ref1);
1870 ao_ref_init (&r2, ref2);
1871 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1872 }
1873
1874 /* Returns true if there is a anti-dependence for the STORE that
1875 executes after the LOAD. */
1876
1877 bool
1878 refs_anti_dependent_p (tree load, tree store)
1879 {
1880 ao_ref r1, r2;
1881 ao_ref_init (&r1, load);
1882 ao_ref_init (&r2, store);
1883 return refs_may_alias_p_1 (&r1, &r2, false);
1884 }
1885
1886 /* Returns true if there is a output dependence for the stores
1887 STORE1 and STORE2. */
1888
1889 bool
1890 refs_output_dependent_p (tree store1, tree store2)
1891 {
1892 ao_ref r1, r2;
1893 ao_ref_init (&r1, store1);
1894 ao_ref_init (&r2, store2);
1895 return refs_may_alias_p_1 (&r1, &r2, false);
1896 }
1897
1898 /* If the call CALL may use the memory reference REF return true,
1899 otherwise return false. */
1900
1901 static bool
1902 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1903 {
1904 tree base, callee;
1905 unsigned i;
1906 int flags = gimple_call_flags (call);
1907
1908 /* Const functions without a static chain do not implicitly use memory. */
1909 if (!gimple_call_chain (call)
1910 && (flags & (ECF_CONST|ECF_NOVOPS)))
1911 goto process_args;
1912
1913 base = ao_ref_base (ref);
1914 if (!base)
1915 return true;
1916
1917 /* A call that is not without side-effects might involve volatile
1918 accesses and thus conflicts with all other volatile accesses. */
1919 if (ref->volatile_p)
1920 return true;
1921
1922 /* If the reference is based on a decl that is not aliased the call
1923 cannot possibly use it. */
1924 if (DECL_P (base)
1925 && !may_be_aliased (base)
1926 /* But local statics can be used through recursion. */
1927 && !is_global_var (base))
1928 goto process_args;
1929
1930 callee = gimple_call_fndecl (call);
1931
1932 /* Handle those builtin functions explicitly that do not act as
1933 escape points. See tree-ssa-structalias.c:find_func_aliases
1934 for the list of builtins we might need to handle here. */
1935 if (callee != NULL_TREE
1936 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1937 switch (DECL_FUNCTION_CODE (callee))
1938 {
1939 /* All the following functions read memory pointed to by
1940 their second argument. strcat/strncat additionally
1941 reads memory pointed to by the first argument. */
1942 case BUILT_IN_STRCAT:
1943 case BUILT_IN_STRNCAT:
1944 {
1945 ao_ref dref;
1946 ao_ref_init_from_ptr_and_size (&dref,
1947 gimple_call_arg (call, 0),
1948 NULL_TREE);
1949 if (refs_may_alias_p_1 (&dref, ref, false))
1950 return true;
1951 }
1952 /* FALLTHRU */
1953 case BUILT_IN_STRCPY:
1954 case BUILT_IN_STRNCPY:
1955 case BUILT_IN_MEMCPY:
1956 case BUILT_IN_MEMMOVE:
1957 case BUILT_IN_MEMPCPY:
1958 case BUILT_IN_STPCPY:
1959 case BUILT_IN_STPNCPY:
1960 case BUILT_IN_TM_MEMCPY:
1961 case BUILT_IN_TM_MEMMOVE:
1962 {
1963 ao_ref dref;
1964 tree size = NULL_TREE;
1965 if (gimple_call_num_args (call) == 3)
1966 size = gimple_call_arg (call, 2);
1967 ao_ref_init_from_ptr_and_size (&dref,
1968 gimple_call_arg (call, 1),
1969 size);
1970 return refs_may_alias_p_1 (&dref, ref, false);
1971 }
1972 case BUILT_IN_STRCAT_CHK:
1973 case BUILT_IN_STRNCAT_CHK:
1974 {
1975 ao_ref dref;
1976 ao_ref_init_from_ptr_and_size (&dref,
1977 gimple_call_arg (call, 0),
1978 NULL_TREE);
1979 if (refs_may_alias_p_1 (&dref, ref, false))
1980 return true;
1981 }
1982 /* FALLTHRU */
1983 case BUILT_IN_STRCPY_CHK:
1984 case BUILT_IN_STRNCPY_CHK:
1985 case BUILT_IN_MEMCPY_CHK:
1986 case BUILT_IN_MEMMOVE_CHK:
1987 case BUILT_IN_MEMPCPY_CHK:
1988 case BUILT_IN_STPCPY_CHK:
1989 case BUILT_IN_STPNCPY_CHK:
1990 {
1991 ao_ref dref;
1992 tree size = NULL_TREE;
1993 if (gimple_call_num_args (call) == 4)
1994 size = gimple_call_arg (call, 2);
1995 ao_ref_init_from_ptr_and_size (&dref,
1996 gimple_call_arg (call, 1),
1997 size);
1998 return refs_may_alias_p_1 (&dref, ref, false);
1999 }
2000 case BUILT_IN_BCOPY:
2001 {
2002 ao_ref dref;
2003 tree size = gimple_call_arg (call, 2);
2004 ao_ref_init_from_ptr_and_size (&dref,
2005 gimple_call_arg (call, 0),
2006 size);
2007 return refs_may_alias_p_1 (&dref, ref, false);
2008 }
2009
2010 /* The following functions read memory pointed to by their
2011 first argument. */
2012 CASE_BUILT_IN_TM_LOAD (1):
2013 CASE_BUILT_IN_TM_LOAD (2):
2014 CASE_BUILT_IN_TM_LOAD (4):
2015 CASE_BUILT_IN_TM_LOAD (8):
2016 CASE_BUILT_IN_TM_LOAD (FLOAT):
2017 CASE_BUILT_IN_TM_LOAD (DOUBLE):
2018 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
2019 CASE_BUILT_IN_TM_LOAD (M64):
2020 CASE_BUILT_IN_TM_LOAD (M128):
2021 CASE_BUILT_IN_TM_LOAD (M256):
2022 case BUILT_IN_TM_LOG:
2023 case BUILT_IN_TM_LOG_1:
2024 case BUILT_IN_TM_LOG_2:
2025 case BUILT_IN_TM_LOG_4:
2026 case BUILT_IN_TM_LOG_8:
2027 case BUILT_IN_TM_LOG_FLOAT:
2028 case BUILT_IN_TM_LOG_DOUBLE:
2029 case BUILT_IN_TM_LOG_LDOUBLE:
2030 case BUILT_IN_TM_LOG_M64:
2031 case BUILT_IN_TM_LOG_M128:
2032 case BUILT_IN_TM_LOG_M256:
2033 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
2034
2035 /* These read memory pointed to by the first argument. */
2036 case BUILT_IN_STRDUP:
2037 case BUILT_IN_STRNDUP:
2038 case BUILT_IN_REALLOC:
2039 {
2040 ao_ref dref;
2041 tree size = NULL_TREE;
2042 if (gimple_call_num_args (call) == 2)
2043 size = gimple_call_arg (call, 1);
2044 ao_ref_init_from_ptr_and_size (&dref,
2045 gimple_call_arg (call, 0),
2046 size);
2047 return refs_may_alias_p_1 (&dref, ref, false);
2048 }
2049 /* These read memory pointed to by the first argument. */
2050 case BUILT_IN_INDEX:
2051 case BUILT_IN_STRCHR:
2052 case BUILT_IN_STRRCHR:
2053 {
2054 ao_ref dref;
2055 ao_ref_init_from_ptr_and_size (&dref,
2056 gimple_call_arg (call, 0),
2057 NULL_TREE);
2058 return refs_may_alias_p_1 (&dref, ref, false);
2059 }
2060 /* These read memory pointed to by the first argument with size
2061 in the third argument. */
2062 case BUILT_IN_MEMCHR:
2063 {
2064 ao_ref dref;
2065 ao_ref_init_from_ptr_and_size (&dref,
2066 gimple_call_arg (call, 0),
2067 gimple_call_arg (call, 2));
2068 return refs_may_alias_p_1 (&dref, ref, false);
2069 }
2070 /* These read memory pointed to by the first and second arguments. */
2071 case BUILT_IN_STRSTR:
2072 case BUILT_IN_STRPBRK:
2073 {
2074 ao_ref dref;
2075 ao_ref_init_from_ptr_and_size (&dref,
2076 gimple_call_arg (call, 0),
2077 NULL_TREE);
2078 if (refs_may_alias_p_1 (&dref, ref, false))
2079 return true;
2080 ao_ref_init_from_ptr_and_size (&dref,
2081 gimple_call_arg (call, 1),
2082 NULL_TREE);
2083 return refs_may_alias_p_1 (&dref, ref, false);
2084 }
2085
2086 /* The following builtins do not read from memory. */
2087 case BUILT_IN_FREE:
2088 case BUILT_IN_MALLOC:
2089 case BUILT_IN_POSIX_MEMALIGN:
2090 case BUILT_IN_ALIGNED_ALLOC:
2091 case BUILT_IN_CALLOC:
2092 CASE_BUILT_IN_ALLOCA:
2093 case BUILT_IN_STACK_SAVE:
2094 case BUILT_IN_STACK_RESTORE:
2095 case BUILT_IN_MEMSET:
2096 case BUILT_IN_TM_MEMSET:
2097 case BUILT_IN_MEMSET_CHK:
2098 case BUILT_IN_FREXP:
2099 case BUILT_IN_FREXPF:
2100 case BUILT_IN_FREXPL:
2101 case BUILT_IN_GAMMA_R:
2102 case BUILT_IN_GAMMAF_R:
2103 case BUILT_IN_GAMMAL_R:
2104 case BUILT_IN_LGAMMA_R:
2105 case BUILT_IN_LGAMMAF_R:
2106 case BUILT_IN_LGAMMAL_R:
2107 case BUILT_IN_MODF:
2108 case BUILT_IN_MODFF:
2109 case BUILT_IN_MODFL:
2110 case BUILT_IN_REMQUO:
2111 case BUILT_IN_REMQUOF:
2112 case BUILT_IN_REMQUOL:
2113 case BUILT_IN_SINCOS:
2114 case BUILT_IN_SINCOSF:
2115 case BUILT_IN_SINCOSL:
2116 case BUILT_IN_ASSUME_ALIGNED:
2117 case BUILT_IN_VA_END:
2118 return false;
2119 /* __sync_* builtins and some OpenMP builtins act as threading
2120 barriers. */
2121 #undef DEF_SYNC_BUILTIN
2122 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2123 #include "sync-builtins.def"
2124 #undef DEF_SYNC_BUILTIN
2125 case BUILT_IN_GOMP_ATOMIC_START:
2126 case BUILT_IN_GOMP_ATOMIC_END:
2127 case BUILT_IN_GOMP_BARRIER:
2128 case BUILT_IN_GOMP_BARRIER_CANCEL:
2129 case BUILT_IN_GOMP_TASKWAIT:
2130 case BUILT_IN_GOMP_TASKGROUP_END:
2131 case BUILT_IN_GOMP_CRITICAL_START:
2132 case BUILT_IN_GOMP_CRITICAL_END:
2133 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2134 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2135 case BUILT_IN_GOMP_LOOP_END:
2136 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2137 case BUILT_IN_GOMP_ORDERED_START:
2138 case BUILT_IN_GOMP_ORDERED_END:
2139 case BUILT_IN_GOMP_SECTIONS_END:
2140 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2141 case BUILT_IN_GOMP_SINGLE_COPY_START:
2142 case BUILT_IN_GOMP_SINGLE_COPY_END:
2143 return true;
2144
2145 default:
2146 /* Fallthru to general call handling. */;
2147 }
2148
2149 /* Check if base is a global static variable that is not read
2150 by the function. */
2151 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2152 {
2153 struct cgraph_node *node = cgraph_node::get (callee);
2154 bitmap not_read;
2155
2156 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2157 node yet. We should enforce that there are nodes for all decls in the
2158 IL and remove this check instead. */
2159 if (node
2160 && (not_read = ipa_reference_get_not_read_global (node))
2161 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
2162 goto process_args;
2163 }
2164
2165 /* Check if the base variable is call-used. */
2166 if (DECL_P (base))
2167 {
2168 if (pt_solution_includes (gimple_call_use_set (call), base))
2169 return true;
2170 }
2171 else if ((TREE_CODE (base) == MEM_REF
2172 || TREE_CODE (base) == TARGET_MEM_REF)
2173 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2174 {
2175 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2176 if (!pi)
2177 return true;
2178
2179 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2180 return true;
2181 }
2182 else
2183 return true;
2184
2185 /* Inspect call arguments for passed-by-value aliases. */
2186 process_args:
2187 for (i = 0; i < gimple_call_num_args (call); ++i)
2188 {
2189 tree op = gimple_call_arg (call, i);
2190 int flags = gimple_call_arg_flags (call, i);
2191
2192 if (flags & EAF_UNUSED)
2193 continue;
2194
2195 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2196 op = TREE_OPERAND (op, 0);
2197
2198 if (TREE_CODE (op) != SSA_NAME
2199 && !is_gimple_min_invariant (op))
2200 {
2201 ao_ref r;
2202 ao_ref_init (&r, op);
2203 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2204 return true;
2205 }
2206 }
2207
2208 return false;
2209 }
2210
2211 static bool
2212 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2213 {
2214 bool res;
2215 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2216 if (res)
2217 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2218 else
2219 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2220 return res;
2221 }
2222
2223
2224 /* If the statement STMT may use the memory reference REF return
2225 true, otherwise return false. */
2226
2227 bool
2228 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2229 {
2230 if (is_gimple_assign (stmt))
2231 {
2232 tree rhs;
2233
2234 /* All memory assign statements are single. */
2235 if (!gimple_assign_single_p (stmt))
2236 return false;
2237
2238 rhs = gimple_assign_rhs1 (stmt);
2239 if (is_gimple_reg (rhs)
2240 || is_gimple_min_invariant (rhs)
2241 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2242 return false;
2243
2244 return refs_may_alias_p (rhs, ref, tbaa_p);
2245 }
2246 else if (is_gimple_call (stmt))
2247 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2248 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2249 {
2250 tree retval = gimple_return_retval (return_stmt);
2251 if (retval
2252 && TREE_CODE (retval) != SSA_NAME
2253 && !is_gimple_min_invariant (retval)
2254 && refs_may_alias_p (retval, ref, tbaa_p))
2255 return true;
2256 /* If ref escapes the function then the return acts as a use. */
2257 tree base = ao_ref_base (ref);
2258 if (!base)
2259 ;
2260 else if (DECL_P (base))
2261 return is_global_var (base);
2262 else if (TREE_CODE (base) == MEM_REF
2263 || TREE_CODE (base) == TARGET_MEM_REF)
2264 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2265 return false;
2266 }
2267
2268 return true;
2269 }
2270
2271 bool
2272 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2273 {
2274 ao_ref r;
2275 ao_ref_init (&r, ref);
2276 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2277 }
2278
2279 /* If the call in statement CALL may clobber the memory reference REF
2280 return true, otherwise return false. */
2281
2282 bool
2283 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2284 {
2285 tree base;
2286 tree callee;
2287
2288 /* If the call is pure or const it cannot clobber anything. */
2289 if (gimple_call_flags (call)
2290 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2291 return false;
2292 if (gimple_call_internal_p (call))
2293 switch (gimple_call_internal_fn (call))
2294 {
2295 /* Treat these internal calls like ECF_PURE for aliasing,
2296 they don't write to any memory the program should care about.
2297 They have important other side-effects, and read memory,
2298 so can't be ECF_NOVOPS. */
2299 case IFN_UBSAN_NULL:
2300 case IFN_UBSAN_BOUNDS:
2301 case IFN_UBSAN_VPTR:
2302 case IFN_UBSAN_OBJECT_SIZE:
2303 case IFN_UBSAN_PTR:
2304 case IFN_ASAN_CHECK:
2305 return false;
2306 default:
2307 break;
2308 }
2309
2310 base = ao_ref_base (ref);
2311 if (!base)
2312 return true;
2313
2314 if (TREE_CODE (base) == SSA_NAME
2315 || CONSTANT_CLASS_P (base))
2316 return false;
2317
2318 /* A call that is not without side-effects might involve volatile
2319 accesses and thus conflicts with all other volatile accesses. */
2320 if (ref->volatile_p)
2321 return true;
2322
2323 /* If the reference is based on a decl that is not aliased the call
2324 cannot possibly clobber it. */
2325 if (DECL_P (base)
2326 && !may_be_aliased (base)
2327 /* But local non-readonly statics can be modified through recursion
2328 or the call may implement a threading barrier which we must
2329 treat as may-def. */
2330 && (TREE_READONLY (base)
2331 || !is_global_var (base)))
2332 return false;
2333
2334 /* If the reference is based on a pointer that points to memory
2335 that may not be written to then the call cannot possibly clobber it. */
2336 if ((TREE_CODE (base) == MEM_REF
2337 || TREE_CODE (base) == TARGET_MEM_REF)
2338 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2339 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2340 return false;
2341
2342 callee = gimple_call_fndecl (call);
2343
2344 /* Handle those builtin functions explicitly that do not act as
2345 escape points. See tree-ssa-structalias.c:find_func_aliases
2346 for the list of builtins we might need to handle here. */
2347 if (callee != NULL_TREE
2348 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2349 switch (DECL_FUNCTION_CODE (callee))
2350 {
2351 /* All the following functions clobber memory pointed to by
2352 their first argument. */
2353 case BUILT_IN_STRCPY:
2354 case BUILT_IN_STRNCPY:
2355 case BUILT_IN_MEMCPY:
2356 case BUILT_IN_MEMMOVE:
2357 case BUILT_IN_MEMPCPY:
2358 case BUILT_IN_STPCPY:
2359 case BUILT_IN_STPNCPY:
2360 case BUILT_IN_STRCAT:
2361 case BUILT_IN_STRNCAT:
2362 case BUILT_IN_MEMSET:
2363 case BUILT_IN_TM_MEMSET:
2364 CASE_BUILT_IN_TM_STORE (1):
2365 CASE_BUILT_IN_TM_STORE (2):
2366 CASE_BUILT_IN_TM_STORE (4):
2367 CASE_BUILT_IN_TM_STORE (8):
2368 CASE_BUILT_IN_TM_STORE (FLOAT):
2369 CASE_BUILT_IN_TM_STORE (DOUBLE):
2370 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2371 CASE_BUILT_IN_TM_STORE (M64):
2372 CASE_BUILT_IN_TM_STORE (M128):
2373 CASE_BUILT_IN_TM_STORE (M256):
2374 case BUILT_IN_TM_MEMCPY:
2375 case BUILT_IN_TM_MEMMOVE:
2376 {
2377 ao_ref dref;
2378 tree size = NULL_TREE;
2379 /* Don't pass in size for strncat, as the maximum size
2380 is strlen (dest) + n + 1 instead of n, resp.
2381 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2382 known. */
2383 if (gimple_call_num_args (call) == 3
2384 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2385 size = gimple_call_arg (call, 2);
2386 ao_ref_init_from_ptr_and_size (&dref,
2387 gimple_call_arg (call, 0),
2388 size);
2389 return refs_may_alias_p_1 (&dref, ref, false);
2390 }
2391 case BUILT_IN_STRCPY_CHK:
2392 case BUILT_IN_STRNCPY_CHK:
2393 case BUILT_IN_MEMCPY_CHK:
2394 case BUILT_IN_MEMMOVE_CHK:
2395 case BUILT_IN_MEMPCPY_CHK:
2396 case BUILT_IN_STPCPY_CHK:
2397 case BUILT_IN_STPNCPY_CHK:
2398 case BUILT_IN_STRCAT_CHK:
2399 case BUILT_IN_STRNCAT_CHK:
2400 case BUILT_IN_MEMSET_CHK:
2401 {
2402 ao_ref dref;
2403 tree size = NULL_TREE;
2404 /* Don't pass in size for __strncat_chk, as the maximum size
2405 is strlen (dest) + n + 1 instead of n, resp.
2406 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2407 known. */
2408 if (gimple_call_num_args (call) == 4
2409 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2410 size = gimple_call_arg (call, 2);
2411 ao_ref_init_from_ptr_and_size (&dref,
2412 gimple_call_arg (call, 0),
2413 size);
2414 return refs_may_alias_p_1 (&dref, ref, false);
2415 }
2416 case BUILT_IN_BCOPY:
2417 {
2418 ao_ref dref;
2419 tree size = gimple_call_arg (call, 2);
2420 ao_ref_init_from_ptr_and_size (&dref,
2421 gimple_call_arg (call, 1),
2422 size);
2423 return refs_may_alias_p_1 (&dref, ref, false);
2424 }
2425 /* Allocating memory does not have any side-effects apart from
2426 being the definition point for the pointer. */
2427 case BUILT_IN_MALLOC:
2428 case BUILT_IN_ALIGNED_ALLOC:
2429 case BUILT_IN_CALLOC:
2430 case BUILT_IN_STRDUP:
2431 case BUILT_IN_STRNDUP:
2432 /* Unix98 specifies that errno is set on allocation failure. */
2433 if (flag_errno_math
2434 && targetm.ref_may_alias_errno (ref))
2435 return true;
2436 return false;
2437 case BUILT_IN_STACK_SAVE:
2438 CASE_BUILT_IN_ALLOCA:
2439 case BUILT_IN_ASSUME_ALIGNED:
2440 return false;
2441 /* But posix_memalign stores a pointer into the memory pointed to
2442 by its first argument. */
2443 case BUILT_IN_POSIX_MEMALIGN:
2444 {
2445 tree ptrptr = gimple_call_arg (call, 0);
2446 ao_ref dref;
2447 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2448 TYPE_SIZE_UNIT (ptr_type_node));
2449 return (refs_may_alias_p_1 (&dref, ref, false)
2450 || (flag_errno_math
2451 && targetm.ref_may_alias_errno (ref)));
2452 }
2453 /* Freeing memory kills the pointed-to memory. More importantly
2454 the call has to serve as a barrier for moving loads and stores
2455 across it. */
2456 case BUILT_IN_FREE:
2457 case BUILT_IN_VA_END:
2458 {
2459 tree ptr = gimple_call_arg (call, 0);
2460 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2461 }
2462 /* Realloc serves both as allocation point and deallocation point. */
2463 case BUILT_IN_REALLOC:
2464 {
2465 tree ptr = gimple_call_arg (call, 0);
2466 /* Unix98 specifies that errno is set on allocation failure. */
2467 return ((flag_errno_math
2468 && targetm.ref_may_alias_errno (ref))
2469 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2470 }
2471 case BUILT_IN_GAMMA_R:
2472 case BUILT_IN_GAMMAF_R:
2473 case BUILT_IN_GAMMAL_R:
2474 case BUILT_IN_LGAMMA_R:
2475 case BUILT_IN_LGAMMAF_R:
2476 case BUILT_IN_LGAMMAL_R:
2477 {
2478 tree out = gimple_call_arg (call, 1);
2479 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2480 return true;
2481 if (flag_errno_math)
2482 break;
2483 return false;
2484 }
2485 case BUILT_IN_FREXP:
2486 case BUILT_IN_FREXPF:
2487 case BUILT_IN_FREXPL:
2488 case BUILT_IN_MODF:
2489 case BUILT_IN_MODFF:
2490 case BUILT_IN_MODFL:
2491 {
2492 tree out = gimple_call_arg (call, 1);
2493 return ptr_deref_may_alias_ref_p_1 (out, ref);
2494 }
2495 case BUILT_IN_REMQUO:
2496 case BUILT_IN_REMQUOF:
2497 case BUILT_IN_REMQUOL:
2498 {
2499 tree out = gimple_call_arg (call, 2);
2500 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2501 return true;
2502 if (flag_errno_math)
2503 break;
2504 return false;
2505 }
2506 case BUILT_IN_SINCOS:
2507 case BUILT_IN_SINCOSF:
2508 case BUILT_IN_SINCOSL:
2509 {
2510 tree sin = gimple_call_arg (call, 1);
2511 tree cos = gimple_call_arg (call, 2);
2512 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2513 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2514 }
2515 /* __sync_* builtins and some OpenMP builtins act as threading
2516 barriers. */
2517 #undef DEF_SYNC_BUILTIN
2518 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2519 #include "sync-builtins.def"
2520 #undef DEF_SYNC_BUILTIN
2521 case BUILT_IN_GOMP_ATOMIC_START:
2522 case BUILT_IN_GOMP_ATOMIC_END:
2523 case BUILT_IN_GOMP_BARRIER:
2524 case BUILT_IN_GOMP_BARRIER_CANCEL:
2525 case BUILT_IN_GOMP_TASKWAIT:
2526 case BUILT_IN_GOMP_TASKGROUP_END:
2527 case BUILT_IN_GOMP_CRITICAL_START:
2528 case BUILT_IN_GOMP_CRITICAL_END:
2529 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2530 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2531 case BUILT_IN_GOMP_LOOP_END:
2532 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2533 case BUILT_IN_GOMP_ORDERED_START:
2534 case BUILT_IN_GOMP_ORDERED_END:
2535 case BUILT_IN_GOMP_SECTIONS_END:
2536 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2537 case BUILT_IN_GOMP_SINGLE_COPY_START:
2538 case BUILT_IN_GOMP_SINGLE_COPY_END:
2539 return true;
2540 default:
2541 /* Fallthru to general call handling. */;
2542 }
2543
2544 /* Check if base is a global static variable that is not written
2545 by the function. */
2546 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2547 {
2548 struct cgraph_node *node = cgraph_node::get (callee);
2549 bitmap not_written;
2550
2551 if (node
2552 && (not_written = ipa_reference_get_not_written_global (node))
2553 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2554 return false;
2555 }
2556
2557 /* Check if the base variable is call-clobbered. */
2558 if (DECL_P (base))
2559 return pt_solution_includes (gimple_call_clobber_set (call), base);
2560 else if ((TREE_CODE (base) == MEM_REF
2561 || TREE_CODE (base) == TARGET_MEM_REF)
2562 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2563 {
2564 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2565 if (!pi)
2566 return true;
2567
2568 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2569 }
2570
2571 return true;
2572 }
2573
2574 /* If the call in statement CALL may clobber the memory reference REF
2575 return true, otherwise return false. */
2576
2577 bool
2578 call_may_clobber_ref_p (gcall *call, tree ref)
2579 {
2580 bool res;
2581 ao_ref r;
2582 ao_ref_init (&r, ref);
2583 res = call_may_clobber_ref_p_1 (call, &r);
2584 if (res)
2585 ++alias_stats.call_may_clobber_ref_p_may_alias;
2586 else
2587 ++alias_stats.call_may_clobber_ref_p_no_alias;
2588 return res;
2589 }
2590
2591
2592 /* If the statement STMT may clobber the memory reference REF return true,
2593 otherwise return false. */
2594
2595 bool
2596 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2597 {
2598 if (is_gimple_call (stmt))
2599 {
2600 tree lhs = gimple_call_lhs (stmt);
2601 if (lhs
2602 && TREE_CODE (lhs) != SSA_NAME)
2603 {
2604 ao_ref r;
2605 ao_ref_init (&r, lhs);
2606 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2607 return true;
2608 }
2609
2610 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2611 }
2612 else if (gimple_assign_single_p (stmt))
2613 {
2614 tree lhs = gimple_assign_lhs (stmt);
2615 if (TREE_CODE (lhs) != SSA_NAME)
2616 {
2617 ao_ref r;
2618 ao_ref_init (&r, lhs);
2619 return refs_may_alias_p_1 (ref, &r, tbaa_p);
2620 }
2621 }
2622 else if (gimple_code (stmt) == GIMPLE_ASM)
2623 return true;
2624
2625 return false;
2626 }
2627
2628 bool
2629 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2630 {
2631 ao_ref r;
2632 ao_ref_init (&r, ref);
2633 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2634 }
2635
2636 /* Return true if store1 and store2 described by corresponding tuples
2637 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2638 address. */
2639
2640 static bool
2641 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2642 poly_int64 max_size1,
2643 tree base2, poly_int64 offset2, poly_int64 size2,
2644 poly_int64 max_size2)
2645 {
2646 /* Offsets need to be 0. */
2647 if (maybe_ne (offset1, 0)
2648 || maybe_ne (offset2, 0))
2649 return false;
2650
2651 bool base1_obj_p = SSA_VAR_P (base1);
2652 bool base2_obj_p = SSA_VAR_P (base2);
2653
2654 /* We need one object. */
2655 if (base1_obj_p == base2_obj_p)
2656 return false;
2657 tree obj = base1_obj_p ? base1 : base2;
2658
2659 /* And we need one MEM_REF. */
2660 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2661 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2662 if (base1_memref_p == base2_memref_p)
2663 return false;
2664 tree memref = base1_memref_p ? base1 : base2;
2665
2666 /* Sizes need to be valid. */
2667 if (!known_size_p (max_size1)
2668 || !known_size_p (max_size2)
2669 || !known_size_p (size1)
2670 || !known_size_p (size2))
2671 return false;
2672
2673 /* Max_size needs to match size. */
2674 if (maybe_ne (max_size1, size1)
2675 || maybe_ne (max_size2, size2))
2676 return false;
2677
2678 /* Sizes need to match. */
2679 if (maybe_ne (size1, size2))
2680 return false;
2681
2682
2683 /* Check that memref is a store to pointer with singleton points-to info. */
2684 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2685 return false;
2686 tree ptr = TREE_OPERAND (memref, 0);
2687 if (TREE_CODE (ptr) != SSA_NAME)
2688 return false;
2689 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2690 unsigned int pt_uid;
2691 if (pi == NULL
2692 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2693 return false;
2694
2695 /* Be conservative with non-call exceptions when the address might
2696 be NULL. */
2697 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2698 return false;
2699
2700 /* Check that ptr points relative to obj. */
2701 unsigned int obj_uid = DECL_PT_UID (obj);
2702 if (obj_uid != pt_uid)
2703 return false;
2704
2705 /* Check that the object size is the same as the store size. That ensures us
2706 that ptr points to the start of obj. */
2707 return (DECL_SIZE (obj)
2708 && poly_int_tree_p (DECL_SIZE (obj))
2709 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2710 }
2711
2712 /* If STMT kills the memory reference REF return true, otherwise
2713 return false. */
2714
2715 bool
2716 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2717 {
2718 if (!ao_ref_base (ref))
2719 return false;
2720
2721 if (gimple_has_lhs (stmt)
2722 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2723 /* The assignment is not necessarily carried out if it can throw
2724 and we can catch it in the current function where we could inspect
2725 the previous value.
2726 ??? We only need to care about the RHS throwing. For aggregate
2727 assignments or similar calls and non-call exceptions the LHS
2728 might throw as well. */
2729 && !stmt_can_throw_internal (cfun, stmt))
2730 {
2731 tree lhs = gimple_get_lhs (stmt);
2732 /* If LHS is literally a base of the access we are done. */
2733 if (ref->ref)
2734 {
2735 tree base = ref->ref;
2736 tree innermost_dropped_array_ref = NULL_TREE;
2737 if (handled_component_p (base))
2738 {
2739 tree saved_lhs0 = NULL_TREE;
2740 if (handled_component_p (lhs))
2741 {
2742 saved_lhs0 = TREE_OPERAND (lhs, 0);
2743 TREE_OPERAND (lhs, 0) = integer_zero_node;
2744 }
2745 do
2746 {
2747 /* Just compare the outermost handled component, if
2748 they are equal we have found a possible common
2749 base. */
2750 tree saved_base0 = TREE_OPERAND (base, 0);
2751 TREE_OPERAND (base, 0) = integer_zero_node;
2752 bool res = operand_equal_p (lhs, base, 0);
2753 TREE_OPERAND (base, 0) = saved_base0;
2754 if (res)
2755 break;
2756 /* Remember if we drop an array-ref that we need to
2757 double-check not being at struct end. */
2758 if (TREE_CODE (base) == ARRAY_REF
2759 || TREE_CODE (base) == ARRAY_RANGE_REF)
2760 innermost_dropped_array_ref = base;
2761 /* Otherwise drop handled components of the access. */
2762 base = saved_base0;
2763 }
2764 while (handled_component_p (base));
2765 if (saved_lhs0)
2766 TREE_OPERAND (lhs, 0) = saved_lhs0;
2767 }
2768 /* Finally check if the lhs has the same address and size as the
2769 base candidate of the access. Watch out if we have dropped
2770 an array-ref that was at struct end, this means ref->ref may
2771 be outside of the TYPE_SIZE of its base. */
2772 if ((! innermost_dropped_array_ref
2773 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2774 && (lhs == base
2775 || (((TYPE_SIZE (TREE_TYPE (lhs))
2776 == TYPE_SIZE (TREE_TYPE (base)))
2777 || (TYPE_SIZE (TREE_TYPE (lhs))
2778 && TYPE_SIZE (TREE_TYPE (base))
2779 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2780 TYPE_SIZE (TREE_TYPE (base)),
2781 0)))
2782 && operand_equal_p (lhs, base,
2783 OEP_ADDRESS_OF
2784 | OEP_MATCH_SIDE_EFFECTS))))
2785 return true;
2786 }
2787
2788 /* Now look for non-literal equal bases with the restriction of
2789 handling constant offset and size. */
2790 /* For a must-alias check we need to be able to constrain
2791 the access properly. */
2792 if (!ref->max_size_known_p ())
2793 return false;
2794 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2795 bool reverse;
2796 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2797 &reverse);
2798 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2799 so base == ref->base does not always hold. */
2800 if (base != ref->base)
2801 {
2802 /* Try using points-to info. */
2803 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2804 ref->offset, ref->size, ref->max_size))
2805 return true;
2806
2807 /* If both base and ref->base are MEM_REFs, only compare the
2808 first operand, and if the second operand isn't equal constant,
2809 try to add the offsets into offset and ref_offset. */
2810 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2811 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2812 {
2813 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2814 TREE_OPERAND (ref->base, 1)))
2815 {
2816 poly_offset_int off1 = mem_ref_offset (base);
2817 off1 <<= LOG2_BITS_PER_UNIT;
2818 off1 += offset;
2819 poly_offset_int off2 = mem_ref_offset (ref->base);
2820 off2 <<= LOG2_BITS_PER_UNIT;
2821 off2 += ref_offset;
2822 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2823 size = -1;
2824 }
2825 }
2826 else
2827 size = -1;
2828 }
2829 /* For a must-alias check we need to be able to constrain
2830 the access properly. */
2831 if (known_eq (size, max_size)
2832 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2833 return true;
2834 }
2835
2836 if (is_gimple_call (stmt))
2837 {
2838 tree callee = gimple_call_fndecl (stmt);
2839 if (callee != NULL_TREE
2840 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2841 switch (DECL_FUNCTION_CODE (callee))
2842 {
2843 case BUILT_IN_FREE:
2844 {
2845 tree ptr = gimple_call_arg (stmt, 0);
2846 tree base = ao_ref_base (ref);
2847 if (base && TREE_CODE (base) == MEM_REF
2848 && TREE_OPERAND (base, 0) == ptr)
2849 return true;
2850 break;
2851 }
2852
2853 case BUILT_IN_MEMCPY:
2854 case BUILT_IN_MEMPCPY:
2855 case BUILT_IN_MEMMOVE:
2856 case BUILT_IN_MEMSET:
2857 case BUILT_IN_MEMCPY_CHK:
2858 case BUILT_IN_MEMPCPY_CHK:
2859 case BUILT_IN_MEMMOVE_CHK:
2860 case BUILT_IN_MEMSET_CHK:
2861 case BUILT_IN_STRNCPY:
2862 case BUILT_IN_STPNCPY:
2863 {
2864 /* For a must-alias check we need to be able to constrain
2865 the access properly. */
2866 if (!ref->max_size_known_p ())
2867 return false;
2868 tree dest = gimple_call_arg (stmt, 0);
2869 tree len = gimple_call_arg (stmt, 2);
2870 if (!poly_int_tree_p (len))
2871 return false;
2872 tree rbase = ref->base;
2873 poly_offset_int roffset = ref->offset;
2874 ao_ref dref;
2875 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2876 tree base = ao_ref_base (&dref);
2877 poly_offset_int offset = dref.offset;
2878 if (!base || !known_size_p (dref.size))
2879 return false;
2880 if (TREE_CODE (base) == MEM_REF)
2881 {
2882 if (TREE_CODE (rbase) != MEM_REF)
2883 return false;
2884 // Compare pointers.
2885 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2886 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2887 base = TREE_OPERAND (base, 0);
2888 rbase = TREE_OPERAND (rbase, 0);
2889 }
2890 if (base == rbase
2891 && known_subrange_p (roffset, ref->max_size, offset,
2892 wi::to_poly_offset (len)
2893 << LOG2_BITS_PER_UNIT))
2894 return true;
2895 break;
2896 }
2897
2898 case BUILT_IN_VA_END:
2899 {
2900 tree ptr = gimple_call_arg (stmt, 0);
2901 if (TREE_CODE (ptr) == ADDR_EXPR)
2902 {
2903 tree base = ao_ref_base (ref);
2904 if (TREE_OPERAND (ptr, 0) == base)
2905 return true;
2906 }
2907 break;
2908 }
2909
2910 default:;
2911 }
2912 }
2913 return false;
2914 }
2915
2916 bool
2917 stmt_kills_ref_p (gimple *stmt, tree ref)
2918 {
2919 ao_ref r;
2920 ao_ref_init (&r, ref);
2921 return stmt_kills_ref_p (stmt, &r);
2922 }
2923
2924
2925 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2926 TARGET or a statement clobbering the memory reference REF in which
2927 case false is returned. The walk starts with VUSE, one argument of PHI. */
2928
2929 static bool
2930 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
2931 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
2932 bool abort_on_visited,
2933 void *(*translate)(ao_ref *, tree, void *, bool *),
2934 void *data)
2935 {
2936 basic_block bb = gimple_bb (phi);
2937
2938 if (!*visited)
2939 *visited = BITMAP_ALLOC (NULL);
2940
2941 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2942
2943 /* Walk until we hit the target. */
2944 while (vuse != target)
2945 {
2946 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2947 /* If we are searching for the target VUSE by walking up to
2948 TARGET_BB dominating the original PHI we are finished once
2949 we reach a default def or a definition in a block dominating
2950 that block. Update TARGET and return. */
2951 if (!target
2952 && (gimple_nop_p (def_stmt)
2953 || dominated_by_p (CDI_DOMINATORS,
2954 target_bb, gimple_bb (def_stmt))))
2955 {
2956 target = vuse;
2957 return true;
2958 }
2959
2960 /* Recurse for PHI nodes. */
2961 if (gimple_code (def_stmt) == GIMPLE_PHI)
2962 {
2963 /* An already visited PHI node ends the walk successfully. */
2964 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2965 return !abort_on_visited;
2966 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2967 visited, abort_on_visited,
2968 translate, data);
2969 if (!vuse)
2970 return false;
2971 continue;
2972 }
2973 else if (gimple_nop_p (def_stmt))
2974 return false;
2975 else
2976 {
2977 /* A clobbering statement or the end of the IL ends it failing. */
2978 if ((int)limit <= 0)
2979 return false;
2980 --limit;
2981 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2982 {
2983 bool disambiguate_only = true;
2984 if (translate
2985 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2986 ;
2987 else
2988 return false;
2989 }
2990 }
2991 /* If we reach a new basic-block see if we already skipped it
2992 in a previous walk that ended successfully. */
2993 if (gimple_bb (def_stmt) != bb)
2994 {
2995 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2996 return !abort_on_visited;
2997 bb = gimple_bb (def_stmt);
2998 }
2999 vuse = gimple_vuse (def_stmt);
3000 }
3001 return true;
3002 }
3003
3004
3005 /* Starting from a PHI node for the virtual operand of the memory reference
3006 REF find a continuation virtual operand that allows to continue walking
3007 statements dominating PHI skipping only statements that cannot possibly
3008 clobber REF. Decrements LIMIT for each alias disambiguation done
3009 and aborts the walk, returning NULL_TREE if it reaches zero.
3010 Returns NULL_TREE if no suitable virtual operand can be found. */
3011
3012 tree
3013 get_continuation_for_phi (gimple *phi, ao_ref *ref,
3014 unsigned int &limit, bitmap *visited,
3015 bool abort_on_visited,
3016 void *(*translate)(ao_ref *, tree, void *, bool *),
3017 void *data)
3018 {
3019 unsigned nargs = gimple_phi_num_args (phi);
3020
3021 /* Through a single-argument PHI we can simply look through. */
3022 if (nargs == 1)
3023 return PHI_ARG_DEF (phi, 0);
3024
3025 /* For two or more arguments try to pairwise skip non-aliasing code
3026 until we hit the phi argument definition that dominates the other one. */
3027 basic_block phi_bb = gimple_bb (phi);
3028 tree arg0, arg1;
3029 unsigned i;
3030
3031 /* Find a candidate for the virtual operand which definition
3032 dominates those of all others. */
3033 /* First look if any of the args themselves satisfy this. */
3034 for (i = 0; i < nargs; ++i)
3035 {
3036 arg0 = PHI_ARG_DEF (phi, i);
3037 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3038 break;
3039 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3040 if (def_bb != phi_bb
3041 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3042 break;
3043 arg0 = NULL_TREE;
3044 }
3045 /* If not, look if we can reach such candidate by walking defs
3046 until we hit the immediate dominator. maybe_skip_until will
3047 do that for us. */
3048 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3049
3050 /* Then check against the (to be) found candidate. */
3051 for (i = 0; i < nargs; ++i)
3052 {
3053 arg1 = PHI_ARG_DEF (phi, i);
3054 if (arg1 == arg0)
3055 ;
3056 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
3057 abort_on_visited,
3058 /* Do not translate when walking over
3059 backedges. */
3060 dominated_by_p
3061 (CDI_DOMINATORS,
3062 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3063 phi_bb)
3064 ? NULL : translate, data))
3065 return NULL_TREE;
3066 }
3067
3068 return arg0;
3069 }
3070
3071 /* Based on the memory reference REF and its virtual use VUSE call
3072 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3073 itself. That is, for each virtual use for which its defining statement
3074 does not clobber REF.
3075
3076 WALKER is called with REF, the current virtual use and DATA. If
3077 WALKER returns non-NULL the walk stops and its result is returned.
3078 At the end of a non-successful walk NULL is returned.
3079
3080 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3081 use which definition is a statement that may clobber REF and DATA.
3082 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3083 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3084 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3085 to adjust REF and *DATA to make that valid.
3086
3087 VALUEIZE if non-NULL is called with the next VUSE that is considered
3088 and return value is substituted for that. This can be used to
3089 implement optimistic value-numbering for example. Note that the
3090 VUSE argument is assumed to be valueized already.
3091
3092 LIMIT specifies the number of alias queries we are allowed to do,
3093 the walk stops when it reaches zero and NULL is returned. LIMIT
3094 is decremented by the number of alias queries (plus adjustments
3095 done by the callbacks) upon return.
3096
3097 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3098
3099 void *
3100 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
3101 void *(*walker)(ao_ref *, tree, void *),
3102 void *(*translate)(ao_ref *, tree, void *, bool *),
3103 tree (*valueize)(tree),
3104 unsigned &limit, void *data)
3105 {
3106 bitmap visited = NULL;
3107 void *res;
3108 bool translated = false;
3109
3110 timevar_push (TV_ALIAS_STMT_WALK);
3111
3112 do
3113 {
3114 gimple *def_stmt;
3115
3116 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3117 res = (*walker) (ref, vuse, data);
3118 /* Abort walk. */
3119 if (res == (void *)-1)
3120 {
3121 res = NULL;
3122 break;
3123 }
3124 /* Lookup succeeded. */
3125 else if (res != NULL)
3126 break;
3127
3128 if (valueize)
3129 {
3130 vuse = valueize (vuse);
3131 if (!vuse)
3132 {
3133 res = NULL;
3134 break;
3135 }
3136 }
3137 def_stmt = SSA_NAME_DEF_STMT (vuse);
3138 if (gimple_nop_p (def_stmt))
3139 break;
3140 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3141 vuse = get_continuation_for_phi (def_stmt, ref, limit,
3142 &visited, translated, translate, data);
3143 else
3144 {
3145 if ((int)limit <= 0)
3146 {
3147 res = NULL;
3148 break;
3149 }
3150 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
3151 {
3152 if (!translate)
3153 break;
3154 bool disambiguate_only = false;
3155 res = (*translate) (ref, vuse, data, &disambiguate_only);
3156 /* Failed lookup and translation. */
3157 if (res == (void *)-1)
3158 {
3159 res = NULL;
3160 break;
3161 }
3162 /* Lookup succeeded. */
3163 else if (res != NULL)
3164 break;
3165 /* Translation succeeded, continue walking. */
3166 translated = translated || !disambiguate_only;
3167 }
3168 vuse = gimple_vuse (def_stmt);
3169 }
3170 }
3171 while (vuse);
3172
3173 if (visited)
3174 BITMAP_FREE (visited);
3175
3176 timevar_pop (TV_ALIAS_STMT_WALK);
3177
3178 return res;
3179 }
3180
3181
3182 /* Based on the memory reference REF call WALKER for each vdef which
3183 defining statement may clobber REF, starting with VDEF. If REF
3184 is NULL_TREE, each defining statement is visited.
3185
3186 WALKER is called with REF, the current vdef and DATA. If WALKER
3187 returns true the walk is stopped, otherwise it continues.
3188
3189 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3190 The pointer may be NULL and then we do not track this information.
3191
3192 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3193 PHI argument (but only one walk continues on merge points), the
3194 return value is true if any of the walks was successful.
3195
3196 The function returns the number of statements walked or -1 if
3197 LIMIT stmts were walked and the walk was aborted at this point.
3198 If LIMIT is zero the walk is not aborted. */
3199
3200 static int
3201 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3202 bool (*walker)(ao_ref *, tree, void *), void *data,
3203 bitmap *visited, unsigned int cnt,
3204 bool *function_entry_reached, unsigned limit)
3205 {
3206 do
3207 {
3208 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3209
3210 if (*visited
3211 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3212 return cnt;
3213
3214 if (gimple_nop_p (def_stmt))
3215 {
3216 if (function_entry_reached)
3217 *function_entry_reached = true;
3218 return cnt;
3219 }
3220 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3221 {
3222 unsigned i;
3223 if (!*visited)
3224 *visited = BITMAP_ALLOC (NULL);
3225 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3226 {
3227 int res = walk_aliased_vdefs_1 (ref,
3228 gimple_phi_arg_def (def_stmt, i),
3229 walker, data, visited, cnt,
3230 function_entry_reached, limit);
3231 if (res == -1)
3232 return -1;
3233 cnt = res;
3234 }
3235 return cnt;
3236 }
3237
3238 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3239 cnt++;
3240 if (cnt == limit)
3241 return -1;
3242 if ((!ref
3243 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3244 && (*walker) (ref, vdef, data))
3245 return cnt;
3246
3247 vdef = gimple_vuse (def_stmt);
3248 }
3249 while (1);
3250 }
3251
3252 int
3253 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3254 bool (*walker)(ao_ref *, tree, void *), void *data,
3255 bitmap *visited,
3256 bool *function_entry_reached, unsigned int limit)
3257 {
3258 bitmap local_visited = NULL;
3259 int ret;
3260
3261 timevar_push (TV_ALIAS_STMT_WALK);
3262
3263 if (function_entry_reached)
3264 *function_entry_reached = false;
3265
3266 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3267 visited ? visited : &local_visited, 0,
3268 function_entry_reached, limit);
3269 if (local_visited)
3270 BITMAP_FREE (local_visited);
3271
3272 timevar_pop (TV_ALIAS_STMT_WALK);
3273
3274 return ret;
3275 }
3276