re PR tree-optimization/90930 (Excessive memory consumption)
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
93
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
102 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
103 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
104 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_of_decl_p_may_alias;
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_of_decl_p_no_alias;
107 } alias_stats;
108
109 void
110 dump_alias_stats (FILE *s)
111 {
112 fprintf (s, "\nAlias oracle query stats:\n");
113 fprintf (s, " refs_may_alias_p: "
114 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC" queries\n",
116 alias_stats.refs_may_alias_p_no_alias,
117 alias_stats.refs_may_alias_p_no_alias
118 + alias_stats.refs_may_alias_p_may_alias);
119 fprintf (s, " ref_maybe_used_by_call_p: "
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC" queries\n",
122 alias_stats.ref_maybe_used_by_call_p_no_alias,
123 alias_stats.refs_may_alias_p_no_alias
124 + alias_stats.ref_maybe_used_by_call_p_may_alias);
125 fprintf (s, " call_may_clobber_ref_p: "
126 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
127 HOST_WIDE_INT_PRINT_DEC" queries\n",
128 alias_stats.call_may_clobber_ref_p_no_alias,
129 alias_stats.call_may_clobber_ref_p_no_alias
130 + alias_stats.call_may_clobber_ref_p_may_alias);
131 fprintf (s, " nonoverlapping_component_refs_p: "
132 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
133 HOST_WIDE_INT_PRINT_DEC" queries\n",
134 alias_stats.nonoverlapping_component_refs_p_no_alias,
135 alias_stats.nonoverlapping_component_refs_p_no_alias
136 + alias_stats.nonoverlapping_component_refs_p_may_alias);
137 fprintf (s, " nonoverlapping_component_refs_of_decl_p: "
138 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC" queries\n",
140 alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias,
141 alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias
142 + alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias);
143 fprintf (s, " aliasing_component_refs_p: "
144 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC" queries\n",
146 alias_stats.aliasing_component_refs_p_no_alias,
147 alias_stats.aliasing_component_refs_p_no_alias
148 + alias_stats.aliasing_component_refs_p_may_alias);
149 dump_alias_stats_in_alias_c (s);
150 }
151
152
153 /* Return true, if dereferencing PTR may alias with a global variable. */
154
155 bool
156 ptr_deref_may_alias_global_p (tree ptr)
157 {
158 struct ptr_info_def *pi;
159
160 /* If we end up with a pointer constant here that may point
161 to global memory. */
162 if (TREE_CODE (ptr) != SSA_NAME)
163 return true;
164
165 pi = SSA_NAME_PTR_INFO (ptr);
166
167 /* If we do not have points-to information for this variable,
168 we have to punt. */
169 if (!pi)
170 return true;
171
172 /* ??? This does not use TBAA to prune globals ptr may not access. */
173 return pt_solution_includes_global (&pi->pt);
174 }
175
176 /* Return true if dereferencing PTR may alias DECL.
177 The caller is responsible for applying TBAA to see if PTR
178 may access DECL at all. */
179
180 static bool
181 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
182 {
183 struct ptr_info_def *pi;
184
185 /* Conversions are irrelevant for points-to information and
186 data-dependence analysis can feed us those. */
187 STRIP_NOPS (ptr);
188
189 /* Anything we do not explicilty handle aliases. */
190 if ((TREE_CODE (ptr) != SSA_NAME
191 && TREE_CODE (ptr) != ADDR_EXPR
192 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
193 || !POINTER_TYPE_P (TREE_TYPE (ptr))
194 || (!VAR_P (decl)
195 && TREE_CODE (decl) != PARM_DECL
196 && TREE_CODE (decl) != RESULT_DECL))
197 return true;
198
199 /* Disregard pointer offsetting. */
200 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
201 {
202 do
203 {
204 ptr = TREE_OPERAND (ptr, 0);
205 }
206 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
207 return ptr_deref_may_alias_decl_p (ptr, decl);
208 }
209
210 /* ADDR_EXPR pointers either just offset another pointer or directly
211 specify the pointed-to set. */
212 if (TREE_CODE (ptr) == ADDR_EXPR)
213 {
214 tree base = get_base_address (TREE_OPERAND (ptr, 0));
215 if (base
216 && (TREE_CODE (base) == MEM_REF
217 || TREE_CODE (base) == TARGET_MEM_REF))
218 ptr = TREE_OPERAND (base, 0);
219 else if (base
220 && DECL_P (base))
221 return compare_base_decls (base, decl) != 0;
222 else if (base
223 && CONSTANT_CLASS_P (base))
224 return false;
225 else
226 return true;
227 }
228
229 /* Non-aliased variables cannot be pointed to. */
230 if (!may_be_aliased (decl))
231 return false;
232
233 /* If we do not have useful points-to information for this pointer
234 we cannot disambiguate anything else. */
235 pi = SSA_NAME_PTR_INFO (ptr);
236 if (!pi)
237 return true;
238
239 return pt_solution_includes (&pi->pt, decl);
240 }
241
242 /* Return true if dereferenced PTR1 and PTR2 may alias.
243 The caller is responsible for applying TBAA to see if accesses
244 through PTR1 and PTR2 may conflict at all. */
245
246 bool
247 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
248 {
249 struct ptr_info_def *pi1, *pi2;
250
251 /* Conversions are irrelevant for points-to information and
252 data-dependence analysis can feed us those. */
253 STRIP_NOPS (ptr1);
254 STRIP_NOPS (ptr2);
255
256 /* Disregard pointer offsetting. */
257 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
258 {
259 do
260 {
261 ptr1 = TREE_OPERAND (ptr1, 0);
262 }
263 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
264 return ptr_derefs_may_alias_p (ptr1, ptr2);
265 }
266 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
267 {
268 do
269 {
270 ptr2 = TREE_OPERAND (ptr2, 0);
271 }
272 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
273 return ptr_derefs_may_alias_p (ptr1, ptr2);
274 }
275
276 /* ADDR_EXPR pointers either just offset another pointer or directly
277 specify the pointed-to set. */
278 if (TREE_CODE (ptr1) == ADDR_EXPR)
279 {
280 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
281 if (base
282 && (TREE_CODE (base) == MEM_REF
283 || TREE_CODE (base) == TARGET_MEM_REF))
284 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
285 else if (base
286 && DECL_P (base))
287 return ptr_deref_may_alias_decl_p (ptr2, base);
288 else
289 return true;
290 }
291 if (TREE_CODE (ptr2) == ADDR_EXPR)
292 {
293 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
294 if (base
295 && (TREE_CODE (base) == MEM_REF
296 || TREE_CODE (base) == TARGET_MEM_REF))
297 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
298 else if (base
299 && DECL_P (base))
300 return ptr_deref_may_alias_decl_p (ptr1, base);
301 else
302 return true;
303 }
304
305 /* From here we require SSA name pointers. Anything else aliases. */
306 if (TREE_CODE (ptr1) != SSA_NAME
307 || TREE_CODE (ptr2) != SSA_NAME
308 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
309 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
310 return true;
311
312 /* We may end up with two empty points-to solutions for two same pointers.
313 In this case we still want to say both pointers alias, so shortcut
314 that here. */
315 if (ptr1 == ptr2)
316 return true;
317
318 /* If we do not have useful points-to information for either pointer
319 we cannot disambiguate anything else. */
320 pi1 = SSA_NAME_PTR_INFO (ptr1);
321 pi2 = SSA_NAME_PTR_INFO (ptr2);
322 if (!pi1 || !pi2)
323 return true;
324
325 /* ??? This does not use TBAA to prune decls from the intersection
326 that not both pointers may access. */
327 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
328 }
329
330 /* Return true if dereferencing PTR may alias *REF.
331 The caller is responsible for applying TBAA to see if PTR
332 may access *REF at all. */
333
334 static bool
335 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
336 {
337 tree base = ao_ref_base (ref);
338
339 if (TREE_CODE (base) == MEM_REF
340 || TREE_CODE (base) == TARGET_MEM_REF)
341 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
342 else if (DECL_P (base))
343 return ptr_deref_may_alias_decl_p (ptr, base);
344
345 return true;
346 }
347
348 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
349
350 bool
351 ptrs_compare_unequal (tree ptr1, tree ptr2)
352 {
353 /* First resolve the pointers down to a SSA name pointer base or
354 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
355 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
356 or STRING_CSTs which needs points-to adjustments to track them
357 in the points-to sets. */
358 tree obj1 = NULL_TREE;
359 tree obj2 = NULL_TREE;
360 if (TREE_CODE (ptr1) == ADDR_EXPR)
361 {
362 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
363 if (! tem)
364 return false;
365 if (VAR_P (tem)
366 || TREE_CODE (tem) == PARM_DECL
367 || TREE_CODE (tem) == RESULT_DECL)
368 obj1 = tem;
369 else if (TREE_CODE (tem) == MEM_REF)
370 ptr1 = TREE_OPERAND (tem, 0);
371 }
372 if (TREE_CODE (ptr2) == ADDR_EXPR)
373 {
374 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
375 if (! tem)
376 return false;
377 if (VAR_P (tem)
378 || TREE_CODE (tem) == PARM_DECL
379 || TREE_CODE (tem) == RESULT_DECL)
380 obj2 = tem;
381 else if (TREE_CODE (tem) == MEM_REF)
382 ptr2 = TREE_OPERAND (tem, 0);
383 }
384
385 /* Canonicalize ptr vs. object. */
386 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
387 {
388 std::swap (ptr1, ptr2);
389 std::swap (obj1, obj2);
390 }
391
392 if (obj1 && obj2)
393 /* Other code handles this correctly, no need to duplicate it here. */;
394 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
395 {
396 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
397 /* We may not use restrict to optimize pointer comparisons.
398 See PR71062. So we have to assume that restrict-pointed-to
399 may be in fact obj1. */
400 if (!pi
401 || pi->pt.vars_contains_restrict
402 || pi->pt.vars_contains_interposable)
403 return false;
404 if (VAR_P (obj1)
405 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
406 {
407 varpool_node *node = varpool_node::get (obj1);
408 /* If obj1 may bind to NULL give up (see below). */
409 if (! node
410 || ! node->nonzero_address ()
411 || ! decl_binds_to_current_def_p (obj1))
412 return false;
413 }
414 return !pt_solution_includes (&pi->pt, obj1);
415 }
416
417 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
418 but those require pt.null to be conservatively correct. */
419
420 return false;
421 }
422
423 /* Returns whether reference REF to BASE may refer to global memory. */
424
425 static bool
426 ref_may_alias_global_p_1 (tree base)
427 {
428 if (DECL_P (base))
429 return is_global_var (base);
430 else if (TREE_CODE (base) == MEM_REF
431 || TREE_CODE (base) == TARGET_MEM_REF)
432 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
433 return true;
434 }
435
436 bool
437 ref_may_alias_global_p (ao_ref *ref)
438 {
439 tree base = ao_ref_base (ref);
440 return ref_may_alias_global_p_1 (base);
441 }
442
443 bool
444 ref_may_alias_global_p (tree ref)
445 {
446 tree base = get_base_address (ref);
447 return ref_may_alias_global_p_1 (base);
448 }
449
450 /* Return true whether STMT may clobber global memory. */
451
452 bool
453 stmt_may_clobber_global_p (gimple *stmt)
454 {
455 tree lhs;
456
457 if (!gimple_vdef (stmt))
458 return false;
459
460 /* ??? We can ask the oracle whether an artificial pointer
461 dereference with a pointer with points-to information covering
462 all global memory (what about non-address taken memory?) maybe
463 clobbered by this call. As there is at the moment no convenient
464 way of doing that without generating garbage do some manual
465 checking instead.
466 ??? We could make a NULL ao_ref argument to the various
467 predicates special, meaning any global memory. */
468
469 switch (gimple_code (stmt))
470 {
471 case GIMPLE_ASSIGN:
472 lhs = gimple_assign_lhs (stmt);
473 return (TREE_CODE (lhs) != SSA_NAME
474 && ref_may_alias_global_p (lhs));
475 case GIMPLE_CALL:
476 return true;
477 default:
478 return true;
479 }
480 }
481
482
483 /* Dump alias information on FILE. */
484
485 void
486 dump_alias_info (FILE *file)
487 {
488 unsigned i;
489 tree ptr;
490 const char *funcname
491 = lang_hooks.decl_printable_name (current_function_decl, 2);
492 tree var;
493
494 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
495
496 fprintf (file, "Aliased symbols\n\n");
497
498 FOR_EACH_LOCAL_DECL (cfun, i, var)
499 {
500 if (may_be_aliased (var))
501 dump_variable (file, var);
502 }
503
504 fprintf (file, "\nCall clobber information\n");
505
506 fprintf (file, "\nESCAPED");
507 dump_points_to_solution (file, &cfun->gimple_df->escaped);
508
509 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
510
511 FOR_EACH_SSA_NAME (i, ptr, cfun)
512 {
513 struct ptr_info_def *pi;
514
515 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
516 || SSA_NAME_IN_FREE_LIST (ptr))
517 continue;
518
519 pi = SSA_NAME_PTR_INFO (ptr);
520 if (pi)
521 dump_points_to_info_for (file, ptr);
522 }
523
524 fprintf (file, "\n");
525 }
526
527
528 /* Dump alias information on stderr. */
529
530 DEBUG_FUNCTION void
531 debug_alias_info (void)
532 {
533 dump_alias_info (stderr);
534 }
535
536
537 /* Dump the points-to set *PT into FILE. */
538
539 void
540 dump_points_to_solution (FILE *file, struct pt_solution *pt)
541 {
542 if (pt->anything)
543 fprintf (file, ", points-to anything");
544
545 if (pt->nonlocal)
546 fprintf (file, ", points-to non-local");
547
548 if (pt->escaped)
549 fprintf (file, ", points-to escaped");
550
551 if (pt->ipa_escaped)
552 fprintf (file, ", points-to unit escaped");
553
554 if (pt->null)
555 fprintf (file, ", points-to NULL");
556
557 if (pt->vars)
558 {
559 fprintf (file, ", points-to vars: ");
560 dump_decl_set (file, pt->vars);
561 if (pt->vars_contains_nonlocal
562 || pt->vars_contains_escaped
563 || pt->vars_contains_escaped_heap
564 || pt->vars_contains_restrict)
565 {
566 const char *comma = "";
567 fprintf (file, " (");
568 if (pt->vars_contains_nonlocal)
569 {
570 fprintf (file, "nonlocal");
571 comma = ", ";
572 }
573 if (pt->vars_contains_escaped)
574 {
575 fprintf (file, "%sescaped", comma);
576 comma = ", ";
577 }
578 if (pt->vars_contains_escaped_heap)
579 {
580 fprintf (file, "%sescaped heap", comma);
581 comma = ", ";
582 }
583 if (pt->vars_contains_restrict)
584 {
585 fprintf (file, "%srestrict", comma);
586 comma = ", ";
587 }
588 if (pt->vars_contains_interposable)
589 fprintf (file, "%sinterposable", comma);
590 fprintf (file, ")");
591 }
592 }
593 }
594
595
596 /* Unified dump function for pt_solution. */
597
598 DEBUG_FUNCTION void
599 debug (pt_solution &ref)
600 {
601 dump_points_to_solution (stderr, &ref);
602 }
603
604 DEBUG_FUNCTION void
605 debug (pt_solution *ptr)
606 {
607 if (ptr)
608 debug (*ptr);
609 else
610 fprintf (stderr, "<nil>\n");
611 }
612
613
614 /* Dump points-to information for SSA_NAME PTR into FILE. */
615
616 void
617 dump_points_to_info_for (FILE *file, tree ptr)
618 {
619 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
620
621 print_generic_expr (file, ptr, dump_flags);
622
623 if (pi)
624 dump_points_to_solution (file, &pi->pt);
625 else
626 fprintf (file, ", points-to anything");
627
628 fprintf (file, "\n");
629 }
630
631
632 /* Dump points-to information for VAR into stderr. */
633
634 DEBUG_FUNCTION void
635 debug_points_to_info_for (tree var)
636 {
637 dump_points_to_info_for (stderr, var);
638 }
639
640
641 /* Initializes the alias-oracle reference representation *R from REF. */
642
643 void
644 ao_ref_init (ao_ref *r, tree ref)
645 {
646 r->ref = ref;
647 r->base = NULL_TREE;
648 r->offset = 0;
649 r->size = -1;
650 r->max_size = -1;
651 r->ref_alias_set = -1;
652 r->base_alias_set = -1;
653 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
654 }
655
656 /* Returns the base object of the memory reference *REF. */
657
658 tree
659 ao_ref_base (ao_ref *ref)
660 {
661 bool reverse;
662
663 if (ref->base)
664 return ref->base;
665 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
666 &ref->max_size, &reverse);
667 return ref->base;
668 }
669
670 /* Returns the base object alias set of the memory reference *REF. */
671
672 alias_set_type
673 ao_ref_base_alias_set (ao_ref *ref)
674 {
675 tree base_ref;
676 if (ref->base_alias_set != -1)
677 return ref->base_alias_set;
678 if (!ref->ref)
679 return 0;
680 base_ref = ref->ref;
681 while (handled_component_p (base_ref))
682 base_ref = TREE_OPERAND (base_ref, 0);
683 ref->base_alias_set = get_alias_set (base_ref);
684 return ref->base_alias_set;
685 }
686
687 /* Returns the reference alias set of the memory reference *REF. */
688
689 alias_set_type
690 ao_ref_alias_set (ao_ref *ref)
691 {
692 if (ref->ref_alias_set != -1)
693 return ref->ref_alias_set;
694 ref->ref_alias_set = get_alias_set (ref->ref);
695 return ref->ref_alias_set;
696 }
697
698 /* Init an alias-oracle reference representation from a gimple pointer
699 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
700 size is assumed to be unknown. The access is assumed to be only
701 to or after of the pointer target, not before it. */
702
703 void
704 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
705 {
706 poly_int64 t, size_hwi, extra_offset = 0;
707 ref->ref = NULL_TREE;
708 if (TREE_CODE (ptr) == SSA_NAME)
709 {
710 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
711 if (gimple_assign_single_p (stmt)
712 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
713 ptr = gimple_assign_rhs1 (stmt);
714 else if (is_gimple_assign (stmt)
715 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
716 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
717 {
718 ptr = gimple_assign_rhs1 (stmt);
719 extra_offset *= BITS_PER_UNIT;
720 }
721 }
722
723 if (TREE_CODE (ptr) == ADDR_EXPR)
724 {
725 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
726 if (ref->base)
727 ref->offset = BITS_PER_UNIT * t;
728 else
729 {
730 size = NULL_TREE;
731 ref->offset = 0;
732 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
733 }
734 }
735 else
736 {
737 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
738 ref->base = build2 (MEM_REF, char_type_node,
739 ptr, null_pointer_node);
740 ref->offset = 0;
741 }
742 ref->offset += extra_offset;
743 if (size
744 && poly_int_tree_p (size, &size_hwi)
745 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
746 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
747 else
748 ref->max_size = ref->size = -1;
749 ref->ref_alias_set = 0;
750 ref->base_alias_set = 0;
751 ref->volatile_p = false;
752 }
753
754 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
755 Return -1 if S1 < S2
756 Return 1 if S1 > S2
757 Return 0 if equal or incomparable. */
758
759 static int
760 compare_sizes (tree s1, tree s2)
761 {
762 if (!s1 || !s2)
763 return 0;
764
765 poly_uint64 size1;
766 poly_uint64 size2;
767
768 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
769 return 0;
770 if (known_lt (size1, size2))
771 return -1;
772 if (known_lt (size2, size1))
773 return 1;
774 return 0;
775 }
776
777 /* Compare TYPE1 and TYPE2 by its size.
778 Return -1 if size of TYPE1 < size of TYPE2
779 Return 1 if size of TYPE1 > size of TYPE2
780 Return 0 if types are of equal sizes or we can not compare them. */
781
782 static int
783 compare_type_sizes (tree type1, tree type2)
784 {
785 /* Be conservative for arrays and vectors. We want to support partial
786 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
787 while (TREE_CODE (type1) == ARRAY_TYPE
788 || TREE_CODE (type1) == VECTOR_TYPE)
789 type1 = TREE_TYPE (type1);
790 while (TREE_CODE (type2) == ARRAY_TYPE
791 || TREE_CODE (type2) == VECTOR_TYPE)
792 type2 = TREE_TYPE (type2);
793 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
794 }
795
796 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
797 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
798 decide. */
799
800 static inline int
801 same_type_for_tbaa (tree type1, tree type2)
802 {
803 type1 = TYPE_MAIN_VARIANT (type1);
804 type2 = TYPE_MAIN_VARIANT (type2);
805
806 /* Handle the most common case first. */
807 if (type1 == type2)
808 return 1;
809
810 /* If we would have to do structural comparison bail out. */
811 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
812 || TYPE_STRUCTURAL_EQUALITY_P (type2))
813 return -1;
814
815 /* Compare the canonical types. */
816 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
817 return 1;
818
819 /* ??? Array types are not properly unified in all cases as we have
820 spurious changes in the index types for example. Removing this
821 causes all sorts of problems with the Fortran frontend. */
822 if (TREE_CODE (type1) == ARRAY_TYPE
823 && TREE_CODE (type2) == ARRAY_TYPE)
824 return -1;
825
826 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
827 object of one of its constrained subtypes, e.g. when a function with an
828 unconstrained parameter passed by reference is called on an object and
829 inlined. But, even in the case of a fixed size, type and subtypes are
830 not equivalent enough as to share the same TYPE_CANONICAL, since this
831 would mean that conversions between them are useless, whereas they are
832 not (e.g. type and subtypes can have different modes). So, in the end,
833 they are only guaranteed to have the same alias set. */
834 if (get_alias_set (type1) == get_alias_set (type2))
835 return -1;
836
837 /* The types are known to be not equal. */
838 return 0;
839 }
840
841 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
842 components on it). */
843
844 static bool
845 type_has_components_p (tree type)
846 {
847 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
848 || TREE_CODE (type) == COMPLEX_TYPE;
849 }
850
851 /* Determine if the two component references REF1 and REF2 which are
852 based on access types TYPE1 and TYPE2 and of which at least one is based
853 on an indirect reference may alias.
854 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
855 are the respective alias sets. */
856
857 static bool
858 aliasing_component_refs_p (tree ref1,
859 alias_set_type ref1_alias_set,
860 alias_set_type base1_alias_set,
861 poly_int64 offset1, poly_int64 max_size1,
862 tree ref2,
863 alias_set_type ref2_alias_set,
864 alias_set_type base2_alias_set,
865 poly_int64 offset2, poly_int64 max_size2)
866 {
867 /* If one reference is a component references through pointers try to find a
868 common base and apply offset based disambiguation. This handles
869 for example
870 struct A { int i; int j; } *q;
871 struct B { struct A a; int k; } *p;
872 disambiguating q->i and p->a.j. */
873 tree base1, base2;
874 tree type1, type2;
875 int same_p1 = 0, same_p2 = 0;
876 bool maybe_match = false;
877 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
878
879 /* Choose bases and base types to search for. */
880 base1 = ref1;
881 while (handled_component_p (base1))
882 {
883 /* Generally access paths are monotous in the size of object. The
884 exception are trailing arrays of structures. I.e.
885 struct a {int array[0];};
886 or
887 struct a {int array1[0]; int array[];};
888 Such struct has size 0 but accesses to a.array may have non-zero size.
889 In this case the size of TREE_TYPE (base1) is smaller than
890 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
891
892 Because we compare sizes of arrays just by sizes of their elements,
893 we only need to care about zero sized array fields here. */
894 if (TREE_CODE (base1) == COMPONENT_REF
895 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base1, 1))) == ARRAY_TYPE
896 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base1, 1)))
897 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base1, 1)))))
898 && array_at_struct_end_p (base1))
899 {
900 gcc_checking_assert (!end_struct_ref1);
901 end_struct_ref1 = base1;
902 }
903 if (TREE_CODE (base1) == VIEW_CONVERT_EXPR
904 || TREE_CODE (base1) == BIT_FIELD_REF)
905 ref1 = TREE_OPERAND (base1, 0);
906 base1 = TREE_OPERAND (base1, 0);
907 }
908 type1 = TREE_TYPE (base1);
909 base2 = ref2;
910 while (handled_component_p (base2))
911 {
912 if (TREE_CODE (base2) == COMPONENT_REF
913 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base2, 1))) == ARRAY_TYPE
914 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base2, 1)))
915 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base2, 1)))))
916 && array_at_struct_end_p (base2))
917 {
918 gcc_checking_assert (!end_struct_ref2);
919 end_struct_ref2 = base2;
920 }
921 if (TREE_CODE (base2) == VIEW_CONVERT_EXPR
922 || TREE_CODE (base2) == BIT_FIELD_REF)
923 ref2 = TREE_OPERAND (base2, 0);
924 base2 = TREE_OPERAND (base2, 0);
925 }
926 type2 = TREE_TYPE (base2);
927
928 /* Now search for the type1 in the access path of ref2. This
929 would be a common base for doing offset based disambiguation on.
930 This however only makes sense if type2 is big enough to hold type1. */
931 int cmp_outer = compare_type_sizes (type2, type1);
932
933 /* If type2 is big enough to contain type1 walk its access path.
934 We also need to care of arrays at the end of structs that may extend
935 beyond the end of structure. */
936 if (cmp_outer >= 0
937 || (end_struct_ref2
938 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
939 {
940 tree ref = ref2;
941 while (true)
942 {
943 /* We walk from inner type to the outer types. If type we see is
944 already too large to be part of type1, terminate the search. */
945 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
946
947 if (cmp < 0
948 && (!end_struct_ref1
949 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
950 TREE_TYPE (ref)) < 0))
951 break;
952 /* If types may be of same size, see if we can decide about their
953 equality. */
954 if (cmp == 0)
955 {
956 same_p2 = same_type_for_tbaa (TREE_TYPE (ref), type1);
957 if (same_p2 == 1)
958 break;
959 /* In case we can't decide whether types are same try to
960 continue looking for the exact match.
961 Remember however that we possibly saw a match
962 to bypass the access path continuations tests we do later. */
963 if (same_p2 == -1)
964 maybe_match = true;
965 }
966 if (!handled_component_p (ref))
967 break;
968 ref = TREE_OPERAND (ref, 0);
969 }
970 if (same_p2 == 1)
971 {
972 poly_int64 offadj, sztmp, msztmp;
973 bool reverse;
974
975 /* We assume that arrays can overlap by multiple of their elements
976 size as tested in gcc.dg/torture/alias-2.c.
977 This partial overlap happen only when both arrays are bases of
978 the access and not contained within another component ref.
979 To be safe we also assume partial overlap for VLAs. */
980 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
981 && (!TYPE_SIZE (TREE_TYPE (base1))
982 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
983 || ref == base2))
984 {
985 ++alias_stats.aliasing_component_refs_p_may_alias;
986 return true;
987 }
988
989 get_ref_base_and_extent (ref, &offadj, &sztmp, &msztmp, &reverse);
990 offset2 -= offadj;
991 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
992 offset1 -= offadj;
993 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
994 {
995 ++alias_stats.aliasing_component_refs_p_may_alias;
996 return true;
997 }
998 else
999 {
1000 ++alias_stats.aliasing_component_refs_p_no_alias;
1001 return false;
1002 }
1003 }
1004 }
1005
1006 /* If we didn't find a common base, try the other way around. */
1007 if (cmp_outer <= 0
1008 || (end_struct_ref1
1009 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1010 {
1011 tree ref = ref1;
1012 while (true)
1013 {
1014 int cmp = compare_type_sizes (type2, TREE_TYPE (ref));
1015 if (cmp < 0
1016 && (!end_struct_ref2
1017 || compare_type_sizes (TREE_TYPE (end_struct_ref2),
1018 TREE_TYPE (ref)) < 0))
1019 break;
1020 /* If types may be of same size, see if we can decide about their
1021 equality. */
1022 if (cmp == 0)
1023 {
1024 same_p1 = same_type_for_tbaa (TREE_TYPE (ref), type2);
1025 if (same_p1 == 1)
1026 break;
1027 if (same_p1 == -1)
1028 maybe_match = true;
1029 }
1030 if (!handled_component_p (ref))
1031 break;
1032 ref = TREE_OPERAND (ref, 0);
1033 }
1034 if (same_p1 == 1)
1035 {
1036 poly_int64 offadj, sztmp, msztmp;
1037 bool reverse;
1038
1039 if (TREE_CODE (TREE_TYPE (base2)) == ARRAY_TYPE
1040 && (!TYPE_SIZE (TREE_TYPE (base2))
1041 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base2))) != INTEGER_CST
1042 || ref == base1))
1043 {
1044 ++alias_stats.aliasing_component_refs_p_may_alias;
1045 return true;
1046 }
1047
1048 get_ref_base_and_extent (ref, &offadj, &sztmp, &msztmp, &reverse);
1049 offset1 -= offadj;
1050 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
1051 offset2 -= offadj;
1052 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1053 {
1054 ++alias_stats.aliasing_component_refs_p_may_alias;
1055 return true;
1056 }
1057 else
1058 {
1059 ++alias_stats.aliasing_component_refs_p_no_alias;
1060 return false;
1061 }
1062 }
1063 }
1064
1065 /* In the following code we make an assumption that the types in access
1066 paths do not overlap and thus accesses alias only if one path can be
1067 continuation of another. If we was not able to decide about equivalence,
1068 we need to give up. */
1069 if (maybe_match)
1070 return true;
1071
1072 /* If we have two type access paths B1.path1 and B2.path2 they may
1073 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
1074 But we can still have a path that goes B1.path1...B2.path2 with
1075 a part that we do not see. So we can only disambiguate now
1076 if there is no B2 in the tail of path1 and no B1 on the
1077 tail of path2. */
1078 if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0
1079 && (!end_struct_ref1
1080 || compare_type_sizes (TREE_TYPE (ref2),
1081 TREE_TYPE (end_struct_ref1)) >= 0)
1082 && type_has_components_p (TREE_TYPE (ref2))
1083 && (base1_alias_set == ref2_alias_set
1084 || alias_set_subset_of (base1_alias_set, ref2_alias_set)))
1085 {
1086 ++alias_stats.aliasing_component_refs_p_may_alias;
1087 return true;
1088 }
1089 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
1090 if (compare_type_sizes (TREE_TYPE (ref1), type2) >= 0
1091 && (!end_struct_ref2
1092 || compare_type_sizes (TREE_TYPE (ref1),
1093 TREE_TYPE (end_struct_ref2)) >= 0)
1094 && type_has_components_p (TREE_TYPE (ref1))
1095 && (base2_alias_set == ref1_alias_set
1096 || alias_set_subset_of (base2_alias_set, ref1_alias_set)))
1097 {
1098 ++alias_stats.aliasing_component_refs_p_may_alias;
1099 return true;
1100 }
1101 ++alias_stats.aliasing_component_refs_p_no_alias;
1102 return false;
1103 }
1104
1105 /* Return true if we can determine that component references REF1 and REF2,
1106 that are within a common DECL, cannot overlap. */
1107
1108 static bool
1109 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
1110 {
1111 auto_vec<tree, 16> component_refs1;
1112 auto_vec<tree, 16> component_refs2;
1113
1114 /* Create the stack of handled components for REF1. */
1115 while (handled_component_p (ref1))
1116 {
1117 component_refs1.safe_push (ref1);
1118 ref1 = TREE_OPERAND (ref1, 0);
1119 }
1120 if (TREE_CODE (ref1) == MEM_REF)
1121 {
1122 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
1123 {
1124 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1125 return false;
1126 }
1127 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
1128 }
1129
1130 /* Create the stack of handled components for REF2. */
1131 while (handled_component_p (ref2))
1132 {
1133 component_refs2.safe_push (ref2);
1134 ref2 = TREE_OPERAND (ref2, 0);
1135 }
1136 if (TREE_CODE (ref2) == MEM_REF)
1137 {
1138 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
1139 {
1140 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1141 return false;
1142 }
1143 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
1144 }
1145
1146 /* Bases must be either same or uncomparable. */
1147 gcc_checking_assert (ref1 == ref2
1148 || (DECL_P (ref1) && DECL_P (ref2)
1149 && compare_base_decls (ref1, ref2) != 0));
1150
1151 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1152 rank. This is sufficient because we start from the same DECL and you
1153 cannot reference several fields at a time with COMPONENT_REFs (unlike
1154 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1155 of them to access a sub-component, unless you're in a union, in which
1156 case the return value will precisely be false. */
1157 while (true)
1158 {
1159 do
1160 {
1161 if (component_refs1.is_empty ())
1162 {
1163 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1164 return false;
1165 }
1166 ref1 = component_refs1.pop ();
1167 }
1168 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1169
1170 do
1171 {
1172 if (component_refs2.is_empty ())
1173 {
1174 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1175 return false;
1176 }
1177 ref2 = component_refs2.pop ();
1178 }
1179 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1180
1181 /* Beware of BIT_FIELD_REF. */
1182 if (TREE_CODE (ref1) != COMPONENT_REF
1183 || TREE_CODE (ref2) != COMPONENT_REF)
1184 {
1185 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1186 return false;
1187 }
1188
1189 tree field1 = TREE_OPERAND (ref1, 1);
1190 tree field2 = TREE_OPERAND (ref2, 1);
1191
1192 /* ??? We cannot simply use the type of operand #0 of the refs here
1193 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1194 for common blocks instead of using unions like everyone else. */
1195 tree type1 = DECL_CONTEXT (field1);
1196 tree type2 = DECL_CONTEXT (field2);
1197
1198 /* We cannot disambiguate fields in a union or qualified union. */
1199 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
1200 {
1201 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1202 return false;
1203 }
1204
1205 if (field1 != field2)
1206 {
1207 /* A field and its representative need to be considered the
1208 same. */
1209 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
1210 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
1211 {
1212 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1213 return false;
1214 }
1215 /* Different fields of the same record type cannot overlap.
1216 ??? Bitfields can overlap at RTL level so punt on them. */
1217 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1218 {
1219 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1220 return false;
1221 }
1222 ++alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias;
1223 return true;
1224 }
1225 }
1226
1227 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1228 return false;
1229 }
1230
1231 /* qsort compare function to sort FIELD_DECLs after their
1232 DECL_FIELD_CONTEXT TYPE_UID. */
1233
1234 static inline int
1235 ncr_compar (const void *field1_, const void *field2_)
1236 {
1237 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1238 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1239 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
1240 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
1241 if (uid1 < uid2)
1242 return -1;
1243 else if (uid1 > uid2)
1244 return 1;
1245 return 0;
1246 }
1247
1248 /* Return true if we can determine that the fields referenced cannot
1249 overlap for any pair of objects. */
1250
1251 static bool
1252 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1253 {
1254 if (!flag_strict_aliasing
1255 || !x || !y
1256 || !handled_component_p (x)
1257 || !handled_component_p (y))
1258 {
1259 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1260 return false;
1261 }
1262
1263 auto_vec<const_tree, 16> fieldsx;
1264 while (handled_component_p (x))
1265 {
1266 if (TREE_CODE (x) == COMPONENT_REF)
1267 {
1268 tree field = TREE_OPERAND (x, 1);
1269 tree type = DECL_FIELD_CONTEXT (field);
1270 if (TREE_CODE (type) == RECORD_TYPE)
1271 fieldsx.safe_push (field);
1272 }
1273 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
1274 || TREE_CODE (x) == BIT_FIELD_REF)
1275 fieldsx.truncate (0);
1276 x = TREE_OPERAND (x, 0);
1277 }
1278 if (fieldsx.length () == 0)
1279 return false;
1280 auto_vec<const_tree, 16> fieldsy;
1281 while (handled_component_p (y))
1282 {
1283 if (TREE_CODE (y) == COMPONENT_REF)
1284 {
1285 tree field = TREE_OPERAND (y, 1);
1286 tree type = DECL_FIELD_CONTEXT (field);
1287 if (TREE_CODE (type) == RECORD_TYPE)
1288 fieldsy.safe_push (TREE_OPERAND (y, 1));
1289 }
1290 else if (TREE_CODE (y) == VIEW_CONVERT_EXPR
1291 || TREE_CODE (y) == BIT_FIELD_REF)
1292 fieldsy.truncate (0);
1293 y = TREE_OPERAND (y, 0);
1294 }
1295 if (fieldsy.length () == 0)
1296 {
1297 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1298 return false;
1299 }
1300
1301 /* Most common case first. */
1302 if (fieldsx.length () == 1
1303 && fieldsy.length () == 1)
1304 {
1305 if ((DECL_FIELD_CONTEXT (fieldsx[0])
1306 == DECL_FIELD_CONTEXT (fieldsy[0]))
1307 && fieldsx[0] != fieldsy[0]
1308 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])))
1309 {
1310 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1311 return true;
1312 }
1313 else
1314 {
1315 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1316 return false;
1317 }
1318 }
1319
1320 if (fieldsx.length () == 2)
1321 {
1322 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1323 std::swap (fieldsx[0], fieldsx[1]);
1324 }
1325 else
1326 fieldsx.qsort (ncr_compar);
1327
1328 if (fieldsy.length () == 2)
1329 {
1330 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1331 std::swap (fieldsy[0], fieldsy[1]);
1332 }
1333 else
1334 fieldsy.qsort (ncr_compar);
1335
1336 unsigned i = 0, j = 0;
1337 do
1338 {
1339 const_tree fieldx = fieldsx[i];
1340 const_tree fieldy = fieldsy[j];
1341 tree typex = DECL_FIELD_CONTEXT (fieldx);
1342 tree typey = DECL_FIELD_CONTEXT (fieldy);
1343 if (typex == typey)
1344 {
1345 /* We're left with accessing different fields of a structure,
1346 no possible overlap. */
1347 if (fieldx != fieldy)
1348 {
1349 /* A field and its representative need to be considered the
1350 same. */
1351 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1352 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1353 ;
1354 /* Different fields of the same record type cannot overlap.
1355 ??? Bitfields can overlap at RTL level so punt on them. */
1356 else if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1357 ;
1358 else
1359 {
1360 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1361 return true;
1362 }
1363 }
1364 }
1365 if (TYPE_UID (typex) < TYPE_UID (typey))
1366 {
1367 i++;
1368 if (i == fieldsx.length ())
1369 break;
1370 }
1371 else
1372 {
1373 j++;
1374 if (j == fieldsy.length ())
1375 break;
1376 }
1377 }
1378 while (1);
1379
1380 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1381 return false;
1382 }
1383
1384
1385 /* Return true if two memory references based on the variables BASE1
1386 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1387 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1388 if non-NULL are the complete memory reference trees. */
1389
1390 static bool
1391 decl_refs_may_alias_p (tree ref1, tree base1,
1392 poly_int64 offset1, poly_int64 max_size1,
1393 tree ref2, tree base2,
1394 poly_int64 offset2, poly_int64 max_size2)
1395 {
1396 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1397
1398 /* If both references are based on different variables, they cannot alias. */
1399 if (compare_base_decls (base1, base2) == 0)
1400 return false;
1401
1402 /* If both references are based on the same variable, they cannot alias if
1403 the accesses do not overlap. */
1404 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1405 return false;
1406
1407 /* For components with variable position, the above test isn't sufficient,
1408 so we disambiguate component references manually. */
1409 if (ref1 && ref2
1410 && handled_component_p (ref1) && handled_component_p (ref2)
1411 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1412 return false;
1413
1414 return true;
1415 }
1416
1417 /* Return true if an indirect reference based on *PTR1 constrained
1418 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1419 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1420 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1421 in which case they are computed on-demand. REF1 and REF2
1422 if non-NULL are the complete memory reference trees. */
1423
1424 static bool
1425 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1426 poly_int64 offset1, poly_int64 max_size1,
1427 alias_set_type ref1_alias_set,
1428 alias_set_type base1_alias_set,
1429 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1430 poly_int64 offset2, poly_int64 max_size2,
1431 alias_set_type ref2_alias_set,
1432 alias_set_type base2_alias_set, bool tbaa_p)
1433 {
1434 tree ptr1;
1435 tree ptrtype1, dbase2;
1436
1437 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1438 || TREE_CODE (base1) == TARGET_MEM_REF)
1439 && DECL_P (base2));
1440
1441 ptr1 = TREE_OPERAND (base1, 0);
1442 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1443
1444 /* If only one reference is based on a variable, they cannot alias if
1445 the pointer access is beyond the extent of the variable access.
1446 (the pointer base cannot validly point to an offset less than zero
1447 of the variable).
1448 ??? IVOPTs creates bases that do not honor this restriction,
1449 so do not apply this optimization for TARGET_MEM_REFs. */
1450 if (TREE_CODE (base1) != TARGET_MEM_REF
1451 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1452 return false;
1453 /* They also cannot alias if the pointer may not point to the decl. */
1454 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1455 return false;
1456
1457 /* Disambiguations that rely on strict aliasing rules follow. */
1458 if (!flag_strict_aliasing || !tbaa_p)
1459 return true;
1460
1461 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1462
1463 /* If the alias set for a pointer access is zero all bets are off. */
1464 if (base1_alias_set == 0)
1465 return true;
1466
1467 /* When we are trying to disambiguate an access with a pointer dereference
1468 as base versus one with a decl as base we can use both the size
1469 of the decl and its dynamic type for extra disambiguation.
1470 ??? We do not know anything about the dynamic type of the decl
1471 other than that its alias-set contains base2_alias_set as a subset
1472 which does not help us here. */
1473 /* As we know nothing useful about the dynamic type of the decl just
1474 use the usual conflict check rather than a subset test.
1475 ??? We could introduce -fvery-strict-aliasing when the language
1476 does not allow decls to have a dynamic type that differs from their
1477 static type. Then we can check
1478 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1479 if (base1_alias_set != base2_alias_set
1480 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1481 return false;
1482 /* If the size of the access relevant for TBAA through the pointer
1483 is bigger than the size of the decl we can't possibly access the
1484 decl via that pointer. */
1485 if (/* ??? This in turn may run afoul when a decl of type T which is
1486 a member of union type U is accessed through a pointer to
1487 type U and sizeof T is smaller than sizeof U. */
1488 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1489 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1490 && compare_sizes (DECL_SIZE (base2),
1491 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1492 return false;
1493
1494 if (!ref2)
1495 return true;
1496
1497 /* If the decl is accessed via a MEM_REF, reconstruct the base
1498 we can use for TBAA and an appropriately adjusted offset. */
1499 dbase2 = ref2;
1500 while (handled_component_p (dbase2))
1501 dbase2 = TREE_OPERAND (dbase2, 0);
1502 poly_int64 doffset1 = offset1;
1503 poly_offset_int doffset2 = offset2;
1504 if (TREE_CODE (dbase2) == MEM_REF
1505 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1506 {
1507 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1508 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
1509 /* If second reference is view-converted, give up now. */
1510 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
1511 return true;
1512 }
1513
1514 /* If first reference is view-converted, give up now. */
1515 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
1516 return true;
1517
1518 /* If both references are through the same type, they do not alias
1519 if the accesses do not overlap. This does extra disambiguation
1520 for mixed/pointer accesses but requires strict aliasing.
1521 For MEM_REFs we require that the component-ref offset we computed
1522 is relative to the start of the type which we ensure by
1523 comparing rvalue and access type and disregarding the constant
1524 pointer offset.
1525
1526 But avoid treating variable length arrays as "objects", instead assume they
1527 can overlap by an exact multiple of their element size.
1528 See gcc.dg/torture/alias-2.c. */
1529 if (((TREE_CODE (base1) != TARGET_MEM_REF
1530 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1531 && (TREE_CODE (dbase2) != TARGET_MEM_REF
1532 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
1533 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
1534 && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
1535 || (TYPE_SIZE (TREE_TYPE (base1))
1536 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST)))
1537 return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1538
1539 if (ref1 && ref2
1540 && nonoverlapping_component_refs_p (ref1, ref2))
1541 return false;
1542
1543 /* Do access-path based disambiguation. */
1544 if (ref1 && ref2
1545 && (handled_component_p (ref1) || handled_component_p (ref2)))
1546 return aliasing_component_refs_p (ref1,
1547 ref1_alias_set, base1_alias_set,
1548 offset1, max_size1,
1549 ref2,
1550 ref2_alias_set, base2_alias_set,
1551 offset2, max_size2);
1552
1553 return true;
1554 }
1555
1556 /* Return true if two indirect references based on *PTR1
1557 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1558 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1559 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1560 in which case they are computed on-demand. REF1 and REF2
1561 if non-NULL are the complete memory reference trees. */
1562
1563 static bool
1564 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1565 poly_int64 offset1, poly_int64 max_size1,
1566 alias_set_type ref1_alias_set,
1567 alias_set_type base1_alias_set,
1568 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1569 poly_int64 offset2, poly_int64 max_size2,
1570 alias_set_type ref2_alias_set,
1571 alias_set_type base2_alias_set, bool tbaa_p)
1572 {
1573 tree ptr1;
1574 tree ptr2;
1575 tree ptrtype1, ptrtype2;
1576
1577 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1578 || TREE_CODE (base1) == TARGET_MEM_REF)
1579 && (TREE_CODE (base2) == MEM_REF
1580 || TREE_CODE (base2) == TARGET_MEM_REF));
1581
1582 ptr1 = TREE_OPERAND (base1, 0);
1583 ptr2 = TREE_OPERAND (base2, 0);
1584
1585 /* If both bases are based on pointers they cannot alias if they may not
1586 point to the same memory object or if they point to the same object
1587 and the accesses do not overlap. */
1588 if ((!cfun || gimple_in_ssa_p (cfun))
1589 && operand_equal_p (ptr1, ptr2, 0)
1590 && (((TREE_CODE (base1) != TARGET_MEM_REF
1591 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1592 && (TREE_CODE (base2) != TARGET_MEM_REF
1593 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1594 || (TREE_CODE (base1) == TARGET_MEM_REF
1595 && TREE_CODE (base2) == TARGET_MEM_REF
1596 && (TMR_STEP (base1) == TMR_STEP (base2)
1597 || (TMR_STEP (base1) && TMR_STEP (base2)
1598 && operand_equal_p (TMR_STEP (base1),
1599 TMR_STEP (base2), 0)))
1600 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1601 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1602 && operand_equal_p (TMR_INDEX (base1),
1603 TMR_INDEX (base2), 0)))
1604 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1605 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1606 && operand_equal_p (TMR_INDEX2 (base1),
1607 TMR_INDEX2 (base2), 0))))))
1608 {
1609 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1610 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1611 return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1612 offset2 + moff2, max_size2);
1613 }
1614 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1615 return false;
1616
1617 /* Disambiguations that rely on strict aliasing rules follow. */
1618 if (!flag_strict_aliasing || !tbaa_p)
1619 return true;
1620
1621 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1622 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1623
1624 /* If the alias set for a pointer access is zero all bets are off. */
1625 if (base1_alias_set == 0
1626 || base2_alias_set == 0)
1627 return true;
1628
1629 /* Do type-based disambiguation. */
1630 if (base1_alias_set != base2_alias_set
1631 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1632 return false;
1633
1634 /* If either reference is view-converted, give up now. */
1635 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1636 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1637 return true;
1638
1639 /* If both references are through the same type, they do not alias
1640 if the accesses do not overlap. This does extra disambiguation
1641 for mixed/pointer accesses but requires strict aliasing. */
1642 if ((TREE_CODE (base1) != TARGET_MEM_REF
1643 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1644 && (TREE_CODE (base2) != TARGET_MEM_REF
1645 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1646 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1647 TREE_TYPE (ptrtype2)) == 1
1648 /* But avoid treating arrays as "objects", instead assume they
1649 can overlap by an exact multiple of their element size.
1650 See gcc.dg/torture/alias-2.c. */
1651 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1652 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1653
1654 if (ref1 && ref2
1655 && nonoverlapping_component_refs_p (ref1, ref2))
1656 return false;
1657
1658 /* Do access-path based disambiguation. */
1659 if (ref1 && ref2
1660 && (handled_component_p (ref1) || handled_component_p (ref2)))
1661 return aliasing_component_refs_p (ref1,
1662 ref1_alias_set, base1_alias_set,
1663 offset1, max_size1,
1664 ref2,
1665 ref2_alias_set, base2_alias_set,
1666 offset2, max_size2);
1667
1668 return true;
1669 }
1670
1671 /* Return true, if the two memory references REF1 and REF2 may alias. */
1672
1673 static bool
1674 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1675 {
1676 tree base1, base2;
1677 poly_int64 offset1 = 0, offset2 = 0;
1678 poly_int64 max_size1 = -1, max_size2 = -1;
1679 bool var1_p, var2_p, ind1_p, ind2_p;
1680
1681 gcc_checking_assert ((!ref1->ref
1682 || TREE_CODE (ref1->ref) == SSA_NAME
1683 || DECL_P (ref1->ref)
1684 || TREE_CODE (ref1->ref) == STRING_CST
1685 || handled_component_p (ref1->ref)
1686 || TREE_CODE (ref1->ref) == MEM_REF
1687 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1688 && (!ref2->ref
1689 || TREE_CODE (ref2->ref) == SSA_NAME
1690 || DECL_P (ref2->ref)
1691 || TREE_CODE (ref2->ref) == STRING_CST
1692 || handled_component_p (ref2->ref)
1693 || TREE_CODE (ref2->ref) == MEM_REF
1694 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1695
1696 /* Decompose the references into their base objects and the access. */
1697 base1 = ao_ref_base (ref1);
1698 offset1 = ref1->offset;
1699 max_size1 = ref1->max_size;
1700 base2 = ao_ref_base (ref2);
1701 offset2 = ref2->offset;
1702 max_size2 = ref2->max_size;
1703
1704 /* We can end up with registers or constants as bases for example from
1705 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1706 which is seen as a struct copy. */
1707 if (TREE_CODE (base1) == SSA_NAME
1708 || TREE_CODE (base1) == CONST_DECL
1709 || TREE_CODE (base1) == CONSTRUCTOR
1710 || TREE_CODE (base1) == ADDR_EXPR
1711 || CONSTANT_CLASS_P (base1)
1712 || TREE_CODE (base2) == SSA_NAME
1713 || TREE_CODE (base2) == CONST_DECL
1714 || TREE_CODE (base2) == CONSTRUCTOR
1715 || TREE_CODE (base2) == ADDR_EXPR
1716 || CONSTANT_CLASS_P (base2))
1717 return false;
1718
1719 /* We can end up referring to code via function and label decls.
1720 As we likely do not properly track code aliases conservatively
1721 bail out. */
1722 if (TREE_CODE (base1) == FUNCTION_DECL
1723 || TREE_CODE (base1) == LABEL_DECL
1724 || TREE_CODE (base2) == FUNCTION_DECL
1725 || TREE_CODE (base2) == LABEL_DECL)
1726 return true;
1727
1728 /* Two volatile accesses always conflict. */
1729 if (ref1->volatile_p
1730 && ref2->volatile_p)
1731 return true;
1732
1733 /* Defer to simple offset based disambiguation if we have
1734 references based on two decls. Do this before defering to
1735 TBAA to handle must-alias cases in conformance with the
1736 GCC extension of allowing type-punning through unions. */
1737 var1_p = DECL_P (base1);
1738 var2_p = DECL_P (base2);
1739 if (var1_p && var2_p)
1740 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1741 ref2->ref, base2, offset2, max_size2);
1742
1743 /* Handle restrict based accesses.
1744 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1745 here. */
1746 tree rbase1 = base1;
1747 tree rbase2 = base2;
1748 if (var1_p)
1749 {
1750 rbase1 = ref1->ref;
1751 if (rbase1)
1752 while (handled_component_p (rbase1))
1753 rbase1 = TREE_OPERAND (rbase1, 0);
1754 }
1755 if (var2_p)
1756 {
1757 rbase2 = ref2->ref;
1758 if (rbase2)
1759 while (handled_component_p (rbase2))
1760 rbase2 = TREE_OPERAND (rbase2, 0);
1761 }
1762 if (rbase1 && rbase2
1763 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1764 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1765 /* If the accesses are in the same restrict clique... */
1766 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1767 /* But based on different pointers they do not alias. */
1768 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1769 return false;
1770
1771 ind1_p = (TREE_CODE (base1) == MEM_REF
1772 || TREE_CODE (base1) == TARGET_MEM_REF);
1773 ind2_p = (TREE_CODE (base2) == MEM_REF
1774 || TREE_CODE (base2) == TARGET_MEM_REF);
1775
1776 /* Canonicalize the pointer-vs-decl case. */
1777 if (ind1_p && var2_p)
1778 {
1779 std::swap (offset1, offset2);
1780 std::swap (max_size1, max_size2);
1781 std::swap (base1, base2);
1782 std::swap (ref1, ref2);
1783 var1_p = true;
1784 ind1_p = false;
1785 var2_p = false;
1786 ind2_p = true;
1787 }
1788
1789 /* First defer to TBAA if possible. */
1790 if (tbaa_p
1791 && flag_strict_aliasing
1792 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1793 ao_ref_alias_set (ref2)))
1794 return false;
1795
1796 /* If the reference is based on a pointer that points to memory
1797 that may not be written to then the other reference cannot possibly
1798 clobber it. */
1799 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1800 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1801 || (ind1_p
1802 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1803 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1804 return false;
1805
1806 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1807 if (var1_p && ind2_p)
1808 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1809 offset2, max_size2,
1810 ao_ref_alias_set (ref2),
1811 ao_ref_base_alias_set (ref2),
1812 ref1->ref, base1,
1813 offset1, max_size1,
1814 ao_ref_alias_set (ref1),
1815 ao_ref_base_alias_set (ref1),
1816 tbaa_p);
1817 else if (ind1_p && ind2_p)
1818 return indirect_refs_may_alias_p (ref1->ref, base1,
1819 offset1, max_size1,
1820 ao_ref_alias_set (ref1),
1821 ao_ref_base_alias_set (ref1),
1822 ref2->ref, base2,
1823 offset2, max_size2,
1824 ao_ref_alias_set (ref2),
1825 ao_ref_base_alias_set (ref2),
1826 tbaa_p);
1827
1828 gcc_unreachable ();
1829 }
1830
1831 /* Return true, if the two memory references REF1 and REF2 may alias
1832 and update statistics. */
1833
1834 bool
1835 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1836 {
1837 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
1838 if (res)
1839 ++alias_stats.refs_may_alias_p_may_alias;
1840 else
1841 ++alias_stats.refs_may_alias_p_no_alias;
1842 return res;
1843 }
1844
1845 static bool
1846 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1847 {
1848 ao_ref r1;
1849 ao_ref_init (&r1, ref1);
1850 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1851 }
1852
1853 bool
1854 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1855 {
1856 ao_ref r1, r2;
1857 ao_ref_init (&r1, ref1);
1858 ao_ref_init (&r2, ref2);
1859 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1860 }
1861
1862 /* Returns true if there is a anti-dependence for the STORE that
1863 executes after the LOAD. */
1864
1865 bool
1866 refs_anti_dependent_p (tree load, tree store)
1867 {
1868 ao_ref r1, r2;
1869 ao_ref_init (&r1, load);
1870 ao_ref_init (&r2, store);
1871 return refs_may_alias_p_1 (&r1, &r2, false);
1872 }
1873
1874 /* Returns true if there is a output dependence for the stores
1875 STORE1 and STORE2. */
1876
1877 bool
1878 refs_output_dependent_p (tree store1, tree store2)
1879 {
1880 ao_ref r1, r2;
1881 ao_ref_init (&r1, store1);
1882 ao_ref_init (&r2, store2);
1883 return refs_may_alias_p_1 (&r1, &r2, false);
1884 }
1885
1886 /* If the call CALL may use the memory reference REF return true,
1887 otherwise return false. */
1888
1889 static bool
1890 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1891 {
1892 tree base, callee;
1893 unsigned i;
1894 int flags = gimple_call_flags (call);
1895
1896 /* Const functions without a static chain do not implicitly use memory. */
1897 if (!gimple_call_chain (call)
1898 && (flags & (ECF_CONST|ECF_NOVOPS)))
1899 goto process_args;
1900
1901 base = ao_ref_base (ref);
1902 if (!base)
1903 return true;
1904
1905 /* A call that is not without side-effects might involve volatile
1906 accesses and thus conflicts with all other volatile accesses. */
1907 if (ref->volatile_p)
1908 return true;
1909
1910 /* If the reference is based on a decl that is not aliased the call
1911 cannot possibly use it. */
1912 if (DECL_P (base)
1913 && !may_be_aliased (base)
1914 /* But local statics can be used through recursion. */
1915 && !is_global_var (base))
1916 goto process_args;
1917
1918 callee = gimple_call_fndecl (call);
1919
1920 /* Handle those builtin functions explicitly that do not act as
1921 escape points. See tree-ssa-structalias.c:find_func_aliases
1922 for the list of builtins we might need to handle here. */
1923 if (callee != NULL_TREE
1924 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1925 switch (DECL_FUNCTION_CODE (callee))
1926 {
1927 /* All the following functions read memory pointed to by
1928 their second argument. strcat/strncat additionally
1929 reads memory pointed to by the first argument. */
1930 case BUILT_IN_STRCAT:
1931 case BUILT_IN_STRNCAT:
1932 {
1933 ao_ref dref;
1934 ao_ref_init_from_ptr_and_size (&dref,
1935 gimple_call_arg (call, 0),
1936 NULL_TREE);
1937 if (refs_may_alias_p_1 (&dref, ref, false))
1938 return true;
1939 }
1940 /* FALLTHRU */
1941 case BUILT_IN_STRCPY:
1942 case BUILT_IN_STRNCPY:
1943 case BUILT_IN_MEMCPY:
1944 case BUILT_IN_MEMMOVE:
1945 case BUILT_IN_MEMPCPY:
1946 case BUILT_IN_STPCPY:
1947 case BUILT_IN_STPNCPY:
1948 case BUILT_IN_TM_MEMCPY:
1949 case BUILT_IN_TM_MEMMOVE:
1950 {
1951 ao_ref dref;
1952 tree size = NULL_TREE;
1953 if (gimple_call_num_args (call) == 3)
1954 size = gimple_call_arg (call, 2);
1955 ao_ref_init_from_ptr_and_size (&dref,
1956 gimple_call_arg (call, 1),
1957 size);
1958 return refs_may_alias_p_1 (&dref, ref, false);
1959 }
1960 case BUILT_IN_STRCAT_CHK:
1961 case BUILT_IN_STRNCAT_CHK:
1962 {
1963 ao_ref dref;
1964 ao_ref_init_from_ptr_and_size (&dref,
1965 gimple_call_arg (call, 0),
1966 NULL_TREE);
1967 if (refs_may_alias_p_1 (&dref, ref, false))
1968 return true;
1969 }
1970 /* FALLTHRU */
1971 case BUILT_IN_STRCPY_CHK:
1972 case BUILT_IN_STRNCPY_CHK:
1973 case BUILT_IN_MEMCPY_CHK:
1974 case BUILT_IN_MEMMOVE_CHK:
1975 case BUILT_IN_MEMPCPY_CHK:
1976 case BUILT_IN_STPCPY_CHK:
1977 case BUILT_IN_STPNCPY_CHK:
1978 {
1979 ao_ref dref;
1980 tree size = NULL_TREE;
1981 if (gimple_call_num_args (call) == 4)
1982 size = gimple_call_arg (call, 2);
1983 ao_ref_init_from_ptr_and_size (&dref,
1984 gimple_call_arg (call, 1),
1985 size);
1986 return refs_may_alias_p_1 (&dref, ref, false);
1987 }
1988 case BUILT_IN_BCOPY:
1989 {
1990 ao_ref dref;
1991 tree size = gimple_call_arg (call, 2);
1992 ao_ref_init_from_ptr_and_size (&dref,
1993 gimple_call_arg (call, 0),
1994 size);
1995 return refs_may_alias_p_1 (&dref, ref, false);
1996 }
1997
1998 /* The following functions read memory pointed to by their
1999 first argument. */
2000 CASE_BUILT_IN_TM_LOAD (1):
2001 CASE_BUILT_IN_TM_LOAD (2):
2002 CASE_BUILT_IN_TM_LOAD (4):
2003 CASE_BUILT_IN_TM_LOAD (8):
2004 CASE_BUILT_IN_TM_LOAD (FLOAT):
2005 CASE_BUILT_IN_TM_LOAD (DOUBLE):
2006 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
2007 CASE_BUILT_IN_TM_LOAD (M64):
2008 CASE_BUILT_IN_TM_LOAD (M128):
2009 CASE_BUILT_IN_TM_LOAD (M256):
2010 case BUILT_IN_TM_LOG:
2011 case BUILT_IN_TM_LOG_1:
2012 case BUILT_IN_TM_LOG_2:
2013 case BUILT_IN_TM_LOG_4:
2014 case BUILT_IN_TM_LOG_8:
2015 case BUILT_IN_TM_LOG_FLOAT:
2016 case BUILT_IN_TM_LOG_DOUBLE:
2017 case BUILT_IN_TM_LOG_LDOUBLE:
2018 case BUILT_IN_TM_LOG_M64:
2019 case BUILT_IN_TM_LOG_M128:
2020 case BUILT_IN_TM_LOG_M256:
2021 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
2022
2023 /* These read memory pointed to by the first argument. */
2024 case BUILT_IN_STRDUP:
2025 case BUILT_IN_STRNDUP:
2026 case BUILT_IN_REALLOC:
2027 {
2028 ao_ref dref;
2029 tree size = NULL_TREE;
2030 if (gimple_call_num_args (call) == 2)
2031 size = gimple_call_arg (call, 1);
2032 ao_ref_init_from_ptr_and_size (&dref,
2033 gimple_call_arg (call, 0),
2034 size);
2035 return refs_may_alias_p_1 (&dref, ref, false);
2036 }
2037 /* These read memory pointed to by the first argument. */
2038 case BUILT_IN_INDEX:
2039 case BUILT_IN_STRCHR:
2040 case BUILT_IN_STRRCHR:
2041 {
2042 ao_ref dref;
2043 ao_ref_init_from_ptr_and_size (&dref,
2044 gimple_call_arg (call, 0),
2045 NULL_TREE);
2046 return refs_may_alias_p_1 (&dref, ref, false);
2047 }
2048 /* These read memory pointed to by the first argument with size
2049 in the third argument. */
2050 case BUILT_IN_MEMCHR:
2051 {
2052 ao_ref dref;
2053 ao_ref_init_from_ptr_and_size (&dref,
2054 gimple_call_arg (call, 0),
2055 gimple_call_arg (call, 2));
2056 return refs_may_alias_p_1 (&dref, ref, false);
2057 }
2058 /* These read memory pointed to by the first and second arguments. */
2059 case BUILT_IN_STRSTR:
2060 case BUILT_IN_STRPBRK:
2061 {
2062 ao_ref dref;
2063 ao_ref_init_from_ptr_and_size (&dref,
2064 gimple_call_arg (call, 0),
2065 NULL_TREE);
2066 if (refs_may_alias_p_1 (&dref, ref, false))
2067 return true;
2068 ao_ref_init_from_ptr_and_size (&dref,
2069 gimple_call_arg (call, 1),
2070 NULL_TREE);
2071 return refs_may_alias_p_1 (&dref, ref, false);
2072 }
2073
2074 /* The following builtins do not read from memory. */
2075 case BUILT_IN_FREE:
2076 case BUILT_IN_MALLOC:
2077 case BUILT_IN_POSIX_MEMALIGN:
2078 case BUILT_IN_ALIGNED_ALLOC:
2079 case BUILT_IN_CALLOC:
2080 CASE_BUILT_IN_ALLOCA:
2081 case BUILT_IN_STACK_SAVE:
2082 case BUILT_IN_STACK_RESTORE:
2083 case BUILT_IN_MEMSET:
2084 case BUILT_IN_TM_MEMSET:
2085 case BUILT_IN_MEMSET_CHK:
2086 case BUILT_IN_FREXP:
2087 case BUILT_IN_FREXPF:
2088 case BUILT_IN_FREXPL:
2089 case BUILT_IN_GAMMA_R:
2090 case BUILT_IN_GAMMAF_R:
2091 case BUILT_IN_GAMMAL_R:
2092 case BUILT_IN_LGAMMA_R:
2093 case BUILT_IN_LGAMMAF_R:
2094 case BUILT_IN_LGAMMAL_R:
2095 case BUILT_IN_MODF:
2096 case BUILT_IN_MODFF:
2097 case BUILT_IN_MODFL:
2098 case BUILT_IN_REMQUO:
2099 case BUILT_IN_REMQUOF:
2100 case BUILT_IN_REMQUOL:
2101 case BUILT_IN_SINCOS:
2102 case BUILT_IN_SINCOSF:
2103 case BUILT_IN_SINCOSL:
2104 case BUILT_IN_ASSUME_ALIGNED:
2105 case BUILT_IN_VA_END:
2106 return false;
2107 /* __sync_* builtins and some OpenMP builtins act as threading
2108 barriers. */
2109 #undef DEF_SYNC_BUILTIN
2110 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2111 #include "sync-builtins.def"
2112 #undef DEF_SYNC_BUILTIN
2113 case BUILT_IN_GOMP_ATOMIC_START:
2114 case BUILT_IN_GOMP_ATOMIC_END:
2115 case BUILT_IN_GOMP_BARRIER:
2116 case BUILT_IN_GOMP_BARRIER_CANCEL:
2117 case BUILT_IN_GOMP_TASKWAIT:
2118 case BUILT_IN_GOMP_TASKGROUP_END:
2119 case BUILT_IN_GOMP_CRITICAL_START:
2120 case BUILT_IN_GOMP_CRITICAL_END:
2121 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2122 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2123 case BUILT_IN_GOMP_LOOP_END:
2124 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2125 case BUILT_IN_GOMP_ORDERED_START:
2126 case BUILT_IN_GOMP_ORDERED_END:
2127 case BUILT_IN_GOMP_SECTIONS_END:
2128 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2129 case BUILT_IN_GOMP_SINGLE_COPY_START:
2130 case BUILT_IN_GOMP_SINGLE_COPY_END:
2131 return true;
2132
2133 default:
2134 /* Fallthru to general call handling. */;
2135 }
2136
2137 /* Check if base is a global static variable that is not read
2138 by the function. */
2139 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2140 {
2141 struct cgraph_node *node = cgraph_node::get (callee);
2142 bitmap not_read;
2143
2144 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2145 node yet. We should enforce that there are nodes for all decls in the
2146 IL and remove this check instead. */
2147 if (node
2148 && (not_read = ipa_reference_get_not_read_global (node))
2149 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
2150 goto process_args;
2151 }
2152
2153 /* Check if the base variable is call-used. */
2154 if (DECL_P (base))
2155 {
2156 if (pt_solution_includes (gimple_call_use_set (call), base))
2157 return true;
2158 }
2159 else if ((TREE_CODE (base) == MEM_REF
2160 || TREE_CODE (base) == TARGET_MEM_REF)
2161 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2162 {
2163 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2164 if (!pi)
2165 return true;
2166
2167 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2168 return true;
2169 }
2170 else
2171 return true;
2172
2173 /* Inspect call arguments for passed-by-value aliases. */
2174 process_args:
2175 for (i = 0; i < gimple_call_num_args (call); ++i)
2176 {
2177 tree op = gimple_call_arg (call, i);
2178 int flags = gimple_call_arg_flags (call, i);
2179
2180 if (flags & EAF_UNUSED)
2181 continue;
2182
2183 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2184 op = TREE_OPERAND (op, 0);
2185
2186 if (TREE_CODE (op) != SSA_NAME
2187 && !is_gimple_min_invariant (op))
2188 {
2189 ao_ref r;
2190 ao_ref_init (&r, op);
2191 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2192 return true;
2193 }
2194 }
2195
2196 return false;
2197 }
2198
2199 static bool
2200 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2201 {
2202 bool res;
2203 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2204 if (res)
2205 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2206 else
2207 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2208 return res;
2209 }
2210
2211
2212 /* If the statement STMT may use the memory reference REF return
2213 true, otherwise return false. */
2214
2215 bool
2216 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2217 {
2218 if (is_gimple_assign (stmt))
2219 {
2220 tree rhs;
2221
2222 /* All memory assign statements are single. */
2223 if (!gimple_assign_single_p (stmt))
2224 return false;
2225
2226 rhs = gimple_assign_rhs1 (stmt);
2227 if (is_gimple_reg (rhs)
2228 || is_gimple_min_invariant (rhs)
2229 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2230 return false;
2231
2232 return refs_may_alias_p (rhs, ref, tbaa_p);
2233 }
2234 else if (is_gimple_call (stmt))
2235 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2236 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2237 {
2238 tree retval = gimple_return_retval (return_stmt);
2239 if (retval
2240 && TREE_CODE (retval) != SSA_NAME
2241 && !is_gimple_min_invariant (retval)
2242 && refs_may_alias_p (retval, ref, tbaa_p))
2243 return true;
2244 /* If ref escapes the function then the return acts as a use. */
2245 tree base = ao_ref_base (ref);
2246 if (!base)
2247 ;
2248 else if (DECL_P (base))
2249 return is_global_var (base);
2250 else if (TREE_CODE (base) == MEM_REF
2251 || TREE_CODE (base) == TARGET_MEM_REF)
2252 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2253 return false;
2254 }
2255
2256 return true;
2257 }
2258
2259 bool
2260 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2261 {
2262 ao_ref r;
2263 ao_ref_init (&r, ref);
2264 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2265 }
2266
2267 /* If the call in statement CALL may clobber the memory reference REF
2268 return true, otherwise return false. */
2269
2270 bool
2271 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2272 {
2273 tree base;
2274 tree callee;
2275
2276 /* If the call is pure or const it cannot clobber anything. */
2277 if (gimple_call_flags (call)
2278 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2279 return false;
2280 if (gimple_call_internal_p (call))
2281 switch (gimple_call_internal_fn (call))
2282 {
2283 /* Treat these internal calls like ECF_PURE for aliasing,
2284 they don't write to any memory the program should care about.
2285 They have important other side-effects, and read memory,
2286 so can't be ECF_NOVOPS. */
2287 case IFN_UBSAN_NULL:
2288 case IFN_UBSAN_BOUNDS:
2289 case IFN_UBSAN_VPTR:
2290 case IFN_UBSAN_OBJECT_SIZE:
2291 case IFN_UBSAN_PTR:
2292 case IFN_ASAN_CHECK:
2293 return false;
2294 default:
2295 break;
2296 }
2297
2298 base = ao_ref_base (ref);
2299 if (!base)
2300 return true;
2301
2302 if (TREE_CODE (base) == SSA_NAME
2303 || CONSTANT_CLASS_P (base))
2304 return false;
2305
2306 /* A call that is not without side-effects might involve volatile
2307 accesses and thus conflicts with all other volatile accesses. */
2308 if (ref->volatile_p)
2309 return true;
2310
2311 /* If the reference is based on a decl that is not aliased the call
2312 cannot possibly clobber it. */
2313 if (DECL_P (base)
2314 && !may_be_aliased (base)
2315 /* But local non-readonly statics can be modified through recursion
2316 or the call may implement a threading barrier which we must
2317 treat as may-def. */
2318 && (TREE_READONLY (base)
2319 || !is_global_var (base)))
2320 return false;
2321
2322 /* If the reference is based on a pointer that points to memory
2323 that may not be written to then the call cannot possibly clobber it. */
2324 if ((TREE_CODE (base) == MEM_REF
2325 || TREE_CODE (base) == TARGET_MEM_REF)
2326 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2327 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2328 return false;
2329
2330 callee = gimple_call_fndecl (call);
2331
2332 /* Handle those builtin functions explicitly that do not act as
2333 escape points. See tree-ssa-structalias.c:find_func_aliases
2334 for the list of builtins we might need to handle here. */
2335 if (callee != NULL_TREE
2336 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2337 switch (DECL_FUNCTION_CODE (callee))
2338 {
2339 /* All the following functions clobber memory pointed to by
2340 their first argument. */
2341 case BUILT_IN_STRCPY:
2342 case BUILT_IN_STRNCPY:
2343 case BUILT_IN_MEMCPY:
2344 case BUILT_IN_MEMMOVE:
2345 case BUILT_IN_MEMPCPY:
2346 case BUILT_IN_STPCPY:
2347 case BUILT_IN_STPNCPY:
2348 case BUILT_IN_STRCAT:
2349 case BUILT_IN_STRNCAT:
2350 case BUILT_IN_MEMSET:
2351 case BUILT_IN_TM_MEMSET:
2352 CASE_BUILT_IN_TM_STORE (1):
2353 CASE_BUILT_IN_TM_STORE (2):
2354 CASE_BUILT_IN_TM_STORE (4):
2355 CASE_BUILT_IN_TM_STORE (8):
2356 CASE_BUILT_IN_TM_STORE (FLOAT):
2357 CASE_BUILT_IN_TM_STORE (DOUBLE):
2358 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2359 CASE_BUILT_IN_TM_STORE (M64):
2360 CASE_BUILT_IN_TM_STORE (M128):
2361 CASE_BUILT_IN_TM_STORE (M256):
2362 case BUILT_IN_TM_MEMCPY:
2363 case BUILT_IN_TM_MEMMOVE:
2364 {
2365 ao_ref dref;
2366 tree size = NULL_TREE;
2367 /* Don't pass in size for strncat, as the maximum size
2368 is strlen (dest) + n + 1 instead of n, resp.
2369 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2370 known. */
2371 if (gimple_call_num_args (call) == 3
2372 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2373 size = gimple_call_arg (call, 2);
2374 ao_ref_init_from_ptr_and_size (&dref,
2375 gimple_call_arg (call, 0),
2376 size);
2377 return refs_may_alias_p_1 (&dref, ref, false);
2378 }
2379 case BUILT_IN_STRCPY_CHK:
2380 case BUILT_IN_STRNCPY_CHK:
2381 case BUILT_IN_MEMCPY_CHK:
2382 case BUILT_IN_MEMMOVE_CHK:
2383 case BUILT_IN_MEMPCPY_CHK:
2384 case BUILT_IN_STPCPY_CHK:
2385 case BUILT_IN_STPNCPY_CHK:
2386 case BUILT_IN_STRCAT_CHK:
2387 case BUILT_IN_STRNCAT_CHK:
2388 case BUILT_IN_MEMSET_CHK:
2389 {
2390 ao_ref dref;
2391 tree size = NULL_TREE;
2392 /* Don't pass in size for __strncat_chk, as the maximum size
2393 is strlen (dest) + n + 1 instead of n, resp.
2394 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2395 known. */
2396 if (gimple_call_num_args (call) == 4
2397 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2398 size = gimple_call_arg (call, 2);
2399 ao_ref_init_from_ptr_and_size (&dref,
2400 gimple_call_arg (call, 0),
2401 size);
2402 return refs_may_alias_p_1 (&dref, ref, false);
2403 }
2404 case BUILT_IN_BCOPY:
2405 {
2406 ao_ref dref;
2407 tree size = gimple_call_arg (call, 2);
2408 ao_ref_init_from_ptr_and_size (&dref,
2409 gimple_call_arg (call, 1),
2410 size);
2411 return refs_may_alias_p_1 (&dref, ref, false);
2412 }
2413 /* Allocating memory does not have any side-effects apart from
2414 being the definition point for the pointer. */
2415 case BUILT_IN_MALLOC:
2416 case BUILT_IN_ALIGNED_ALLOC:
2417 case BUILT_IN_CALLOC:
2418 case BUILT_IN_STRDUP:
2419 case BUILT_IN_STRNDUP:
2420 /* Unix98 specifies that errno is set on allocation failure. */
2421 if (flag_errno_math
2422 && targetm.ref_may_alias_errno (ref))
2423 return true;
2424 return false;
2425 case BUILT_IN_STACK_SAVE:
2426 CASE_BUILT_IN_ALLOCA:
2427 case BUILT_IN_ASSUME_ALIGNED:
2428 return false;
2429 /* But posix_memalign stores a pointer into the memory pointed to
2430 by its first argument. */
2431 case BUILT_IN_POSIX_MEMALIGN:
2432 {
2433 tree ptrptr = gimple_call_arg (call, 0);
2434 ao_ref dref;
2435 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2436 TYPE_SIZE_UNIT (ptr_type_node));
2437 return (refs_may_alias_p_1 (&dref, ref, false)
2438 || (flag_errno_math
2439 && targetm.ref_may_alias_errno (ref)));
2440 }
2441 /* Freeing memory kills the pointed-to memory. More importantly
2442 the call has to serve as a barrier for moving loads and stores
2443 across it. */
2444 case BUILT_IN_FREE:
2445 case BUILT_IN_VA_END:
2446 {
2447 tree ptr = gimple_call_arg (call, 0);
2448 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2449 }
2450 /* Realloc serves both as allocation point and deallocation point. */
2451 case BUILT_IN_REALLOC:
2452 {
2453 tree ptr = gimple_call_arg (call, 0);
2454 /* Unix98 specifies that errno is set on allocation failure. */
2455 return ((flag_errno_math
2456 && targetm.ref_may_alias_errno (ref))
2457 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2458 }
2459 case BUILT_IN_GAMMA_R:
2460 case BUILT_IN_GAMMAF_R:
2461 case BUILT_IN_GAMMAL_R:
2462 case BUILT_IN_LGAMMA_R:
2463 case BUILT_IN_LGAMMAF_R:
2464 case BUILT_IN_LGAMMAL_R:
2465 {
2466 tree out = gimple_call_arg (call, 1);
2467 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2468 return true;
2469 if (flag_errno_math)
2470 break;
2471 return false;
2472 }
2473 case BUILT_IN_FREXP:
2474 case BUILT_IN_FREXPF:
2475 case BUILT_IN_FREXPL:
2476 case BUILT_IN_MODF:
2477 case BUILT_IN_MODFF:
2478 case BUILT_IN_MODFL:
2479 {
2480 tree out = gimple_call_arg (call, 1);
2481 return ptr_deref_may_alias_ref_p_1 (out, ref);
2482 }
2483 case BUILT_IN_REMQUO:
2484 case BUILT_IN_REMQUOF:
2485 case BUILT_IN_REMQUOL:
2486 {
2487 tree out = gimple_call_arg (call, 2);
2488 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2489 return true;
2490 if (flag_errno_math)
2491 break;
2492 return false;
2493 }
2494 case BUILT_IN_SINCOS:
2495 case BUILT_IN_SINCOSF:
2496 case BUILT_IN_SINCOSL:
2497 {
2498 tree sin = gimple_call_arg (call, 1);
2499 tree cos = gimple_call_arg (call, 2);
2500 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2501 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2502 }
2503 /* __sync_* builtins and some OpenMP builtins act as threading
2504 barriers. */
2505 #undef DEF_SYNC_BUILTIN
2506 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2507 #include "sync-builtins.def"
2508 #undef DEF_SYNC_BUILTIN
2509 case BUILT_IN_GOMP_ATOMIC_START:
2510 case BUILT_IN_GOMP_ATOMIC_END:
2511 case BUILT_IN_GOMP_BARRIER:
2512 case BUILT_IN_GOMP_BARRIER_CANCEL:
2513 case BUILT_IN_GOMP_TASKWAIT:
2514 case BUILT_IN_GOMP_TASKGROUP_END:
2515 case BUILT_IN_GOMP_CRITICAL_START:
2516 case BUILT_IN_GOMP_CRITICAL_END:
2517 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2518 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2519 case BUILT_IN_GOMP_LOOP_END:
2520 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2521 case BUILT_IN_GOMP_ORDERED_START:
2522 case BUILT_IN_GOMP_ORDERED_END:
2523 case BUILT_IN_GOMP_SECTIONS_END:
2524 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2525 case BUILT_IN_GOMP_SINGLE_COPY_START:
2526 case BUILT_IN_GOMP_SINGLE_COPY_END:
2527 return true;
2528 default:
2529 /* Fallthru to general call handling. */;
2530 }
2531
2532 /* Check if base is a global static variable that is not written
2533 by the function. */
2534 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2535 {
2536 struct cgraph_node *node = cgraph_node::get (callee);
2537 bitmap not_written;
2538
2539 if (node
2540 && (not_written = ipa_reference_get_not_written_global (node))
2541 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2542 return false;
2543 }
2544
2545 /* Check if the base variable is call-clobbered. */
2546 if (DECL_P (base))
2547 return pt_solution_includes (gimple_call_clobber_set (call), base);
2548 else if ((TREE_CODE (base) == MEM_REF
2549 || TREE_CODE (base) == TARGET_MEM_REF)
2550 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2551 {
2552 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2553 if (!pi)
2554 return true;
2555
2556 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2557 }
2558
2559 return true;
2560 }
2561
2562 /* If the call in statement CALL may clobber the memory reference REF
2563 return true, otherwise return false. */
2564
2565 bool
2566 call_may_clobber_ref_p (gcall *call, tree ref)
2567 {
2568 bool res;
2569 ao_ref r;
2570 ao_ref_init (&r, ref);
2571 res = call_may_clobber_ref_p_1 (call, &r);
2572 if (res)
2573 ++alias_stats.call_may_clobber_ref_p_may_alias;
2574 else
2575 ++alias_stats.call_may_clobber_ref_p_no_alias;
2576 return res;
2577 }
2578
2579
2580 /* If the statement STMT may clobber the memory reference REF return true,
2581 otherwise return false. */
2582
2583 bool
2584 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2585 {
2586 if (is_gimple_call (stmt))
2587 {
2588 tree lhs = gimple_call_lhs (stmt);
2589 if (lhs
2590 && TREE_CODE (lhs) != SSA_NAME)
2591 {
2592 ao_ref r;
2593 ao_ref_init (&r, lhs);
2594 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2595 return true;
2596 }
2597
2598 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2599 }
2600 else if (gimple_assign_single_p (stmt))
2601 {
2602 tree lhs = gimple_assign_lhs (stmt);
2603 if (TREE_CODE (lhs) != SSA_NAME)
2604 {
2605 ao_ref r;
2606 ao_ref_init (&r, lhs);
2607 return refs_may_alias_p_1 (ref, &r, tbaa_p);
2608 }
2609 }
2610 else if (gimple_code (stmt) == GIMPLE_ASM)
2611 return true;
2612
2613 return false;
2614 }
2615
2616 bool
2617 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2618 {
2619 ao_ref r;
2620 ao_ref_init (&r, ref);
2621 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2622 }
2623
2624 /* Return true if store1 and store2 described by corresponding tuples
2625 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2626 address. */
2627
2628 static bool
2629 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2630 poly_int64 max_size1,
2631 tree base2, poly_int64 offset2, poly_int64 size2,
2632 poly_int64 max_size2)
2633 {
2634 /* Offsets need to be 0. */
2635 if (maybe_ne (offset1, 0)
2636 || maybe_ne (offset2, 0))
2637 return false;
2638
2639 bool base1_obj_p = SSA_VAR_P (base1);
2640 bool base2_obj_p = SSA_VAR_P (base2);
2641
2642 /* We need one object. */
2643 if (base1_obj_p == base2_obj_p)
2644 return false;
2645 tree obj = base1_obj_p ? base1 : base2;
2646
2647 /* And we need one MEM_REF. */
2648 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2649 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2650 if (base1_memref_p == base2_memref_p)
2651 return false;
2652 tree memref = base1_memref_p ? base1 : base2;
2653
2654 /* Sizes need to be valid. */
2655 if (!known_size_p (max_size1)
2656 || !known_size_p (max_size2)
2657 || !known_size_p (size1)
2658 || !known_size_p (size2))
2659 return false;
2660
2661 /* Max_size needs to match size. */
2662 if (maybe_ne (max_size1, size1)
2663 || maybe_ne (max_size2, size2))
2664 return false;
2665
2666 /* Sizes need to match. */
2667 if (maybe_ne (size1, size2))
2668 return false;
2669
2670
2671 /* Check that memref is a store to pointer with singleton points-to info. */
2672 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2673 return false;
2674 tree ptr = TREE_OPERAND (memref, 0);
2675 if (TREE_CODE (ptr) != SSA_NAME)
2676 return false;
2677 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2678 unsigned int pt_uid;
2679 if (pi == NULL
2680 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2681 return false;
2682
2683 /* Be conservative with non-call exceptions when the address might
2684 be NULL. */
2685 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2686 return false;
2687
2688 /* Check that ptr points relative to obj. */
2689 unsigned int obj_uid = DECL_PT_UID (obj);
2690 if (obj_uid != pt_uid)
2691 return false;
2692
2693 /* Check that the object size is the same as the store size. That ensures us
2694 that ptr points to the start of obj. */
2695 return (DECL_SIZE (obj)
2696 && poly_int_tree_p (DECL_SIZE (obj))
2697 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2698 }
2699
2700 /* If STMT kills the memory reference REF return true, otherwise
2701 return false. */
2702
2703 bool
2704 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2705 {
2706 if (!ao_ref_base (ref))
2707 return false;
2708
2709 if (gimple_has_lhs (stmt)
2710 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2711 /* The assignment is not necessarily carried out if it can throw
2712 and we can catch it in the current function where we could inspect
2713 the previous value.
2714 ??? We only need to care about the RHS throwing. For aggregate
2715 assignments or similar calls and non-call exceptions the LHS
2716 might throw as well. */
2717 && !stmt_can_throw_internal (cfun, stmt))
2718 {
2719 tree lhs = gimple_get_lhs (stmt);
2720 /* If LHS is literally a base of the access we are done. */
2721 if (ref->ref)
2722 {
2723 tree base = ref->ref;
2724 tree innermost_dropped_array_ref = NULL_TREE;
2725 if (handled_component_p (base))
2726 {
2727 tree saved_lhs0 = NULL_TREE;
2728 if (handled_component_p (lhs))
2729 {
2730 saved_lhs0 = TREE_OPERAND (lhs, 0);
2731 TREE_OPERAND (lhs, 0) = integer_zero_node;
2732 }
2733 do
2734 {
2735 /* Just compare the outermost handled component, if
2736 they are equal we have found a possible common
2737 base. */
2738 tree saved_base0 = TREE_OPERAND (base, 0);
2739 TREE_OPERAND (base, 0) = integer_zero_node;
2740 bool res = operand_equal_p (lhs, base, 0);
2741 TREE_OPERAND (base, 0) = saved_base0;
2742 if (res)
2743 break;
2744 /* Remember if we drop an array-ref that we need to
2745 double-check not being at struct end. */
2746 if (TREE_CODE (base) == ARRAY_REF
2747 || TREE_CODE (base) == ARRAY_RANGE_REF)
2748 innermost_dropped_array_ref = base;
2749 /* Otherwise drop handled components of the access. */
2750 base = saved_base0;
2751 }
2752 while (handled_component_p (base));
2753 if (saved_lhs0)
2754 TREE_OPERAND (lhs, 0) = saved_lhs0;
2755 }
2756 /* Finally check if the lhs has the same address and size as the
2757 base candidate of the access. Watch out if we have dropped
2758 an array-ref that was at struct end, this means ref->ref may
2759 be outside of the TYPE_SIZE of its base. */
2760 if ((! innermost_dropped_array_ref
2761 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2762 && (lhs == base
2763 || (((TYPE_SIZE (TREE_TYPE (lhs))
2764 == TYPE_SIZE (TREE_TYPE (base)))
2765 || (TYPE_SIZE (TREE_TYPE (lhs))
2766 && TYPE_SIZE (TREE_TYPE (base))
2767 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2768 TYPE_SIZE (TREE_TYPE (base)),
2769 0)))
2770 && operand_equal_p (lhs, base,
2771 OEP_ADDRESS_OF
2772 | OEP_MATCH_SIDE_EFFECTS))))
2773 return true;
2774 }
2775
2776 /* Now look for non-literal equal bases with the restriction of
2777 handling constant offset and size. */
2778 /* For a must-alias check we need to be able to constrain
2779 the access properly. */
2780 if (!ref->max_size_known_p ())
2781 return false;
2782 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2783 bool reverse;
2784 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2785 &reverse);
2786 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2787 so base == ref->base does not always hold. */
2788 if (base != ref->base)
2789 {
2790 /* Try using points-to info. */
2791 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2792 ref->offset, ref->size, ref->max_size))
2793 return true;
2794
2795 /* If both base and ref->base are MEM_REFs, only compare the
2796 first operand, and if the second operand isn't equal constant,
2797 try to add the offsets into offset and ref_offset. */
2798 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2799 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2800 {
2801 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2802 TREE_OPERAND (ref->base, 1)))
2803 {
2804 poly_offset_int off1 = mem_ref_offset (base);
2805 off1 <<= LOG2_BITS_PER_UNIT;
2806 off1 += offset;
2807 poly_offset_int off2 = mem_ref_offset (ref->base);
2808 off2 <<= LOG2_BITS_PER_UNIT;
2809 off2 += ref_offset;
2810 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2811 size = -1;
2812 }
2813 }
2814 else
2815 size = -1;
2816 }
2817 /* For a must-alias check we need to be able to constrain
2818 the access properly. */
2819 if (known_eq (size, max_size)
2820 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2821 return true;
2822 }
2823
2824 if (is_gimple_call (stmt))
2825 {
2826 tree callee = gimple_call_fndecl (stmt);
2827 if (callee != NULL_TREE
2828 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2829 switch (DECL_FUNCTION_CODE (callee))
2830 {
2831 case BUILT_IN_FREE:
2832 {
2833 tree ptr = gimple_call_arg (stmt, 0);
2834 tree base = ao_ref_base (ref);
2835 if (base && TREE_CODE (base) == MEM_REF
2836 && TREE_OPERAND (base, 0) == ptr)
2837 return true;
2838 break;
2839 }
2840
2841 case BUILT_IN_MEMCPY:
2842 case BUILT_IN_MEMPCPY:
2843 case BUILT_IN_MEMMOVE:
2844 case BUILT_IN_MEMSET:
2845 case BUILT_IN_MEMCPY_CHK:
2846 case BUILT_IN_MEMPCPY_CHK:
2847 case BUILT_IN_MEMMOVE_CHK:
2848 case BUILT_IN_MEMSET_CHK:
2849 case BUILT_IN_STRNCPY:
2850 case BUILT_IN_STPNCPY:
2851 {
2852 /* For a must-alias check we need to be able to constrain
2853 the access properly. */
2854 if (!ref->max_size_known_p ())
2855 return false;
2856 tree dest = gimple_call_arg (stmt, 0);
2857 tree len = gimple_call_arg (stmt, 2);
2858 if (!poly_int_tree_p (len))
2859 return false;
2860 tree rbase = ref->base;
2861 poly_offset_int roffset = ref->offset;
2862 ao_ref dref;
2863 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2864 tree base = ao_ref_base (&dref);
2865 poly_offset_int offset = dref.offset;
2866 if (!base || !known_size_p (dref.size))
2867 return false;
2868 if (TREE_CODE (base) == MEM_REF)
2869 {
2870 if (TREE_CODE (rbase) != MEM_REF)
2871 return false;
2872 // Compare pointers.
2873 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2874 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2875 base = TREE_OPERAND (base, 0);
2876 rbase = TREE_OPERAND (rbase, 0);
2877 }
2878 if (base == rbase
2879 && known_subrange_p (roffset, ref->max_size, offset,
2880 wi::to_poly_offset (len)
2881 << LOG2_BITS_PER_UNIT))
2882 return true;
2883 break;
2884 }
2885
2886 case BUILT_IN_VA_END:
2887 {
2888 tree ptr = gimple_call_arg (stmt, 0);
2889 if (TREE_CODE (ptr) == ADDR_EXPR)
2890 {
2891 tree base = ao_ref_base (ref);
2892 if (TREE_OPERAND (ptr, 0) == base)
2893 return true;
2894 }
2895 break;
2896 }
2897
2898 default:;
2899 }
2900 }
2901 return false;
2902 }
2903
2904 bool
2905 stmt_kills_ref_p (gimple *stmt, tree ref)
2906 {
2907 ao_ref r;
2908 ao_ref_init (&r, ref);
2909 return stmt_kills_ref_p (stmt, &r);
2910 }
2911
2912
2913 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2914 TARGET or a statement clobbering the memory reference REF in which
2915 case false is returned. The walk starts with VUSE, one argument of PHI. */
2916
2917 static bool
2918 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
2919 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
2920 bool abort_on_visited,
2921 void *(*translate)(ao_ref *, tree, void *, bool *),
2922 void *data)
2923 {
2924 basic_block bb = gimple_bb (phi);
2925
2926 if (!*visited)
2927 *visited = BITMAP_ALLOC (NULL);
2928
2929 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2930
2931 /* Walk until we hit the target. */
2932 while (vuse != target)
2933 {
2934 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2935 /* If we are searching for the target VUSE by walking up to
2936 TARGET_BB dominating the original PHI we are finished once
2937 we reach a default def or a definition in a block dominating
2938 that block. Update TARGET and return. */
2939 if (!target
2940 && (gimple_nop_p (def_stmt)
2941 || dominated_by_p (CDI_DOMINATORS,
2942 target_bb, gimple_bb (def_stmt))))
2943 {
2944 target = vuse;
2945 return true;
2946 }
2947
2948 /* Recurse for PHI nodes. */
2949 if (gimple_code (def_stmt) == GIMPLE_PHI)
2950 {
2951 /* An already visited PHI node ends the walk successfully. */
2952 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2953 return !abort_on_visited;
2954 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2955 visited, abort_on_visited,
2956 translate, data);
2957 if (!vuse)
2958 return false;
2959 continue;
2960 }
2961 else if (gimple_nop_p (def_stmt))
2962 return false;
2963 else
2964 {
2965 /* A clobbering statement or the end of the IL ends it failing. */
2966 if ((int)limit <= 0)
2967 return false;
2968 --limit;
2969 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2970 {
2971 bool disambiguate_only = true;
2972 if (translate
2973 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2974 ;
2975 else
2976 return false;
2977 }
2978 }
2979 /* If we reach a new basic-block see if we already skipped it
2980 in a previous walk that ended successfully. */
2981 if (gimple_bb (def_stmt) != bb)
2982 {
2983 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2984 return !abort_on_visited;
2985 bb = gimple_bb (def_stmt);
2986 }
2987 vuse = gimple_vuse (def_stmt);
2988 }
2989 return true;
2990 }
2991
2992
2993 /* Starting from a PHI node for the virtual operand of the memory reference
2994 REF find a continuation virtual operand that allows to continue walking
2995 statements dominating PHI skipping only statements that cannot possibly
2996 clobber REF. Decrements LIMIT for each alias disambiguation done
2997 and aborts the walk, returning NULL_TREE if it reaches zero.
2998 Returns NULL_TREE if no suitable virtual operand can be found. */
2999
3000 tree
3001 get_continuation_for_phi (gimple *phi, ao_ref *ref,
3002 unsigned int &limit, bitmap *visited,
3003 bool abort_on_visited,
3004 void *(*translate)(ao_ref *, tree, void *, bool *),
3005 void *data)
3006 {
3007 unsigned nargs = gimple_phi_num_args (phi);
3008
3009 /* Through a single-argument PHI we can simply look through. */
3010 if (nargs == 1)
3011 return PHI_ARG_DEF (phi, 0);
3012
3013 /* For two or more arguments try to pairwise skip non-aliasing code
3014 until we hit the phi argument definition that dominates the other one. */
3015 basic_block phi_bb = gimple_bb (phi);
3016 tree arg0, arg1;
3017 unsigned i;
3018
3019 /* Find a candidate for the virtual operand which definition
3020 dominates those of all others. */
3021 /* First look if any of the args themselves satisfy this. */
3022 for (i = 0; i < nargs; ++i)
3023 {
3024 arg0 = PHI_ARG_DEF (phi, i);
3025 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3026 break;
3027 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3028 if (def_bb != phi_bb
3029 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3030 break;
3031 arg0 = NULL_TREE;
3032 }
3033 /* If not, look if we can reach such candidate by walking defs
3034 until we hit the immediate dominator. maybe_skip_until will
3035 do that for us. */
3036 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3037
3038 /* Then check against the (to be) found candidate. */
3039 for (i = 0; i < nargs; ++i)
3040 {
3041 arg1 = PHI_ARG_DEF (phi, i);
3042 if (arg1 == arg0)
3043 ;
3044 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
3045 abort_on_visited,
3046 /* Do not translate when walking over
3047 backedges. */
3048 dominated_by_p
3049 (CDI_DOMINATORS,
3050 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3051 phi_bb)
3052 ? NULL : translate, data))
3053 return NULL_TREE;
3054 }
3055
3056 return arg0;
3057 }
3058
3059 /* Based on the memory reference REF and its virtual use VUSE call
3060 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3061 itself. That is, for each virtual use for which its defining statement
3062 does not clobber REF.
3063
3064 WALKER is called with REF, the current virtual use and DATA. If
3065 WALKER returns non-NULL the walk stops and its result is returned.
3066 At the end of a non-successful walk NULL is returned.
3067
3068 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3069 use which definition is a statement that may clobber REF and DATA.
3070 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3071 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3072 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3073 to adjust REF and *DATA to make that valid.
3074
3075 VALUEIZE if non-NULL is called with the next VUSE that is considered
3076 and return value is substituted for that. This can be used to
3077 implement optimistic value-numbering for example. Note that the
3078 VUSE argument is assumed to be valueized already.
3079
3080 LIMIT specifies the number of alias queries we are allowed to do,
3081 the walk stops when it reaches zero and NULL is returned. LIMIT
3082 is decremented by the number of alias queries (plus adjustments
3083 done by the callbacks) upon return.
3084
3085 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3086
3087 void *
3088 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
3089 void *(*walker)(ao_ref *, tree, void *),
3090 void *(*translate)(ao_ref *, tree, void *, bool *),
3091 tree (*valueize)(tree),
3092 unsigned &limit, void *data)
3093 {
3094 bitmap visited = NULL;
3095 void *res;
3096 bool translated = false;
3097
3098 timevar_push (TV_ALIAS_STMT_WALK);
3099
3100 do
3101 {
3102 gimple *def_stmt;
3103
3104 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3105 res = (*walker) (ref, vuse, data);
3106 /* Abort walk. */
3107 if (res == (void *)-1)
3108 {
3109 res = NULL;
3110 break;
3111 }
3112 /* Lookup succeeded. */
3113 else if (res != NULL)
3114 break;
3115
3116 if (valueize)
3117 {
3118 vuse = valueize (vuse);
3119 if (!vuse)
3120 {
3121 res = NULL;
3122 break;
3123 }
3124 }
3125 def_stmt = SSA_NAME_DEF_STMT (vuse);
3126 if (gimple_nop_p (def_stmt))
3127 break;
3128 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3129 vuse = get_continuation_for_phi (def_stmt, ref, limit,
3130 &visited, translated, translate, data);
3131 else
3132 {
3133 if ((int)limit <= 0)
3134 {
3135 res = NULL;
3136 break;
3137 }
3138 --limit;
3139 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
3140 {
3141 if (!translate)
3142 break;
3143 bool disambiguate_only = false;
3144 res = (*translate) (ref, vuse, data, &disambiguate_only);
3145 /* Failed lookup and translation. */
3146 if (res == (void *)-1)
3147 {
3148 res = NULL;
3149 break;
3150 }
3151 /* Lookup succeeded. */
3152 else if (res != NULL)
3153 break;
3154 /* Translation succeeded, continue walking. */
3155 translated = translated || !disambiguate_only;
3156 }
3157 vuse = gimple_vuse (def_stmt);
3158 }
3159 }
3160 while (vuse);
3161
3162 if (visited)
3163 BITMAP_FREE (visited);
3164
3165 timevar_pop (TV_ALIAS_STMT_WALK);
3166
3167 return res;
3168 }
3169
3170
3171 /* Based on the memory reference REF call WALKER for each vdef which
3172 defining statement may clobber REF, starting with VDEF. If REF
3173 is NULL_TREE, each defining statement is visited.
3174
3175 WALKER is called with REF, the current vdef and DATA. If WALKER
3176 returns true the walk is stopped, otherwise it continues.
3177
3178 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3179 The pointer may be NULL and then we do not track this information.
3180
3181 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3182 PHI argument (but only one walk continues on merge points), the
3183 return value is true if any of the walks was successful.
3184
3185 The function returns the number of statements walked or -1 if
3186 LIMIT stmts were walked and the walk was aborted at this point.
3187 If LIMIT is zero the walk is not aborted. */
3188
3189 static int
3190 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3191 bool (*walker)(ao_ref *, tree, void *), void *data,
3192 bitmap *visited, unsigned int cnt,
3193 bool *function_entry_reached, unsigned limit)
3194 {
3195 do
3196 {
3197 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3198
3199 if (*visited
3200 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3201 return cnt;
3202
3203 if (gimple_nop_p (def_stmt))
3204 {
3205 if (function_entry_reached)
3206 *function_entry_reached = true;
3207 return cnt;
3208 }
3209 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3210 {
3211 unsigned i;
3212 if (!*visited)
3213 *visited = BITMAP_ALLOC (NULL);
3214 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3215 {
3216 int res = walk_aliased_vdefs_1 (ref,
3217 gimple_phi_arg_def (def_stmt, i),
3218 walker, data, visited, cnt,
3219 function_entry_reached, limit);
3220 if (res == -1)
3221 return -1;
3222 cnt = res;
3223 }
3224 return cnt;
3225 }
3226
3227 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3228 cnt++;
3229 if (cnt == limit)
3230 return -1;
3231 if ((!ref
3232 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3233 && (*walker) (ref, vdef, data))
3234 return cnt;
3235
3236 vdef = gimple_vuse (def_stmt);
3237 }
3238 while (1);
3239 }
3240
3241 int
3242 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3243 bool (*walker)(ao_ref *, tree, void *), void *data,
3244 bitmap *visited,
3245 bool *function_entry_reached, unsigned int limit)
3246 {
3247 bitmap local_visited = NULL;
3248 int ret;
3249
3250 timevar_push (TV_ALIAS_STMT_WALK);
3251
3252 if (function_entry_reached)
3253 *function_entry_reached = false;
3254
3255 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3256 visited ? visited : &local_visited, 0,
3257 function_entry_reached, limit);
3258 if (local_visited)
3259 BITMAP_FREE (local_visited);
3260
3261 timevar_pop (TV_ALIAS_STMT_WALK);
3262
3263 return ret;
3264 }
3265