New modref/ipa_modref optimization passes
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43
44 /* Broad overview of how alias analysis on gimple works:
45
46 Statements clobbering or using memory are linked through the
47 virtual operand factored use-def chain. The virtual operand
48 is unique per function, its symbol is accessible via gimple_vop (cfun).
49 Virtual operands are used for efficiently walking memory statements
50 in the gimple IL and are useful for things like value-numbering as
51 a generation count for memory references.
52
53 SSA_NAME pointers may have associated points-to information
54 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
55 points-to information is (re-)computed by the TODO_rebuild_alias
56 pass manager todo. Points-to information is also used for more
57 precise tracking of call-clobbered and call-used variables and
58 related disambiguations.
59
60 This file contains functions for disambiguating memory references,
61 the so called alias-oracle and tools for walking of the gimple IL.
62
63 The main alias-oracle entry-points are
64
65 bool stmt_may_clobber_ref_p (gimple *, tree)
66
67 This function queries if a statement may invalidate (parts of)
68 the memory designated by the reference tree argument.
69
70 bool ref_maybe_used_by_stmt_p (gimple *, tree)
71
72 This function queries if a statement may need (parts of) the
73 memory designated by the reference tree argument.
74
75 There are variants of these functions that only handle the call
76 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
77 Note that these do not disambiguate against a possible call lhs.
78
79 bool refs_may_alias_p (tree, tree)
80
81 This function tries to disambiguate two reference trees.
82
83 bool ptr_deref_may_alias_global_p (tree)
84
85 This function queries if dereferencing a pointer variable may
86 alias global memory.
87
88 More low-level disambiguators are available and documented in
89 this file. Low-level disambiguators dealing with points-to
90 information are in tree-ssa-structalias.c. */
91
92 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
93 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
94
95 /* Query statistics for the different low-level disambiguators.
96 A high-level query may trigger multiple of them. */
97
98 static struct {
99 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
100 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
101 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
102 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
103 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
104 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
105 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
106 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
107 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
108 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
109 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
110 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
111 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
112 unsigned HOST_WIDE_INT modref_use_may_alias;
113 unsigned HOST_WIDE_INT modref_use_no_alias;
114 unsigned HOST_WIDE_INT modref_clobber_may_alias;
115 unsigned HOST_WIDE_INT modref_clobber_no_alias;
116 unsigned HOST_WIDE_INT modref_tests;
117 } alias_stats;
118
119 void
120 dump_alias_stats (FILE *s)
121 {
122 fprintf (s, "\nAlias oracle query stats:\n");
123 fprintf (s, " refs_may_alias_p: "
124 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
125 HOST_WIDE_INT_PRINT_DEC" queries\n",
126 alias_stats.refs_may_alias_p_no_alias,
127 alias_stats.refs_may_alias_p_no_alias
128 + alias_stats.refs_may_alias_p_may_alias);
129 fprintf (s, " ref_maybe_used_by_call_p: "
130 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
131 HOST_WIDE_INT_PRINT_DEC" queries\n",
132 alias_stats.ref_maybe_used_by_call_p_no_alias,
133 alias_stats.refs_may_alias_p_no_alias
134 + alias_stats.ref_maybe_used_by_call_p_may_alias);
135 fprintf (s, " call_may_clobber_ref_p: "
136 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
137 HOST_WIDE_INT_PRINT_DEC" queries\n",
138 alias_stats.call_may_clobber_ref_p_no_alias,
139 alias_stats.call_may_clobber_ref_p_no_alias
140 + alias_stats.call_may_clobber_ref_p_may_alias);
141 fprintf (s, " nonoverlapping_component_refs_p: "
142 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
143 HOST_WIDE_INT_PRINT_DEC" queries\n",
144 alias_stats.nonoverlapping_component_refs_p_no_alias,
145 alias_stats.nonoverlapping_component_refs_p_no_alias
146 + alias_stats.nonoverlapping_component_refs_p_may_alias);
147 fprintf (s, " nonoverlapping_refs_since_match_p: "
148 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
149 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
150 HOST_WIDE_INT_PRINT_DEC" queries\n",
151 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
152 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
153 alias_stats.nonoverlapping_refs_since_match_p_no_alias
154 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
155 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
156 fprintf (s, " aliasing_component_refs_p: "
157 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
158 HOST_WIDE_INT_PRINT_DEC" queries\n",
159 alias_stats.aliasing_component_refs_p_no_alias,
160 alias_stats.aliasing_component_refs_p_no_alias
161 + alias_stats.aliasing_component_refs_p_may_alias);
162 dump_alias_stats_in_alias_c (s);
163 fprintf (s, "\nModref stats:\n");
164 fprintf (s, " modref use: "
165 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
166 HOST_WIDE_INT_PRINT_DEC" queries\n",
167 alias_stats.modref_use_no_alias,
168 alias_stats.modref_use_no_alias
169 + alias_stats.modref_use_may_alias);
170 fprintf (s, " modref clobber: "
171 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
172 HOST_WIDE_INT_PRINT_DEC" queries\n "
173 HOST_WIDE_INT_PRINT_DEC" tbaa querries (%f per modref querry)\n",
174 alias_stats.modref_clobber_no_alias,
175 alias_stats.modref_clobber_no_alias
176 + alias_stats.modref_clobber_may_alias,
177 alias_stats.modref_tests,
178 ((double)alias_stats.modref_tests)
179 / (alias_stats.modref_clobber_no_alias
180 + alias_stats.modref_clobber_may_alias));
181 }
182
183
184 /* Return true, if dereferencing PTR may alias with a global variable. */
185
186 bool
187 ptr_deref_may_alias_global_p (tree ptr)
188 {
189 struct ptr_info_def *pi;
190
191 /* If we end up with a pointer constant here that may point
192 to global memory. */
193 if (TREE_CODE (ptr) != SSA_NAME)
194 return true;
195
196 pi = SSA_NAME_PTR_INFO (ptr);
197
198 /* If we do not have points-to information for this variable,
199 we have to punt. */
200 if (!pi)
201 return true;
202
203 /* ??? This does not use TBAA to prune globals ptr may not access. */
204 return pt_solution_includes_global (&pi->pt);
205 }
206
207 /* Return true if dereferencing PTR may alias DECL.
208 The caller is responsible for applying TBAA to see if PTR
209 may access DECL at all. */
210
211 static bool
212 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
213 {
214 struct ptr_info_def *pi;
215
216 /* Conversions are irrelevant for points-to information and
217 data-dependence analysis can feed us those. */
218 STRIP_NOPS (ptr);
219
220 /* Anything we do not explicilty handle aliases. */
221 if ((TREE_CODE (ptr) != SSA_NAME
222 && TREE_CODE (ptr) != ADDR_EXPR
223 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
224 || !POINTER_TYPE_P (TREE_TYPE (ptr))
225 || (!VAR_P (decl)
226 && TREE_CODE (decl) != PARM_DECL
227 && TREE_CODE (decl) != RESULT_DECL))
228 return true;
229
230 /* Disregard pointer offsetting. */
231 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
232 {
233 do
234 {
235 ptr = TREE_OPERAND (ptr, 0);
236 }
237 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
238 return ptr_deref_may_alias_decl_p (ptr, decl);
239 }
240
241 /* ADDR_EXPR pointers either just offset another pointer or directly
242 specify the pointed-to set. */
243 if (TREE_CODE (ptr) == ADDR_EXPR)
244 {
245 tree base = get_base_address (TREE_OPERAND (ptr, 0));
246 if (base
247 && (TREE_CODE (base) == MEM_REF
248 || TREE_CODE (base) == TARGET_MEM_REF))
249 ptr = TREE_OPERAND (base, 0);
250 else if (base
251 && DECL_P (base))
252 return compare_base_decls (base, decl) != 0;
253 else if (base
254 && CONSTANT_CLASS_P (base))
255 return false;
256 else
257 return true;
258 }
259
260 /* Non-aliased variables cannot be pointed to. */
261 if (!may_be_aliased (decl))
262 return false;
263
264 /* If we do not have useful points-to information for this pointer
265 we cannot disambiguate anything else. */
266 pi = SSA_NAME_PTR_INFO (ptr);
267 if (!pi)
268 return true;
269
270 return pt_solution_includes (&pi->pt, decl);
271 }
272
273 /* Return true if dereferenced PTR1 and PTR2 may alias.
274 The caller is responsible for applying TBAA to see if accesses
275 through PTR1 and PTR2 may conflict at all. */
276
277 bool
278 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
279 {
280 struct ptr_info_def *pi1, *pi2;
281
282 /* Conversions are irrelevant for points-to information and
283 data-dependence analysis can feed us those. */
284 STRIP_NOPS (ptr1);
285 STRIP_NOPS (ptr2);
286
287 /* Disregard pointer offsetting. */
288 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
289 {
290 do
291 {
292 ptr1 = TREE_OPERAND (ptr1, 0);
293 }
294 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
295 return ptr_derefs_may_alias_p (ptr1, ptr2);
296 }
297 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
298 {
299 do
300 {
301 ptr2 = TREE_OPERAND (ptr2, 0);
302 }
303 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
304 return ptr_derefs_may_alias_p (ptr1, ptr2);
305 }
306
307 /* ADDR_EXPR pointers either just offset another pointer or directly
308 specify the pointed-to set. */
309 if (TREE_CODE (ptr1) == ADDR_EXPR)
310 {
311 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
312 if (base
313 && (TREE_CODE (base) == MEM_REF
314 || TREE_CODE (base) == TARGET_MEM_REF))
315 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
316 else if (base
317 && DECL_P (base))
318 return ptr_deref_may_alias_decl_p (ptr2, base);
319 else
320 return true;
321 }
322 if (TREE_CODE (ptr2) == ADDR_EXPR)
323 {
324 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
325 if (base
326 && (TREE_CODE (base) == MEM_REF
327 || TREE_CODE (base) == TARGET_MEM_REF))
328 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
329 else if (base
330 && DECL_P (base))
331 return ptr_deref_may_alias_decl_p (ptr1, base);
332 else
333 return true;
334 }
335
336 /* From here we require SSA name pointers. Anything else aliases. */
337 if (TREE_CODE (ptr1) != SSA_NAME
338 || TREE_CODE (ptr2) != SSA_NAME
339 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
340 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
341 return true;
342
343 /* We may end up with two empty points-to solutions for two same pointers.
344 In this case we still want to say both pointers alias, so shortcut
345 that here. */
346 if (ptr1 == ptr2)
347 return true;
348
349 /* If we do not have useful points-to information for either pointer
350 we cannot disambiguate anything else. */
351 pi1 = SSA_NAME_PTR_INFO (ptr1);
352 pi2 = SSA_NAME_PTR_INFO (ptr2);
353 if (!pi1 || !pi2)
354 return true;
355
356 /* ??? This does not use TBAA to prune decls from the intersection
357 that not both pointers may access. */
358 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
359 }
360
361 /* Return true if dereferencing PTR may alias *REF.
362 The caller is responsible for applying TBAA to see if PTR
363 may access *REF at all. */
364
365 static bool
366 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
367 {
368 tree base = ao_ref_base (ref);
369
370 if (TREE_CODE (base) == MEM_REF
371 || TREE_CODE (base) == TARGET_MEM_REF)
372 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
373 else if (DECL_P (base))
374 return ptr_deref_may_alias_decl_p (ptr, base);
375
376 return true;
377 }
378
379 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
380
381 bool
382 ptrs_compare_unequal (tree ptr1, tree ptr2)
383 {
384 /* First resolve the pointers down to a SSA name pointer base or
385 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
386 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
387 or STRING_CSTs which needs points-to adjustments to track them
388 in the points-to sets. */
389 tree obj1 = NULL_TREE;
390 tree obj2 = NULL_TREE;
391 if (TREE_CODE (ptr1) == ADDR_EXPR)
392 {
393 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
394 if (! tem)
395 return false;
396 if (VAR_P (tem)
397 || TREE_CODE (tem) == PARM_DECL
398 || TREE_CODE (tem) == RESULT_DECL)
399 obj1 = tem;
400 else if (TREE_CODE (tem) == MEM_REF)
401 ptr1 = TREE_OPERAND (tem, 0);
402 }
403 if (TREE_CODE (ptr2) == ADDR_EXPR)
404 {
405 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
406 if (! tem)
407 return false;
408 if (VAR_P (tem)
409 || TREE_CODE (tem) == PARM_DECL
410 || TREE_CODE (tem) == RESULT_DECL)
411 obj2 = tem;
412 else if (TREE_CODE (tem) == MEM_REF)
413 ptr2 = TREE_OPERAND (tem, 0);
414 }
415
416 /* Canonicalize ptr vs. object. */
417 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
418 {
419 std::swap (ptr1, ptr2);
420 std::swap (obj1, obj2);
421 }
422
423 if (obj1 && obj2)
424 /* Other code handles this correctly, no need to duplicate it here. */;
425 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
426 {
427 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
428 /* We may not use restrict to optimize pointer comparisons.
429 See PR71062. So we have to assume that restrict-pointed-to
430 may be in fact obj1. */
431 if (!pi
432 || pi->pt.vars_contains_restrict
433 || pi->pt.vars_contains_interposable)
434 return false;
435 if (VAR_P (obj1)
436 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
437 {
438 varpool_node *node = varpool_node::get (obj1);
439 /* If obj1 may bind to NULL give up (see below). */
440 if (! node
441 || ! node->nonzero_address ()
442 || ! decl_binds_to_current_def_p (obj1))
443 return false;
444 }
445 return !pt_solution_includes (&pi->pt, obj1);
446 }
447
448 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
449 but those require pt.null to be conservatively correct. */
450
451 return false;
452 }
453
454 /* Returns whether reference REF to BASE may refer to global memory. */
455
456 static bool
457 ref_may_alias_global_p_1 (tree base)
458 {
459 if (DECL_P (base))
460 return is_global_var (base);
461 else if (TREE_CODE (base) == MEM_REF
462 || TREE_CODE (base) == TARGET_MEM_REF)
463 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
464 return true;
465 }
466
467 bool
468 ref_may_alias_global_p (ao_ref *ref)
469 {
470 tree base = ao_ref_base (ref);
471 return ref_may_alias_global_p_1 (base);
472 }
473
474 bool
475 ref_may_alias_global_p (tree ref)
476 {
477 tree base = get_base_address (ref);
478 return ref_may_alias_global_p_1 (base);
479 }
480
481 /* Return true whether STMT may clobber global memory. */
482
483 bool
484 stmt_may_clobber_global_p (gimple *stmt)
485 {
486 tree lhs;
487
488 if (!gimple_vdef (stmt))
489 return false;
490
491 /* ??? We can ask the oracle whether an artificial pointer
492 dereference with a pointer with points-to information covering
493 all global memory (what about non-address taken memory?) maybe
494 clobbered by this call. As there is at the moment no convenient
495 way of doing that without generating garbage do some manual
496 checking instead.
497 ??? We could make a NULL ao_ref argument to the various
498 predicates special, meaning any global memory. */
499
500 switch (gimple_code (stmt))
501 {
502 case GIMPLE_ASSIGN:
503 lhs = gimple_assign_lhs (stmt);
504 return (TREE_CODE (lhs) != SSA_NAME
505 && ref_may_alias_global_p (lhs));
506 case GIMPLE_CALL:
507 return true;
508 default:
509 return true;
510 }
511 }
512
513
514 /* Dump alias information on FILE. */
515
516 void
517 dump_alias_info (FILE *file)
518 {
519 unsigned i;
520 tree ptr;
521 const char *funcname
522 = lang_hooks.decl_printable_name (current_function_decl, 2);
523 tree var;
524
525 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
526
527 fprintf (file, "Aliased symbols\n\n");
528
529 FOR_EACH_LOCAL_DECL (cfun, i, var)
530 {
531 if (may_be_aliased (var))
532 dump_variable (file, var);
533 }
534
535 fprintf (file, "\nCall clobber information\n");
536
537 fprintf (file, "\nESCAPED");
538 dump_points_to_solution (file, &cfun->gimple_df->escaped);
539
540 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
541
542 FOR_EACH_SSA_NAME (i, ptr, cfun)
543 {
544 struct ptr_info_def *pi;
545
546 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
547 || SSA_NAME_IN_FREE_LIST (ptr))
548 continue;
549
550 pi = SSA_NAME_PTR_INFO (ptr);
551 if (pi)
552 dump_points_to_info_for (file, ptr);
553 }
554
555 fprintf (file, "\n");
556 }
557
558
559 /* Dump alias information on stderr. */
560
561 DEBUG_FUNCTION void
562 debug_alias_info (void)
563 {
564 dump_alias_info (stderr);
565 }
566
567
568 /* Dump the points-to set *PT into FILE. */
569
570 void
571 dump_points_to_solution (FILE *file, struct pt_solution *pt)
572 {
573 if (pt->anything)
574 fprintf (file, ", points-to anything");
575
576 if (pt->nonlocal)
577 fprintf (file, ", points-to non-local");
578
579 if (pt->escaped)
580 fprintf (file, ", points-to escaped");
581
582 if (pt->ipa_escaped)
583 fprintf (file, ", points-to unit escaped");
584
585 if (pt->null)
586 fprintf (file, ", points-to NULL");
587
588 if (pt->vars)
589 {
590 fprintf (file, ", points-to vars: ");
591 dump_decl_set (file, pt->vars);
592 if (pt->vars_contains_nonlocal
593 || pt->vars_contains_escaped
594 || pt->vars_contains_escaped_heap
595 || pt->vars_contains_restrict)
596 {
597 const char *comma = "";
598 fprintf (file, " (");
599 if (pt->vars_contains_nonlocal)
600 {
601 fprintf (file, "nonlocal");
602 comma = ", ";
603 }
604 if (pt->vars_contains_escaped)
605 {
606 fprintf (file, "%sescaped", comma);
607 comma = ", ";
608 }
609 if (pt->vars_contains_escaped_heap)
610 {
611 fprintf (file, "%sescaped heap", comma);
612 comma = ", ";
613 }
614 if (pt->vars_contains_restrict)
615 {
616 fprintf (file, "%srestrict", comma);
617 comma = ", ";
618 }
619 if (pt->vars_contains_interposable)
620 fprintf (file, "%sinterposable", comma);
621 fprintf (file, ")");
622 }
623 }
624 }
625
626
627 /* Unified dump function for pt_solution. */
628
629 DEBUG_FUNCTION void
630 debug (pt_solution &ref)
631 {
632 dump_points_to_solution (stderr, &ref);
633 }
634
635 DEBUG_FUNCTION void
636 debug (pt_solution *ptr)
637 {
638 if (ptr)
639 debug (*ptr);
640 else
641 fprintf (stderr, "<nil>\n");
642 }
643
644
645 /* Dump points-to information for SSA_NAME PTR into FILE. */
646
647 void
648 dump_points_to_info_for (FILE *file, tree ptr)
649 {
650 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
651
652 print_generic_expr (file, ptr, dump_flags);
653
654 if (pi)
655 dump_points_to_solution (file, &pi->pt);
656 else
657 fprintf (file, ", points-to anything");
658
659 fprintf (file, "\n");
660 }
661
662
663 /* Dump points-to information for VAR into stderr. */
664
665 DEBUG_FUNCTION void
666 debug_points_to_info_for (tree var)
667 {
668 dump_points_to_info_for (stderr, var);
669 }
670
671
672 /* Initializes the alias-oracle reference representation *R from REF. */
673
674 void
675 ao_ref_init (ao_ref *r, tree ref)
676 {
677 r->ref = ref;
678 r->base = NULL_TREE;
679 r->offset = 0;
680 r->size = -1;
681 r->max_size = -1;
682 r->ref_alias_set = -1;
683 r->base_alias_set = -1;
684 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
685 }
686
687 /* Returns the base object of the memory reference *REF. */
688
689 tree
690 ao_ref_base (ao_ref *ref)
691 {
692 bool reverse;
693
694 if (ref->base)
695 return ref->base;
696 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
697 &ref->max_size, &reverse);
698 return ref->base;
699 }
700
701 /* Returns the base object alias set of the memory reference *REF. */
702
703 alias_set_type
704 ao_ref_base_alias_set (ao_ref *ref)
705 {
706 tree base_ref;
707 if (ref->base_alias_set != -1)
708 return ref->base_alias_set;
709 if (!ref->ref)
710 return 0;
711 base_ref = ref->ref;
712 while (handled_component_p (base_ref))
713 base_ref = TREE_OPERAND (base_ref, 0);
714 ref->base_alias_set = get_alias_set (base_ref);
715 return ref->base_alias_set;
716 }
717
718 /* Returns the reference alias set of the memory reference *REF. */
719
720 alias_set_type
721 ao_ref_alias_set (ao_ref *ref)
722 {
723 if (ref->ref_alias_set != -1)
724 return ref->ref_alias_set;
725 if (!ref->ref)
726 return 0;
727 ref->ref_alias_set = get_alias_set (ref->ref);
728 return ref->ref_alias_set;
729 }
730
731 /* Init an alias-oracle reference representation from a gimple pointer
732 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
733 size is assumed to be unknown. The access is assumed to be only
734 to or after of the pointer target, not before it. */
735
736 void
737 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
738 {
739 poly_int64 t, size_hwi, extra_offset = 0;
740 ref->ref = NULL_TREE;
741 if (TREE_CODE (ptr) == SSA_NAME)
742 {
743 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
744 if (gimple_assign_single_p (stmt)
745 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
746 ptr = gimple_assign_rhs1 (stmt);
747 else if (is_gimple_assign (stmt)
748 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
749 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
750 {
751 ptr = gimple_assign_rhs1 (stmt);
752 extra_offset *= BITS_PER_UNIT;
753 }
754 }
755
756 if (TREE_CODE (ptr) == ADDR_EXPR)
757 {
758 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
759 if (ref->base)
760 ref->offset = BITS_PER_UNIT * t;
761 else
762 {
763 size = NULL_TREE;
764 ref->offset = 0;
765 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
766 }
767 }
768 else
769 {
770 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
771 ref->base = build2 (MEM_REF, char_type_node,
772 ptr, null_pointer_node);
773 ref->offset = 0;
774 }
775 ref->offset += extra_offset;
776 if (size
777 && poly_int_tree_p (size, &size_hwi)
778 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
779 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
780 else
781 ref->max_size = ref->size = -1;
782 ref->ref_alias_set = 0;
783 ref->base_alias_set = 0;
784 ref->volatile_p = false;
785 }
786
787 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
788 Return -1 if S1 < S2
789 Return 1 if S1 > S2
790 Return 0 if equal or incomparable. */
791
792 static int
793 compare_sizes (tree s1, tree s2)
794 {
795 if (!s1 || !s2)
796 return 0;
797
798 poly_uint64 size1;
799 poly_uint64 size2;
800
801 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
802 return 0;
803 if (known_lt (size1, size2))
804 return -1;
805 if (known_lt (size2, size1))
806 return 1;
807 return 0;
808 }
809
810 /* Compare TYPE1 and TYPE2 by its size.
811 Return -1 if size of TYPE1 < size of TYPE2
812 Return 1 if size of TYPE1 > size of TYPE2
813 Return 0 if types are of equal sizes or we can not compare them. */
814
815 static int
816 compare_type_sizes (tree type1, tree type2)
817 {
818 /* Be conservative for arrays and vectors. We want to support partial
819 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
820 while (TREE_CODE (type1) == ARRAY_TYPE
821 || TREE_CODE (type1) == VECTOR_TYPE)
822 type1 = TREE_TYPE (type1);
823 while (TREE_CODE (type2) == ARRAY_TYPE
824 || TREE_CODE (type2) == VECTOR_TYPE)
825 type2 = TREE_TYPE (type2);
826 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
827 }
828
829 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
830 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
831 decide. */
832
833 static inline int
834 same_type_for_tbaa (tree type1, tree type2)
835 {
836 type1 = TYPE_MAIN_VARIANT (type1);
837 type2 = TYPE_MAIN_VARIANT (type2);
838
839 /* Handle the most common case first. */
840 if (type1 == type2)
841 return 1;
842
843 /* If we would have to do structural comparison bail out. */
844 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
845 || TYPE_STRUCTURAL_EQUALITY_P (type2))
846 return -1;
847
848 /* Compare the canonical types. */
849 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
850 return 1;
851
852 /* ??? Array types are not properly unified in all cases as we have
853 spurious changes in the index types for example. Removing this
854 causes all sorts of problems with the Fortran frontend. */
855 if (TREE_CODE (type1) == ARRAY_TYPE
856 && TREE_CODE (type2) == ARRAY_TYPE)
857 return -1;
858
859 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
860 object of one of its constrained subtypes, e.g. when a function with an
861 unconstrained parameter passed by reference is called on an object and
862 inlined. But, even in the case of a fixed size, type and subtypes are
863 not equivalent enough as to share the same TYPE_CANONICAL, since this
864 would mean that conversions between them are useless, whereas they are
865 not (e.g. type and subtypes can have different modes). So, in the end,
866 they are only guaranteed to have the same alias set. */
867 alias_set_type set1 = get_alias_set (type1);
868 alias_set_type set2 = get_alias_set (type2);
869 if (set1 == set2)
870 return -1;
871
872 /* Pointers to void are considered compatible with all other pointers,
873 so for two pointers see what the alias set resolution thinks. */
874 if (POINTER_TYPE_P (type1)
875 && POINTER_TYPE_P (type2)
876 && alias_sets_conflict_p (set1, set2))
877 return -1;
878
879 /* The types are known to be not equal. */
880 return 0;
881 }
882
883 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
884 components on it). */
885
886 static bool
887 type_has_components_p (tree type)
888 {
889 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
890 || TREE_CODE (type) == COMPLEX_TYPE;
891 }
892
893 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
894 respectively are either pointing to same address or are completely
895 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
896 just partly overlap.
897
898 Try to disambiguate using the access path starting from the match
899 and return false if there is no conflict.
900
901 Helper for aliasing_component_refs_p. */
902
903 static bool
904 aliasing_matching_component_refs_p (tree match1, tree ref1,
905 poly_int64 offset1, poly_int64 max_size1,
906 tree match2, tree ref2,
907 poly_int64 offset2, poly_int64 max_size2,
908 bool partial_overlap)
909 {
910 poly_int64 offadj, sztmp, msztmp;
911 bool reverse;
912
913 if (!partial_overlap)
914 {
915 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
916 offset2 -= offadj;
917 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
918 offset1 -= offadj;
919 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
920 {
921 ++alias_stats.aliasing_component_refs_p_no_alias;
922 return false;
923 }
924 }
925
926 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
927 partial_overlap);
928 if (cmp == 1
929 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
930 {
931 ++alias_stats.aliasing_component_refs_p_no_alias;
932 return false;
933 }
934 ++alias_stats.aliasing_component_refs_p_may_alias;
935 return true;
936 }
937
938 /* Return true if REF is reference to zero sized trailing array. I.e.
939 struct foo {int bar; int array[0];} *fooptr;
940 fooptr->array. */
941
942 static bool
943 component_ref_to_zero_sized_trailing_array_p (tree ref)
944 {
945 return (TREE_CODE (ref) == COMPONENT_REF
946 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
947 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
948 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
949 && array_at_struct_end_p (ref));
950 }
951
952 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
953 aliasing_component_refs_p.
954
955 Walk access path REF2 and try to find type matching TYPE1
956 (which is a start of possibly aliasing access path REF1).
957 If match is found, try to disambiguate.
958
959 Return 0 for sucessful disambiguation.
960 Return 1 if match was found but disambiguation failed
961 Return -1 if there is no match.
962 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
963 in access patch REF2 and -1 if we are not sure. */
964
965 static int
966 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
967 poly_int64 offset1, poly_int64 max_size1,
968 tree end_struct_ref1,
969 tree ref2, tree base2,
970 poly_int64 offset2, poly_int64 max_size2,
971 bool *maybe_match)
972 {
973 tree ref = ref2;
974 int same_p = 0;
975
976 while (true)
977 {
978 /* We walk from inner type to the outer types. If type we see is
979 already too large to be part of type1, terminate the search. */
980 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
981
982 if (cmp < 0
983 && (!end_struct_ref1
984 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
985 TREE_TYPE (ref)) < 0))
986 break;
987 /* If types may be of same size, see if we can decide about their
988 equality. */
989 if (cmp == 0)
990 {
991 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
992 if (same_p == 1)
993 break;
994 /* In case we can't decide whether types are same try to
995 continue looking for the exact match.
996 Remember however that we possibly saw a match
997 to bypass the access path continuations tests we do later. */
998 if (same_p == -1)
999 *maybe_match = true;
1000 }
1001 if (!handled_component_p (ref))
1002 break;
1003 ref = TREE_OPERAND (ref, 0);
1004 }
1005 if (same_p == 1)
1006 {
1007 bool partial_overlap = false;
1008
1009 /* We assume that arrays can overlap by multiple of their elements
1010 size as tested in gcc.dg/torture/alias-2.c.
1011 This partial overlap happen only when both arrays are bases of
1012 the access and not contained within another component ref.
1013 To be safe we also assume partial overlap for VLAs. */
1014 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1015 && (!TYPE_SIZE (TREE_TYPE (base1))
1016 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1017 || ref == base2))
1018 {
1019 /* Setting maybe_match to true triggers
1020 nonoverlapping_component_refs_p test later that still may do
1021 useful disambiguation. */
1022 *maybe_match = true;
1023 partial_overlap = true;
1024 }
1025 return aliasing_matching_component_refs_p (base1, ref1,
1026 offset1, max_size1,
1027 ref, ref2,
1028 offset2, max_size2,
1029 partial_overlap);
1030 }
1031 return -1;
1032 }
1033
1034 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1035 Return true if they can be composed to single access path
1036 base1...ref1...base2...ref2.
1037
1038 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1039 a trailing array access after REF1 in the non-TBAA part of the access.
1040 REF1_ALIAS_SET is the alias set of REF1.
1041
1042 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1043 a traling array access in the TBAA part of access path2.
1044 BASE2_ALIAS_SET is the alias set of base2. */
1045
1046 bool
1047 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1048 alias_set_type ref1_alias_set,
1049 tree base_type2, tree end_struct_ref2,
1050 alias_set_type base2_alias_set)
1051 {
1052 /* Access path can not continue past types with no components. */
1053 if (!type_has_components_p (ref_type1))
1054 return false;
1055
1056 /* If first access path ends by too small type to hold base of
1057 the second access path, typically paths can not continue.
1058
1059 Punt if end_struct_past_end1 is true. We want to support arbitrary
1060 type puning past first COMPONENT_REF to union because redundant store
1061 elimination depends on this, see PR92152. For this reason we can not
1062 check size of the reference because types may partially overlap. */
1063 if (!end_struct_past_end1)
1064 {
1065 if (compare_type_sizes (ref_type1, base_type2) < 0)
1066 return false;
1067 /* If the path2 contains trailing array access we can strenghten the check
1068 to verify that also the size of element of the trailing array fits.
1069 In fact we could check for offset + type_size, but we do not track
1070 offsets and this is quite side case. */
1071 if (end_struct_ref2
1072 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1073 return false;
1074 }
1075 return (base2_alias_set == ref1_alias_set
1076 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1077 }
1078
1079 /* Determine if the two component references REF1 and REF2 which are
1080 based on access types TYPE1 and TYPE2 and of which at least one is based
1081 on an indirect reference may alias.
1082 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1083 are the respective alias sets. */
1084
1085 static bool
1086 aliasing_component_refs_p (tree ref1,
1087 alias_set_type ref1_alias_set,
1088 alias_set_type base1_alias_set,
1089 poly_int64 offset1, poly_int64 max_size1,
1090 tree ref2,
1091 alias_set_type ref2_alias_set,
1092 alias_set_type base2_alias_set,
1093 poly_int64 offset2, poly_int64 max_size2)
1094 {
1095 /* If one reference is a component references through pointers try to find a
1096 common base and apply offset based disambiguation. This handles
1097 for example
1098 struct A { int i; int j; } *q;
1099 struct B { struct A a; int k; } *p;
1100 disambiguating q->i and p->a.j. */
1101 tree base1, base2;
1102 tree type1, type2;
1103 bool maybe_match = false;
1104 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1105 bool end_struct_past_end1 = false;
1106 bool end_struct_past_end2 = false;
1107
1108 /* Choose bases and base types to search for.
1109 The access path is as follows:
1110 base....end_of_tbaa_ref...actual_ref
1111 At one place in the access path may be a reference to zero sized or
1112 trailing array.
1113
1114 We generally discard the segment after end_of_tbaa_ref however
1115 we need to be careful in case it contains zero sized or traling array.
1116 These may happen after refernce to union and in this case we need to
1117 not disambiguate type puning scenarios.
1118
1119 We set:
1120 base1 to point to base
1121
1122 ref1 to point to end_of_tbaa_ref
1123
1124 end_struct_ref1 to point the trailing reference (if it exists
1125 in range base....end_of_tbaa_ref
1126
1127 end_struct_past_end1 is true if this traling refernece occurs in
1128 end_of_tbaa_ref...actual_ref. */
1129 base1 = ref1;
1130 while (handled_component_p (base1))
1131 {
1132 /* Generally access paths are monotous in the size of object. The
1133 exception are trailing arrays of structures. I.e.
1134 struct a {int array[0];};
1135 or
1136 struct a {int array1[0]; int array[];};
1137 Such struct has size 0 but accesses to a.array may have non-zero size.
1138 In this case the size of TREE_TYPE (base1) is smaller than
1139 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
1140
1141 Because we compare sizes of arrays just by sizes of their elements,
1142 we only need to care about zero sized array fields here. */
1143 if (component_ref_to_zero_sized_trailing_array_p (base1))
1144 {
1145 gcc_checking_assert (!end_struct_ref1);
1146 end_struct_ref1 = base1;
1147 }
1148 if (ends_tbaa_access_path_p (base1))
1149 {
1150 ref1 = TREE_OPERAND (base1, 0);
1151 if (end_struct_ref1)
1152 {
1153 end_struct_past_end1 = true;
1154 end_struct_ref1 = NULL;
1155 }
1156 }
1157 base1 = TREE_OPERAND (base1, 0);
1158 }
1159 type1 = TREE_TYPE (base1);
1160 base2 = ref2;
1161 while (handled_component_p (base2))
1162 {
1163 if (component_ref_to_zero_sized_trailing_array_p (base2))
1164 {
1165 gcc_checking_assert (!end_struct_ref2);
1166 end_struct_ref2 = base2;
1167 }
1168 if (ends_tbaa_access_path_p (base2))
1169 {
1170 ref2 = TREE_OPERAND (base2, 0);
1171 if (end_struct_ref2)
1172 {
1173 end_struct_past_end2 = true;
1174 end_struct_ref2 = NULL;
1175 }
1176 }
1177 base2 = TREE_OPERAND (base2, 0);
1178 }
1179 type2 = TREE_TYPE (base2);
1180
1181 /* Now search for the type1 in the access path of ref2. This
1182 would be a common base for doing offset based disambiguation on.
1183 This however only makes sense if type2 is big enough to hold type1. */
1184 int cmp_outer = compare_type_sizes (type2, type1);
1185
1186 /* If type2 is big enough to contain type1 walk its access path.
1187 We also need to care of arrays at the end of structs that may extend
1188 beyond the end of structure. If this occurs in the TBAA part of the
1189 access path, we need to consider the increased type as well. */
1190 if (cmp_outer >= 0
1191 || (end_struct_ref2
1192 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1193 {
1194 int res = aliasing_component_refs_walk (ref1, type1, base1,
1195 offset1, max_size1,
1196 end_struct_ref1,
1197 ref2, base2, offset2, max_size2,
1198 &maybe_match);
1199 if (res != -1)
1200 return res;
1201 }
1202
1203 /* If we didn't find a common base, try the other way around. */
1204 if (cmp_outer <= 0
1205 || (end_struct_ref1
1206 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1207 {
1208 int res = aliasing_component_refs_walk (ref2, type2, base2,
1209 offset2, max_size2,
1210 end_struct_ref2,
1211 ref1, base1, offset1, max_size1,
1212 &maybe_match);
1213 if (res != -1)
1214 return res;
1215 }
1216
1217 /* In the following code we make an assumption that the types in access
1218 paths do not overlap and thus accesses alias only if one path can be
1219 continuation of another. If we was not able to decide about equivalence,
1220 we need to give up. */
1221 if (maybe_match)
1222 {
1223 if (!nonoverlapping_component_refs_p (ref1, ref2))
1224 {
1225 ++alias_stats.aliasing_component_refs_p_may_alias;
1226 return true;
1227 }
1228 ++alias_stats.aliasing_component_refs_p_no_alias;
1229 return false;
1230 }
1231
1232 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1233 ref1_alias_set,
1234 type2, end_struct_ref2,
1235 base2_alias_set)
1236 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1237 ref2_alias_set,
1238 type1, end_struct_ref1,
1239 base1_alias_set))
1240 {
1241 ++alias_stats.aliasing_component_refs_p_may_alias;
1242 return true;
1243 }
1244 ++alias_stats.aliasing_component_refs_p_no_alias;
1245 return false;
1246 }
1247
1248 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1249 that bases of both component refs are either equivalent or nonoverlapping.
1250 We do not assume that the containers of FIELD1 and FIELD2 are of the
1251 same type or size.
1252
1253 Return 0 in case the base address of component_refs are same then
1254 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1255 may not be of same type or size.
1256
1257 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1258
1259 Return -1 otherwise.
1260
1261 Main difference between 0 and -1 is to let
1262 nonoverlapping_component_refs_since_match_p discover the semantically
1263 equivalent part of the access path.
1264
1265 Note that this function is used even with -fno-strict-aliasing
1266 and makes use of no TBAA assumptions. */
1267
1268 static int
1269 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1270 {
1271 /* If both fields are of the same type, we could save hard work of
1272 comparing offsets. */
1273 tree type1 = DECL_CONTEXT (field1);
1274 tree type2 = DECL_CONTEXT (field2);
1275
1276 if (TREE_CODE (type1) == RECORD_TYPE
1277 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1278 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1279 if (TREE_CODE (type2) == RECORD_TYPE
1280 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1281 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1282
1283 /* ??? Bitfields can overlap at RTL level so punt on them.
1284 FIXME: RTL expansion should be fixed by adjusting the access path
1285 when producing MEM_ATTRs for MEMs which are wider than
1286 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1287 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1288 return -1;
1289
1290 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1291 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1292 return field1 != field2;
1293
1294 /* In common case the offsets and bit offsets will be the same.
1295 However if frontends do not agree on the alignment, they may be
1296 different even if they actually represent same address.
1297 Try the common case first and if that fails calcualte the
1298 actual bit offset. */
1299 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1300 DECL_FIELD_OFFSET (field2))
1301 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1302 DECL_FIELD_BIT_OFFSET (field2)))
1303 return 0;
1304
1305 /* Note that it may be possible to use component_ref_field_offset
1306 which would provide offsets as trees. However constructing and folding
1307 trees is expensive and does not seem to be worth the compile time
1308 cost. */
1309
1310 poly_uint64 offset1, offset2;
1311 poly_uint64 bit_offset1, bit_offset2;
1312
1313 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1314 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1315 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1316 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1317 {
1318 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1319 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1320
1321 if (known_eq (offset1, offset2))
1322 return 0;
1323
1324 poly_uint64 size1, size2;
1325
1326 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1327 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1328 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1329 return 1;
1330 }
1331 /* Resort to slower overlap checking by looking for matching types in
1332 the middle of access path. */
1333 return -1;
1334 }
1335
1336 /* Return low bound of array. Do not produce new trees
1337 and thus do not care about particular type of integer constant
1338 and placeholder exprs. */
1339
1340 static tree
1341 cheap_array_ref_low_bound (tree ref)
1342 {
1343 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1344
1345 /* Avoid expensive array_ref_low_bound.
1346 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1347 type or it is zero. */
1348 if (TREE_OPERAND (ref, 2))
1349 return TREE_OPERAND (ref, 2);
1350 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1351 return TYPE_MIN_VALUE (domain_type);
1352 else
1353 return integer_zero_node;
1354 }
1355
1356 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1357 completely disjoint.
1358
1359 Return 1 if the refs are non-overlapping.
1360 Return 0 if they are possibly overlapping but if so the overlap again
1361 starts on the same address.
1362 Return -1 otherwise. */
1363
1364 int
1365 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1366 {
1367 tree index1 = TREE_OPERAND (ref1, 1);
1368 tree index2 = TREE_OPERAND (ref2, 1);
1369 tree low_bound1 = cheap_array_ref_low_bound (ref1);
1370 tree low_bound2 = cheap_array_ref_low_bound (ref2);
1371
1372 /* Handle zero offsets first: we do not need to match type size in this
1373 case. */
1374 if (operand_equal_p (index1, low_bound1, 0)
1375 && operand_equal_p (index2, low_bound2, 0))
1376 return 0;
1377
1378 /* If type sizes are different, give up.
1379
1380 Avoid expensive array_ref_element_size.
1381 If operand 3 is present it denotes size in the alignmnet units.
1382 Otherwise size is TYPE_SIZE of the element type.
1383 Handle only common cases where types are of the same "kind". */
1384 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1385 return -1;
1386
1387 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1388 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1389
1390 if (TREE_OPERAND (ref1, 3))
1391 {
1392 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1393 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1394 TREE_OPERAND (ref2, 3), 0))
1395 return -1;
1396 }
1397 else
1398 {
1399 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1400 TYPE_SIZE_UNIT (elmt_type2), 0))
1401 return -1;
1402 }
1403
1404 /* Since we know that type sizes are the same, there is no need to return
1405 -1 after this point. Partial overlap can not be introduced. */
1406
1407 /* We may need to fold trees in this case.
1408 TODO: Handle integer constant case at least. */
1409 if (!operand_equal_p (low_bound1, low_bound2, 0))
1410 return 0;
1411
1412 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1413 {
1414 if (tree_int_cst_equal (index1, index2))
1415 return 0;
1416 return 1;
1417 }
1418 /* TODO: We can use VRP to further disambiguate here. */
1419 return 0;
1420 }
1421
1422 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1423 MATCH2 either point to the same address or are disjoint.
1424 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1425 respectively or NULL in the case we established equivalence of bases.
1426 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1427 overlap by exact multiply of their element size.
1428
1429 This test works by matching the initial segment of the access path
1430 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1431 match was determined without use of TBAA oracle.
1432
1433 Return 1 if we can determine that component references REF1 and REF2,
1434 that are within a common DECL, cannot overlap.
1435
1436 Return 0 if paths are same and thus there is nothing to disambiguate more
1437 (i.e. there is must alias assuming there is must alias between MATCH1 and
1438 MATCH2)
1439
1440 Return -1 if we can not determine 0 or 1 - this happens when we met
1441 non-matching types was met in the path.
1442 In this case it may make sense to continue by other disambiguation
1443 oracles. */
1444
1445 static int
1446 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1447 tree match2, tree ref2,
1448 bool partial_overlap)
1449 {
1450 int ntbaa1 = 0, ntbaa2 = 0;
1451 /* Early return if there are no references to match, we do not need
1452 to walk the access paths.
1453
1454 Do not consider this as may-alias for stats - it is more useful
1455 to have information how many disambiguations happened provided that
1456 the query was meaningful. */
1457
1458 if (match1 == ref1 || !handled_component_p (ref1)
1459 || match2 == ref2 || !handled_component_p (ref2))
1460 return -1;
1461
1462 auto_vec<tree, 16> component_refs1;
1463 auto_vec<tree, 16> component_refs2;
1464
1465 /* Create the stack of handled components for REF1. */
1466 while (handled_component_p (ref1) && ref1 != match1)
1467 {
1468 /* We use TBAA only to re-synchronize after mismatched refs. So we
1469 do not need to truncate access path after TBAA part ends. */
1470 if (ends_tbaa_access_path_p (ref1))
1471 ntbaa1 = 0;
1472 else
1473 ntbaa1++;
1474 component_refs1.safe_push (ref1);
1475 ref1 = TREE_OPERAND (ref1, 0);
1476 }
1477
1478 /* Create the stack of handled components for REF2. */
1479 while (handled_component_p (ref2) && ref2 != match2)
1480 {
1481 if (ends_tbaa_access_path_p (ref2))
1482 ntbaa2 = 0;
1483 else
1484 ntbaa2++;
1485 component_refs2.safe_push (ref2);
1486 ref2 = TREE_OPERAND (ref2, 0);
1487 }
1488
1489 if (!flag_strict_aliasing)
1490 {
1491 ntbaa1 = 0;
1492 ntbaa2 = 0;
1493 }
1494
1495 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1496 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1497
1498 /* If only one of access path starts with MEM_REF check that offset is 0
1499 so the addresses stays the same after stripping it.
1500 TODO: In this case we may walk the other access path until we get same
1501 offset.
1502
1503 If both starts with MEM_REF, offset has to be same. */
1504 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1505 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1506 || (mem_ref1 && mem_ref2
1507 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1508 TREE_OPERAND (ref2, 1))))
1509 {
1510 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1511 return -1;
1512 }
1513
1514 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1515 to handle them here at all. */
1516 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1517 && TREE_CODE (ref2) != TARGET_MEM_REF);
1518
1519 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1520 rank. This is sufficient because we start from the same DECL and you
1521 cannot reference several fields at a time with COMPONENT_REFs (unlike
1522 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1523 of them to access a sub-component, unless you're in a union, in which
1524 case the return value will precisely be false. */
1525 while (true)
1526 {
1527 /* Track if we seen unmatched ref with non-zero offset. In this case
1528 we must look for partial overlaps. */
1529 bool seen_unmatched_ref_p = false;
1530
1531 /* First match ARRAY_REFs an try to disambiguate. */
1532 if (!component_refs1.is_empty ()
1533 && !component_refs2.is_empty ())
1534 {
1535 unsigned int narray_refs1=0, narray_refs2=0;
1536
1537 /* We generally assume that both access paths starts by same sequence
1538 of refs. However if number of array refs is not in sync, try
1539 to recover and pop elts until number match. This helps the case
1540 where one access path starts by array and other by element. */
1541 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1542 narray_refs1++)
1543 if (TREE_CODE (component_refs1 [component_refs1.length()
1544 - 1 - narray_refs1]) != ARRAY_REF)
1545 break;
1546
1547 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1548 narray_refs2++)
1549 if (TREE_CODE (component_refs2 [component_refs2.length()
1550 - 1 - narray_refs2]) != ARRAY_REF)
1551 break;
1552 for (; narray_refs1 > narray_refs2; narray_refs1--)
1553 {
1554 ref1 = component_refs1.pop ();
1555 ntbaa1--;
1556
1557 /* If index is non-zero we need to check whether the reference
1558 does not break the main invariant that bases are either
1559 disjoint or equal. Consider the example:
1560
1561 unsigned char out[][1];
1562 out[1]="a";
1563 out[i][0];
1564
1565 Here bases out and out are same, but after removing the
1566 [i] index, this invariant no longer holds, because
1567 out[i] points to the middle of array out.
1568
1569 TODO: If size of type of the skipped reference is an integer
1570 multiply of the size of type of the other reference this
1571 invariant can be verified, but even then it is not completely
1572 safe with !flag_strict_aliasing if the other reference contains
1573 unbounded array accesses.
1574 See */
1575
1576 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1577 cheap_array_ref_low_bound (ref1), 0))
1578 return 0;
1579 }
1580 for (; narray_refs2 > narray_refs1; narray_refs2--)
1581 {
1582 ref2 = component_refs2.pop ();
1583 ntbaa2--;
1584 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1585 cheap_array_ref_low_bound (ref2), 0))
1586 return 0;
1587 }
1588 /* Try to disambiguate matched arrays. */
1589 for (unsigned int i = 0; i < narray_refs1; i++)
1590 {
1591 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1592 component_refs2.pop ());
1593 ntbaa1--;
1594 ntbaa2--;
1595 if (cmp == 1 && !partial_overlap)
1596 {
1597 ++alias_stats
1598 .nonoverlapping_refs_since_match_p_no_alias;
1599 return 1;
1600 }
1601 if (cmp == -1)
1602 {
1603 seen_unmatched_ref_p = true;
1604 /* We can not maintain the invariant that bases are either
1605 same or completely disjoint. However we can still recover
1606 from type based alias analysis if we reach referneces to
1607 same sizes. We do not attempt to match array sizes, so
1608 just finish array walking and look for component refs. */
1609 if (ntbaa1 < 0 || ntbaa2 < 0)
1610 {
1611 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1612 return -1;
1613 }
1614 for (i++; i < narray_refs1; i++)
1615 {
1616 component_refs1.pop ();
1617 component_refs2.pop ();
1618 ntbaa1--;
1619 ntbaa2--;
1620 }
1621 break;
1622 }
1623 partial_overlap = false;
1624 }
1625 }
1626
1627 /* Next look for component_refs. */
1628 do
1629 {
1630 if (component_refs1.is_empty ())
1631 {
1632 ++alias_stats
1633 .nonoverlapping_refs_since_match_p_must_overlap;
1634 return 0;
1635 }
1636 ref1 = component_refs1.pop ();
1637 ntbaa1--;
1638 if (TREE_CODE (ref1) != COMPONENT_REF)
1639 {
1640 seen_unmatched_ref_p = true;
1641 if (ntbaa1 < 0 || ntbaa2 < 0)
1642 {
1643 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1644 return -1;
1645 }
1646 }
1647 }
1648 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1649
1650 do
1651 {
1652 if (component_refs2.is_empty ())
1653 {
1654 ++alias_stats
1655 .nonoverlapping_refs_since_match_p_must_overlap;
1656 return 0;
1657 }
1658 ref2 = component_refs2.pop ();
1659 ntbaa2--;
1660 if (TREE_CODE (ref2) != COMPONENT_REF)
1661 {
1662 if (ntbaa1 < 0 || ntbaa2 < 0)
1663 {
1664 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1665 return -1;
1666 }
1667 seen_unmatched_ref_p = true;
1668 }
1669 }
1670 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1671
1672 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1673 earlier. */
1674 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1675 && TREE_CODE (ref2) == COMPONENT_REF);
1676
1677 tree field1 = TREE_OPERAND (ref1, 1);
1678 tree field2 = TREE_OPERAND (ref2, 1);
1679
1680 /* ??? We cannot simply use the type of operand #0 of the refs here
1681 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1682 for common blocks instead of using unions like everyone else. */
1683 tree type1 = DECL_CONTEXT (field1);
1684 tree type2 = DECL_CONTEXT (field2);
1685
1686 partial_overlap = false;
1687
1688 /* If we skipped array refs on type of different sizes, we can
1689 no longer be sure that there are not partial overlaps. */
1690 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1691 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1692 {
1693 ++alias_stats
1694 .nonoverlapping_refs_since_match_p_may_alias;
1695 return -1;
1696 }
1697
1698 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1699 if (cmp == -1)
1700 {
1701 ++alias_stats
1702 .nonoverlapping_refs_since_match_p_may_alias;
1703 return -1;
1704 }
1705 else if (cmp == 1)
1706 {
1707 ++alias_stats
1708 .nonoverlapping_refs_since_match_p_no_alias;
1709 return 1;
1710 }
1711 }
1712
1713 ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
1714 return 0;
1715 }
1716
1717 /* Return TYPE_UID which can be used to match record types we consider
1718 same for TBAA purposes. */
1719
1720 static inline int
1721 ncr_type_uid (const_tree field)
1722 {
1723 /* ??? We cannot simply use the type of operand #0 of the refs here
1724 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1725 for common blocks instead of using unions like everyone else. */
1726 tree type = DECL_FIELD_CONTEXT (field);
1727 /* With LTO types considered same_type_for_tbaa_p
1728 from different translation unit may not have same
1729 main variant. They however have same TYPE_CANONICAL. */
1730 if (TYPE_CANONICAL (type))
1731 return TYPE_UID (TYPE_CANONICAL (type));
1732 return TYPE_UID (type);
1733 }
1734
1735 /* qsort compare function to sort FIELD_DECLs after their
1736 DECL_FIELD_CONTEXT TYPE_UID. */
1737
1738 static inline int
1739 ncr_compar (const void *field1_, const void *field2_)
1740 {
1741 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1742 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1743 unsigned int uid1 = ncr_type_uid (field1);
1744 unsigned int uid2 = ncr_type_uid (field2);
1745
1746 if (uid1 < uid2)
1747 return -1;
1748 else if (uid1 > uid2)
1749 return 1;
1750 return 0;
1751 }
1752
1753 /* Return true if we can determine that the fields referenced cannot
1754 overlap for any pair of objects. This relies on TBAA. */
1755
1756 static bool
1757 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1758 {
1759 /* Early return if we have nothing to do.
1760
1761 Do not consider this as may-alias for stats - it is more useful
1762 to have information how many disambiguations happened provided that
1763 the query was meaningful. */
1764 if (!flag_strict_aliasing
1765 || !x || !y
1766 || !handled_component_p (x)
1767 || !handled_component_p (y))
1768 return false;
1769
1770 auto_vec<const_tree, 16> fieldsx;
1771 while (handled_component_p (x))
1772 {
1773 if (TREE_CODE (x) == COMPONENT_REF)
1774 {
1775 tree field = TREE_OPERAND (x, 1);
1776 tree type = DECL_FIELD_CONTEXT (field);
1777 if (TREE_CODE (type) == RECORD_TYPE)
1778 fieldsx.safe_push (field);
1779 }
1780 else if (ends_tbaa_access_path_p (x))
1781 fieldsx.truncate (0);
1782 x = TREE_OPERAND (x, 0);
1783 }
1784 if (fieldsx.length () == 0)
1785 return false;
1786 auto_vec<const_tree, 16> fieldsy;
1787 while (handled_component_p (y))
1788 {
1789 if (TREE_CODE (y) == COMPONENT_REF)
1790 {
1791 tree field = TREE_OPERAND (y, 1);
1792 tree type = DECL_FIELD_CONTEXT (field);
1793 if (TREE_CODE (type) == RECORD_TYPE)
1794 fieldsy.safe_push (TREE_OPERAND (y, 1));
1795 }
1796 else if (ends_tbaa_access_path_p (y))
1797 fieldsy.truncate (0);
1798 y = TREE_OPERAND (y, 0);
1799 }
1800 if (fieldsy.length () == 0)
1801 {
1802 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1803 return false;
1804 }
1805
1806 /* Most common case first. */
1807 if (fieldsx.length () == 1
1808 && fieldsy.length () == 1)
1809 {
1810 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1811 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1812 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1813 {
1814 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1815 return true;
1816 }
1817 else
1818 {
1819 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1820 return false;
1821 }
1822 }
1823
1824 if (fieldsx.length () == 2)
1825 {
1826 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1827 std::swap (fieldsx[0], fieldsx[1]);
1828 }
1829 else
1830 fieldsx.qsort (ncr_compar);
1831
1832 if (fieldsy.length () == 2)
1833 {
1834 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1835 std::swap (fieldsy[0], fieldsy[1]);
1836 }
1837 else
1838 fieldsy.qsort (ncr_compar);
1839
1840 unsigned i = 0, j = 0;
1841 do
1842 {
1843 const_tree fieldx = fieldsx[i];
1844 const_tree fieldy = fieldsy[j];
1845
1846 /* We're left with accessing different fields of a structure,
1847 no possible overlap. */
1848 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1849 DECL_FIELD_CONTEXT (fieldy)) == 1
1850 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1851 {
1852 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1853 return true;
1854 }
1855
1856 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1857 {
1858 i++;
1859 if (i == fieldsx.length ())
1860 break;
1861 }
1862 else
1863 {
1864 j++;
1865 if (j == fieldsy.length ())
1866 break;
1867 }
1868 }
1869 while (1);
1870
1871 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1872 return false;
1873 }
1874
1875
1876 /* Return true if two memory references based on the variables BASE1
1877 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1878 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1879 if non-NULL are the complete memory reference trees. */
1880
1881 static bool
1882 decl_refs_may_alias_p (tree ref1, tree base1,
1883 poly_int64 offset1, poly_int64 max_size1,
1884 poly_int64 size1,
1885 tree ref2, tree base2,
1886 poly_int64 offset2, poly_int64 max_size2,
1887 poly_int64 size2)
1888 {
1889 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1890
1891 /* If both references are based on different variables, they cannot alias. */
1892 if (compare_base_decls (base1, base2) == 0)
1893 return false;
1894
1895 /* If both references are based on the same variable, they cannot alias if
1896 the accesses do not overlap. */
1897 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1898 return false;
1899
1900 /* If there is must alias, there is no use disambiguating further. */
1901 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1902 return true;
1903
1904 /* For components with variable position, the above test isn't sufficient,
1905 so we disambiguate component references manually. */
1906 if (ref1 && ref2
1907 && handled_component_p (ref1) && handled_component_p (ref2)
1908 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
1909 return false;
1910
1911 return true;
1912 }
1913
1914 /* Return true if an indirect reference based on *PTR1 constrained
1915 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1916 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1917 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1918 in which case they are computed on-demand. REF1 and REF2
1919 if non-NULL are the complete memory reference trees. */
1920
1921 static bool
1922 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1923 poly_int64 offset1, poly_int64 max_size1,
1924 poly_int64 size1,
1925 alias_set_type ref1_alias_set,
1926 alias_set_type base1_alias_set,
1927 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1928 poly_int64 offset2, poly_int64 max_size2,
1929 poly_int64 size2,
1930 alias_set_type ref2_alias_set,
1931 alias_set_type base2_alias_set, bool tbaa_p)
1932 {
1933 tree ptr1;
1934 tree ptrtype1, dbase2;
1935
1936 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1937 || TREE_CODE (base1) == TARGET_MEM_REF)
1938 && DECL_P (base2));
1939
1940 ptr1 = TREE_OPERAND (base1, 0);
1941 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1942
1943 /* If only one reference is based on a variable, they cannot alias if
1944 the pointer access is beyond the extent of the variable access.
1945 (the pointer base cannot validly point to an offset less than zero
1946 of the variable).
1947 ??? IVOPTs creates bases that do not honor this restriction,
1948 so do not apply this optimization for TARGET_MEM_REFs. */
1949 if (TREE_CODE (base1) != TARGET_MEM_REF
1950 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1951 return false;
1952 /* They also cannot alias if the pointer may not point to the decl. */
1953 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1954 return false;
1955
1956 /* Disambiguations that rely on strict aliasing rules follow. */
1957 if (!flag_strict_aliasing || !tbaa_p)
1958 return true;
1959
1960 /* If the alias set for a pointer access is zero all bets are off. */
1961 if (base1_alias_set == 0 || base2_alias_set == 0)
1962 return true;
1963
1964 /* When we are trying to disambiguate an access with a pointer dereference
1965 as base versus one with a decl as base we can use both the size
1966 of the decl and its dynamic type for extra disambiguation.
1967 ??? We do not know anything about the dynamic type of the decl
1968 other than that its alias-set contains base2_alias_set as a subset
1969 which does not help us here. */
1970 /* As we know nothing useful about the dynamic type of the decl just
1971 use the usual conflict check rather than a subset test.
1972 ??? We could introduce -fvery-strict-aliasing when the language
1973 does not allow decls to have a dynamic type that differs from their
1974 static type. Then we can check
1975 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1976 if (base1_alias_set != base2_alias_set
1977 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1978 return false;
1979
1980 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1981
1982 /* If the size of the access relevant for TBAA through the pointer
1983 is bigger than the size of the decl we can't possibly access the
1984 decl via that pointer. */
1985 if (/* ??? This in turn may run afoul when a decl of type T which is
1986 a member of union type U is accessed through a pointer to
1987 type U and sizeof T is smaller than sizeof U. */
1988 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1989 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1990 && compare_sizes (DECL_SIZE (base2),
1991 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1992 return false;
1993
1994 if (!ref2)
1995 return true;
1996
1997 /* If the decl is accessed via a MEM_REF, reconstruct the base
1998 we can use for TBAA and an appropriately adjusted offset. */
1999 dbase2 = ref2;
2000 while (handled_component_p (dbase2))
2001 dbase2 = TREE_OPERAND (dbase2, 0);
2002 poly_int64 doffset1 = offset1;
2003 poly_offset_int doffset2 = offset2;
2004 if (TREE_CODE (dbase2) == MEM_REF
2005 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2006 {
2007 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2008 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2009 /* If second reference is view-converted, give up now. */
2010 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2011 return true;
2012 }
2013
2014 /* If first reference is view-converted, give up now. */
2015 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2016 return true;
2017
2018 /* If both references are through the same type, they do not alias
2019 if the accesses do not overlap. This does extra disambiguation
2020 for mixed/pointer accesses but requires strict aliasing.
2021 For MEM_REFs we require that the component-ref offset we computed
2022 is relative to the start of the type which we ensure by
2023 comparing rvalue and access type and disregarding the constant
2024 pointer offset.
2025
2026 But avoid treating variable length arrays as "objects", instead assume they
2027 can overlap by an exact multiple of their element size.
2028 See gcc.dg/torture/alias-2.c. */
2029 if (((TREE_CODE (base1) != TARGET_MEM_REF
2030 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2031 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2032 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2033 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2034 {
2035 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2036 && (TYPE_SIZE (TREE_TYPE (base1))
2037 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2038 != INTEGER_CST));
2039 if (!partial_overlap
2040 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2041 return false;
2042 if (!ref1 || !ref2
2043 /* If there is must alias, there is no use disambiguating further. */
2044 || (!partial_overlap
2045 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2046 return true;
2047 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2048 partial_overlap);
2049 if (res == -1)
2050 return !nonoverlapping_component_refs_p (ref1, ref2);
2051 return !res;
2052 }
2053
2054 /* Do access-path based disambiguation. */
2055 if (ref1 && ref2
2056 && (handled_component_p (ref1) || handled_component_p (ref2)))
2057 return aliasing_component_refs_p (ref1,
2058 ref1_alias_set, base1_alias_set,
2059 offset1, max_size1,
2060 ref2,
2061 ref2_alias_set, base2_alias_set,
2062 offset2, max_size2);
2063
2064 return true;
2065 }
2066
2067 /* Return true if two indirect references based on *PTR1
2068 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2069 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2070 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2071 in which case they are computed on-demand. REF1 and REF2
2072 if non-NULL are the complete memory reference trees. */
2073
2074 static bool
2075 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2076 poly_int64 offset1, poly_int64 max_size1,
2077 poly_int64 size1,
2078 alias_set_type ref1_alias_set,
2079 alias_set_type base1_alias_set,
2080 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2081 poly_int64 offset2, poly_int64 max_size2,
2082 poly_int64 size2,
2083 alias_set_type ref2_alias_set,
2084 alias_set_type base2_alias_set, bool tbaa_p)
2085 {
2086 tree ptr1;
2087 tree ptr2;
2088 tree ptrtype1, ptrtype2;
2089
2090 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2091 || TREE_CODE (base1) == TARGET_MEM_REF)
2092 && (TREE_CODE (base2) == MEM_REF
2093 || TREE_CODE (base2) == TARGET_MEM_REF));
2094
2095 ptr1 = TREE_OPERAND (base1, 0);
2096 ptr2 = TREE_OPERAND (base2, 0);
2097
2098 /* If both bases are based on pointers they cannot alias if they may not
2099 point to the same memory object or if they point to the same object
2100 and the accesses do not overlap. */
2101 if ((!cfun || gimple_in_ssa_p (cfun))
2102 && operand_equal_p (ptr1, ptr2, 0)
2103 && (((TREE_CODE (base1) != TARGET_MEM_REF
2104 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2105 && (TREE_CODE (base2) != TARGET_MEM_REF
2106 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2107 || (TREE_CODE (base1) == TARGET_MEM_REF
2108 && TREE_CODE (base2) == TARGET_MEM_REF
2109 && (TMR_STEP (base1) == TMR_STEP (base2)
2110 || (TMR_STEP (base1) && TMR_STEP (base2)
2111 && operand_equal_p (TMR_STEP (base1),
2112 TMR_STEP (base2), 0)))
2113 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2114 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2115 && operand_equal_p (TMR_INDEX (base1),
2116 TMR_INDEX (base2), 0)))
2117 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2118 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2119 && operand_equal_p (TMR_INDEX2 (base1),
2120 TMR_INDEX2 (base2), 0))))))
2121 {
2122 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2123 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2124 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2125 offset2 + moff2, max_size2))
2126 return false;
2127 /* If there is must alias, there is no use disambiguating further. */
2128 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2129 return true;
2130 if (ref1 && ref2)
2131 {
2132 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2133 false);
2134 if (res != -1)
2135 return !res;
2136 }
2137 }
2138 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2139 return false;
2140
2141 /* Disambiguations that rely on strict aliasing rules follow. */
2142 if (!flag_strict_aliasing || !tbaa_p)
2143 return true;
2144
2145 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2146 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2147
2148 /* If the alias set for a pointer access is zero all bets are off. */
2149 if (base1_alias_set == 0
2150 || base2_alias_set == 0)
2151 return true;
2152
2153 /* Do type-based disambiguation. */
2154 if (base1_alias_set != base2_alias_set
2155 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2156 return false;
2157
2158 /* If either reference is view-converted, give up now. */
2159 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2160 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2161 return true;
2162
2163 /* If both references are through the same type, they do not alias
2164 if the accesses do not overlap. This does extra disambiguation
2165 for mixed/pointer accesses but requires strict aliasing. */
2166 if ((TREE_CODE (base1) != TARGET_MEM_REF
2167 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2168 && (TREE_CODE (base2) != TARGET_MEM_REF
2169 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2170 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2171 TREE_TYPE (ptrtype2)) == 1)
2172 {
2173 /* But avoid treating arrays as "objects", instead assume they
2174 can overlap by an exact multiple of their element size.
2175 See gcc.dg/torture/alias-2.c. */
2176 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2177
2178 if (!partial_overlap
2179 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2180 return false;
2181 if (!ref1 || !ref2
2182 || (!partial_overlap
2183 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2184 return true;
2185 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2186 partial_overlap);
2187 if (res == -1)
2188 return !nonoverlapping_component_refs_p (ref1, ref2);
2189 return !res;
2190 }
2191
2192 /* Do access-path based disambiguation. */
2193 if (ref1 && ref2
2194 && (handled_component_p (ref1) || handled_component_p (ref2)))
2195 return aliasing_component_refs_p (ref1,
2196 ref1_alias_set, base1_alias_set,
2197 offset1, max_size1,
2198 ref2,
2199 ref2_alias_set, base2_alias_set,
2200 offset2, max_size2);
2201
2202 return true;
2203 }
2204
2205 /* Return true, if the two memory references REF1 and REF2 may alias. */
2206
2207 static bool
2208 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2209 {
2210 tree base1, base2;
2211 poly_int64 offset1 = 0, offset2 = 0;
2212 poly_int64 max_size1 = -1, max_size2 = -1;
2213 bool var1_p, var2_p, ind1_p, ind2_p;
2214
2215 gcc_checking_assert ((!ref1->ref
2216 || TREE_CODE (ref1->ref) == SSA_NAME
2217 || DECL_P (ref1->ref)
2218 || TREE_CODE (ref1->ref) == STRING_CST
2219 || handled_component_p (ref1->ref)
2220 || TREE_CODE (ref1->ref) == MEM_REF
2221 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
2222 && (!ref2->ref
2223 || TREE_CODE (ref2->ref) == SSA_NAME
2224 || DECL_P (ref2->ref)
2225 || TREE_CODE (ref2->ref) == STRING_CST
2226 || handled_component_p (ref2->ref)
2227 || TREE_CODE (ref2->ref) == MEM_REF
2228 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
2229
2230 /* Decompose the references into their base objects and the access. */
2231 base1 = ao_ref_base (ref1);
2232 offset1 = ref1->offset;
2233 max_size1 = ref1->max_size;
2234 base2 = ao_ref_base (ref2);
2235 offset2 = ref2->offset;
2236 max_size2 = ref2->max_size;
2237
2238 /* We can end up with registers or constants as bases for example from
2239 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2240 which is seen as a struct copy. */
2241 if (TREE_CODE (base1) == SSA_NAME
2242 || TREE_CODE (base1) == CONST_DECL
2243 || TREE_CODE (base1) == CONSTRUCTOR
2244 || TREE_CODE (base1) == ADDR_EXPR
2245 || CONSTANT_CLASS_P (base1)
2246 || TREE_CODE (base2) == SSA_NAME
2247 || TREE_CODE (base2) == CONST_DECL
2248 || TREE_CODE (base2) == CONSTRUCTOR
2249 || TREE_CODE (base2) == ADDR_EXPR
2250 || CONSTANT_CLASS_P (base2))
2251 return false;
2252
2253 /* We can end up referring to code via function and label decls.
2254 As we likely do not properly track code aliases conservatively
2255 bail out. */
2256 if (TREE_CODE (base1) == FUNCTION_DECL
2257 || TREE_CODE (base1) == LABEL_DECL
2258 || TREE_CODE (base2) == FUNCTION_DECL
2259 || TREE_CODE (base2) == LABEL_DECL)
2260 return true;
2261
2262 /* Two volatile accesses always conflict. */
2263 if (ref1->volatile_p
2264 && ref2->volatile_p)
2265 return true;
2266
2267 /* Defer to simple offset based disambiguation if we have
2268 references based on two decls. Do this before defering to
2269 TBAA to handle must-alias cases in conformance with the
2270 GCC extension of allowing type-punning through unions. */
2271 var1_p = DECL_P (base1);
2272 var2_p = DECL_P (base2);
2273 if (var1_p && var2_p)
2274 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
2275 ref1->size,
2276 ref2->ref, base2, offset2, max_size2,
2277 ref2->size);
2278
2279 /* Handle restrict based accesses.
2280 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2281 here. */
2282 tree rbase1 = base1;
2283 tree rbase2 = base2;
2284 if (var1_p)
2285 {
2286 rbase1 = ref1->ref;
2287 if (rbase1)
2288 while (handled_component_p (rbase1))
2289 rbase1 = TREE_OPERAND (rbase1, 0);
2290 }
2291 if (var2_p)
2292 {
2293 rbase2 = ref2->ref;
2294 if (rbase2)
2295 while (handled_component_p (rbase2))
2296 rbase2 = TREE_OPERAND (rbase2, 0);
2297 }
2298 if (rbase1 && rbase2
2299 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
2300 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
2301 /* If the accesses are in the same restrict clique... */
2302 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
2303 /* But based on different pointers they do not alias. */
2304 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
2305 return false;
2306
2307 ind1_p = (TREE_CODE (base1) == MEM_REF
2308 || TREE_CODE (base1) == TARGET_MEM_REF);
2309 ind2_p = (TREE_CODE (base2) == MEM_REF
2310 || TREE_CODE (base2) == TARGET_MEM_REF);
2311
2312 /* Canonicalize the pointer-vs-decl case. */
2313 if (ind1_p && var2_p)
2314 {
2315 std::swap (offset1, offset2);
2316 std::swap (max_size1, max_size2);
2317 std::swap (base1, base2);
2318 std::swap (ref1, ref2);
2319 var1_p = true;
2320 ind1_p = false;
2321 var2_p = false;
2322 ind2_p = true;
2323 }
2324
2325 /* First defer to TBAA if possible. */
2326 if (tbaa_p
2327 && flag_strict_aliasing
2328 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2329 ao_ref_alias_set (ref2)))
2330 return false;
2331
2332 /* If the reference is based on a pointer that points to memory
2333 that may not be written to then the other reference cannot possibly
2334 clobber it. */
2335 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2336 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2337 || (ind1_p
2338 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2339 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2340 return false;
2341
2342 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2343 if (var1_p && ind2_p)
2344 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
2345 offset2, max_size2, ref2->size,
2346 ao_ref_alias_set (ref2),
2347 ao_ref_base_alias_set (ref2),
2348 ref1->ref, base1,
2349 offset1, max_size1, ref1->size,
2350 ao_ref_alias_set (ref1),
2351 ao_ref_base_alias_set (ref1),
2352 tbaa_p);
2353 else if (ind1_p && ind2_p)
2354 return indirect_refs_may_alias_p (ref1->ref, base1,
2355 offset1, max_size1, ref1->size,
2356 ao_ref_alias_set (ref1),
2357 ao_ref_base_alias_set (ref1),
2358 ref2->ref, base2,
2359 offset2, max_size2, ref2->size,
2360 ao_ref_alias_set (ref2),
2361 ao_ref_base_alias_set (ref2),
2362 tbaa_p);
2363
2364 gcc_unreachable ();
2365 }
2366
2367 /* Return true, if the two memory references REF1 and REF2 may alias
2368 and update statistics. */
2369
2370 bool
2371 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2372 {
2373 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2374 if (res)
2375 ++alias_stats.refs_may_alias_p_may_alias;
2376 else
2377 ++alias_stats.refs_may_alias_p_no_alias;
2378 return res;
2379 }
2380
2381 static bool
2382 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2383 {
2384 ao_ref r1;
2385 ao_ref_init (&r1, ref1);
2386 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2387 }
2388
2389 bool
2390 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2391 {
2392 ao_ref r1, r2;
2393 ao_ref_init (&r1, ref1);
2394 ao_ref_init (&r2, ref2);
2395 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2396 }
2397
2398 /* Returns true if there is a anti-dependence for the STORE that
2399 executes after the LOAD. */
2400
2401 bool
2402 refs_anti_dependent_p (tree load, tree store)
2403 {
2404 ao_ref r1, r2;
2405 ao_ref_init (&r1, load);
2406 ao_ref_init (&r2, store);
2407 return refs_may_alias_p_1 (&r1, &r2, false);
2408 }
2409
2410 /* Returns true if there is a output dependence for the stores
2411 STORE1 and STORE2. */
2412
2413 bool
2414 refs_output_dependent_p (tree store1, tree store2)
2415 {
2416 ao_ref r1, r2;
2417 ao_ref_init (&r1, store1);
2418 ao_ref_init (&r2, store2);
2419 return refs_may_alias_p_1 (&r1, &r2, false);
2420 }
2421
2422 /* Returns true if and only if REF may alias any access stored in TT.
2423 IF TBAA_P is true, use TBAA oracle. */
2424
2425 static bool
2426 modref_may_conflict (modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2427 {
2428 alias_set_type base_set, ref_set;
2429 modref_base_node <alias_set_type> *base_node;
2430 modref_ref_node <alias_set_type> *ref_node;
2431 size_t i, j;
2432
2433 if (tt->every_base)
2434 return true;
2435
2436 base_set = ao_ref_base_alias_set (ref);
2437
2438 ref_set = ao_ref_alias_set (ref);
2439
2440 int num_tests = 0, max_tests = param_modref_max_tests;
2441 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
2442 {
2443 if (base_node->every_ref)
2444 return true;
2445
2446 if (!base_node->base)
2447 return true;
2448
2449 if (tbaa_p && flag_strict_aliasing)
2450 {
2451 if (!alias_sets_conflict_p (base_set, base_node->base))
2452 continue;
2453 alias_stats.modref_tests++;
2454 num_tests++;
2455 }
2456 else
2457 return true;
2458 if (num_tests >= max_tests)
2459 return true;
2460
2461 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
2462 {
2463 /* Do not repeat same test as before. */
2464 if (ref_set == base_set && base_node->base == ref_node->ref)
2465 return true;
2466 if (!flag_strict_aliasing)
2467 return true;
2468 if (alias_sets_conflict_p (ref_set, ref_node->ref))
2469 return true;
2470 alias_stats.modref_tests++;
2471 num_tests++;
2472 if (num_tests >= max_tests)
2473 return true;
2474 }
2475 }
2476 return false;
2477 }
2478
2479 /* If the call CALL may use the memory reference REF return true,
2480 otherwise return false. */
2481
2482 static bool
2483 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2484 {
2485 tree base, callee;
2486 unsigned i;
2487 int flags = gimple_call_flags (call);
2488
2489 /* Const functions without a static chain do not implicitly use memory. */
2490 if (!gimple_call_chain (call)
2491 && (flags & (ECF_CONST|ECF_NOVOPS)))
2492 goto process_args;
2493
2494 /* A call that is not without side-effects might involve volatile
2495 accesses and thus conflicts with all other volatile accesses. */
2496 if (ref->volatile_p)
2497 return true;
2498
2499 callee = gimple_call_fndecl (call);
2500
2501 if (!gimple_call_chain (call) && callee != NULL_TREE)
2502 {
2503 struct cgraph_node *node = cgraph_node::get (callee);
2504 /* We can not safely optimize based on summary of calle if it does
2505 not always bind to current def: it is possible that memory load
2506 was optimized out earlier and the interposed variant may not be
2507 optimized this way. */
2508 if (node && node->binds_to_current_def_p ())
2509 {
2510 modref_summary *summary = get_modref_function_summary (node);
2511 if (summary)
2512 {
2513 if (!modref_may_conflict (summary->loads, ref, tbaa_p))
2514 {
2515 alias_stats.modref_use_no_alias++;
2516 if (dump_file && (dump_flags & TDF_DETAILS))
2517 {
2518 fprintf (dump_file, "ipa-modref: in %s,"
2519 " call to %s does not use ",
2520 cgraph_node::get
2521 (current_function_decl)->dump_name (),
2522 node->dump_name ());
2523 print_generic_expr (dump_file, ref->ref);
2524 fprintf (dump_file, " %i->%i\n",
2525 ao_ref_base_alias_set (ref),
2526 ao_ref_alias_set (ref));
2527 }
2528 goto process_args;
2529 }
2530 alias_stats.modref_use_may_alias++;
2531 }
2532 }
2533 }
2534
2535 base = ao_ref_base (ref);
2536 if (!base)
2537 return true;
2538
2539 /* If the reference is based on a decl that is not aliased the call
2540 cannot possibly use it. */
2541 if (DECL_P (base)
2542 && !may_be_aliased (base)
2543 /* But local statics can be used through recursion. */
2544 && !is_global_var (base))
2545 goto process_args;
2546
2547 /* Handle those builtin functions explicitly that do not act as
2548 escape points. See tree-ssa-structalias.c:find_func_aliases
2549 for the list of builtins we might need to handle here. */
2550 if (callee != NULL_TREE
2551 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2552 switch (DECL_FUNCTION_CODE (callee))
2553 {
2554 /* All the following functions read memory pointed to by
2555 their second argument. strcat/strncat additionally
2556 reads memory pointed to by the first argument. */
2557 case BUILT_IN_STRCAT:
2558 case BUILT_IN_STRNCAT:
2559 {
2560 ao_ref dref;
2561 ao_ref_init_from_ptr_and_size (&dref,
2562 gimple_call_arg (call, 0),
2563 NULL_TREE);
2564 if (refs_may_alias_p_1 (&dref, ref, false))
2565 return true;
2566 }
2567 /* FALLTHRU */
2568 case BUILT_IN_STRCPY:
2569 case BUILT_IN_STRNCPY:
2570 case BUILT_IN_MEMCPY:
2571 case BUILT_IN_MEMMOVE:
2572 case BUILT_IN_MEMPCPY:
2573 case BUILT_IN_STPCPY:
2574 case BUILT_IN_STPNCPY:
2575 case BUILT_IN_TM_MEMCPY:
2576 case BUILT_IN_TM_MEMMOVE:
2577 {
2578 ao_ref dref;
2579 tree size = NULL_TREE;
2580 if (gimple_call_num_args (call) == 3)
2581 size = gimple_call_arg (call, 2);
2582 ao_ref_init_from_ptr_and_size (&dref,
2583 gimple_call_arg (call, 1),
2584 size);
2585 return refs_may_alias_p_1 (&dref, ref, false);
2586 }
2587 case BUILT_IN_STRCAT_CHK:
2588 case BUILT_IN_STRNCAT_CHK:
2589 {
2590 ao_ref dref;
2591 ao_ref_init_from_ptr_and_size (&dref,
2592 gimple_call_arg (call, 0),
2593 NULL_TREE);
2594 if (refs_may_alias_p_1 (&dref, ref, false))
2595 return true;
2596 }
2597 /* FALLTHRU */
2598 case BUILT_IN_STRCPY_CHK:
2599 case BUILT_IN_STRNCPY_CHK:
2600 case BUILT_IN_MEMCPY_CHK:
2601 case BUILT_IN_MEMMOVE_CHK:
2602 case BUILT_IN_MEMPCPY_CHK:
2603 case BUILT_IN_STPCPY_CHK:
2604 case BUILT_IN_STPNCPY_CHK:
2605 {
2606 ao_ref dref;
2607 tree size = NULL_TREE;
2608 if (gimple_call_num_args (call) == 4)
2609 size = gimple_call_arg (call, 2);
2610 ao_ref_init_from_ptr_and_size (&dref,
2611 gimple_call_arg (call, 1),
2612 size);
2613 return refs_may_alias_p_1 (&dref, ref, false);
2614 }
2615 case BUILT_IN_BCOPY:
2616 {
2617 ao_ref dref;
2618 tree size = gimple_call_arg (call, 2);
2619 ao_ref_init_from_ptr_and_size (&dref,
2620 gimple_call_arg (call, 0),
2621 size);
2622 return refs_may_alias_p_1 (&dref, ref, false);
2623 }
2624
2625 /* The following functions read memory pointed to by their
2626 first argument. */
2627 CASE_BUILT_IN_TM_LOAD (1):
2628 CASE_BUILT_IN_TM_LOAD (2):
2629 CASE_BUILT_IN_TM_LOAD (4):
2630 CASE_BUILT_IN_TM_LOAD (8):
2631 CASE_BUILT_IN_TM_LOAD (FLOAT):
2632 CASE_BUILT_IN_TM_LOAD (DOUBLE):
2633 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
2634 CASE_BUILT_IN_TM_LOAD (M64):
2635 CASE_BUILT_IN_TM_LOAD (M128):
2636 CASE_BUILT_IN_TM_LOAD (M256):
2637 case BUILT_IN_TM_LOG:
2638 case BUILT_IN_TM_LOG_1:
2639 case BUILT_IN_TM_LOG_2:
2640 case BUILT_IN_TM_LOG_4:
2641 case BUILT_IN_TM_LOG_8:
2642 case BUILT_IN_TM_LOG_FLOAT:
2643 case BUILT_IN_TM_LOG_DOUBLE:
2644 case BUILT_IN_TM_LOG_LDOUBLE:
2645 case BUILT_IN_TM_LOG_M64:
2646 case BUILT_IN_TM_LOG_M128:
2647 case BUILT_IN_TM_LOG_M256:
2648 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
2649
2650 /* These read memory pointed to by the first argument. */
2651 case BUILT_IN_STRDUP:
2652 case BUILT_IN_STRNDUP:
2653 case BUILT_IN_REALLOC:
2654 {
2655 ao_ref dref;
2656 tree size = NULL_TREE;
2657 if (gimple_call_num_args (call) == 2)
2658 size = gimple_call_arg (call, 1);
2659 ao_ref_init_from_ptr_and_size (&dref,
2660 gimple_call_arg (call, 0),
2661 size);
2662 return refs_may_alias_p_1 (&dref, ref, false);
2663 }
2664 /* These read memory pointed to by the first argument. */
2665 case BUILT_IN_INDEX:
2666 case BUILT_IN_STRCHR:
2667 case BUILT_IN_STRRCHR:
2668 {
2669 ao_ref dref;
2670 ao_ref_init_from_ptr_and_size (&dref,
2671 gimple_call_arg (call, 0),
2672 NULL_TREE);
2673 return refs_may_alias_p_1 (&dref, ref, false);
2674 }
2675 /* These read memory pointed to by the first argument with size
2676 in the third argument. */
2677 case BUILT_IN_MEMCHR:
2678 {
2679 ao_ref dref;
2680 ao_ref_init_from_ptr_and_size (&dref,
2681 gimple_call_arg (call, 0),
2682 gimple_call_arg (call, 2));
2683 return refs_may_alias_p_1 (&dref, ref, false);
2684 }
2685 /* These read memory pointed to by the first and second arguments. */
2686 case BUILT_IN_STRSTR:
2687 case BUILT_IN_STRPBRK:
2688 {
2689 ao_ref dref;
2690 ao_ref_init_from_ptr_and_size (&dref,
2691 gimple_call_arg (call, 0),
2692 NULL_TREE);
2693 if (refs_may_alias_p_1 (&dref, ref, false))
2694 return true;
2695 ao_ref_init_from_ptr_and_size (&dref,
2696 gimple_call_arg (call, 1),
2697 NULL_TREE);
2698 return refs_may_alias_p_1 (&dref, ref, false);
2699 }
2700
2701 /* The following builtins do not read from memory. */
2702 case BUILT_IN_FREE:
2703 case BUILT_IN_MALLOC:
2704 case BUILT_IN_POSIX_MEMALIGN:
2705 case BUILT_IN_ALIGNED_ALLOC:
2706 case BUILT_IN_CALLOC:
2707 CASE_BUILT_IN_ALLOCA:
2708 case BUILT_IN_STACK_SAVE:
2709 case BUILT_IN_STACK_RESTORE:
2710 case BUILT_IN_MEMSET:
2711 case BUILT_IN_TM_MEMSET:
2712 case BUILT_IN_MEMSET_CHK:
2713 case BUILT_IN_FREXP:
2714 case BUILT_IN_FREXPF:
2715 case BUILT_IN_FREXPL:
2716 case BUILT_IN_GAMMA_R:
2717 case BUILT_IN_GAMMAF_R:
2718 case BUILT_IN_GAMMAL_R:
2719 case BUILT_IN_LGAMMA_R:
2720 case BUILT_IN_LGAMMAF_R:
2721 case BUILT_IN_LGAMMAL_R:
2722 case BUILT_IN_MODF:
2723 case BUILT_IN_MODFF:
2724 case BUILT_IN_MODFL:
2725 case BUILT_IN_REMQUO:
2726 case BUILT_IN_REMQUOF:
2727 case BUILT_IN_REMQUOL:
2728 case BUILT_IN_SINCOS:
2729 case BUILT_IN_SINCOSF:
2730 case BUILT_IN_SINCOSL:
2731 case BUILT_IN_ASSUME_ALIGNED:
2732 case BUILT_IN_VA_END:
2733 return false;
2734 /* __sync_* builtins and some OpenMP builtins act as threading
2735 barriers. */
2736 #undef DEF_SYNC_BUILTIN
2737 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2738 #include "sync-builtins.def"
2739 #undef DEF_SYNC_BUILTIN
2740 case BUILT_IN_GOMP_ATOMIC_START:
2741 case BUILT_IN_GOMP_ATOMIC_END:
2742 case BUILT_IN_GOMP_BARRIER:
2743 case BUILT_IN_GOMP_BARRIER_CANCEL:
2744 case BUILT_IN_GOMP_TASKWAIT:
2745 case BUILT_IN_GOMP_TASKGROUP_END:
2746 case BUILT_IN_GOMP_CRITICAL_START:
2747 case BUILT_IN_GOMP_CRITICAL_END:
2748 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2749 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2750 case BUILT_IN_GOMP_LOOP_END:
2751 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2752 case BUILT_IN_GOMP_ORDERED_START:
2753 case BUILT_IN_GOMP_ORDERED_END:
2754 case BUILT_IN_GOMP_SECTIONS_END:
2755 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2756 case BUILT_IN_GOMP_SINGLE_COPY_START:
2757 case BUILT_IN_GOMP_SINGLE_COPY_END:
2758 return true;
2759
2760 default:
2761 /* Fallthru to general call handling. */;
2762 }
2763
2764 /* Check if base is a global static variable that is not read
2765 by the function. */
2766 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2767 {
2768 struct cgraph_node *node = cgraph_node::get (callee);
2769 bitmap read;
2770 int id;
2771
2772 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2773 node yet. We should enforce that there are nodes for all decls in the
2774 IL and remove this check instead. */
2775 if (node
2776 && (id = ipa_reference_var_uid (base)) != -1
2777 && (read = ipa_reference_get_read_global (node))
2778 && !bitmap_bit_p (read, id))
2779 goto process_args;
2780 }
2781
2782 /* Check if the base variable is call-used. */
2783 if (DECL_P (base))
2784 {
2785 if (pt_solution_includes (gimple_call_use_set (call), base))
2786 return true;
2787 }
2788 else if ((TREE_CODE (base) == MEM_REF
2789 || TREE_CODE (base) == TARGET_MEM_REF)
2790 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2791 {
2792 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2793 if (!pi)
2794 return true;
2795
2796 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2797 return true;
2798 }
2799 else
2800 return true;
2801
2802 /* Inspect call arguments for passed-by-value aliases. */
2803 process_args:
2804 for (i = 0; i < gimple_call_num_args (call); ++i)
2805 {
2806 tree op = gimple_call_arg (call, i);
2807 int flags = gimple_call_arg_flags (call, i);
2808
2809 if (flags & EAF_UNUSED)
2810 continue;
2811
2812 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2813 op = TREE_OPERAND (op, 0);
2814
2815 if (TREE_CODE (op) != SSA_NAME
2816 && !is_gimple_min_invariant (op))
2817 {
2818 ao_ref r;
2819 ao_ref_init (&r, op);
2820 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2821 return true;
2822 }
2823 }
2824
2825 return false;
2826 }
2827
2828 static bool
2829 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2830 {
2831 bool res;
2832 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2833 if (res)
2834 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2835 else
2836 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2837 return res;
2838 }
2839
2840
2841 /* If the statement STMT may use the memory reference REF return
2842 true, otherwise return false. */
2843
2844 bool
2845 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2846 {
2847 if (is_gimple_assign (stmt))
2848 {
2849 tree rhs;
2850
2851 /* All memory assign statements are single. */
2852 if (!gimple_assign_single_p (stmt))
2853 return false;
2854
2855 rhs = gimple_assign_rhs1 (stmt);
2856 if (is_gimple_reg (rhs)
2857 || is_gimple_min_invariant (rhs)
2858 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2859 return false;
2860
2861 return refs_may_alias_p (rhs, ref, tbaa_p);
2862 }
2863 else if (is_gimple_call (stmt))
2864 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2865 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2866 {
2867 tree retval = gimple_return_retval (return_stmt);
2868 if (retval
2869 && TREE_CODE (retval) != SSA_NAME
2870 && !is_gimple_min_invariant (retval)
2871 && refs_may_alias_p (retval, ref, tbaa_p))
2872 return true;
2873 /* If ref escapes the function then the return acts as a use. */
2874 tree base = ao_ref_base (ref);
2875 if (!base)
2876 ;
2877 else if (DECL_P (base))
2878 return is_global_var (base);
2879 else if (TREE_CODE (base) == MEM_REF
2880 || TREE_CODE (base) == TARGET_MEM_REF)
2881 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2882 return false;
2883 }
2884
2885 return true;
2886 }
2887
2888 bool
2889 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2890 {
2891 ao_ref r;
2892 ao_ref_init (&r, ref);
2893 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2894 }
2895
2896 /* If the call in statement CALL may clobber the memory reference REF
2897 return true, otherwise return false. */
2898
2899 bool
2900 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2901 {
2902 tree base;
2903 tree callee;
2904
2905 /* If the call is pure or const it cannot clobber anything. */
2906 if (gimple_call_flags (call)
2907 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2908 return false;
2909 if (gimple_call_internal_p (call))
2910 switch (gimple_call_internal_fn (call))
2911 {
2912 /* Treat these internal calls like ECF_PURE for aliasing,
2913 they don't write to any memory the program should care about.
2914 They have important other side-effects, and read memory,
2915 so can't be ECF_NOVOPS. */
2916 case IFN_UBSAN_NULL:
2917 case IFN_UBSAN_BOUNDS:
2918 case IFN_UBSAN_VPTR:
2919 case IFN_UBSAN_OBJECT_SIZE:
2920 case IFN_UBSAN_PTR:
2921 case IFN_ASAN_CHECK:
2922 return false;
2923 default:
2924 break;
2925 }
2926
2927 callee = gimple_call_fndecl (call);
2928
2929 if (callee != NULL_TREE && !ref->volatile_p)
2930 {
2931 struct cgraph_node *node = cgraph_node::get (callee);
2932 if (node)
2933 {
2934 modref_summary *summary = get_modref_function_summary (node);
2935 if (summary)
2936 {
2937 if (!modref_may_conflict (summary->stores, ref, tbaa_p))
2938 {
2939 alias_stats.modref_clobber_no_alias++;
2940 if (dump_file && (dump_flags & TDF_DETAILS))
2941 {
2942 fprintf (dump_file,
2943 "ipa-modref: in %s, "
2944 "call to %s does not clobber ",
2945 cgraph_node::get
2946 (current_function_decl)->dump_name (),
2947 node->dump_name ());
2948 print_generic_expr (dump_file, ref->ref);
2949 fprintf (dump_file, " %i->%i\n",
2950 ao_ref_base_alias_set (ref),
2951 ao_ref_alias_set (ref));
2952 }
2953 return false;
2954 }
2955 alias_stats.modref_clobber_may_alias++;
2956 }
2957 }
2958 }
2959
2960 base = ao_ref_base (ref);
2961 if (!base)
2962 return true;
2963
2964 if (TREE_CODE (base) == SSA_NAME
2965 || CONSTANT_CLASS_P (base))
2966 return false;
2967
2968 /* A call that is not without side-effects might involve volatile
2969 accesses and thus conflicts with all other volatile accesses. */
2970 if (ref->volatile_p)
2971 return true;
2972
2973 /* If the reference is based on a decl that is not aliased the call
2974 cannot possibly clobber it. */
2975 if (DECL_P (base)
2976 && !may_be_aliased (base)
2977 /* But local non-readonly statics can be modified through recursion
2978 or the call may implement a threading barrier which we must
2979 treat as may-def. */
2980 && (TREE_READONLY (base)
2981 || !is_global_var (base)))
2982 return false;
2983
2984 /* If the reference is based on a pointer that points to memory
2985 that may not be written to then the call cannot possibly clobber it. */
2986 if ((TREE_CODE (base) == MEM_REF
2987 || TREE_CODE (base) == TARGET_MEM_REF)
2988 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2989 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2990 return false;
2991
2992 /* Handle those builtin functions explicitly that do not act as
2993 escape points. See tree-ssa-structalias.c:find_func_aliases
2994 for the list of builtins we might need to handle here. */
2995 if (callee != NULL_TREE
2996 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2997 switch (DECL_FUNCTION_CODE (callee))
2998 {
2999 /* All the following functions clobber memory pointed to by
3000 their first argument. */
3001 case BUILT_IN_STRCPY:
3002 case BUILT_IN_STRNCPY:
3003 case BUILT_IN_MEMCPY:
3004 case BUILT_IN_MEMMOVE:
3005 case BUILT_IN_MEMPCPY:
3006 case BUILT_IN_STPCPY:
3007 case BUILT_IN_STPNCPY:
3008 case BUILT_IN_STRCAT:
3009 case BUILT_IN_STRNCAT:
3010 case BUILT_IN_MEMSET:
3011 case BUILT_IN_TM_MEMSET:
3012 CASE_BUILT_IN_TM_STORE (1):
3013 CASE_BUILT_IN_TM_STORE (2):
3014 CASE_BUILT_IN_TM_STORE (4):
3015 CASE_BUILT_IN_TM_STORE (8):
3016 CASE_BUILT_IN_TM_STORE (FLOAT):
3017 CASE_BUILT_IN_TM_STORE (DOUBLE):
3018 CASE_BUILT_IN_TM_STORE (LDOUBLE):
3019 CASE_BUILT_IN_TM_STORE (M64):
3020 CASE_BUILT_IN_TM_STORE (M128):
3021 CASE_BUILT_IN_TM_STORE (M256):
3022 case BUILT_IN_TM_MEMCPY:
3023 case BUILT_IN_TM_MEMMOVE:
3024 {
3025 ao_ref dref;
3026 tree size = NULL_TREE;
3027 /* Don't pass in size for strncat, as the maximum size
3028 is strlen (dest) + n + 1 instead of n, resp.
3029 n + 1 at dest + strlen (dest), but strlen (dest) isn't
3030 known. */
3031 if (gimple_call_num_args (call) == 3
3032 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
3033 size = gimple_call_arg (call, 2);
3034 ao_ref_init_from_ptr_and_size (&dref,
3035 gimple_call_arg (call, 0),
3036 size);
3037 return refs_may_alias_p_1 (&dref, ref, false);
3038 }
3039 case BUILT_IN_STRCPY_CHK:
3040 case BUILT_IN_STRNCPY_CHK:
3041 case BUILT_IN_MEMCPY_CHK:
3042 case BUILT_IN_MEMMOVE_CHK:
3043 case BUILT_IN_MEMPCPY_CHK:
3044 case BUILT_IN_STPCPY_CHK:
3045 case BUILT_IN_STPNCPY_CHK:
3046 case BUILT_IN_STRCAT_CHK:
3047 case BUILT_IN_STRNCAT_CHK:
3048 case BUILT_IN_MEMSET_CHK:
3049 {
3050 ao_ref dref;
3051 tree size = NULL_TREE;
3052 /* Don't pass in size for __strncat_chk, as the maximum size
3053 is strlen (dest) + n + 1 instead of n, resp.
3054 n + 1 at dest + strlen (dest), but strlen (dest) isn't
3055 known. */
3056 if (gimple_call_num_args (call) == 4
3057 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
3058 size = gimple_call_arg (call, 2);
3059 ao_ref_init_from_ptr_and_size (&dref,
3060 gimple_call_arg (call, 0),
3061 size);
3062 return refs_may_alias_p_1 (&dref, ref, false);
3063 }
3064 case BUILT_IN_BCOPY:
3065 {
3066 ao_ref dref;
3067 tree size = gimple_call_arg (call, 2);
3068 ao_ref_init_from_ptr_and_size (&dref,
3069 gimple_call_arg (call, 1),
3070 size);
3071 return refs_may_alias_p_1 (&dref, ref, false);
3072 }
3073 /* Allocating memory does not have any side-effects apart from
3074 being the definition point for the pointer. */
3075 case BUILT_IN_MALLOC:
3076 case BUILT_IN_ALIGNED_ALLOC:
3077 case BUILT_IN_CALLOC:
3078 case BUILT_IN_STRDUP:
3079 case BUILT_IN_STRNDUP:
3080 /* Unix98 specifies that errno is set on allocation failure. */
3081 if (flag_errno_math
3082 && targetm.ref_may_alias_errno (ref))
3083 return true;
3084 return false;
3085 case BUILT_IN_STACK_SAVE:
3086 CASE_BUILT_IN_ALLOCA:
3087 case BUILT_IN_ASSUME_ALIGNED:
3088 return false;
3089 /* But posix_memalign stores a pointer into the memory pointed to
3090 by its first argument. */
3091 case BUILT_IN_POSIX_MEMALIGN:
3092 {
3093 tree ptrptr = gimple_call_arg (call, 0);
3094 ao_ref dref;
3095 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
3096 TYPE_SIZE_UNIT (ptr_type_node));
3097 return (refs_may_alias_p_1 (&dref, ref, false)
3098 || (flag_errno_math
3099 && targetm.ref_may_alias_errno (ref)));
3100 }
3101 /* Freeing memory kills the pointed-to memory. More importantly
3102 the call has to serve as a barrier for moving loads and stores
3103 across it. */
3104 case BUILT_IN_FREE:
3105 case BUILT_IN_VA_END:
3106 {
3107 tree ptr = gimple_call_arg (call, 0);
3108 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
3109 }
3110 /* Realloc serves both as allocation point and deallocation point. */
3111 case BUILT_IN_REALLOC:
3112 {
3113 tree ptr = gimple_call_arg (call, 0);
3114 /* Unix98 specifies that errno is set on allocation failure. */
3115 return ((flag_errno_math
3116 && targetm.ref_may_alias_errno (ref))
3117 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
3118 }
3119 case BUILT_IN_GAMMA_R:
3120 case BUILT_IN_GAMMAF_R:
3121 case BUILT_IN_GAMMAL_R:
3122 case BUILT_IN_LGAMMA_R:
3123 case BUILT_IN_LGAMMAF_R:
3124 case BUILT_IN_LGAMMAL_R:
3125 {
3126 tree out = gimple_call_arg (call, 1);
3127 if (ptr_deref_may_alias_ref_p_1 (out, ref))
3128 return true;
3129 if (flag_errno_math)
3130 break;
3131 return false;
3132 }
3133 case BUILT_IN_FREXP:
3134 case BUILT_IN_FREXPF:
3135 case BUILT_IN_FREXPL:
3136 case BUILT_IN_MODF:
3137 case BUILT_IN_MODFF:
3138 case BUILT_IN_MODFL:
3139 {
3140 tree out = gimple_call_arg (call, 1);
3141 return ptr_deref_may_alias_ref_p_1 (out, ref);
3142 }
3143 case BUILT_IN_REMQUO:
3144 case BUILT_IN_REMQUOF:
3145 case BUILT_IN_REMQUOL:
3146 {
3147 tree out = gimple_call_arg (call, 2);
3148 if (ptr_deref_may_alias_ref_p_1 (out, ref))
3149 return true;
3150 if (flag_errno_math)
3151 break;
3152 return false;
3153 }
3154 case BUILT_IN_SINCOS:
3155 case BUILT_IN_SINCOSF:
3156 case BUILT_IN_SINCOSL:
3157 {
3158 tree sin = gimple_call_arg (call, 1);
3159 tree cos = gimple_call_arg (call, 2);
3160 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
3161 || ptr_deref_may_alias_ref_p_1 (cos, ref));
3162 }
3163 /* __sync_* builtins and some OpenMP builtins act as threading
3164 barriers. */
3165 #undef DEF_SYNC_BUILTIN
3166 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
3167 #include "sync-builtins.def"
3168 #undef DEF_SYNC_BUILTIN
3169 case BUILT_IN_GOMP_ATOMIC_START:
3170 case BUILT_IN_GOMP_ATOMIC_END:
3171 case BUILT_IN_GOMP_BARRIER:
3172 case BUILT_IN_GOMP_BARRIER_CANCEL:
3173 case BUILT_IN_GOMP_TASKWAIT:
3174 case BUILT_IN_GOMP_TASKGROUP_END:
3175 case BUILT_IN_GOMP_CRITICAL_START:
3176 case BUILT_IN_GOMP_CRITICAL_END:
3177 case BUILT_IN_GOMP_CRITICAL_NAME_START:
3178 case BUILT_IN_GOMP_CRITICAL_NAME_END:
3179 case BUILT_IN_GOMP_LOOP_END:
3180 case BUILT_IN_GOMP_LOOP_END_CANCEL:
3181 case BUILT_IN_GOMP_ORDERED_START:
3182 case BUILT_IN_GOMP_ORDERED_END:
3183 case BUILT_IN_GOMP_SECTIONS_END:
3184 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
3185 case BUILT_IN_GOMP_SINGLE_COPY_START:
3186 case BUILT_IN_GOMP_SINGLE_COPY_END:
3187 return true;
3188 default:
3189 /* Fallthru to general call handling. */;
3190 }
3191
3192 /* Check if base is a global static variable that is not written
3193 by the function. */
3194 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3195 {
3196 struct cgraph_node *node = cgraph_node::get (callee);
3197 bitmap written;
3198 int id;
3199
3200 if (node
3201 && (id = ipa_reference_var_uid (base)) != -1
3202 && (written = ipa_reference_get_written_global (node))
3203 && !bitmap_bit_p (written, id))
3204 return false;
3205 }
3206
3207 /* Check if the base variable is call-clobbered. */
3208 if (DECL_P (base))
3209 return pt_solution_includes (gimple_call_clobber_set (call), base);
3210 else if ((TREE_CODE (base) == MEM_REF
3211 || TREE_CODE (base) == TARGET_MEM_REF)
3212 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3213 {
3214 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3215 if (!pi)
3216 return true;
3217
3218 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3219 }
3220
3221 return true;
3222 }
3223
3224 /* If the call in statement CALL may clobber the memory reference REF
3225 return true, otherwise return false. */
3226
3227 bool
3228 call_may_clobber_ref_p (gcall *call, tree ref)
3229 {
3230 bool res;
3231 ao_ref r;
3232 ao_ref_init (&r, ref);
3233 res = call_may_clobber_ref_p_1 (call, &r, true);
3234 if (res)
3235 ++alias_stats.call_may_clobber_ref_p_may_alias;
3236 else
3237 ++alias_stats.call_may_clobber_ref_p_no_alias;
3238 return res;
3239 }
3240
3241
3242 /* If the statement STMT may clobber the memory reference REF return true,
3243 otherwise return false. */
3244
3245 bool
3246 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3247 {
3248 if (is_gimple_call (stmt))
3249 {
3250 tree lhs = gimple_call_lhs (stmt);
3251 if (lhs
3252 && TREE_CODE (lhs) != SSA_NAME)
3253 {
3254 ao_ref r;
3255 ao_ref_init (&r, lhs);
3256 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3257 return true;
3258 }
3259
3260 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3261 }
3262 else if (gimple_assign_single_p (stmt))
3263 {
3264 tree lhs = gimple_assign_lhs (stmt);
3265 if (TREE_CODE (lhs) != SSA_NAME)
3266 {
3267 ao_ref r;
3268 ao_ref_init (&r, lhs);
3269 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3270 }
3271 }
3272 else if (gimple_code (stmt) == GIMPLE_ASM)
3273 return true;
3274
3275 return false;
3276 }
3277
3278 bool
3279 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3280 {
3281 ao_ref r;
3282 ao_ref_init (&r, ref);
3283 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3284 }
3285
3286 /* Return true if store1 and store2 described by corresponding tuples
3287 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3288 address. */
3289
3290 static bool
3291 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3292 poly_int64 max_size1,
3293 tree base2, poly_int64 offset2, poly_int64 size2,
3294 poly_int64 max_size2)
3295 {
3296 /* Offsets need to be 0. */
3297 if (maybe_ne (offset1, 0)
3298 || maybe_ne (offset2, 0))
3299 return false;
3300
3301 bool base1_obj_p = SSA_VAR_P (base1);
3302 bool base2_obj_p = SSA_VAR_P (base2);
3303
3304 /* We need one object. */
3305 if (base1_obj_p == base2_obj_p)
3306 return false;
3307 tree obj = base1_obj_p ? base1 : base2;
3308
3309 /* And we need one MEM_REF. */
3310 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3311 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3312 if (base1_memref_p == base2_memref_p)
3313 return false;
3314 tree memref = base1_memref_p ? base1 : base2;
3315
3316 /* Sizes need to be valid. */
3317 if (!known_size_p (max_size1)
3318 || !known_size_p (max_size2)
3319 || !known_size_p (size1)
3320 || !known_size_p (size2))
3321 return false;
3322
3323 /* Max_size needs to match size. */
3324 if (maybe_ne (max_size1, size1)
3325 || maybe_ne (max_size2, size2))
3326 return false;
3327
3328 /* Sizes need to match. */
3329 if (maybe_ne (size1, size2))
3330 return false;
3331
3332
3333 /* Check that memref is a store to pointer with singleton points-to info. */
3334 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3335 return false;
3336 tree ptr = TREE_OPERAND (memref, 0);
3337 if (TREE_CODE (ptr) != SSA_NAME)
3338 return false;
3339 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3340 unsigned int pt_uid;
3341 if (pi == NULL
3342 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3343 return false;
3344
3345 /* Be conservative with non-call exceptions when the address might
3346 be NULL. */
3347 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3348 return false;
3349
3350 /* Check that ptr points relative to obj. */
3351 unsigned int obj_uid = DECL_PT_UID (obj);
3352 if (obj_uid != pt_uid)
3353 return false;
3354
3355 /* Check that the object size is the same as the store size. That ensures us
3356 that ptr points to the start of obj. */
3357 return (DECL_SIZE (obj)
3358 && poly_int_tree_p (DECL_SIZE (obj))
3359 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3360 }
3361
3362 /* If STMT kills the memory reference REF return true, otherwise
3363 return false. */
3364
3365 bool
3366 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3367 {
3368 if (!ao_ref_base (ref))
3369 return false;
3370
3371 if (gimple_has_lhs (stmt)
3372 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3373 /* The assignment is not necessarily carried out if it can throw
3374 and we can catch it in the current function where we could inspect
3375 the previous value.
3376 ??? We only need to care about the RHS throwing. For aggregate
3377 assignments or similar calls and non-call exceptions the LHS
3378 might throw as well. */
3379 && !stmt_can_throw_internal (cfun, stmt))
3380 {
3381 tree lhs = gimple_get_lhs (stmt);
3382 /* If LHS is literally a base of the access we are done. */
3383 if (ref->ref)
3384 {
3385 tree base = ref->ref;
3386 tree innermost_dropped_array_ref = NULL_TREE;
3387 if (handled_component_p (base))
3388 {
3389 tree saved_lhs0 = NULL_TREE;
3390 if (handled_component_p (lhs))
3391 {
3392 saved_lhs0 = TREE_OPERAND (lhs, 0);
3393 TREE_OPERAND (lhs, 0) = integer_zero_node;
3394 }
3395 do
3396 {
3397 /* Just compare the outermost handled component, if
3398 they are equal we have found a possible common
3399 base. */
3400 tree saved_base0 = TREE_OPERAND (base, 0);
3401 TREE_OPERAND (base, 0) = integer_zero_node;
3402 bool res = operand_equal_p (lhs, base, 0);
3403 TREE_OPERAND (base, 0) = saved_base0;
3404 if (res)
3405 break;
3406 /* Remember if we drop an array-ref that we need to
3407 double-check not being at struct end. */
3408 if (TREE_CODE (base) == ARRAY_REF
3409 || TREE_CODE (base) == ARRAY_RANGE_REF)
3410 innermost_dropped_array_ref = base;
3411 /* Otherwise drop handled components of the access. */
3412 base = saved_base0;
3413 }
3414 while (handled_component_p (base));
3415 if (saved_lhs0)
3416 TREE_OPERAND (lhs, 0) = saved_lhs0;
3417 }
3418 /* Finally check if the lhs has the same address and size as the
3419 base candidate of the access. Watch out if we have dropped
3420 an array-ref that was at struct end, this means ref->ref may
3421 be outside of the TYPE_SIZE of its base. */
3422 if ((! innermost_dropped_array_ref
3423 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3424 && (lhs == base
3425 || (((TYPE_SIZE (TREE_TYPE (lhs))
3426 == TYPE_SIZE (TREE_TYPE (base)))
3427 || (TYPE_SIZE (TREE_TYPE (lhs))
3428 && TYPE_SIZE (TREE_TYPE (base))
3429 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3430 TYPE_SIZE (TREE_TYPE (base)),
3431 0)))
3432 && operand_equal_p (lhs, base,
3433 OEP_ADDRESS_OF
3434 | OEP_MATCH_SIDE_EFFECTS))))
3435 return true;
3436 }
3437
3438 /* Now look for non-literal equal bases with the restriction of
3439 handling constant offset and size. */
3440 /* For a must-alias check we need to be able to constrain
3441 the access properly. */
3442 if (!ref->max_size_known_p ())
3443 return false;
3444 poly_int64 size, offset, max_size, ref_offset = ref->offset;
3445 bool reverse;
3446 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3447 &reverse);
3448 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3449 so base == ref->base does not always hold. */
3450 if (base != ref->base)
3451 {
3452 /* Try using points-to info. */
3453 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3454 ref->offset, ref->size, ref->max_size))
3455 return true;
3456
3457 /* If both base and ref->base are MEM_REFs, only compare the
3458 first operand, and if the second operand isn't equal constant,
3459 try to add the offsets into offset and ref_offset. */
3460 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3461 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3462 {
3463 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3464 TREE_OPERAND (ref->base, 1)))
3465 {
3466 poly_offset_int off1 = mem_ref_offset (base);
3467 off1 <<= LOG2_BITS_PER_UNIT;
3468 off1 += offset;
3469 poly_offset_int off2 = mem_ref_offset (ref->base);
3470 off2 <<= LOG2_BITS_PER_UNIT;
3471 off2 += ref_offset;
3472 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3473 size = -1;
3474 }
3475 }
3476 else
3477 size = -1;
3478 }
3479 /* For a must-alias check we need to be able to constrain
3480 the access properly. */
3481 if (known_eq (size, max_size)
3482 && known_subrange_p (ref_offset, ref->max_size, offset, size))
3483 return true;
3484 }
3485
3486 if (is_gimple_call (stmt))
3487 {
3488 tree callee = gimple_call_fndecl (stmt);
3489 if (callee != NULL_TREE
3490 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3491 switch (DECL_FUNCTION_CODE (callee))
3492 {
3493 case BUILT_IN_FREE:
3494 {
3495 tree ptr = gimple_call_arg (stmt, 0);
3496 tree base = ao_ref_base (ref);
3497 if (base && TREE_CODE (base) == MEM_REF
3498 && TREE_OPERAND (base, 0) == ptr)
3499 return true;
3500 break;
3501 }
3502
3503 case BUILT_IN_MEMCPY:
3504 case BUILT_IN_MEMPCPY:
3505 case BUILT_IN_MEMMOVE:
3506 case BUILT_IN_MEMSET:
3507 case BUILT_IN_MEMCPY_CHK:
3508 case BUILT_IN_MEMPCPY_CHK:
3509 case BUILT_IN_MEMMOVE_CHK:
3510 case BUILT_IN_MEMSET_CHK:
3511 case BUILT_IN_STRNCPY:
3512 case BUILT_IN_STPNCPY:
3513 case BUILT_IN_CALLOC:
3514 {
3515 /* For a must-alias check we need to be able to constrain
3516 the access properly. */
3517 if (!ref->max_size_known_p ())
3518 return false;
3519 tree dest;
3520 tree len;
3521
3522 /* In execution order a calloc call will never kill
3523 anything. However, DSE will (ab)use this interface
3524 to ask if a calloc call writes the same memory locations
3525 as a later assignment, memset, etc. So handle calloc
3526 in the expected way. */
3527 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3528 {
3529 tree arg0 = gimple_call_arg (stmt, 0);
3530 tree arg1 = gimple_call_arg (stmt, 1);
3531 if (TREE_CODE (arg0) != INTEGER_CST
3532 || TREE_CODE (arg1) != INTEGER_CST)
3533 return false;
3534
3535 dest = gimple_call_lhs (stmt);
3536 if (!dest)
3537 return false;
3538 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3539 }
3540 else
3541 {
3542 dest = gimple_call_arg (stmt, 0);
3543 len = gimple_call_arg (stmt, 2);
3544 }
3545 if (!poly_int_tree_p (len))
3546 return false;
3547 tree rbase = ref->base;
3548 poly_offset_int roffset = ref->offset;
3549 ao_ref dref;
3550 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3551 tree base = ao_ref_base (&dref);
3552 poly_offset_int offset = dref.offset;
3553 if (!base || !known_size_p (dref.size))
3554 return false;
3555 if (TREE_CODE (base) == MEM_REF)
3556 {
3557 if (TREE_CODE (rbase) != MEM_REF)
3558 return false;
3559 // Compare pointers.
3560 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
3561 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
3562 base = TREE_OPERAND (base, 0);
3563 rbase = TREE_OPERAND (rbase, 0);
3564 }
3565 if (base == rbase
3566 && known_subrange_p (roffset, ref->max_size, offset,
3567 wi::to_poly_offset (len)
3568 << LOG2_BITS_PER_UNIT))
3569 return true;
3570 break;
3571 }
3572
3573 case BUILT_IN_VA_END:
3574 {
3575 tree ptr = gimple_call_arg (stmt, 0);
3576 if (TREE_CODE (ptr) == ADDR_EXPR)
3577 {
3578 tree base = ao_ref_base (ref);
3579 if (TREE_OPERAND (ptr, 0) == base)
3580 return true;
3581 }
3582 break;
3583 }
3584
3585 default:;
3586 }
3587 }
3588 return false;
3589 }
3590
3591 bool
3592 stmt_kills_ref_p (gimple *stmt, tree ref)
3593 {
3594 ao_ref r;
3595 ao_ref_init (&r, ref);
3596 return stmt_kills_ref_p (stmt, &r);
3597 }
3598
3599
3600 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3601 TARGET or a statement clobbering the memory reference REF in which
3602 case false is returned. The walk starts with VUSE, one argument of PHI. */
3603
3604 static bool
3605 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3606 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3607 bitmap *visited, bool abort_on_visited,
3608 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3609 translate_flags disambiguate_only,
3610 void *data)
3611 {
3612 basic_block bb = gimple_bb (phi);
3613
3614 if (!*visited)
3615 *visited = BITMAP_ALLOC (NULL);
3616
3617 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3618
3619 /* Walk until we hit the target. */
3620 while (vuse != target)
3621 {
3622 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3623 /* If we are searching for the target VUSE by walking up to
3624 TARGET_BB dominating the original PHI we are finished once
3625 we reach a default def or a definition in a block dominating
3626 that block. Update TARGET and return. */
3627 if (!target
3628 && (gimple_nop_p (def_stmt)
3629 || dominated_by_p (CDI_DOMINATORS,
3630 target_bb, gimple_bb (def_stmt))))
3631 {
3632 target = vuse;
3633 return true;
3634 }
3635
3636 /* Recurse for PHI nodes. */
3637 if (gimple_code (def_stmt) == GIMPLE_PHI)
3638 {
3639 /* An already visited PHI node ends the walk successfully. */
3640 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3641 return !abort_on_visited;
3642 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3643 visited, abort_on_visited,
3644 translate, data, disambiguate_only);
3645 if (!vuse)
3646 return false;
3647 continue;
3648 }
3649 else if (gimple_nop_p (def_stmt))
3650 return false;
3651 else
3652 {
3653 /* A clobbering statement or the end of the IL ends it failing. */
3654 if ((int)limit <= 0)
3655 return false;
3656 --limit;
3657 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3658 {
3659 translate_flags tf = disambiguate_only;
3660 if (translate
3661 && (*translate) (ref, vuse, data, &tf) == NULL)
3662 ;
3663 else
3664 return false;
3665 }
3666 }
3667 /* If we reach a new basic-block see if we already skipped it
3668 in a previous walk that ended successfully. */
3669 if (gimple_bb (def_stmt) != bb)
3670 {
3671 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3672 return !abort_on_visited;
3673 bb = gimple_bb (def_stmt);
3674 }
3675 vuse = gimple_vuse (def_stmt);
3676 }
3677 return true;
3678 }
3679
3680
3681 /* Starting from a PHI node for the virtual operand of the memory reference
3682 REF find a continuation virtual operand that allows to continue walking
3683 statements dominating PHI skipping only statements that cannot possibly
3684 clobber REF. Decrements LIMIT for each alias disambiguation done
3685 and aborts the walk, returning NULL_TREE if it reaches zero.
3686 Returns NULL_TREE if no suitable virtual operand can be found. */
3687
3688 tree
3689 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3690 unsigned int &limit, bitmap *visited,
3691 bool abort_on_visited,
3692 void *(*translate)(ao_ref *, tree, void *,
3693 translate_flags *),
3694 void *data,
3695 translate_flags disambiguate_only)
3696 {
3697 unsigned nargs = gimple_phi_num_args (phi);
3698
3699 /* Through a single-argument PHI we can simply look through. */
3700 if (nargs == 1)
3701 return PHI_ARG_DEF (phi, 0);
3702
3703 /* For two or more arguments try to pairwise skip non-aliasing code
3704 until we hit the phi argument definition that dominates the other one. */
3705 basic_block phi_bb = gimple_bb (phi);
3706 tree arg0, arg1;
3707 unsigned i;
3708
3709 /* Find a candidate for the virtual operand which definition
3710 dominates those of all others. */
3711 /* First look if any of the args themselves satisfy this. */
3712 for (i = 0; i < nargs; ++i)
3713 {
3714 arg0 = PHI_ARG_DEF (phi, i);
3715 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3716 break;
3717 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3718 if (def_bb != phi_bb
3719 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3720 break;
3721 arg0 = NULL_TREE;
3722 }
3723 /* If not, look if we can reach such candidate by walking defs
3724 until we hit the immediate dominator. maybe_skip_until will
3725 do that for us. */
3726 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3727
3728 /* Then check against the (to be) found candidate. */
3729 for (i = 0; i < nargs; ++i)
3730 {
3731 arg1 = PHI_ARG_DEF (phi, i);
3732 if (arg1 == arg0)
3733 ;
3734 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3735 limit, visited,
3736 abort_on_visited,
3737 translate,
3738 /* Do not valueize when walking over
3739 backedges. */
3740 dominated_by_p
3741 (CDI_DOMINATORS,
3742 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3743 phi_bb)
3744 ? TR_DISAMBIGUATE
3745 : disambiguate_only, data))
3746 return NULL_TREE;
3747 }
3748
3749 return arg0;
3750 }
3751
3752 /* Based on the memory reference REF and its virtual use VUSE call
3753 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3754 itself. That is, for each virtual use for which its defining statement
3755 does not clobber REF.
3756
3757 WALKER is called with REF, the current virtual use and DATA. If
3758 WALKER returns non-NULL the walk stops and its result is returned.
3759 At the end of a non-successful walk NULL is returned.
3760
3761 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3762 use which definition is a statement that may clobber REF and DATA.
3763 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3764 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3765 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3766 to adjust REF and *DATA to make that valid.
3767
3768 VALUEIZE if non-NULL is called with the next VUSE that is considered
3769 and return value is substituted for that. This can be used to
3770 implement optimistic value-numbering for example. Note that the
3771 VUSE argument is assumed to be valueized already.
3772
3773 LIMIT specifies the number of alias queries we are allowed to do,
3774 the walk stops when it reaches zero and NULL is returned. LIMIT
3775 is decremented by the number of alias queries (plus adjustments
3776 done by the callbacks) upon return.
3777
3778 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3779
3780 void *
3781 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3782 void *(*walker)(ao_ref *, tree, void *),
3783 void *(*translate)(ao_ref *, tree, void *,
3784 translate_flags *),
3785 tree (*valueize)(tree),
3786 unsigned &limit, void *data)
3787 {
3788 bitmap visited = NULL;
3789 void *res;
3790 bool translated = false;
3791
3792 timevar_push (TV_ALIAS_STMT_WALK);
3793
3794 do
3795 {
3796 gimple *def_stmt;
3797
3798 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3799 res = (*walker) (ref, vuse, data);
3800 /* Abort walk. */
3801 if (res == (void *)-1)
3802 {
3803 res = NULL;
3804 break;
3805 }
3806 /* Lookup succeeded. */
3807 else if (res != NULL)
3808 break;
3809
3810 if (valueize)
3811 {
3812 vuse = valueize (vuse);
3813 if (!vuse)
3814 {
3815 res = NULL;
3816 break;
3817 }
3818 }
3819 def_stmt = SSA_NAME_DEF_STMT (vuse);
3820 if (gimple_nop_p (def_stmt))
3821 break;
3822 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3823 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3824 &visited, translated, translate, data);
3825 else
3826 {
3827 if ((int)limit <= 0)
3828 {
3829 res = NULL;
3830 break;
3831 }
3832 --limit;
3833 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3834 {
3835 if (!translate)
3836 break;
3837 translate_flags disambiguate_only = TR_TRANSLATE;
3838 res = (*translate) (ref, vuse, data, &disambiguate_only);
3839 /* Failed lookup and translation. */
3840 if (res == (void *)-1)
3841 {
3842 res = NULL;
3843 break;
3844 }
3845 /* Lookup succeeded. */
3846 else if (res != NULL)
3847 break;
3848 /* Translation succeeded, continue walking. */
3849 translated = translated || disambiguate_only == TR_TRANSLATE;
3850 }
3851 vuse = gimple_vuse (def_stmt);
3852 }
3853 }
3854 while (vuse);
3855
3856 if (visited)
3857 BITMAP_FREE (visited);
3858
3859 timevar_pop (TV_ALIAS_STMT_WALK);
3860
3861 return res;
3862 }
3863
3864
3865 /* Based on the memory reference REF call WALKER for each vdef which
3866 defining statement may clobber REF, starting with VDEF. If REF
3867 is NULL_TREE, each defining statement is visited.
3868
3869 WALKER is called with REF, the current vdef and DATA. If WALKER
3870 returns true the walk is stopped, otherwise it continues.
3871
3872 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3873 The pointer may be NULL and then we do not track this information.
3874
3875 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3876 PHI argument (but only one walk continues on merge points), the
3877 return value is true if any of the walks was successful.
3878
3879 The function returns the number of statements walked or -1 if
3880 LIMIT stmts were walked and the walk was aborted at this point.
3881 If LIMIT is zero the walk is not aborted. */
3882
3883 static int
3884 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3885 bool (*walker)(ao_ref *, tree, void *), void *data,
3886 bitmap *visited, unsigned int cnt,
3887 bool *function_entry_reached, unsigned limit)
3888 {
3889 do
3890 {
3891 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3892
3893 if (*visited
3894 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3895 return cnt;
3896
3897 if (gimple_nop_p (def_stmt))
3898 {
3899 if (function_entry_reached)
3900 *function_entry_reached = true;
3901 return cnt;
3902 }
3903 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3904 {
3905 unsigned i;
3906 if (!*visited)
3907 *visited = BITMAP_ALLOC (NULL);
3908 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3909 {
3910 int res = walk_aliased_vdefs_1 (ref,
3911 gimple_phi_arg_def (def_stmt, i),
3912 walker, data, visited, cnt,
3913 function_entry_reached, limit);
3914 if (res == -1)
3915 return -1;
3916 cnt = res;
3917 }
3918 return cnt;
3919 }
3920
3921 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3922 cnt++;
3923 if (cnt == limit)
3924 return -1;
3925 if ((!ref
3926 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3927 && (*walker) (ref, vdef, data))
3928 return cnt;
3929
3930 vdef = gimple_vuse (def_stmt);
3931 }
3932 while (1);
3933 }
3934
3935 int
3936 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3937 bool (*walker)(ao_ref *, tree, void *), void *data,
3938 bitmap *visited,
3939 bool *function_entry_reached, unsigned int limit)
3940 {
3941 bitmap local_visited = NULL;
3942 int ret;
3943
3944 timevar_push (TV_ALIAS_STMT_WALK);
3945
3946 if (function_entry_reached)
3947 *function_entry_reached = false;
3948
3949 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3950 visited ? visited : &local_visited, 0,
3951 function_entry_reached, limit);
3952 if (local_visited)
3953 BITMAP_FREE (local_visited);
3954
3955 timevar_pop (TV_ALIAS_STMT_WALK);
3956
3957 return ret;
3958 }
3959