tree-ssa-alias.c (same_type_for_tbaa): Return ture if main variants are pointer equiv...
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
93
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
102 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
103 } alias_stats;
104
105 void
106 dump_alias_stats (FILE *s)
107 {
108 fprintf (s, "\nAlias oracle query stats:\n");
109 fprintf (s, " refs_may_alias_p: "
110 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
111 HOST_WIDE_INT_PRINT_DEC" queries\n",
112 alias_stats.refs_may_alias_p_no_alias,
113 alias_stats.refs_may_alias_p_no_alias
114 + alias_stats.refs_may_alias_p_may_alias);
115 fprintf (s, " ref_maybe_used_by_call_p: "
116 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
117 HOST_WIDE_INT_PRINT_DEC" queries\n",
118 alias_stats.ref_maybe_used_by_call_p_no_alias,
119 alias_stats.refs_may_alias_p_no_alias
120 + alias_stats.ref_maybe_used_by_call_p_may_alias);
121 fprintf (s, " call_may_clobber_ref_p: "
122 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
123 HOST_WIDE_INT_PRINT_DEC" queries\n",
124 alias_stats.call_may_clobber_ref_p_no_alias,
125 alias_stats.call_may_clobber_ref_p_no_alias
126 + alias_stats.call_may_clobber_ref_p_may_alias);
127 fprintf (s, " aliasing_component_ref_p: "
128 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
129 HOST_WIDE_INT_PRINT_DEC" queries\n",
130 alias_stats.aliasing_component_refs_p_no_alias,
131 alias_stats.aliasing_component_refs_p_no_alias
132 + alias_stats.aliasing_component_refs_p_may_alias);
133 dump_alias_stats_in_alias_c (s);
134 }
135
136
137 /* Return true, if dereferencing PTR may alias with a global variable. */
138
139 bool
140 ptr_deref_may_alias_global_p (tree ptr)
141 {
142 struct ptr_info_def *pi;
143
144 /* If we end up with a pointer constant here that may point
145 to global memory. */
146 if (TREE_CODE (ptr) != SSA_NAME)
147 return true;
148
149 pi = SSA_NAME_PTR_INFO (ptr);
150
151 /* If we do not have points-to information for this variable,
152 we have to punt. */
153 if (!pi)
154 return true;
155
156 /* ??? This does not use TBAA to prune globals ptr may not access. */
157 return pt_solution_includes_global (&pi->pt);
158 }
159
160 /* Return true if dereferencing PTR may alias DECL.
161 The caller is responsible for applying TBAA to see if PTR
162 may access DECL at all. */
163
164 static bool
165 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
166 {
167 struct ptr_info_def *pi;
168
169 /* Conversions are irrelevant for points-to information and
170 data-dependence analysis can feed us those. */
171 STRIP_NOPS (ptr);
172
173 /* Anything we do not explicilty handle aliases. */
174 if ((TREE_CODE (ptr) != SSA_NAME
175 && TREE_CODE (ptr) != ADDR_EXPR
176 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
177 || !POINTER_TYPE_P (TREE_TYPE (ptr))
178 || (!VAR_P (decl)
179 && TREE_CODE (decl) != PARM_DECL
180 && TREE_CODE (decl) != RESULT_DECL))
181 return true;
182
183 /* Disregard pointer offsetting. */
184 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
185 {
186 do
187 {
188 ptr = TREE_OPERAND (ptr, 0);
189 }
190 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
191 return ptr_deref_may_alias_decl_p (ptr, decl);
192 }
193
194 /* ADDR_EXPR pointers either just offset another pointer or directly
195 specify the pointed-to set. */
196 if (TREE_CODE (ptr) == ADDR_EXPR)
197 {
198 tree base = get_base_address (TREE_OPERAND (ptr, 0));
199 if (base
200 && (TREE_CODE (base) == MEM_REF
201 || TREE_CODE (base) == TARGET_MEM_REF))
202 ptr = TREE_OPERAND (base, 0);
203 else if (base
204 && DECL_P (base))
205 return compare_base_decls (base, decl) != 0;
206 else if (base
207 && CONSTANT_CLASS_P (base))
208 return false;
209 else
210 return true;
211 }
212
213 /* Non-aliased variables cannot be pointed to. */
214 if (!may_be_aliased (decl))
215 return false;
216
217 /* If we do not have useful points-to information for this pointer
218 we cannot disambiguate anything else. */
219 pi = SSA_NAME_PTR_INFO (ptr);
220 if (!pi)
221 return true;
222
223 return pt_solution_includes (&pi->pt, decl);
224 }
225
226 /* Return true if dereferenced PTR1 and PTR2 may alias.
227 The caller is responsible for applying TBAA to see if accesses
228 through PTR1 and PTR2 may conflict at all. */
229
230 bool
231 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
232 {
233 struct ptr_info_def *pi1, *pi2;
234
235 /* Conversions are irrelevant for points-to information and
236 data-dependence analysis can feed us those. */
237 STRIP_NOPS (ptr1);
238 STRIP_NOPS (ptr2);
239
240 /* Disregard pointer offsetting. */
241 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
242 {
243 do
244 {
245 ptr1 = TREE_OPERAND (ptr1, 0);
246 }
247 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
248 return ptr_derefs_may_alias_p (ptr1, ptr2);
249 }
250 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
251 {
252 do
253 {
254 ptr2 = TREE_OPERAND (ptr2, 0);
255 }
256 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
257 return ptr_derefs_may_alias_p (ptr1, ptr2);
258 }
259
260 /* ADDR_EXPR pointers either just offset another pointer or directly
261 specify the pointed-to set. */
262 if (TREE_CODE (ptr1) == ADDR_EXPR)
263 {
264 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
265 if (base
266 && (TREE_CODE (base) == MEM_REF
267 || TREE_CODE (base) == TARGET_MEM_REF))
268 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
269 else if (base
270 && DECL_P (base))
271 return ptr_deref_may_alias_decl_p (ptr2, base);
272 else
273 return true;
274 }
275 if (TREE_CODE (ptr2) == ADDR_EXPR)
276 {
277 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
278 if (base
279 && (TREE_CODE (base) == MEM_REF
280 || TREE_CODE (base) == TARGET_MEM_REF))
281 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
282 else if (base
283 && DECL_P (base))
284 return ptr_deref_may_alias_decl_p (ptr1, base);
285 else
286 return true;
287 }
288
289 /* From here we require SSA name pointers. Anything else aliases. */
290 if (TREE_CODE (ptr1) != SSA_NAME
291 || TREE_CODE (ptr2) != SSA_NAME
292 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
293 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
294 return true;
295
296 /* We may end up with two empty points-to solutions for two same pointers.
297 In this case we still want to say both pointers alias, so shortcut
298 that here. */
299 if (ptr1 == ptr2)
300 return true;
301
302 /* If we do not have useful points-to information for either pointer
303 we cannot disambiguate anything else. */
304 pi1 = SSA_NAME_PTR_INFO (ptr1);
305 pi2 = SSA_NAME_PTR_INFO (ptr2);
306 if (!pi1 || !pi2)
307 return true;
308
309 /* ??? This does not use TBAA to prune decls from the intersection
310 that not both pointers may access. */
311 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
312 }
313
314 /* Return true if dereferencing PTR may alias *REF.
315 The caller is responsible for applying TBAA to see if PTR
316 may access *REF at all. */
317
318 static bool
319 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
320 {
321 tree base = ao_ref_base (ref);
322
323 if (TREE_CODE (base) == MEM_REF
324 || TREE_CODE (base) == TARGET_MEM_REF)
325 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
326 else if (DECL_P (base))
327 return ptr_deref_may_alias_decl_p (ptr, base);
328
329 return true;
330 }
331
332 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
333
334 bool
335 ptrs_compare_unequal (tree ptr1, tree ptr2)
336 {
337 /* First resolve the pointers down to a SSA name pointer base or
338 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
339 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
340 or STRING_CSTs which needs points-to adjustments to track them
341 in the points-to sets. */
342 tree obj1 = NULL_TREE;
343 tree obj2 = NULL_TREE;
344 if (TREE_CODE (ptr1) == ADDR_EXPR)
345 {
346 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
347 if (! tem)
348 return false;
349 if (VAR_P (tem)
350 || TREE_CODE (tem) == PARM_DECL
351 || TREE_CODE (tem) == RESULT_DECL)
352 obj1 = tem;
353 else if (TREE_CODE (tem) == MEM_REF)
354 ptr1 = TREE_OPERAND (tem, 0);
355 }
356 if (TREE_CODE (ptr2) == ADDR_EXPR)
357 {
358 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
359 if (! tem)
360 return false;
361 if (VAR_P (tem)
362 || TREE_CODE (tem) == PARM_DECL
363 || TREE_CODE (tem) == RESULT_DECL)
364 obj2 = tem;
365 else if (TREE_CODE (tem) == MEM_REF)
366 ptr2 = TREE_OPERAND (tem, 0);
367 }
368
369 /* Canonicalize ptr vs. object. */
370 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
371 {
372 std::swap (ptr1, ptr2);
373 std::swap (obj1, obj2);
374 }
375
376 if (obj1 && obj2)
377 /* Other code handles this correctly, no need to duplicate it here. */;
378 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
379 {
380 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
381 /* We may not use restrict to optimize pointer comparisons.
382 See PR71062. So we have to assume that restrict-pointed-to
383 may be in fact obj1. */
384 if (!pi
385 || pi->pt.vars_contains_restrict
386 || pi->pt.vars_contains_interposable)
387 return false;
388 if (VAR_P (obj1)
389 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
390 {
391 varpool_node *node = varpool_node::get (obj1);
392 /* If obj1 may bind to NULL give up (see below). */
393 if (! node
394 || ! node->nonzero_address ()
395 || ! decl_binds_to_current_def_p (obj1))
396 return false;
397 }
398 return !pt_solution_includes (&pi->pt, obj1);
399 }
400
401 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
402 but those require pt.null to be conservatively correct. */
403
404 return false;
405 }
406
407 /* Returns whether reference REF to BASE may refer to global memory. */
408
409 static bool
410 ref_may_alias_global_p_1 (tree base)
411 {
412 if (DECL_P (base))
413 return is_global_var (base);
414 else if (TREE_CODE (base) == MEM_REF
415 || TREE_CODE (base) == TARGET_MEM_REF)
416 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
417 return true;
418 }
419
420 bool
421 ref_may_alias_global_p (ao_ref *ref)
422 {
423 tree base = ao_ref_base (ref);
424 return ref_may_alias_global_p_1 (base);
425 }
426
427 bool
428 ref_may_alias_global_p (tree ref)
429 {
430 tree base = get_base_address (ref);
431 return ref_may_alias_global_p_1 (base);
432 }
433
434 /* Return true whether STMT may clobber global memory. */
435
436 bool
437 stmt_may_clobber_global_p (gimple *stmt)
438 {
439 tree lhs;
440
441 if (!gimple_vdef (stmt))
442 return false;
443
444 /* ??? We can ask the oracle whether an artificial pointer
445 dereference with a pointer with points-to information covering
446 all global memory (what about non-address taken memory?) maybe
447 clobbered by this call. As there is at the moment no convenient
448 way of doing that without generating garbage do some manual
449 checking instead.
450 ??? We could make a NULL ao_ref argument to the various
451 predicates special, meaning any global memory. */
452
453 switch (gimple_code (stmt))
454 {
455 case GIMPLE_ASSIGN:
456 lhs = gimple_assign_lhs (stmt);
457 return (TREE_CODE (lhs) != SSA_NAME
458 && ref_may_alias_global_p (lhs));
459 case GIMPLE_CALL:
460 return true;
461 default:
462 return true;
463 }
464 }
465
466
467 /* Dump alias information on FILE. */
468
469 void
470 dump_alias_info (FILE *file)
471 {
472 unsigned i;
473 tree ptr;
474 const char *funcname
475 = lang_hooks.decl_printable_name (current_function_decl, 2);
476 tree var;
477
478 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
479
480 fprintf (file, "Aliased symbols\n\n");
481
482 FOR_EACH_LOCAL_DECL (cfun, i, var)
483 {
484 if (may_be_aliased (var))
485 dump_variable (file, var);
486 }
487
488 fprintf (file, "\nCall clobber information\n");
489
490 fprintf (file, "\nESCAPED");
491 dump_points_to_solution (file, &cfun->gimple_df->escaped);
492
493 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
494
495 FOR_EACH_SSA_NAME (i, ptr, cfun)
496 {
497 struct ptr_info_def *pi;
498
499 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
500 || SSA_NAME_IN_FREE_LIST (ptr))
501 continue;
502
503 pi = SSA_NAME_PTR_INFO (ptr);
504 if (pi)
505 dump_points_to_info_for (file, ptr);
506 }
507
508 fprintf (file, "\n");
509 }
510
511
512 /* Dump alias information on stderr. */
513
514 DEBUG_FUNCTION void
515 debug_alias_info (void)
516 {
517 dump_alias_info (stderr);
518 }
519
520
521 /* Dump the points-to set *PT into FILE. */
522
523 void
524 dump_points_to_solution (FILE *file, struct pt_solution *pt)
525 {
526 if (pt->anything)
527 fprintf (file, ", points-to anything");
528
529 if (pt->nonlocal)
530 fprintf (file, ", points-to non-local");
531
532 if (pt->escaped)
533 fprintf (file, ", points-to escaped");
534
535 if (pt->ipa_escaped)
536 fprintf (file, ", points-to unit escaped");
537
538 if (pt->null)
539 fprintf (file, ", points-to NULL");
540
541 if (pt->vars)
542 {
543 fprintf (file, ", points-to vars: ");
544 dump_decl_set (file, pt->vars);
545 if (pt->vars_contains_nonlocal
546 || pt->vars_contains_escaped
547 || pt->vars_contains_escaped_heap
548 || pt->vars_contains_restrict)
549 {
550 const char *comma = "";
551 fprintf (file, " (");
552 if (pt->vars_contains_nonlocal)
553 {
554 fprintf (file, "nonlocal");
555 comma = ", ";
556 }
557 if (pt->vars_contains_escaped)
558 {
559 fprintf (file, "%sescaped", comma);
560 comma = ", ";
561 }
562 if (pt->vars_contains_escaped_heap)
563 {
564 fprintf (file, "%sescaped heap", comma);
565 comma = ", ";
566 }
567 if (pt->vars_contains_restrict)
568 {
569 fprintf (file, "%srestrict", comma);
570 comma = ", ";
571 }
572 if (pt->vars_contains_interposable)
573 fprintf (file, "%sinterposable", comma);
574 fprintf (file, ")");
575 }
576 }
577 }
578
579
580 /* Unified dump function for pt_solution. */
581
582 DEBUG_FUNCTION void
583 debug (pt_solution &ref)
584 {
585 dump_points_to_solution (stderr, &ref);
586 }
587
588 DEBUG_FUNCTION void
589 debug (pt_solution *ptr)
590 {
591 if (ptr)
592 debug (*ptr);
593 else
594 fprintf (stderr, "<nil>\n");
595 }
596
597
598 /* Dump points-to information for SSA_NAME PTR into FILE. */
599
600 void
601 dump_points_to_info_for (FILE *file, tree ptr)
602 {
603 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
604
605 print_generic_expr (file, ptr, dump_flags);
606
607 if (pi)
608 dump_points_to_solution (file, &pi->pt);
609 else
610 fprintf (file, ", points-to anything");
611
612 fprintf (file, "\n");
613 }
614
615
616 /* Dump points-to information for VAR into stderr. */
617
618 DEBUG_FUNCTION void
619 debug_points_to_info_for (tree var)
620 {
621 dump_points_to_info_for (stderr, var);
622 }
623
624
625 /* Initializes the alias-oracle reference representation *R from REF. */
626
627 void
628 ao_ref_init (ao_ref *r, tree ref)
629 {
630 r->ref = ref;
631 r->base = NULL_TREE;
632 r->offset = 0;
633 r->size = -1;
634 r->max_size = -1;
635 r->ref_alias_set = -1;
636 r->base_alias_set = -1;
637 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
638 }
639
640 /* Returns the base object of the memory reference *REF. */
641
642 tree
643 ao_ref_base (ao_ref *ref)
644 {
645 bool reverse;
646
647 if (ref->base)
648 return ref->base;
649 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
650 &ref->max_size, &reverse);
651 return ref->base;
652 }
653
654 /* Returns the base object alias set of the memory reference *REF. */
655
656 alias_set_type
657 ao_ref_base_alias_set (ao_ref *ref)
658 {
659 tree base_ref;
660 if (ref->base_alias_set != -1)
661 return ref->base_alias_set;
662 if (!ref->ref)
663 return 0;
664 base_ref = ref->ref;
665 while (handled_component_p (base_ref))
666 base_ref = TREE_OPERAND (base_ref, 0);
667 ref->base_alias_set = get_alias_set (base_ref);
668 return ref->base_alias_set;
669 }
670
671 /* Returns the reference alias set of the memory reference *REF. */
672
673 alias_set_type
674 ao_ref_alias_set (ao_ref *ref)
675 {
676 if (ref->ref_alias_set != -1)
677 return ref->ref_alias_set;
678 ref->ref_alias_set = get_alias_set (ref->ref);
679 return ref->ref_alias_set;
680 }
681
682 /* Init an alias-oracle reference representation from a gimple pointer
683 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
684 size is assumed to be unknown. The access is assumed to be only
685 to or after of the pointer target, not before it. */
686
687 void
688 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
689 {
690 poly_int64 t, size_hwi, extra_offset = 0;
691 ref->ref = NULL_TREE;
692 if (TREE_CODE (ptr) == SSA_NAME)
693 {
694 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
695 if (gimple_assign_single_p (stmt)
696 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
697 ptr = gimple_assign_rhs1 (stmt);
698 else if (is_gimple_assign (stmt)
699 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
700 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
701 {
702 ptr = gimple_assign_rhs1 (stmt);
703 extra_offset *= BITS_PER_UNIT;
704 }
705 }
706
707 if (TREE_CODE (ptr) == ADDR_EXPR)
708 {
709 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
710 if (ref->base)
711 ref->offset = BITS_PER_UNIT * t;
712 else
713 {
714 size = NULL_TREE;
715 ref->offset = 0;
716 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
717 }
718 }
719 else
720 {
721 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
722 ref->base = build2 (MEM_REF, char_type_node,
723 ptr, null_pointer_node);
724 ref->offset = 0;
725 }
726 ref->offset += extra_offset;
727 if (size
728 && poly_int_tree_p (size, &size_hwi)
729 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
730 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
731 else
732 ref->max_size = ref->size = -1;
733 ref->ref_alias_set = 0;
734 ref->base_alias_set = 0;
735 ref->volatile_p = false;
736 }
737
738 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
739 Return -1 if S1 < S2
740 Return 1 if S1 > S2
741 Return 0 if equal or incomparable. */
742
743 static int
744 compare_sizes (tree s1, tree s2)
745 {
746 if (!s1 || !s2)
747 return 0;
748
749 poly_uint64 size1;
750 poly_uint64 size2;
751
752 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
753 return 0;
754 if (known_lt (size1, size2))
755 return -1;
756 if (known_lt (size2, size1))
757 return 1;
758 return 0;
759 }
760
761 /* Compare TYPE1 and TYPE2 by its size.
762 Return -1 if size of TYPE1 < size of TYPE2
763 Return 1 if size of TYPE1 > size of TYPE2
764 Return 0 if types are of equal sizes or we can not compare them. */
765
766 static int
767 compare_type_sizes (tree type1, tree type2)
768 {
769 /* Be conservative for arrays and vectors. We want to support partial
770 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
771 while (TREE_CODE (type1) == ARRAY_TYPE
772 || TREE_CODE (type1) == VECTOR_TYPE)
773 type1 = TREE_TYPE (type1);
774 while (TREE_CODE (type2) == ARRAY_TYPE
775 || TREE_CODE (type2) == VECTOR_TYPE)
776 type2 = TREE_TYPE (type2);
777 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
778 }
779
780 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
781 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
782 decide. */
783
784 static inline int
785 same_type_for_tbaa (tree type1, tree type2)
786 {
787 type1 = TYPE_MAIN_VARIANT (type1);
788 type2 = TYPE_MAIN_VARIANT (type2);
789
790 /* Handle the most common case first. */
791 if (type1 == type2)
792 return 1;
793
794 /* If we would have to do structural comparison bail out. */
795 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
796 || TYPE_STRUCTURAL_EQUALITY_P (type2))
797 return -1;
798
799 /* Compare the canonical types. */
800 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
801 return 1;
802
803 /* ??? Array types are not properly unified in all cases as we have
804 spurious changes in the index types for example. Removing this
805 causes all sorts of problems with the Fortran frontend. */
806 if (TREE_CODE (type1) == ARRAY_TYPE
807 && TREE_CODE (type2) == ARRAY_TYPE)
808 return -1;
809
810 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
811 object of one of its constrained subtypes, e.g. when a function with an
812 unconstrained parameter passed by reference is called on an object and
813 inlined. But, even in the case of a fixed size, type and subtypes are
814 not equivalent enough as to share the same TYPE_CANONICAL, since this
815 would mean that conversions between them are useless, whereas they are
816 not (e.g. type and subtypes can have different modes). So, in the end,
817 they are only guaranteed to have the same alias set. */
818 if (get_alias_set (type1) == get_alias_set (type2))
819 return -1;
820
821 /* The types are known to be not equal. */
822 return 0;
823 }
824
825 /* Determine if the two component references REF1 and REF2 which are
826 based on access types TYPE1 and TYPE2 and of which at least one is based
827 on an indirect reference may alias. REF2 is the only one that can
828 be a decl in which case REF2_IS_DECL is true.
829 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
830 are the respective alias sets. */
831
832 static bool
833 aliasing_component_refs_p (tree ref1,
834 alias_set_type ref1_alias_set,
835 alias_set_type base1_alias_set,
836 poly_int64 offset1, poly_int64 max_size1,
837 tree ref2,
838 alias_set_type ref2_alias_set,
839 alias_set_type base2_alias_set,
840 poly_int64 offset2, poly_int64 max_size2,
841 bool ref2_is_decl)
842 {
843 /* If one reference is a component references through pointers try to find a
844 common base and apply offset based disambiguation. This handles
845 for example
846 struct A { int i; int j; } *q;
847 struct B { struct A a; int k; } *p;
848 disambiguating q->i and p->a.j. */
849 tree base1, base2;
850 tree type1, type2;
851 tree *refp;
852 int same_p1 = 0, same_p2 = 0;
853
854 /* Choose bases and base types to search for. */
855 base1 = ref1;
856 while (handled_component_p (base1))
857 base1 = TREE_OPERAND (base1, 0);
858 type1 = TREE_TYPE (base1);
859 base2 = ref2;
860 while (handled_component_p (base2))
861 base2 = TREE_OPERAND (base2, 0);
862 type2 = TREE_TYPE (base2);
863
864 /* Now search for the type1 in the access path of ref2. This
865 would be a common base for doing offset based disambiguation on.
866 This however only makes sense if type2 is big enough to hold type1. */
867 int cmp_outer = compare_type_sizes (type2, type1);
868 if (cmp_outer >= 0)
869 {
870 refp = &ref2;
871 while (true)
872 {
873 /* We walk from inner type to the outer types. If type we see is
874 already too large to be part of type1, terminate the search. */
875 int cmp = compare_type_sizes (type1, TREE_TYPE (*refp));
876 if (cmp < 0)
877 break;
878 /* If types may be of same size, see if we can decide about their
879 equality. */
880 if (cmp == 0)
881 {
882 same_p2 = same_type_for_tbaa (TREE_TYPE (*refp), type1);
883 if (same_p2 != 0)
884 break;
885 }
886 if (!handled_component_p (*refp))
887 break;
888 refp = &TREE_OPERAND (*refp, 0);
889 }
890 if (same_p2 == 1)
891 {
892 poly_int64 offadj, sztmp, msztmp;
893 bool reverse;
894 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
895 offset2 -= offadj;
896 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
897 offset1 -= offadj;
898 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
899 {
900 ++alias_stats.aliasing_component_refs_p_may_alias;
901 return true;
902 }
903 else
904 {
905 ++alias_stats.aliasing_component_refs_p_no_alias;
906 return false;
907 }
908 }
909 }
910
911 /* If we didn't find a common base, try the other way around. */
912 if (cmp_outer <= 0)
913 {
914 refp = &ref1;
915 while (true)
916 {
917 int cmp = compare_type_sizes (type2, TREE_TYPE (*refp));
918 if (cmp < 0)
919 break;
920 /* If types may be of same size, see if we can decide about their
921 equality. */
922 if (cmp == 0)
923 {
924 same_p1 = same_type_for_tbaa (TREE_TYPE (*refp), type2);
925 if (same_p1 != 0)
926 break;
927 }
928 if (!handled_component_p (*refp))
929 break;
930 refp = &TREE_OPERAND (*refp, 0);
931 }
932 if (same_p1 == 1)
933 {
934 poly_int64 offadj, sztmp, msztmp;
935 bool reverse;
936
937 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
938 offset1 -= offadj;
939 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
940 offset2 -= offadj;
941 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
942 {
943 ++alias_stats.aliasing_component_refs_p_may_alias;
944 return true;
945 }
946 else
947 {
948 ++alias_stats.aliasing_component_refs_p_no_alias;
949 return false;
950 }
951 }
952 }
953
954 /* In the following code we make an assumption that the types in access
955 paths do not overlap and thus accesses alias only if one path can be
956 continuation of another. If we was not able to decide about equivalence,
957 we need to give up. */
958 if (same_p1 == -1 || same_p2 == -1)
959 return true;
960
961 /* If we have two type access paths B1.path1 and B2.path2 they may
962 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
963 But we can still have a path that goes B1.path1...B2.path2 with
964 a part that we do not see. So we can only disambiguate now
965 if there is no B2 in the tail of path1 and no B1 on the
966 tail of path2. */
967 if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0
968 && (base1_alias_set == ref2_alias_set
969 || alias_set_subset_of (base1_alias_set, ref2_alias_set)))
970 {
971 ++alias_stats.aliasing_component_refs_p_may_alias;
972 return true;
973 }
974 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
975 if (!ref2_is_decl
976 && compare_type_sizes (TREE_TYPE (ref1), type2) >= 0
977 && (base2_alias_set == ref1_alias_set
978 || alias_set_subset_of (base2_alias_set, ref1_alias_set)))
979 {
980 ++alias_stats.aliasing_component_refs_p_may_alias;
981 return true;
982 }
983 ++alias_stats.aliasing_component_refs_p_no_alias;
984 return false;
985 }
986
987 /* Return true if we can determine that component references REF1 and REF2,
988 that are within a common DECL, cannot overlap. */
989
990 static bool
991 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
992 {
993 auto_vec<tree, 16> component_refs1;
994 auto_vec<tree, 16> component_refs2;
995
996 /* Create the stack of handled components for REF1. */
997 while (handled_component_p (ref1))
998 {
999 component_refs1.safe_push (ref1);
1000 ref1 = TREE_OPERAND (ref1, 0);
1001 }
1002 if (TREE_CODE (ref1) == MEM_REF)
1003 {
1004 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
1005 return false;
1006 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
1007 }
1008
1009 /* Create the stack of handled components for REF2. */
1010 while (handled_component_p (ref2))
1011 {
1012 component_refs2.safe_push (ref2);
1013 ref2 = TREE_OPERAND (ref2, 0);
1014 }
1015 if (TREE_CODE (ref2) == MEM_REF)
1016 {
1017 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
1018 return false;
1019 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
1020 }
1021
1022 /* Bases must be either same or uncomparable. */
1023 gcc_checking_assert (ref1 == ref2
1024 || (DECL_P (ref1) && DECL_P (ref2)
1025 && compare_base_decls (ref1, ref2) != 0));
1026
1027 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1028 rank. This is sufficient because we start from the same DECL and you
1029 cannot reference several fields at a time with COMPONENT_REFs (unlike
1030 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1031 of them to access a sub-component, unless you're in a union, in which
1032 case the return value will precisely be false. */
1033 while (true)
1034 {
1035 do
1036 {
1037 if (component_refs1.is_empty ())
1038 return false;
1039 ref1 = component_refs1.pop ();
1040 }
1041 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1042
1043 do
1044 {
1045 if (component_refs2.is_empty ())
1046 return false;
1047 ref2 = component_refs2.pop ();
1048 }
1049 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1050
1051 /* Beware of BIT_FIELD_REF. */
1052 if (TREE_CODE (ref1) != COMPONENT_REF
1053 || TREE_CODE (ref2) != COMPONENT_REF)
1054 return false;
1055
1056 tree field1 = TREE_OPERAND (ref1, 1);
1057 tree field2 = TREE_OPERAND (ref2, 1);
1058
1059 /* ??? We cannot simply use the type of operand #0 of the refs here
1060 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1061 for common blocks instead of using unions like everyone else. */
1062 tree type1 = DECL_CONTEXT (field1);
1063 tree type2 = DECL_CONTEXT (field2);
1064
1065 /* We cannot disambiguate fields in a union or qualified union. */
1066 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
1067 return false;
1068
1069 if (field1 != field2)
1070 {
1071 /* A field and its representative need to be considered the
1072 same. */
1073 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
1074 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
1075 return false;
1076 /* Different fields of the same record type cannot overlap.
1077 ??? Bitfields can overlap at RTL level so punt on them. */
1078 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1079 return false;
1080 return true;
1081 }
1082 }
1083
1084 return false;
1085 }
1086
1087 /* qsort compare function to sort FIELD_DECLs after their
1088 DECL_FIELD_CONTEXT TYPE_UID. */
1089
1090 static inline int
1091 ncr_compar (const void *field1_, const void *field2_)
1092 {
1093 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1094 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1095 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
1096 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
1097 if (uid1 < uid2)
1098 return -1;
1099 else if (uid1 > uid2)
1100 return 1;
1101 return 0;
1102 }
1103
1104 /* Return true if we can determine that the fields referenced cannot
1105 overlap for any pair of objects. */
1106
1107 static bool
1108 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1109 {
1110 if (!flag_strict_aliasing
1111 || !x || !y
1112 || TREE_CODE (x) != COMPONENT_REF
1113 || TREE_CODE (y) != COMPONENT_REF)
1114 return false;
1115
1116 auto_vec<const_tree, 16> fieldsx;
1117 while (TREE_CODE (x) == COMPONENT_REF)
1118 {
1119 tree field = TREE_OPERAND (x, 1);
1120 tree type = DECL_FIELD_CONTEXT (field);
1121 if (TREE_CODE (type) == RECORD_TYPE)
1122 fieldsx.safe_push (field);
1123 x = TREE_OPERAND (x, 0);
1124 }
1125 if (fieldsx.length () == 0)
1126 return false;
1127 auto_vec<const_tree, 16> fieldsy;
1128 while (TREE_CODE (y) == COMPONENT_REF)
1129 {
1130 tree field = TREE_OPERAND (y, 1);
1131 tree type = DECL_FIELD_CONTEXT (field);
1132 if (TREE_CODE (type) == RECORD_TYPE)
1133 fieldsy.safe_push (TREE_OPERAND (y, 1));
1134 y = TREE_OPERAND (y, 0);
1135 }
1136 if (fieldsy.length () == 0)
1137 return false;
1138
1139 /* Most common case first. */
1140 if (fieldsx.length () == 1
1141 && fieldsy.length () == 1)
1142 return ((DECL_FIELD_CONTEXT (fieldsx[0])
1143 == DECL_FIELD_CONTEXT (fieldsy[0]))
1144 && fieldsx[0] != fieldsy[0]
1145 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
1146
1147 if (fieldsx.length () == 2)
1148 {
1149 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1150 std::swap (fieldsx[0], fieldsx[1]);
1151 }
1152 else
1153 fieldsx.qsort (ncr_compar);
1154
1155 if (fieldsy.length () == 2)
1156 {
1157 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1158 std::swap (fieldsy[0], fieldsy[1]);
1159 }
1160 else
1161 fieldsy.qsort (ncr_compar);
1162
1163 unsigned i = 0, j = 0;
1164 do
1165 {
1166 const_tree fieldx = fieldsx[i];
1167 const_tree fieldy = fieldsy[j];
1168 tree typex = DECL_FIELD_CONTEXT (fieldx);
1169 tree typey = DECL_FIELD_CONTEXT (fieldy);
1170 if (typex == typey)
1171 {
1172 /* We're left with accessing different fields of a structure,
1173 no possible overlap. */
1174 if (fieldx != fieldy)
1175 {
1176 /* A field and its representative need to be considered the
1177 same. */
1178 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1179 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1180 return false;
1181 /* Different fields of the same record type cannot overlap.
1182 ??? Bitfields can overlap at RTL level so punt on them. */
1183 if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1184 return false;
1185 return true;
1186 }
1187 }
1188 if (TYPE_UID (typex) < TYPE_UID (typey))
1189 {
1190 i++;
1191 if (i == fieldsx.length ())
1192 break;
1193 }
1194 else
1195 {
1196 j++;
1197 if (j == fieldsy.length ())
1198 break;
1199 }
1200 }
1201 while (1);
1202
1203 return false;
1204 }
1205
1206
1207 /* Return true if two memory references based on the variables BASE1
1208 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1209 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1210 if non-NULL are the complete memory reference trees. */
1211
1212 static bool
1213 decl_refs_may_alias_p (tree ref1, tree base1,
1214 poly_int64 offset1, poly_int64 max_size1,
1215 tree ref2, tree base2,
1216 poly_int64 offset2, poly_int64 max_size2)
1217 {
1218 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1219
1220 /* If both references are based on different variables, they cannot alias. */
1221 if (compare_base_decls (base1, base2) == 0)
1222 return false;
1223
1224 /* If both references are based on the same variable, they cannot alias if
1225 the accesses do not overlap. */
1226 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1227 return false;
1228
1229 /* For components with variable position, the above test isn't sufficient,
1230 so we disambiguate component references manually. */
1231 if (ref1 && ref2
1232 && handled_component_p (ref1) && handled_component_p (ref2)
1233 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1234 return false;
1235
1236 return true;
1237 }
1238
1239 /* Return true if an indirect reference based on *PTR1 constrained
1240 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1241 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1242 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1243 in which case they are computed on-demand. REF1 and REF2
1244 if non-NULL are the complete memory reference trees. */
1245
1246 static bool
1247 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1248 poly_int64 offset1, poly_int64 max_size1,
1249 alias_set_type ref1_alias_set,
1250 alias_set_type base1_alias_set,
1251 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1252 poly_int64 offset2, poly_int64 max_size2,
1253 alias_set_type ref2_alias_set,
1254 alias_set_type base2_alias_set, bool tbaa_p)
1255 {
1256 tree ptr1;
1257 tree ptrtype1, dbase2;
1258
1259 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1260 || TREE_CODE (base1) == TARGET_MEM_REF)
1261 && DECL_P (base2));
1262
1263 ptr1 = TREE_OPERAND (base1, 0);
1264 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1265
1266 /* If only one reference is based on a variable, they cannot alias if
1267 the pointer access is beyond the extent of the variable access.
1268 (the pointer base cannot validly point to an offset less than zero
1269 of the variable).
1270 ??? IVOPTs creates bases that do not honor this restriction,
1271 so do not apply this optimization for TARGET_MEM_REFs. */
1272 if (TREE_CODE (base1) != TARGET_MEM_REF
1273 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1274 return false;
1275 /* They also cannot alias if the pointer may not point to the decl. */
1276 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1277 return false;
1278
1279 /* Disambiguations that rely on strict aliasing rules follow. */
1280 if (!flag_strict_aliasing || !tbaa_p)
1281 return true;
1282
1283 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1284
1285 /* If the alias set for a pointer access is zero all bets are off. */
1286 if (base1_alias_set == 0)
1287 return true;
1288
1289 /* When we are trying to disambiguate an access with a pointer dereference
1290 as base versus one with a decl as base we can use both the size
1291 of the decl and its dynamic type for extra disambiguation.
1292 ??? We do not know anything about the dynamic type of the decl
1293 other than that its alias-set contains base2_alias_set as a subset
1294 which does not help us here. */
1295 /* As we know nothing useful about the dynamic type of the decl just
1296 use the usual conflict check rather than a subset test.
1297 ??? We could introduce -fvery-strict-aliasing when the language
1298 does not allow decls to have a dynamic type that differs from their
1299 static type. Then we can check
1300 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1301 if (base1_alias_set != base2_alias_set
1302 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1303 return false;
1304 /* If the size of the access relevant for TBAA through the pointer
1305 is bigger than the size of the decl we can't possibly access the
1306 decl via that pointer. */
1307 if (/* ??? This in turn may run afoul when a decl of type T which is
1308 a member of union type U is accessed through a pointer to
1309 type U and sizeof T is smaller than sizeof U. */
1310 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1311 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1312 && compare_sizes (DECL_SIZE (base2),
1313 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1314 return false;
1315
1316 if (!ref2)
1317 return true;
1318
1319 /* If the decl is accessed via a MEM_REF, reconstruct the base
1320 we can use for TBAA and an appropriately adjusted offset. */
1321 dbase2 = ref2;
1322 while (handled_component_p (dbase2))
1323 dbase2 = TREE_OPERAND (dbase2, 0);
1324 poly_int64 doffset1 = offset1;
1325 poly_offset_int doffset2 = offset2;
1326 if (TREE_CODE (dbase2) == MEM_REF
1327 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1328 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1329
1330 /* If either reference is view-converted, give up now. */
1331 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1332 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1333 return true;
1334
1335 /* If both references are through the same type, they do not alias
1336 if the accesses do not overlap. This does extra disambiguation
1337 for mixed/pointer accesses but requires strict aliasing.
1338 For MEM_REFs we require that the component-ref offset we computed
1339 is relative to the start of the type which we ensure by
1340 comparing rvalue and access type and disregarding the constant
1341 pointer offset. */
1342 if ((TREE_CODE (base1) != TARGET_MEM_REF
1343 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1344 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1345 return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1346
1347 if (ref1 && ref2
1348 && nonoverlapping_component_refs_p (ref1, ref2))
1349 return false;
1350
1351 /* Do access-path based disambiguation. */
1352 if (ref1 && ref2
1353 && (handled_component_p (ref1) || handled_component_p (ref2)))
1354 return aliasing_component_refs_p (ref1,
1355 ref1_alias_set, base1_alias_set,
1356 offset1, max_size1,
1357 ref2,
1358 ref2_alias_set, base2_alias_set,
1359 offset2, max_size2, true);
1360
1361 return true;
1362 }
1363
1364 /* Return true if two indirect references based on *PTR1
1365 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1366 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1367 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1368 in which case they are computed on-demand. REF1 and REF2
1369 if non-NULL are the complete memory reference trees. */
1370
1371 static bool
1372 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1373 poly_int64 offset1, poly_int64 max_size1,
1374 alias_set_type ref1_alias_set,
1375 alias_set_type base1_alias_set,
1376 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1377 poly_int64 offset2, poly_int64 max_size2,
1378 alias_set_type ref2_alias_set,
1379 alias_set_type base2_alias_set, bool tbaa_p)
1380 {
1381 tree ptr1;
1382 tree ptr2;
1383 tree ptrtype1, ptrtype2;
1384
1385 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1386 || TREE_CODE (base1) == TARGET_MEM_REF)
1387 && (TREE_CODE (base2) == MEM_REF
1388 || TREE_CODE (base2) == TARGET_MEM_REF));
1389
1390 ptr1 = TREE_OPERAND (base1, 0);
1391 ptr2 = TREE_OPERAND (base2, 0);
1392
1393 /* If both bases are based on pointers they cannot alias if they may not
1394 point to the same memory object or if they point to the same object
1395 and the accesses do not overlap. */
1396 if ((!cfun || gimple_in_ssa_p (cfun))
1397 && operand_equal_p (ptr1, ptr2, 0)
1398 && (((TREE_CODE (base1) != TARGET_MEM_REF
1399 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1400 && (TREE_CODE (base2) != TARGET_MEM_REF
1401 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1402 || (TREE_CODE (base1) == TARGET_MEM_REF
1403 && TREE_CODE (base2) == TARGET_MEM_REF
1404 && (TMR_STEP (base1) == TMR_STEP (base2)
1405 || (TMR_STEP (base1) && TMR_STEP (base2)
1406 && operand_equal_p (TMR_STEP (base1),
1407 TMR_STEP (base2), 0)))
1408 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1409 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1410 && operand_equal_p (TMR_INDEX (base1),
1411 TMR_INDEX (base2), 0)))
1412 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1413 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1414 && operand_equal_p (TMR_INDEX2 (base1),
1415 TMR_INDEX2 (base2), 0))))))
1416 {
1417 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1418 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1419 return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1420 offset2 + moff2, max_size2);
1421 }
1422 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1423 return false;
1424
1425 /* Disambiguations that rely on strict aliasing rules follow. */
1426 if (!flag_strict_aliasing || !tbaa_p)
1427 return true;
1428
1429 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1430 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1431
1432 /* If the alias set for a pointer access is zero all bets are off. */
1433 if (base1_alias_set == 0
1434 || base2_alias_set == 0)
1435 return true;
1436
1437 /* If both references are through the same type, they do not alias
1438 if the accesses do not overlap. This does extra disambiguation
1439 for mixed/pointer accesses but requires strict aliasing. */
1440 if ((TREE_CODE (base1) != TARGET_MEM_REF
1441 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1442 && (TREE_CODE (base2) != TARGET_MEM_REF
1443 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1444 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1445 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1446 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1447 TREE_TYPE (ptrtype2)) == 1
1448 /* But avoid treating arrays as "objects", instead assume they
1449 can overlap by an exact multiple of their element size. */
1450 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1451 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1452
1453 /* Do type-based disambiguation. */
1454 if (base1_alias_set != base2_alias_set
1455 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1456 return false;
1457
1458 /* If either reference is view-converted, give up now. */
1459 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1460 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1461 return true;
1462
1463 if (ref1 && ref2
1464 && nonoverlapping_component_refs_p (ref1, ref2))
1465 return false;
1466
1467 /* Do access-path based disambiguation. */
1468 if (ref1 && ref2
1469 && (handled_component_p (ref1) || handled_component_p (ref2)))
1470 return aliasing_component_refs_p (ref1,
1471 ref1_alias_set, base1_alias_set,
1472 offset1, max_size1,
1473 ref2,
1474 ref2_alias_set, base2_alias_set,
1475 offset2, max_size2, false);
1476
1477 return true;
1478 }
1479
1480 /* Return true, if the two memory references REF1 and REF2 may alias. */
1481
1482 static bool
1483 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1484 {
1485 tree base1, base2;
1486 poly_int64 offset1 = 0, offset2 = 0;
1487 poly_int64 max_size1 = -1, max_size2 = -1;
1488 bool var1_p, var2_p, ind1_p, ind2_p;
1489
1490 gcc_checking_assert ((!ref1->ref
1491 || TREE_CODE (ref1->ref) == SSA_NAME
1492 || DECL_P (ref1->ref)
1493 || TREE_CODE (ref1->ref) == STRING_CST
1494 || handled_component_p (ref1->ref)
1495 || TREE_CODE (ref1->ref) == MEM_REF
1496 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1497 && (!ref2->ref
1498 || TREE_CODE (ref2->ref) == SSA_NAME
1499 || DECL_P (ref2->ref)
1500 || TREE_CODE (ref2->ref) == STRING_CST
1501 || handled_component_p (ref2->ref)
1502 || TREE_CODE (ref2->ref) == MEM_REF
1503 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1504
1505 /* Decompose the references into their base objects and the access. */
1506 base1 = ao_ref_base (ref1);
1507 offset1 = ref1->offset;
1508 max_size1 = ref1->max_size;
1509 base2 = ao_ref_base (ref2);
1510 offset2 = ref2->offset;
1511 max_size2 = ref2->max_size;
1512
1513 /* We can end up with registers or constants as bases for example from
1514 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1515 which is seen as a struct copy. */
1516 if (TREE_CODE (base1) == SSA_NAME
1517 || TREE_CODE (base1) == CONST_DECL
1518 || TREE_CODE (base1) == CONSTRUCTOR
1519 || TREE_CODE (base1) == ADDR_EXPR
1520 || CONSTANT_CLASS_P (base1)
1521 || TREE_CODE (base2) == SSA_NAME
1522 || TREE_CODE (base2) == CONST_DECL
1523 || TREE_CODE (base2) == CONSTRUCTOR
1524 || TREE_CODE (base2) == ADDR_EXPR
1525 || CONSTANT_CLASS_P (base2))
1526 return false;
1527
1528 /* We can end up referring to code via function and label decls.
1529 As we likely do not properly track code aliases conservatively
1530 bail out. */
1531 if (TREE_CODE (base1) == FUNCTION_DECL
1532 || TREE_CODE (base1) == LABEL_DECL
1533 || TREE_CODE (base2) == FUNCTION_DECL
1534 || TREE_CODE (base2) == LABEL_DECL)
1535 return true;
1536
1537 /* Two volatile accesses always conflict. */
1538 if (ref1->volatile_p
1539 && ref2->volatile_p)
1540 return true;
1541
1542 /* Defer to simple offset based disambiguation if we have
1543 references based on two decls. Do this before defering to
1544 TBAA to handle must-alias cases in conformance with the
1545 GCC extension of allowing type-punning through unions. */
1546 var1_p = DECL_P (base1);
1547 var2_p = DECL_P (base2);
1548 if (var1_p && var2_p)
1549 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1550 ref2->ref, base2, offset2, max_size2);
1551
1552 /* Handle restrict based accesses.
1553 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1554 here. */
1555 tree rbase1 = base1;
1556 tree rbase2 = base2;
1557 if (var1_p)
1558 {
1559 rbase1 = ref1->ref;
1560 if (rbase1)
1561 while (handled_component_p (rbase1))
1562 rbase1 = TREE_OPERAND (rbase1, 0);
1563 }
1564 if (var2_p)
1565 {
1566 rbase2 = ref2->ref;
1567 if (rbase2)
1568 while (handled_component_p (rbase2))
1569 rbase2 = TREE_OPERAND (rbase2, 0);
1570 }
1571 if (rbase1 && rbase2
1572 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1573 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1574 /* If the accesses are in the same restrict clique... */
1575 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1576 /* But based on different pointers they do not alias. */
1577 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1578 return false;
1579
1580 ind1_p = (TREE_CODE (base1) == MEM_REF
1581 || TREE_CODE (base1) == TARGET_MEM_REF);
1582 ind2_p = (TREE_CODE (base2) == MEM_REF
1583 || TREE_CODE (base2) == TARGET_MEM_REF);
1584
1585 /* Canonicalize the pointer-vs-decl case. */
1586 if (ind1_p && var2_p)
1587 {
1588 std::swap (offset1, offset2);
1589 std::swap (max_size1, max_size2);
1590 std::swap (base1, base2);
1591 std::swap (ref1, ref2);
1592 var1_p = true;
1593 ind1_p = false;
1594 var2_p = false;
1595 ind2_p = true;
1596 }
1597
1598 /* First defer to TBAA if possible. */
1599 if (tbaa_p
1600 && flag_strict_aliasing
1601 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1602 ao_ref_alias_set (ref2)))
1603 return false;
1604
1605 /* If the reference is based on a pointer that points to memory
1606 that may not be written to then the other reference cannot possibly
1607 clobber it. */
1608 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1609 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1610 || (ind1_p
1611 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1612 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1613 return false;
1614
1615 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1616 if (var1_p && ind2_p)
1617 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1618 offset2, max_size2,
1619 ao_ref_alias_set (ref2),
1620 ao_ref_base_alias_set (ref2),
1621 ref1->ref, base1,
1622 offset1, max_size1,
1623 ao_ref_alias_set (ref1),
1624 ao_ref_base_alias_set (ref1),
1625 tbaa_p);
1626 else if (ind1_p && ind2_p)
1627 return indirect_refs_may_alias_p (ref1->ref, base1,
1628 offset1, max_size1,
1629 ao_ref_alias_set (ref1),
1630 ao_ref_base_alias_set (ref1),
1631 ref2->ref, base2,
1632 offset2, max_size2,
1633 ao_ref_alias_set (ref2),
1634 ao_ref_base_alias_set (ref2),
1635 tbaa_p);
1636
1637 gcc_unreachable ();
1638 }
1639
1640 /* Return true, if the two memory references REF1 and REF2 may alias
1641 and update statistics. */
1642
1643 bool
1644 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1645 {
1646 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
1647 if (res)
1648 ++alias_stats.refs_may_alias_p_may_alias;
1649 else
1650 ++alias_stats.refs_may_alias_p_no_alias;
1651 return res;
1652 }
1653
1654 static bool
1655 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1656 {
1657 ao_ref r1;
1658 ao_ref_init (&r1, ref1);
1659 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1660 }
1661
1662 bool
1663 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1664 {
1665 ao_ref r1, r2;
1666 ao_ref_init (&r1, ref1);
1667 ao_ref_init (&r2, ref2);
1668 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1669 }
1670
1671 /* Returns true if there is a anti-dependence for the STORE that
1672 executes after the LOAD. */
1673
1674 bool
1675 refs_anti_dependent_p (tree load, tree store)
1676 {
1677 ao_ref r1, r2;
1678 ao_ref_init (&r1, load);
1679 ao_ref_init (&r2, store);
1680 return refs_may_alias_p_1 (&r1, &r2, false);
1681 }
1682
1683 /* Returns true if there is a output dependence for the stores
1684 STORE1 and STORE2. */
1685
1686 bool
1687 refs_output_dependent_p (tree store1, tree store2)
1688 {
1689 ao_ref r1, r2;
1690 ao_ref_init (&r1, store1);
1691 ao_ref_init (&r2, store2);
1692 return refs_may_alias_p_1 (&r1, &r2, false);
1693 }
1694
1695 /* If the call CALL may use the memory reference REF return true,
1696 otherwise return false. */
1697
1698 static bool
1699 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1700 {
1701 tree base, callee;
1702 unsigned i;
1703 int flags = gimple_call_flags (call);
1704
1705 /* Const functions without a static chain do not implicitly use memory. */
1706 if (!gimple_call_chain (call)
1707 && (flags & (ECF_CONST|ECF_NOVOPS)))
1708 goto process_args;
1709
1710 base = ao_ref_base (ref);
1711 if (!base)
1712 return true;
1713
1714 /* A call that is not without side-effects might involve volatile
1715 accesses and thus conflicts with all other volatile accesses. */
1716 if (ref->volatile_p)
1717 return true;
1718
1719 /* If the reference is based on a decl that is not aliased the call
1720 cannot possibly use it. */
1721 if (DECL_P (base)
1722 && !may_be_aliased (base)
1723 /* But local statics can be used through recursion. */
1724 && !is_global_var (base))
1725 goto process_args;
1726
1727 callee = gimple_call_fndecl (call);
1728
1729 /* Handle those builtin functions explicitly that do not act as
1730 escape points. See tree-ssa-structalias.c:find_func_aliases
1731 for the list of builtins we might need to handle here. */
1732 if (callee != NULL_TREE
1733 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1734 switch (DECL_FUNCTION_CODE (callee))
1735 {
1736 /* All the following functions read memory pointed to by
1737 their second argument. strcat/strncat additionally
1738 reads memory pointed to by the first argument. */
1739 case BUILT_IN_STRCAT:
1740 case BUILT_IN_STRNCAT:
1741 {
1742 ao_ref dref;
1743 ao_ref_init_from_ptr_and_size (&dref,
1744 gimple_call_arg (call, 0),
1745 NULL_TREE);
1746 if (refs_may_alias_p_1 (&dref, ref, false))
1747 return true;
1748 }
1749 /* FALLTHRU */
1750 case BUILT_IN_STRCPY:
1751 case BUILT_IN_STRNCPY:
1752 case BUILT_IN_MEMCPY:
1753 case BUILT_IN_MEMMOVE:
1754 case BUILT_IN_MEMPCPY:
1755 case BUILT_IN_STPCPY:
1756 case BUILT_IN_STPNCPY:
1757 case BUILT_IN_TM_MEMCPY:
1758 case BUILT_IN_TM_MEMMOVE:
1759 {
1760 ao_ref dref;
1761 tree size = NULL_TREE;
1762 if (gimple_call_num_args (call) == 3)
1763 size = gimple_call_arg (call, 2);
1764 ao_ref_init_from_ptr_and_size (&dref,
1765 gimple_call_arg (call, 1),
1766 size);
1767 return refs_may_alias_p_1 (&dref, ref, false);
1768 }
1769 case BUILT_IN_STRCAT_CHK:
1770 case BUILT_IN_STRNCAT_CHK:
1771 {
1772 ao_ref dref;
1773 ao_ref_init_from_ptr_and_size (&dref,
1774 gimple_call_arg (call, 0),
1775 NULL_TREE);
1776 if (refs_may_alias_p_1 (&dref, ref, false))
1777 return true;
1778 }
1779 /* FALLTHRU */
1780 case BUILT_IN_STRCPY_CHK:
1781 case BUILT_IN_STRNCPY_CHK:
1782 case BUILT_IN_MEMCPY_CHK:
1783 case BUILT_IN_MEMMOVE_CHK:
1784 case BUILT_IN_MEMPCPY_CHK:
1785 case BUILT_IN_STPCPY_CHK:
1786 case BUILT_IN_STPNCPY_CHK:
1787 {
1788 ao_ref dref;
1789 tree size = NULL_TREE;
1790 if (gimple_call_num_args (call) == 4)
1791 size = gimple_call_arg (call, 2);
1792 ao_ref_init_from_ptr_and_size (&dref,
1793 gimple_call_arg (call, 1),
1794 size);
1795 return refs_may_alias_p_1 (&dref, ref, false);
1796 }
1797 case BUILT_IN_BCOPY:
1798 {
1799 ao_ref dref;
1800 tree size = gimple_call_arg (call, 2);
1801 ao_ref_init_from_ptr_and_size (&dref,
1802 gimple_call_arg (call, 0),
1803 size);
1804 return refs_may_alias_p_1 (&dref, ref, false);
1805 }
1806
1807 /* The following functions read memory pointed to by their
1808 first argument. */
1809 CASE_BUILT_IN_TM_LOAD (1):
1810 CASE_BUILT_IN_TM_LOAD (2):
1811 CASE_BUILT_IN_TM_LOAD (4):
1812 CASE_BUILT_IN_TM_LOAD (8):
1813 CASE_BUILT_IN_TM_LOAD (FLOAT):
1814 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1815 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1816 CASE_BUILT_IN_TM_LOAD (M64):
1817 CASE_BUILT_IN_TM_LOAD (M128):
1818 CASE_BUILT_IN_TM_LOAD (M256):
1819 case BUILT_IN_TM_LOG:
1820 case BUILT_IN_TM_LOG_1:
1821 case BUILT_IN_TM_LOG_2:
1822 case BUILT_IN_TM_LOG_4:
1823 case BUILT_IN_TM_LOG_8:
1824 case BUILT_IN_TM_LOG_FLOAT:
1825 case BUILT_IN_TM_LOG_DOUBLE:
1826 case BUILT_IN_TM_LOG_LDOUBLE:
1827 case BUILT_IN_TM_LOG_M64:
1828 case BUILT_IN_TM_LOG_M128:
1829 case BUILT_IN_TM_LOG_M256:
1830 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1831
1832 /* These read memory pointed to by the first argument. */
1833 case BUILT_IN_STRDUP:
1834 case BUILT_IN_STRNDUP:
1835 case BUILT_IN_REALLOC:
1836 {
1837 ao_ref dref;
1838 tree size = NULL_TREE;
1839 if (gimple_call_num_args (call) == 2)
1840 size = gimple_call_arg (call, 1);
1841 ao_ref_init_from_ptr_and_size (&dref,
1842 gimple_call_arg (call, 0),
1843 size);
1844 return refs_may_alias_p_1 (&dref, ref, false);
1845 }
1846 /* These read memory pointed to by the first argument. */
1847 case BUILT_IN_INDEX:
1848 case BUILT_IN_STRCHR:
1849 case BUILT_IN_STRRCHR:
1850 {
1851 ao_ref dref;
1852 ao_ref_init_from_ptr_and_size (&dref,
1853 gimple_call_arg (call, 0),
1854 NULL_TREE);
1855 return refs_may_alias_p_1 (&dref, ref, false);
1856 }
1857 /* These read memory pointed to by the first argument with size
1858 in the third argument. */
1859 case BUILT_IN_MEMCHR:
1860 {
1861 ao_ref dref;
1862 ao_ref_init_from_ptr_and_size (&dref,
1863 gimple_call_arg (call, 0),
1864 gimple_call_arg (call, 2));
1865 return refs_may_alias_p_1 (&dref, ref, false);
1866 }
1867 /* These read memory pointed to by the first and second arguments. */
1868 case BUILT_IN_STRSTR:
1869 case BUILT_IN_STRPBRK:
1870 {
1871 ao_ref dref;
1872 ao_ref_init_from_ptr_and_size (&dref,
1873 gimple_call_arg (call, 0),
1874 NULL_TREE);
1875 if (refs_may_alias_p_1 (&dref, ref, false))
1876 return true;
1877 ao_ref_init_from_ptr_and_size (&dref,
1878 gimple_call_arg (call, 1),
1879 NULL_TREE);
1880 return refs_may_alias_p_1 (&dref, ref, false);
1881 }
1882
1883 /* The following builtins do not read from memory. */
1884 case BUILT_IN_FREE:
1885 case BUILT_IN_MALLOC:
1886 case BUILT_IN_POSIX_MEMALIGN:
1887 case BUILT_IN_ALIGNED_ALLOC:
1888 case BUILT_IN_CALLOC:
1889 CASE_BUILT_IN_ALLOCA:
1890 case BUILT_IN_STACK_SAVE:
1891 case BUILT_IN_STACK_RESTORE:
1892 case BUILT_IN_MEMSET:
1893 case BUILT_IN_TM_MEMSET:
1894 case BUILT_IN_MEMSET_CHK:
1895 case BUILT_IN_FREXP:
1896 case BUILT_IN_FREXPF:
1897 case BUILT_IN_FREXPL:
1898 case BUILT_IN_GAMMA_R:
1899 case BUILT_IN_GAMMAF_R:
1900 case BUILT_IN_GAMMAL_R:
1901 case BUILT_IN_LGAMMA_R:
1902 case BUILT_IN_LGAMMAF_R:
1903 case BUILT_IN_LGAMMAL_R:
1904 case BUILT_IN_MODF:
1905 case BUILT_IN_MODFF:
1906 case BUILT_IN_MODFL:
1907 case BUILT_IN_REMQUO:
1908 case BUILT_IN_REMQUOF:
1909 case BUILT_IN_REMQUOL:
1910 case BUILT_IN_SINCOS:
1911 case BUILT_IN_SINCOSF:
1912 case BUILT_IN_SINCOSL:
1913 case BUILT_IN_ASSUME_ALIGNED:
1914 case BUILT_IN_VA_END:
1915 return false;
1916 /* __sync_* builtins and some OpenMP builtins act as threading
1917 barriers. */
1918 #undef DEF_SYNC_BUILTIN
1919 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1920 #include "sync-builtins.def"
1921 #undef DEF_SYNC_BUILTIN
1922 case BUILT_IN_GOMP_ATOMIC_START:
1923 case BUILT_IN_GOMP_ATOMIC_END:
1924 case BUILT_IN_GOMP_BARRIER:
1925 case BUILT_IN_GOMP_BARRIER_CANCEL:
1926 case BUILT_IN_GOMP_TASKWAIT:
1927 case BUILT_IN_GOMP_TASKGROUP_END:
1928 case BUILT_IN_GOMP_CRITICAL_START:
1929 case BUILT_IN_GOMP_CRITICAL_END:
1930 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1931 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1932 case BUILT_IN_GOMP_LOOP_END:
1933 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1934 case BUILT_IN_GOMP_ORDERED_START:
1935 case BUILT_IN_GOMP_ORDERED_END:
1936 case BUILT_IN_GOMP_SECTIONS_END:
1937 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1938 case BUILT_IN_GOMP_SINGLE_COPY_START:
1939 case BUILT_IN_GOMP_SINGLE_COPY_END:
1940 return true;
1941
1942 default:
1943 /* Fallthru to general call handling. */;
1944 }
1945
1946 /* Check if base is a global static variable that is not read
1947 by the function. */
1948 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
1949 {
1950 struct cgraph_node *node = cgraph_node::get (callee);
1951 bitmap not_read;
1952
1953 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1954 node yet. We should enforce that there are nodes for all decls in the
1955 IL and remove this check instead. */
1956 if (node
1957 && (not_read = ipa_reference_get_not_read_global (node))
1958 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1959 goto process_args;
1960 }
1961
1962 /* Check if the base variable is call-used. */
1963 if (DECL_P (base))
1964 {
1965 if (pt_solution_includes (gimple_call_use_set (call), base))
1966 return true;
1967 }
1968 else if ((TREE_CODE (base) == MEM_REF
1969 || TREE_CODE (base) == TARGET_MEM_REF)
1970 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1971 {
1972 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1973 if (!pi)
1974 return true;
1975
1976 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1977 return true;
1978 }
1979 else
1980 return true;
1981
1982 /* Inspect call arguments for passed-by-value aliases. */
1983 process_args:
1984 for (i = 0; i < gimple_call_num_args (call); ++i)
1985 {
1986 tree op = gimple_call_arg (call, i);
1987 int flags = gimple_call_arg_flags (call, i);
1988
1989 if (flags & EAF_UNUSED)
1990 continue;
1991
1992 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1993 op = TREE_OPERAND (op, 0);
1994
1995 if (TREE_CODE (op) != SSA_NAME
1996 && !is_gimple_min_invariant (op))
1997 {
1998 ao_ref r;
1999 ao_ref_init (&r, op);
2000 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2001 return true;
2002 }
2003 }
2004
2005 return false;
2006 }
2007
2008 static bool
2009 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2010 {
2011 bool res;
2012 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2013 if (res)
2014 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2015 else
2016 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2017 return res;
2018 }
2019
2020
2021 /* If the statement STMT may use the memory reference REF return
2022 true, otherwise return false. */
2023
2024 bool
2025 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2026 {
2027 if (is_gimple_assign (stmt))
2028 {
2029 tree rhs;
2030
2031 /* All memory assign statements are single. */
2032 if (!gimple_assign_single_p (stmt))
2033 return false;
2034
2035 rhs = gimple_assign_rhs1 (stmt);
2036 if (is_gimple_reg (rhs)
2037 || is_gimple_min_invariant (rhs)
2038 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2039 return false;
2040
2041 return refs_may_alias_p (rhs, ref, tbaa_p);
2042 }
2043 else if (is_gimple_call (stmt))
2044 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2045 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2046 {
2047 tree retval = gimple_return_retval (return_stmt);
2048 if (retval
2049 && TREE_CODE (retval) != SSA_NAME
2050 && !is_gimple_min_invariant (retval)
2051 && refs_may_alias_p (retval, ref, tbaa_p))
2052 return true;
2053 /* If ref escapes the function then the return acts as a use. */
2054 tree base = ao_ref_base (ref);
2055 if (!base)
2056 ;
2057 else if (DECL_P (base))
2058 return is_global_var (base);
2059 else if (TREE_CODE (base) == MEM_REF
2060 || TREE_CODE (base) == TARGET_MEM_REF)
2061 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2062 return false;
2063 }
2064
2065 return true;
2066 }
2067
2068 bool
2069 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2070 {
2071 ao_ref r;
2072 ao_ref_init (&r, ref);
2073 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2074 }
2075
2076 /* If the call in statement CALL may clobber the memory reference REF
2077 return true, otherwise return false. */
2078
2079 bool
2080 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2081 {
2082 tree base;
2083 tree callee;
2084
2085 /* If the call is pure or const it cannot clobber anything. */
2086 if (gimple_call_flags (call)
2087 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2088 return false;
2089 if (gimple_call_internal_p (call))
2090 switch (gimple_call_internal_fn (call))
2091 {
2092 /* Treat these internal calls like ECF_PURE for aliasing,
2093 they don't write to any memory the program should care about.
2094 They have important other side-effects, and read memory,
2095 so can't be ECF_NOVOPS. */
2096 case IFN_UBSAN_NULL:
2097 case IFN_UBSAN_BOUNDS:
2098 case IFN_UBSAN_VPTR:
2099 case IFN_UBSAN_OBJECT_SIZE:
2100 case IFN_UBSAN_PTR:
2101 case IFN_ASAN_CHECK:
2102 return false;
2103 default:
2104 break;
2105 }
2106
2107 base = ao_ref_base (ref);
2108 if (!base)
2109 return true;
2110
2111 if (TREE_CODE (base) == SSA_NAME
2112 || CONSTANT_CLASS_P (base))
2113 return false;
2114
2115 /* A call that is not without side-effects might involve volatile
2116 accesses and thus conflicts with all other volatile accesses. */
2117 if (ref->volatile_p)
2118 return true;
2119
2120 /* If the reference is based on a decl that is not aliased the call
2121 cannot possibly clobber it. */
2122 if (DECL_P (base)
2123 && !may_be_aliased (base)
2124 /* But local non-readonly statics can be modified through recursion
2125 or the call may implement a threading barrier which we must
2126 treat as may-def. */
2127 && (TREE_READONLY (base)
2128 || !is_global_var (base)))
2129 return false;
2130
2131 /* If the reference is based on a pointer that points to memory
2132 that may not be written to then the call cannot possibly clobber it. */
2133 if ((TREE_CODE (base) == MEM_REF
2134 || TREE_CODE (base) == TARGET_MEM_REF)
2135 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2136 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2137 return false;
2138
2139 callee = gimple_call_fndecl (call);
2140
2141 /* Handle those builtin functions explicitly that do not act as
2142 escape points. See tree-ssa-structalias.c:find_func_aliases
2143 for the list of builtins we might need to handle here. */
2144 if (callee != NULL_TREE
2145 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2146 switch (DECL_FUNCTION_CODE (callee))
2147 {
2148 /* All the following functions clobber memory pointed to by
2149 their first argument. */
2150 case BUILT_IN_STRCPY:
2151 case BUILT_IN_STRNCPY:
2152 case BUILT_IN_MEMCPY:
2153 case BUILT_IN_MEMMOVE:
2154 case BUILT_IN_MEMPCPY:
2155 case BUILT_IN_STPCPY:
2156 case BUILT_IN_STPNCPY:
2157 case BUILT_IN_STRCAT:
2158 case BUILT_IN_STRNCAT:
2159 case BUILT_IN_MEMSET:
2160 case BUILT_IN_TM_MEMSET:
2161 CASE_BUILT_IN_TM_STORE (1):
2162 CASE_BUILT_IN_TM_STORE (2):
2163 CASE_BUILT_IN_TM_STORE (4):
2164 CASE_BUILT_IN_TM_STORE (8):
2165 CASE_BUILT_IN_TM_STORE (FLOAT):
2166 CASE_BUILT_IN_TM_STORE (DOUBLE):
2167 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2168 CASE_BUILT_IN_TM_STORE (M64):
2169 CASE_BUILT_IN_TM_STORE (M128):
2170 CASE_BUILT_IN_TM_STORE (M256):
2171 case BUILT_IN_TM_MEMCPY:
2172 case BUILT_IN_TM_MEMMOVE:
2173 {
2174 ao_ref dref;
2175 tree size = NULL_TREE;
2176 /* Don't pass in size for strncat, as the maximum size
2177 is strlen (dest) + n + 1 instead of n, resp.
2178 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2179 known. */
2180 if (gimple_call_num_args (call) == 3
2181 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2182 size = gimple_call_arg (call, 2);
2183 ao_ref_init_from_ptr_and_size (&dref,
2184 gimple_call_arg (call, 0),
2185 size);
2186 return refs_may_alias_p_1 (&dref, ref, false);
2187 }
2188 case BUILT_IN_STRCPY_CHK:
2189 case BUILT_IN_STRNCPY_CHK:
2190 case BUILT_IN_MEMCPY_CHK:
2191 case BUILT_IN_MEMMOVE_CHK:
2192 case BUILT_IN_MEMPCPY_CHK:
2193 case BUILT_IN_STPCPY_CHK:
2194 case BUILT_IN_STPNCPY_CHK:
2195 case BUILT_IN_STRCAT_CHK:
2196 case BUILT_IN_STRNCAT_CHK:
2197 case BUILT_IN_MEMSET_CHK:
2198 {
2199 ao_ref dref;
2200 tree size = NULL_TREE;
2201 /* Don't pass in size for __strncat_chk, as the maximum size
2202 is strlen (dest) + n + 1 instead of n, resp.
2203 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2204 known. */
2205 if (gimple_call_num_args (call) == 4
2206 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2207 size = gimple_call_arg (call, 2);
2208 ao_ref_init_from_ptr_and_size (&dref,
2209 gimple_call_arg (call, 0),
2210 size);
2211 return refs_may_alias_p_1 (&dref, ref, false);
2212 }
2213 case BUILT_IN_BCOPY:
2214 {
2215 ao_ref dref;
2216 tree size = gimple_call_arg (call, 2);
2217 ao_ref_init_from_ptr_and_size (&dref,
2218 gimple_call_arg (call, 1),
2219 size);
2220 return refs_may_alias_p_1 (&dref, ref, false);
2221 }
2222 /* Allocating memory does not have any side-effects apart from
2223 being the definition point for the pointer. */
2224 case BUILT_IN_MALLOC:
2225 case BUILT_IN_ALIGNED_ALLOC:
2226 case BUILT_IN_CALLOC:
2227 case BUILT_IN_STRDUP:
2228 case BUILT_IN_STRNDUP:
2229 /* Unix98 specifies that errno is set on allocation failure. */
2230 if (flag_errno_math
2231 && targetm.ref_may_alias_errno (ref))
2232 return true;
2233 return false;
2234 case BUILT_IN_STACK_SAVE:
2235 CASE_BUILT_IN_ALLOCA:
2236 case BUILT_IN_ASSUME_ALIGNED:
2237 return false;
2238 /* But posix_memalign stores a pointer into the memory pointed to
2239 by its first argument. */
2240 case BUILT_IN_POSIX_MEMALIGN:
2241 {
2242 tree ptrptr = gimple_call_arg (call, 0);
2243 ao_ref dref;
2244 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2245 TYPE_SIZE_UNIT (ptr_type_node));
2246 return (refs_may_alias_p_1 (&dref, ref, false)
2247 || (flag_errno_math
2248 && targetm.ref_may_alias_errno (ref)));
2249 }
2250 /* Freeing memory kills the pointed-to memory. More importantly
2251 the call has to serve as a barrier for moving loads and stores
2252 across it. */
2253 case BUILT_IN_FREE:
2254 case BUILT_IN_VA_END:
2255 {
2256 tree ptr = gimple_call_arg (call, 0);
2257 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2258 }
2259 /* Realloc serves both as allocation point and deallocation point. */
2260 case BUILT_IN_REALLOC:
2261 {
2262 tree ptr = gimple_call_arg (call, 0);
2263 /* Unix98 specifies that errno is set on allocation failure. */
2264 return ((flag_errno_math
2265 && targetm.ref_may_alias_errno (ref))
2266 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2267 }
2268 case BUILT_IN_GAMMA_R:
2269 case BUILT_IN_GAMMAF_R:
2270 case BUILT_IN_GAMMAL_R:
2271 case BUILT_IN_LGAMMA_R:
2272 case BUILT_IN_LGAMMAF_R:
2273 case BUILT_IN_LGAMMAL_R:
2274 {
2275 tree out = gimple_call_arg (call, 1);
2276 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2277 return true;
2278 if (flag_errno_math)
2279 break;
2280 return false;
2281 }
2282 case BUILT_IN_FREXP:
2283 case BUILT_IN_FREXPF:
2284 case BUILT_IN_FREXPL:
2285 case BUILT_IN_MODF:
2286 case BUILT_IN_MODFF:
2287 case BUILT_IN_MODFL:
2288 {
2289 tree out = gimple_call_arg (call, 1);
2290 return ptr_deref_may_alias_ref_p_1 (out, ref);
2291 }
2292 case BUILT_IN_REMQUO:
2293 case BUILT_IN_REMQUOF:
2294 case BUILT_IN_REMQUOL:
2295 {
2296 tree out = gimple_call_arg (call, 2);
2297 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2298 return true;
2299 if (flag_errno_math)
2300 break;
2301 return false;
2302 }
2303 case BUILT_IN_SINCOS:
2304 case BUILT_IN_SINCOSF:
2305 case BUILT_IN_SINCOSL:
2306 {
2307 tree sin = gimple_call_arg (call, 1);
2308 tree cos = gimple_call_arg (call, 2);
2309 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2310 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2311 }
2312 /* __sync_* builtins and some OpenMP builtins act as threading
2313 barriers. */
2314 #undef DEF_SYNC_BUILTIN
2315 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2316 #include "sync-builtins.def"
2317 #undef DEF_SYNC_BUILTIN
2318 case BUILT_IN_GOMP_ATOMIC_START:
2319 case BUILT_IN_GOMP_ATOMIC_END:
2320 case BUILT_IN_GOMP_BARRIER:
2321 case BUILT_IN_GOMP_BARRIER_CANCEL:
2322 case BUILT_IN_GOMP_TASKWAIT:
2323 case BUILT_IN_GOMP_TASKGROUP_END:
2324 case BUILT_IN_GOMP_CRITICAL_START:
2325 case BUILT_IN_GOMP_CRITICAL_END:
2326 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2327 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2328 case BUILT_IN_GOMP_LOOP_END:
2329 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2330 case BUILT_IN_GOMP_ORDERED_START:
2331 case BUILT_IN_GOMP_ORDERED_END:
2332 case BUILT_IN_GOMP_SECTIONS_END:
2333 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2334 case BUILT_IN_GOMP_SINGLE_COPY_START:
2335 case BUILT_IN_GOMP_SINGLE_COPY_END:
2336 return true;
2337 default:
2338 /* Fallthru to general call handling. */;
2339 }
2340
2341 /* Check if base is a global static variable that is not written
2342 by the function. */
2343 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2344 {
2345 struct cgraph_node *node = cgraph_node::get (callee);
2346 bitmap not_written;
2347
2348 if (node
2349 && (not_written = ipa_reference_get_not_written_global (node))
2350 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2351 return false;
2352 }
2353
2354 /* Check if the base variable is call-clobbered. */
2355 if (DECL_P (base))
2356 return pt_solution_includes (gimple_call_clobber_set (call), base);
2357 else if ((TREE_CODE (base) == MEM_REF
2358 || TREE_CODE (base) == TARGET_MEM_REF)
2359 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2360 {
2361 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2362 if (!pi)
2363 return true;
2364
2365 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2366 }
2367
2368 return true;
2369 }
2370
2371 /* If the call in statement CALL may clobber the memory reference REF
2372 return true, otherwise return false. */
2373
2374 bool
2375 call_may_clobber_ref_p (gcall *call, tree ref)
2376 {
2377 bool res;
2378 ao_ref r;
2379 ao_ref_init (&r, ref);
2380 res = call_may_clobber_ref_p_1 (call, &r);
2381 if (res)
2382 ++alias_stats.call_may_clobber_ref_p_may_alias;
2383 else
2384 ++alias_stats.call_may_clobber_ref_p_no_alias;
2385 return res;
2386 }
2387
2388
2389 /* If the statement STMT may clobber the memory reference REF return true,
2390 otherwise return false. */
2391
2392 bool
2393 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2394 {
2395 if (is_gimple_call (stmt))
2396 {
2397 tree lhs = gimple_call_lhs (stmt);
2398 if (lhs
2399 && TREE_CODE (lhs) != SSA_NAME)
2400 {
2401 ao_ref r;
2402 ao_ref_init (&r, lhs);
2403 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2404 return true;
2405 }
2406
2407 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2408 }
2409 else if (gimple_assign_single_p (stmt))
2410 {
2411 tree lhs = gimple_assign_lhs (stmt);
2412 if (TREE_CODE (lhs) != SSA_NAME)
2413 {
2414 ao_ref r;
2415 ao_ref_init (&r, lhs);
2416 return refs_may_alias_p_1 (ref, &r, tbaa_p);
2417 }
2418 }
2419 else if (gimple_code (stmt) == GIMPLE_ASM)
2420 return true;
2421
2422 return false;
2423 }
2424
2425 bool
2426 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2427 {
2428 ao_ref r;
2429 ao_ref_init (&r, ref);
2430 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2431 }
2432
2433 /* Return true if store1 and store2 described by corresponding tuples
2434 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2435 address. */
2436
2437 static bool
2438 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2439 poly_int64 max_size1,
2440 tree base2, poly_int64 offset2, poly_int64 size2,
2441 poly_int64 max_size2)
2442 {
2443 /* Offsets need to be 0. */
2444 if (maybe_ne (offset1, 0)
2445 || maybe_ne (offset2, 0))
2446 return false;
2447
2448 bool base1_obj_p = SSA_VAR_P (base1);
2449 bool base2_obj_p = SSA_VAR_P (base2);
2450
2451 /* We need one object. */
2452 if (base1_obj_p == base2_obj_p)
2453 return false;
2454 tree obj = base1_obj_p ? base1 : base2;
2455
2456 /* And we need one MEM_REF. */
2457 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2458 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2459 if (base1_memref_p == base2_memref_p)
2460 return false;
2461 tree memref = base1_memref_p ? base1 : base2;
2462
2463 /* Sizes need to be valid. */
2464 if (!known_size_p (max_size1)
2465 || !known_size_p (max_size2)
2466 || !known_size_p (size1)
2467 || !known_size_p (size2))
2468 return false;
2469
2470 /* Max_size needs to match size. */
2471 if (maybe_ne (max_size1, size1)
2472 || maybe_ne (max_size2, size2))
2473 return false;
2474
2475 /* Sizes need to match. */
2476 if (maybe_ne (size1, size2))
2477 return false;
2478
2479
2480 /* Check that memref is a store to pointer with singleton points-to info. */
2481 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2482 return false;
2483 tree ptr = TREE_OPERAND (memref, 0);
2484 if (TREE_CODE (ptr) != SSA_NAME)
2485 return false;
2486 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2487 unsigned int pt_uid;
2488 if (pi == NULL
2489 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2490 return false;
2491
2492 /* Be conservative with non-call exceptions when the address might
2493 be NULL. */
2494 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2495 return false;
2496
2497 /* Check that ptr points relative to obj. */
2498 unsigned int obj_uid = DECL_PT_UID (obj);
2499 if (obj_uid != pt_uid)
2500 return false;
2501
2502 /* Check that the object size is the same as the store size. That ensures us
2503 that ptr points to the start of obj. */
2504 return (DECL_SIZE (obj)
2505 && poly_int_tree_p (DECL_SIZE (obj))
2506 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2507 }
2508
2509 /* If STMT kills the memory reference REF return true, otherwise
2510 return false. */
2511
2512 bool
2513 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2514 {
2515 if (!ao_ref_base (ref))
2516 return false;
2517
2518 if (gimple_has_lhs (stmt)
2519 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2520 /* The assignment is not necessarily carried out if it can throw
2521 and we can catch it in the current function where we could inspect
2522 the previous value.
2523 ??? We only need to care about the RHS throwing. For aggregate
2524 assignments or similar calls and non-call exceptions the LHS
2525 might throw as well. */
2526 && !stmt_can_throw_internal (cfun, stmt))
2527 {
2528 tree lhs = gimple_get_lhs (stmt);
2529 /* If LHS is literally a base of the access we are done. */
2530 if (ref->ref)
2531 {
2532 tree base = ref->ref;
2533 tree innermost_dropped_array_ref = NULL_TREE;
2534 if (handled_component_p (base))
2535 {
2536 tree saved_lhs0 = NULL_TREE;
2537 if (handled_component_p (lhs))
2538 {
2539 saved_lhs0 = TREE_OPERAND (lhs, 0);
2540 TREE_OPERAND (lhs, 0) = integer_zero_node;
2541 }
2542 do
2543 {
2544 /* Just compare the outermost handled component, if
2545 they are equal we have found a possible common
2546 base. */
2547 tree saved_base0 = TREE_OPERAND (base, 0);
2548 TREE_OPERAND (base, 0) = integer_zero_node;
2549 bool res = operand_equal_p (lhs, base, 0);
2550 TREE_OPERAND (base, 0) = saved_base0;
2551 if (res)
2552 break;
2553 /* Remember if we drop an array-ref that we need to
2554 double-check not being at struct end. */
2555 if (TREE_CODE (base) == ARRAY_REF
2556 || TREE_CODE (base) == ARRAY_RANGE_REF)
2557 innermost_dropped_array_ref = base;
2558 /* Otherwise drop handled components of the access. */
2559 base = saved_base0;
2560 }
2561 while (handled_component_p (base));
2562 if (saved_lhs0)
2563 TREE_OPERAND (lhs, 0) = saved_lhs0;
2564 }
2565 /* Finally check if the lhs has the same address and size as the
2566 base candidate of the access. Watch out if we have dropped
2567 an array-ref that was at struct end, this means ref->ref may
2568 be outside of the TYPE_SIZE of its base. */
2569 if ((! innermost_dropped_array_ref
2570 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2571 && (lhs == base
2572 || (((TYPE_SIZE (TREE_TYPE (lhs))
2573 == TYPE_SIZE (TREE_TYPE (base)))
2574 || (TYPE_SIZE (TREE_TYPE (lhs))
2575 && TYPE_SIZE (TREE_TYPE (base))
2576 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2577 TYPE_SIZE (TREE_TYPE (base)),
2578 0)))
2579 && operand_equal_p (lhs, base,
2580 OEP_ADDRESS_OF
2581 | OEP_MATCH_SIDE_EFFECTS))))
2582 return true;
2583 }
2584
2585 /* Now look for non-literal equal bases with the restriction of
2586 handling constant offset and size. */
2587 /* For a must-alias check we need to be able to constrain
2588 the access properly. */
2589 if (!ref->max_size_known_p ())
2590 return false;
2591 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2592 bool reverse;
2593 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2594 &reverse);
2595 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2596 so base == ref->base does not always hold. */
2597 if (base != ref->base)
2598 {
2599 /* Try using points-to info. */
2600 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2601 ref->offset, ref->size, ref->max_size))
2602 return true;
2603
2604 /* If both base and ref->base are MEM_REFs, only compare the
2605 first operand, and if the second operand isn't equal constant,
2606 try to add the offsets into offset and ref_offset. */
2607 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2608 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2609 {
2610 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2611 TREE_OPERAND (ref->base, 1)))
2612 {
2613 poly_offset_int off1 = mem_ref_offset (base);
2614 off1 <<= LOG2_BITS_PER_UNIT;
2615 off1 += offset;
2616 poly_offset_int off2 = mem_ref_offset (ref->base);
2617 off2 <<= LOG2_BITS_PER_UNIT;
2618 off2 += ref_offset;
2619 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2620 size = -1;
2621 }
2622 }
2623 else
2624 size = -1;
2625 }
2626 /* For a must-alias check we need to be able to constrain
2627 the access properly. */
2628 if (known_eq (size, max_size)
2629 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2630 return true;
2631 }
2632
2633 if (is_gimple_call (stmt))
2634 {
2635 tree callee = gimple_call_fndecl (stmt);
2636 if (callee != NULL_TREE
2637 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2638 switch (DECL_FUNCTION_CODE (callee))
2639 {
2640 case BUILT_IN_FREE:
2641 {
2642 tree ptr = gimple_call_arg (stmt, 0);
2643 tree base = ao_ref_base (ref);
2644 if (base && TREE_CODE (base) == MEM_REF
2645 && TREE_OPERAND (base, 0) == ptr)
2646 return true;
2647 break;
2648 }
2649
2650 case BUILT_IN_MEMCPY:
2651 case BUILT_IN_MEMPCPY:
2652 case BUILT_IN_MEMMOVE:
2653 case BUILT_IN_MEMSET:
2654 case BUILT_IN_MEMCPY_CHK:
2655 case BUILT_IN_MEMPCPY_CHK:
2656 case BUILT_IN_MEMMOVE_CHK:
2657 case BUILT_IN_MEMSET_CHK:
2658 case BUILT_IN_STRNCPY:
2659 case BUILT_IN_STPNCPY:
2660 {
2661 /* For a must-alias check we need to be able to constrain
2662 the access properly. */
2663 if (!ref->max_size_known_p ())
2664 return false;
2665 tree dest = gimple_call_arg (stmt, 0);
2666 tree len = gimple_call_arg (stmt, 2);
2667 if (!poly_int_tree_p (len))
2668 return false;
2669 tree rbase = ref->base;
2670 poly_offset_int roffset = ref->offset;
2671 ao_ref dref;
2672 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2673 tree base = ao_ref_base (&dref);
2674 poly_offset_int offset = dref.offset;
2675 if (!base || !known_size_p (dref.size))
2676 return false;
2677 if (TREE_CODE (base) == MEM_REF)
2678 {
2679 if (TREE_CODE (rbase) != MEM_REF)
2680 return false;
2681 // Compare pointers.
2682 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2683 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2684 base = TREE_OPERAND (base, 0);
2685 rbase = TREE_OPERAND (rbase, 0);
2686 }
2687 if (base == rbase
2688 && known_subrange_p (roffset, ref->max_size, offset,
2689 wi::to_poly_offset (len)
2690 << LOG2_BITS_PER_UNIT))
2691 return true;
2692 break;
2693 }
2694
2695 case BUILT_IN_VA_END:
2696 {
2697 tree ptr = gimple_call_arg (stmt, 0);
2698 if (TREE_CODE (ptr) == ADDR_EXPR)
2699 {
2700 tree base = ao_ref_base (ref);
2701 if (TREE_OPERAND (ptr, 0) == base)
2702 return true;
2703 }
2704 break;
2705 }
2706
2707 default:;
2708 }
2709 }
2710 return false;
2711 }
2712
2713 bool
2714 stmt_kills_ref_p (gimple *stmt, tree ref)
2715 {
2716 ao_ref r;
2717 ao_ref_init (&r, ref);
2718 return stmt_kills_ref_p (stmt, &r);
2719 }
2720
2721
2722 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2723 TARGET or a statement clobbering the memory reference REF in which
2724 case false is returned. The walk starts with VUSE, one argument of PHI. */
2725
2726 static bool
2727 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
2728 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
2729 bool abort_on_visited,
2730 void *(*translate)(ao_ref *, tree, void *, bool *),
2731 void *data)
2732 {
2733 basic_block bb = gimple_bb (phi);
2734
2735 if (!*visited)
2736 *visited = BITMAP_ALLOC (NULL);
2737
2738 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2739
2740 /* Walk until we hit the target. */
2741 while (vuse != target)
2742 {
2743 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2744 /* If we are searching for the target VUSE by walking up to
2745 TARGET_BB dominating the original PHI we are finished once
2746 we reach a default def or a definition in a block dominating
2747 that block. Update TARGET and return. */
2748 if (!target
2749 && (gimple_nop_p (def_stmt)
2750 || dominated_by_p (CDI_DOMINATORS,
2751 target_bb, gimple_bb (def_stmt))))
2752 {
2753 target = vuse;
2754 return true;
2755 }
2756
2757 /* Recurse for PHI nodes. */
2758 if (gimple_code (def_stmt) == GIMPLE_PHI)
2759 {
2760 /* An already visited PHI node ends the walk successfully. */
2761 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2762 return !abort_on_visited;
2763 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2764 visited, abort_on_visited,
2765 translate, data);
2766 if (!vuse)
2767 return false;
2768 continue;
2769 }
2770 else if (gimple_nop_p (def_stmt))
2771 return false;
2772 else
2773 {
2774 /* A clobbering statement or the end of the IL ends it failing. */
2775 if ((int)limit <= 0)
2776 return false;
2777 --limit;
2778 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2779 {
2780 bool disambiguate_only = true;
2781 if (translate
2782 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2783 ;
2784 else
2785 return false;
2786 }
2787 }
2788 /* If we reach a new basic-block see if we already skipped it
2789 in a previous walk that ended successfully. */
2790 if (gimple_bb (def_stmt) != bb)
2791 {
2792 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2793 return !abort_on_visited;
2794 bb = gimple_bb (def_stmt);
2795 }
2796 vuse = gimple_vuse (def_stmt);
2797 }
2798 return true;
2799 }
2800
2801
2802 /* Starting from a PHI node for the virtual operand of the memory reference
2803 REF find a continuation virtual operand that allows to continue walking
2804 statements dominating PHI skipping only statements that cannot possibly
2805 clobber REF. Decrements LIMIT for each alias disambiguation done
2806 and aborts the walk, returning NULL_TREE if it reaches zero.
2807 Returns NULL_TREE if no suitable virtual operand can be found. */
2808
2809 tree
2810 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2811 unsigned int &limit, bitmap *visited,
2812 bool abort_on_visited,
2813 void *(*translate)(ao_ref *, tree, void *, bool *),
2814 void *data)
2815 {
2816 unsigned nargs = gimple_phi_num_args (phi);
2817
2818 /* Through a single-argument PHI we can simply look through. */
2819 if (nargs == 1)
2820 return PHI_ARG_DEF (phi, 0);
2821
2822 /* For two or more arguments try to pairwise skip non-aliasing code
2823 until we hit the phi argument definition that dominates the other one. */
2824 basic_block phi_bb = gimple_bb (phi);
2825 tree arg0, arg1;
2826 unsigned i;
2827
2828 /* Find a candidate for the virtual operand which definition
2829 dominates those of all others. */
2830 /* First look if any of the args themselves satisfy this. */
2831 for (i = 0; i < nargs; ++i)
2832 {
2833 arg0 = PHI_ARG_DEF (phi, i);
2834 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
2835 break;
2836 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
2837 if (def_bb != phi_bb
2838 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
2839 break;
2840 arg0 = NULL_TREE;
2841 }
2842 /* If not, look if we can reach such candidate by walking defs
2843 until we hit the immediate dominator. maybe_skip_until will
2844 do that for us. */
2845 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
2846
2847 /* Then check against the (to be) found candidate. */
2848 for (i = 0; i < nargs; ++i)
2849 {
2850 arg1 = PHI_ARG_DEF (phi, i);
2851 if (arg1 == arg0)
2852 ;
2853 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
2854 abort_on_visited,
2855 /* Do not translate when walking over
2856 backedges. */
2857 dominated_by_p
2858 (CDI_DOMINATORS,
2859 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
2860 phi_bb)
2861 ? NULL : translate, data))
2862 return NULL_TREE;
2863 }
2864
2865 return arg0;
2866 }
2867
2868 /* Based on the memory reference REF and its virtual use VUSE call
2869 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2870 itself. That is, for each virtual use for which its defining statement
2871 does not clobber REF.
2872
2873 WALKER is called with REF, the current virtual use and DATA. If
2874 WALKER returns non-NULL the walk stops and its result is returned.
2875 At the end of a non-successful walk NULL is returned.
2876
2877 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2878 use which definition is a statement that may clobber REF and DATA.
2879 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2880 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2881 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2882 to adjust REF and *DATA to make that valid.
2883
2884 VALUEIZE if non-NULL is called with the next VUSE that is considered
2885 and return value is substituted for that. This can be used to
2886 implement optimistic value-numbering for example. Note that the
2887 VUSE argument is assumed to be valueized already.
2888
2889 LIMIT specifies the number of alias queries we are allowed to do,
2890 the walk stops when it reaches zero and NULL is returned. LIMIT
2891 is decremented by the number of alias queries (plus adjustments
2892 done by the callbacks) upon return.
2893
2894 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2895
2896 void *
2897 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2898 void *(*walker)(ao_ref *, tree, void *),
2899 void *(*translate)(ao_ref *, tree, void *, bool *),
2900 tree (*valueize)(tree),
2901 unsigned &limit, void *data)
2902 {
2903 bitmap visited = NULL;
2904 void *res;
2905 bool translated = false;
2906
2907 timevar_push (TV_ALIAS_STMT_WALK);
2908
2909 do
2910 {
2911 gimple *def_stmt;
2912
2913 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2914 res = (*walker) (ref, vuse, data);
2915 /* Abort walk. */
2916 if (res == (void *)-1)
2917 {
2918 res = NULL;
2919 break;
2920 }
2921 /* Lookup succeeded. */
2922 else if (res != NULL)
2923 break;
2924
2925 if (valueize)
2926 {
2927 vuse = valueize (vuse);
2928 if (!vuse)
2929 {
2930 res = NULL;
2931 break;
2932 }
2933 }
2934 def_stmt = SSA_NAME_DEF_STMT (vuse);
2935 if (gimple_nop_p (def_stmt))
2936 break;
2937 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2938 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2939 &visited, translated, translate, data);
2940 else
2941 {
2942 if ((int)limit <= 0)
2943 {
2944 res = NULL;
2945 break;
2946 }
2947 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2948 {
2949 if (!translate)
2950 break;
2951 bool disambiguate_only = false;
2952 res = (*translate) (ref, vuse, data, &disambiguate_only);
2953 /* Failed lookup and translation. */
2954 if (res == (void *)-1)
2955 {
2956 res = NULL;
2957 break;
2958 }
2959 /* Lookup succeeded. */
2960 else if (res != NULL)
2961 break;
2962 /* Translation succeeded, continue walking. */
2963 translated = translated || !disambiguate_only;
2964 }
2965 vuse = gimple_vuse (def_stmt);
2966 }
2967 }
2968 while (vuse);
2969
2970 if (visited)
2971 BITMAP_FREE (visited);
2972
2973 timevar_pop (TV_ALIAS_STMT_WALK);
2974
2975 return res;
2976 }
2977
2978
2979 /* Based on the memory reference REF call WALKER for each vdef which
2980 defining statement may clobber REF, starting with VDEF. If REF
2981 is NULL_TREE, each defining statement is visited.
2982
2983 WALKER is called with REF, the current vdef and DATA. If WALKER
2984 returns true the walk is stopped, otherwise it continues.
2985
2986 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2987 The pointer may be NULL and then we do not track this information.
2988
2989 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2990 PHI argument (but only one walk continues on merge points), the
2991 return value is true if any of the walks was successful.
2992
2993 The function returns the number of statements walked or -1 if
2994 LIMIT stmts were walked and the walk was aborted at this point.
2995 If LIMIT is zero the walk is not aborted. */
2996
2997 static int
2998 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2999 bool (*walker)(ao_ref *, tree, void *), void *data,
3000 bitmap *visited, unsigned int cnt,
3001 bool *function_entry_reached, unsigned limit)
3002 {
3003 do
3004 {
3005 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3006
3007 if (*visited
3008 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3009 return cnt;
3010
3011 if (gimple_nop_p (def_stmt))
3012 {
3013 if (function_entry_reached)
3014 *function_entry_reached = true;
3015 return cnt;
3016 }
3017 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3018 {
3019 unsigned i;
3020 if (!*visited)
3021 *visited = BITMAP_ALLOC (NULL);
3022 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3023 {
3024 int res = walk_aliased_vdefs_1 (ref,
3025 gimple_phi_arg_def (def_stmt, i),
3026 walker, data, visited, cnt,
3027 function_entry_reached, limit);
3028 if (res == -1)
3029 return -1;
3030 cnt = res;
3031 }
3032 return cnt;
3033 }
3034
3035 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3036 cnt++;
3037 if (cnt == limit)
3038 return -1;
3039 if ((!ref
3040 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3041 && (*walker) (ref, vdef, data))
3042 return cnt;
3043
3044 vdef = gimple_vuse (def_stmt);
3045 }
3046 while (1);
3047 }
3048
3049 int
3050 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3051 bool (*walker)(ao_ref *, tree, void *), void *data,
3052 bitmap *visited,
3053 bool *function_entry_reached, unsigned int limit)
3054 {
3055 bitmap local_visited = NULL;
3056 int ret;
3057
3058 timevar_push (TV_ALIAS_STMT_WALK);
3059
3060 if (function_entry_reached)
3061 *function_entry_reached = false;
3062
3063 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3064 visited ? visited : &local_visited, 0,
3065 function_entry_reached, limit);
3066 if (local_visited)
3067 BITMAP_FREE (local_visited);
3068
3069 timevar_pop (TV_ALIAS_STMT_WALK);
3070
3071 return ret;
3072 }
3073