tree-ssa-alias.c (compare_sizes): New function.
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
93
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
102 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
103 } alias_stats;
104
105 void
106 dump_alias_stats (FILE *s)
107 {
108 fprintf (s, "\nAlias oracle query stats:\n");
109 fprintf (s, " refs_may_alias_p: "
110 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
111 HOST_WIDE_INT_PRINT_DEC" queries\n",
112 alias_stats.refs_may_alias_p_no_alias,
113 alias_stats.refs_may_alias_p_no_alias
114 + alias_stats.refs_may_alias_p_may_alias);
115 fprintf (s, " ref_maybe_used_by_call_p: "
116 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
117 HOST_WIDE_INT_PRINT_DEC" queries\n",
118 alias_stats.ref_maybe_used_by_call_p_no_alias,
119 alias_stats.refs_may_alias_p_no_alias
120 + alias_stats.ref_maybe_used_by_call_p_may_alias);
121 fprintf (s, " call_may_clobber_ref_p: "
122 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
123 HOST_WIDE_INT_PRINT_DEC" queries\n",
124 alias_stats.call_may_clobber_ref_p_no_alias,
125 alias_stats.call_may_clobber_ref_p_no_alias
126 + alias_stats.call_may_clobber_ref_p_may_alias);
127 fprintf (s, " aliasing_component_ref_p: "
128 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
129 HOST_WIDE_INT_PRINT_DEC" queries\n",
130 alias_stats.aliasing_component_refs_p_no_alias,
131 alias_stats.aliasing_component_refs_p_no_alias
132 + alias_stats.aliasing_component_refs_p_may_alias);
133 dump_alias_stats_in_alias_c (s);
134 }
135
136
137 /* Return true, if dereferencing PTR may alias with a global variable. */
138
139 bool
140 ptr_deref_may_alias_global_p (tree ptr)
141 {
142 struct ptr_info_def *pi;
143
144 /* If we end up with a pointer constant here that may point
145 to global memory. */
146 if (TREE_CODE (ptr) != SSA_NAME)
147 return true;
148
149 pi = SSA_NAME_PTR_INFO (ptr);
150
151 /* If we do not have points-to information for this variable,
152 we have to punt. */
153 if (!pi)
154 return true;
155
156 /* ??? This does not use TBAA to prune globals ptr may not access. */
157 return pt_solution_includes_global (&pi->pt);
158 }
159
160 /* Return true if dereferencing PTR may alias DECL.
161 The caller is responsible for applying TBAA to see if PTR
162 may access DECL at all. */
163
164 static bool
165 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
166 {
167 struct ptr_info_def *pi;
168
169 /* Conversions are irrelevant for points-to information and
170 data-dependence analysis can feed us those. */
171 STRIP_NOPS (ptr);
172
173 /* Anything we do not explicilty handle aliases. */
174 if ((TREE_CODE (ptr) != SSA_NAME
175 && TREE_CODE (ptr) != ADDR_EXPR
176 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
177 || !POINTER_TYPE_P (TREE_TYPE (ptr))
178 || (!VAR_P (decl)
179 && TREE_CODE (decl) != PARM_DECL
180 && TREE_CODE (decl) != RESULT_DECL))
181 return true;
182
183 /* Disregard pointer offsetting. */
184 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
185 {
186 do
187 {
188 ptr = TREE_OPERAND (ptr, 0);
189 }
190 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
191 return ptr_deref_may_alias_decl_p (ptr, decl);
192 }
193
194 /* ADDR_EXPR pointers either just offset another pointer or directly
195 specify the pointed-to set. */
196 if (TREE_CODE (ptr) == ADDR_EXPR)
197 {
198 tree base = get_base_address (TREE_OPERAND (ptr, 0));
199 if (base
200 && (TREE_CODE (base) == MEM_REF
201 || TREE_CODE (base) == TARGET_MEM_REF))
202 ptr = TREE_OPERAND (base, 0);
203 else if (base
204 && DECL_P (base))
205 return compare_base_decls (base, decl) != 0;
206 else if (base
207 && CONSTANT_CLASS_P (base))
208 return false;
209 else
210 return true;
211 }
212
213 /* Non-aliased variables cannot be pointed to. */
214 if (!may_be_aliased (decl))
215 return false;
216
217 /* If we do not have useful points-to information for this pointer
218 we cannot disambiguate anything else. */
219 pi = SSA_NAME_PTR_INFO (ptr);
220 if (!pi)
221 return true;
222
223 return pt_solution_includes (&pi->pt, decl);
224 }
225
226 /* Return true if dereferenced PTR1 and PTR2 may alias.
227 The caller is responsible for applying TBAA to see if accesses
228 through PTR1 and PTR2 may conflict at all. */
229
230 bool
231 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
232 {
233 struct ptr_info_def *pi1, *pi2;
234
235 /* Conversions are irrelevant for points-to information and
236 data-dependence analysis can feed us those. */
237 STRIP_NOPS (ptr1);
238 STRIP_NOPS (ptr2);
239
240 /* Disregard pointer offsetting. */
241 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
242 {
243 do
244 {
245 ptr1 = TREE_OPERAND (ptr1, 0);
246 }
247 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
248 return ptr_derefs_may_alias_p (ptr1, ptr2);
249 }
250 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
251 {
252 do
253 {
254 ptr2 = TREE_OPERAND (ptr2, 0);
255 }
256 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
257 return ptr_derefs_may_alias_p (ptr1, ptr2);
258 }
259
260 /* ADDR_EXPR pointers either just offset another pointer or directly
261 specify the pointed-to set. */
262 if (TREE_CODE (ptr1) == ADDR_EXPR)
263 {
264 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
265 if (base
266 && (TREE_CODE (base) == MEM_REF
267 || TREE_CODE (base) == TARGET_MEM_REF))
268 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
269 else if (base
270 && DECL_P (base))
271 return ptr_deref_may_alias_decl_p (ptr2, base);
272 else
273 return true;
274 }
275 if (TREE_CODE (ptr2) == ADDR_EXPR)
276 {
277 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
278 if (base
279 && (TREE_CODE (base) == MEM_REF
280 || TREE_CODE (base) == TARGET_MEM_REF))
281 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
282 else if (base
283 && DECL_P (base))
284 return ptr_deref_may_alias_decl_p (ptr1, base);
285 else
286 return true;
287 }
288
289 /* From here we require SSA name pointers. Anything else aliases. */
290 if (TREE_CODE (ptr1) != SSA_NAME
291 || TREE_CODE (ptr2) != SSA_NAME
292 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
293 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
294 return true;
295
296 /* We may end up with two empty points-to solutions for two same pointers.
297 In this case we still want to say both pointers alias, so shortcut
298 that here. */
299 if (ptr1 == ptr2)
300 return true;
301
302 /* If we do not have useful points-to information for either pointer
303 we cannot disambiguate anything else. */
304 pi1 = SSA_NAME_PTR_INFO (ptr1);
305 pi2 = SSA_NAME_PTR_INFO (ptr2);
306 if (!pi1 || !pi2)
307 return true;
308
309 /* ??? This does not use TBAA to prune decls from the intersection
310 that not both pointers may access. */
311 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
312 }
313
314 /* Return true if dereferencing PTR may alias *REF.
315 The caller is responsible for applying TBAA to see if PTR
316 may access *REF at all. */
317
318 static bool
319 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
320 {
321 tree base = ao_ref_base (ref);
322
323 if (TREE_CODE (base) == MEM_REF
324 || TREE_CODE (base) == TARGET_MEM_REF)
325 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
326 else if (DECL_P (base))
327 return ptr_deref_may_alias_decl_p (ptr, base);
328
329 return true;
330 }
331
332 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
333
334 bool
335 ptrs_compare_unequal (tree ptr1, tree ptr2)
336 {
337 /* First resolve the pointers down to a SSA name pointer base or
338 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
339 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
340 or STRING_CSTs which needs points-to adjustments to track them
341 in the points-to sets. */
342 tree obj1 = NULL_TREE;
343 tree obj2 = NULL_TREE;
344 if (TREE_CODE (ptr1) == ADDR_EXPR)
345 {
346 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
347 if (! tem)
348 return false;
349 if (VAR_P (tem)
350 || TREE_CODE (tem) == PARM_DECL
351 || TREE_CODE (tem) == RESULT_DECL)
352 obj1 = tem;
353 else if (TREE_CODE (tem) == MEM_REF)
354 ptr1 = TREE_OPERAND (tem, 0);
355 }
356 if (TREE_CODE (ptr2) == ADDR_EXPR)
357 {
358 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
359 if (! tem)
360 return false;
361 if (VAR_P (tem)
362 || TREE_CODE (tem) == PARM_DECL
363 || TREE_CODE (tem) == RESULT_DECL)
364 obj2 = tem;
365 else if (TREE_CODE (tem) == MEM_REF)
366 ptr2 = TREE_OPERAND (tem, 0);
367 }
368
369 /* Canonicalize ptr vs. object. */
370 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
371 {
372 std::swap (ptr1, ptr2);
373 std::swap (obj1, obj2);
374 }
375
376 if (obj1 && obj2)
377 /* Other code handles this correctly, no need to duplicate it here. */;
378 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
379 {
380 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
381 /* We may not use restrict to optimize pointer comparisons.
382 See PR71062. So we have to assume that restrict-pointed-to
383 may be in fact obj1. */
384 if (!pi
385 || pi->pt.vars_contains_restrict
386 || pi->pt.vars_contains_interposable)
387 return false;
388 if (VAR_P (obj1)
389 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
390 {
391 varpool_node *node = varpool_node::get (obj1);
392 /* If obj1 may bind to NULL give up (see below). */
393 if (! node
394 || ! node->nonzero_address ()
395 || ! decl_binds_to_current_def_p (obj1))
396 return false;
397 }
398 return !pt_solution_includes (&pi->pt, obj1);
399 }
400
401 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
402 but those require pt.null to be conservatively correct. */
403
404 return false;
405 }
406
407 /* Returns whether reference REF to BASE may refer to global memory. */
408
409 static bool
410 ref_may_alias_global_p_1 (tree base)
411 {
412 if (DECL_P (base))
413 return is_global_var (base);
414 else if (TREE_CODE (base) == MEM_REF
415 || TREE_CODE (base) == TARGET_MEM_REF)
416 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
417 return true;
418 }
419
420 bool
421 ref_may_alias_global_p (ao_ref *ref)
422 {
423 tree base = ao_ref_base (ref);
424 return ref_may_alias_global_p_1 (base);
425 }
426
427 bool
428 ref_may_alias_global_p (tree ref)
429 {
430 tree base = get_base_address (ref);
431 return ref_may_alias_global_p_1 (base);
432 }
433
434 /* Return true whether STMT may clobber global memory. */
435
436 bool
437 stmt_may_clobber_global_p (gimple *stmt)
438 {
439 tree lhs;
440
441 if (!gimple_vdef (stmt))
442 return false;
443
444 /* ??? We can ask the oracle whether an artificial pointer
445 dereference with a pointer with points-to information covering
446 all global memory (what about non-address taken memory?) maybe
447 clobbered by this call. As there is at the moment no convenient
448 way of doing that without generating garbage do some manual
449 checking instead.
450 ??? We could make a NULL ao_ref argument to the various
451 predicates special, meaning any global memory. */
452
453 switch (gimple_code (stmt))
454 {
455 case GIMPLE_ASSIGN:
456 lhs = gimple_assign_lhs (stmt);
457 return (TREE_CODE (lhs) != SSA_NAME
458 && ref_may_alias_global_p (lhs));
459 case GIMPLE_CALL:
460 return true;
461 default:
462 return true;
463 }
464 }
465
466
467 /* Dump alias information on FILE. */
468
469 void
470 dump_alias_info (FILE *file)
471 {
472 unsigned i;
473 tree ptr;
474 const char *funcname
475 = lang_hooks.decl_printable_name (current_function_decl, 2);
476 tree var;
477
478 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
479
480 fprintf (file, "Aliased symbols\n\n");
481
482 FOR_EACH_LOCAL_DECL (cfun, i, var)
483 {
484 if (may_be_aliased (var))
485 dump_variable (file, var);
486 }
487
488 fprintf (file, "\nCall clobber information\n");
489
490 fprintf (file, "\nESCAPED");
491 dump_points_to_solution (file, &cfun->gimple_df->escaped);
492
493 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
494
495 FOR_EACH_SSA_NAME (i, ptr, cfun)
496 {
497 struct ptr_info_def *pi;
498
499 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
500 || SSA_NAME_IN_FREE_LIST (ptr))
501 continue;
502
503 pi = SSA_NAME_PTR_INFO (ptr);
504 if (pi)
505 dump_points_to_info_for (file, ptr);
506 }
507
508 fprintf (file, "\n");
509 }
510
511
512 /* Dump alias information on stderr. */
513
514 DEBUG_FUNCTION void
515 debug_alias_info (void)
516 {
517 dump_alias_info (stderr);
518 }
519
520
521 /* Dump the points-to set *PT into FILE. */
522
523 void
524 dump_points_to_solution (FILE *file, struct pt_solution *pt)
525 {
526 if (pt->anything)
527 fprintf (file, ", points-to anything");
528
529 if (pt->nonlocal)
530 fprintf (file, ", points-to non-local");
531
532 if (pt->escaped)
533 fprintf (file, ", points-to escaped");
534
535 if (pt->ipa_escaped)
536 fprintf (file, ", points-to unit escaped");
537
538 if (pt->null)
539 fprintf (file, ", points-to NULL");
540
541 if (pt->vars)
542 {
543 fprintf (file, ", points-to vars: ");
544 dump_decl_set (file, pt->vars);
545 if (pt->vars_contains_nonlocal
546 || pt->vars_contains_escaped
547 || pt->vars_contains_escaped_heap
548 || pt->vars_contains_restrict)
549 {
550 const char *comma = "";
551 fprintf (file, " (");
552 if (pt->vars_contains_nonlocal)
553 {
554 fprintf (file, "nonlocal");
555 comma = ", ";
556 }
557 if (pt->vars_contains_escaped)
558 {
559 fprintf (file, "%sescaped", comma);
560 comma = ", ";
561 }
562 if (pt->vars_contains_escaped_heap)
563 {
564 fprintf (file, "%sescaped heap", comma);
565 comma = ", ";
566 }
567 if (pt->vars_contains_restrict)
568 {
569 fprintf (file, "%srestrict", comma);
570 comma = ", ";
571 }
572 if (pt->vars_contains_interposable)
573 fprintf (file, "%sinterposable", comma);
574 fprintf (file, ")");
575 }
576 }
577 }
578
579
580 /* Unified dump function for pt_solution. */
581
582 DEBUG_FUNCTION void
583 debug (pt_solution &ref)
584 {
585 dump_points_to_solution (stderr, &ref);
586 }
587
588 DEBUG_FUNCTION void
589 debug (pt_solution *ptr)
590 {
591 if (ptr)
592 debug (*ptr);
593 else
594 fprintf (stderr, "<nil>\n");
595 }
596
597
598 /* Dump points-to information for SSA_NAME PTR into FILE. */
599
600 void
601 dump_points_to_info_for (FILE *file, tree ptr)
602 {
603 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
604
605 print_generic_expr (file, ptr, dump_flags);
606
607 if (pi)
608 dump_points_to_solution (file, &pi->pt);
609 else
610 fprintf (file, ", points-to anything");
611
612 fprintf (file, "\n");
613 }
614
615
616 /* Dump points-to information for VAR into stderr. */
617
618 DEBUG_FUNCTION void
619 debug_points_to_info_for (tree var)
620 {
621 dump_points_to_info_for (stderr, var);
622 }
623
624
625 /* Initializes the alias-oracle reference representation *R from REF. */
626
627 void
628 ao_ref_init (ao_ref *r, tree ref)
629 {
630 r->ref = ref;
631 r->base = NULL_TREE;
632 r->offset = 0;
633 r->size = -1;
634 r->max_size = -1;
635 r->ref_alias_set = -1;
636 r->base_alias_set = -1;
637 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
638 }
639
640 /* Returns the base object of the memory reference *REF. */
641
642 tree
643 ao_ref_base (ao_ref *ref)
644 {
645 bool reverse;
646
647 if (ref->base)
648 return ref->base;
649 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
650 &ref->max_size, &reverse);
651 return ref->base;
652 }
653
654 /* Returns the base object alias set of the memory reference *REF. */
655
656 alias_set_type
657 ao_ref_base_alias_set (ao_ref *ref)
658 {
659 tree base_ref;
660 if (ref->base_alias_set != -1)
661 return ref->base_alias_set;
662 if (!ref->ref)
663 return 0;
664 base_ref = ref->ref;
665 while (handled_component_p (base_ref))
666 base_ref = TREE_OPERAND (base_ref, 0);
667 ref->base_alias_set = get_alias_set (base_ref);
668 return ref->base_alias_set;
669 }
670
671 /* Returns the reference alias set of the memory reference *REF. */
672
673 alias_set_type
674 ao_ref_alias_set (ao_ref *ref)
675 {
676 if (ref->ref_alias_set != -1)
677 return ref->ref_alias_set;
678 ref->ref_alias_set = get_alias_set (ref->ref);
679 return ref->ref_alias_set;
680 }
681
682 /* Init an alias-oracle reference representation from a gimple pointer
683 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
684 size is assumed to be unknown. The access is assumed to be only
685 to or after of the pointer target, not before it. */
686
687 void
688 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
689 {
690 poly_int64 t, size_hwi, extra_offset = 0;
691 ref->ref = NULL_TREE;
692 if (TREE_CODE (ptr) == SSA_NAME)
693 {
694 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
695 if (gimple_assign_single_p (stmt)
696 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
697 ptr = gimple_assign_rhs1 (stmt);
698 else if (is_gimple_assign (stmt)
699 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
700 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
701 {
702 ptr = gimple_assign_rhs1 (stmt);
703 extra_offset *= BITS_PER_UNIT;
704 }
705 }
706
707 if (TREE_CODE (ptr) == ADDR_EXPR)
708 {
709 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
710 if (ref->base)
711 ref->offset = BITS_PER_UNIT * t;
712 else
713 {
714 size = NULL_TREE;
715 ref->offset = 0;
716 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
717 }
718 }
719 else
720 {
721 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
722 ref->base = build2 (MEM_REF, char_type_node,
723 ptr, null_pointer_node);
724 ref->offset = 0;
725 }
726 ref->offset += extra_offset;
727 if (size
728 && poly_int_tree_p (size, &size_hwi)
729 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
730 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
731 else
732 ref->max_size = ref->size = -1;
733 ref->ref_alias_set = 0;
734 ref->base_alias_set = 0;
735 ref->volatile_p = false;
736 }
737
738 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
739 Return -1 if S1 < S2
740 Return 1 if S1 > S2
741 Return 0 if equal or incomparable. */
742
743 static int
744 compare_sizes (tree s1, tree s2)
745 {
746 if (!s1 || !s2)
747 return 0;
748
749 poly_uint64 size1 = poly_int_tree_p (s1, &size1);
750 poly_uint64 size2 = poly_int_tree_p (s2, &size2);
751
752 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
753 return 0;
754 if (known_lt (size1, size2))
755 return -1;
756 if (known_lt (size2, size1))
757 return 1;
758 return 0;
759 }
760
761 /* Compare TYPE1 and TYPE2 by its size.
762 Return -1 if size of TYPE1 < size of TYPE2
763 Return 1 if size of TYPE1 > size of TYPE2
764 Return 0 if types are of equal sizes or we can not compare them. */
765
766 static int
767 compare_type_sizes (tree type1, tree type2)
768 {
769 /* Be conservative for arrays and vectors. We want to support partial
770 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
771 while (TREE_CODE (type1) == ARRAY_TYPE
772 || TREE_CODE (type1) == VECTOR_TYPE)
773 type1 = TREE_TYPE (type1);
774 while (TREE_CODE (type2) == ARRAY_TYPE
775 || TREE_CODE (type2) == VECTOR_TYPE)
776 type2 = TREE_TYPE (type2);
777 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
778 }
779
780 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
781 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
782 decide. */
783
784 static inline int
785 same_type_for_tbaa (tree type1, tree type2)
786 {
787 type1 = TYPE_MAIN_VARIANT (type1);
788 type2 = TYPE_MAIN_VARIANT (type2);
789
790 /* If we would have to do structural comparison bail out. */
791 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
792 || TYPE_STRUCTURAL_EQUALITY_P (type2))
793 return -1;
794
795 /* Compare the canonical types. */
796 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
797 return 1;
798
799 /* ??? Array types are not properly unified in all cases as we have
800 spurious changes in the index types for example. Removing this
801 causes all sorts of problems with the Fortran frontend. */
802 if (TREE_CODE (type1) == ARRAY_TYPE
803 && TREE_CODE (type2) == ARRAY_TYPE)
804 return -1;
805
806 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
807 object of one of its constrained subtypes, e.g. when a function with an
808 unconstrained parameter passed by reference is called on an object and
809 inlined. But, even in the case of a fixed size, type and subtypes are
810 not equivalent enough as to share the same TYPE_CANONICAL, since this
811 would mean that conversions between them are useless, whereas they are
812 not (e.g. type and subtypes can have different modes). So, in the end,
813 they are only guaranteed to have the same alias set. */
814 if (get_alias_set (type1) == get_alias_set (type2))
815 return -1;
816
817 /* The types are known to be not equal. */
818 return 0;
819 }
820
821 /* Determine if the two component references REF1 and REF2 which are
822 based on access types TYPE1 and TYPE2 and of which at least one is based
823 on an indirect reference may alias. REF2 is the only one that can
824 be a decl in which case REF2_IS_DECL is true.
825 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
826 are the respective alias sets. */
827
828 static bool
829 aliasing_component_refs_p (tree ref1,
830 alias_set_type ref1_alias_set,
831 alias_set_type base1_alias_set,
832 poly_int64 offset1, poly_int64 max_size1,
833 tree ref2,
834 alias_set_type ref2_alias_set,
835 alias_set_type base2_alias_set,
836 poly_int64 offset2, poly_int64 max_size2,
837 bool ref2_is_decl)
838 {
839 /* If one reference is a component references through pointers try to find a
840 common base and apply offset based disambiguation. This handles
841 for example
842 struct A { int i; int j; } *q;
843 struct B { struct A a; int k; } *p;
844 disambiguating q->i and p->a.j. */
845 tree base1, base2;
846 tree type1, type2;
847 tree *refp;
848 int same_p1 = 0, same_p2 = 0;
849
850 /* Choose bases and base types to search for. */
851 base1 = ref1;
852 while (handled_component_p (base1))
853 base1 = TREE_OPERAND (base1, 0);
854 type1 = TREE_TYPE (base1);
855 base2 = ref2;
856 while (handled_component_p (base2))
857 base2 = TREE_OPERAND (base2, 0);
858 type2 = TREE_TYPE (base2);
859
860 /* Now search for the type1 in the access path of ref2. This
861 would be a common base for doing offset based disambiguation on.
862 This however only makes sense if type2 is big enough to hold type1. */
863 int cmp_outer = compare_type_sizes (type2, type1);
864 if (cmp_outer >= 0)
865 {
866 refp = &ref2;
867 while (true)
868 {
869 /* We walk from inner type to the outer types. If type we see is
870 already too large to be part of type1, terminate the search. */
871 int cmp = compare_type_sizes (type1, TREE_TYPE (*refp));
872 if (cmp < 0)
873 break;
874 /* If types may be of same size, see if we can decide about their
875 equality. */
876 if (cmp >= 0)
877 {
878 same_p2 = same_type_for_tbaa (TREE_TYPE (*refp), type1);
879 if (same_p2 != 0)
880 break;
881 }
882 if (!handled_component_p (*refp))
883 break;
884 refp = &TREE_OPERAND (*refp, 0);
885 }
886 if (same_p2 == 1)
887 {
888 poly_int64 offadj, sztmp, msztmp;
889 bool reverse;
890 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
891 offset2 -= offadj;
892 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
893 offset1 -= offadj;
894 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
895 {
896 ++alias_stats.aliasing_component_refs_p_may_alias;
897 return true;
898 }
899 else
900 {
901 ++alias_stats.aliasing_component_refs_p_no_alias;
902 return false;
903 }
904 }
905 }
906
907 /* If we didn't find a common base, try the other way around. */
908 if (cmp_outer <= 0)
909 {
910 refp = &ref1;
911 while (true)
912 {
913 int cmp = compare_type_sizes (type2, TREE_TYPE (*refp));
914 if (cmp < 0)
915 break;
916 /* If types may be of same size, see if we can decide about their
917 equality. */
918 if (cmp >= 0)
919 {
920 same_p1 = same_type_for_tbaa (TREE_TYPE (*refp), type2);
921 if (same_p1 != 0)
922 break;
923 }
924 if (!handled_component_p (*refp))
925 break;
926 refp = &TREE_OPERAND (*refp, 0);
927 }
928 if (same_p1 == 1)
929 {
930 poly_int64 offadj, sztmp, msztmp;
931 bool reverse;
932
933 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
934 offset1 -= offadj;
935 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
936 offset2 -= offadj;
937 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
938 {
939 ++alias_stats.aliasing_component_refs_p_may_alias;
940 return true;
941 }
942 else
943 {
944 ++alias_stats.aliasing_component_refs_p_no_alias;
945 return false;
946 }
947 }
948 }
949
950 /* If we have two type access paths B1.path1 and B2.path2 they may
951 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
952 But we can still have a path that goes B1.path1...B2.path2 with
953 a part that we do not see. So we can only disambiguate now
954 if there is no B2 in the tail of path1 and no B1 on the
955 tail of path2. */
956 if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0
957 && (same_p2 == -1
958 || base1_alias_set == ref2_alias_set
959 || alias_set_subset_of (base1_alias_set, ref2_alias_set)))
960 {
961 ++alias_stats.aliasing_component_refs_p_may_alias;
962 return true;
963 }
964 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
965 if (!ref2_is_decl
966 && compare_type_sizes (TREE_TYPE (ref1), type2) >= 0
967 && (same_p1 == -1
968 || base2_alias_set == ref1_alias_set
969 || alias_set_subset_of (base2_alias_set, ref1_alias_set)))
970 {
971 ++alias_stats.aliasing_component_refs_p_may_alias;
972 return true;
973 }
974 ++alias_stats.aliasing_component_refs_p_no_alias;
975 return false;
976 }
977
978 /* Return true if we can determine that component references REF1 and REF2,
979 that are within a common DECL, cannot overlap. */
980
981 static bool
982 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
983 {
984 auto_vec<tree, 16> component_refs1;
985 auto_vec<tree, 16> component_refs2;
986
987 /* Create the stack of handled components for REF1. */
988 while (handled_component_p (ref1))
989 {
990 component_refs1.safe_push (ref1);
991 ref1 = TREE_OPERAND (ref1, 0);
992 }
993 if (TREE_CODE (ref1) == MEM_REF)
994 {
995 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
996 return false;
997 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
998 }
999
1000 /* Create the stack of handled components for REF2. */
1001 while (handled_component_p (ref2))
1002 {
1003 component_refs2.safe_push (ref2);
1004 ref2 = TREE_OPERAND (ref2, 0);
1005 }
1006 if (TREE_CODE (ref2) == MEM_REF)
1007 {
1008 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
1009 return false;
1010 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
1011 }
1012
1013 /* Bases must be either same or uncomparable. */
1014 gcc_checking_assert (ref1 == ref2
1015 || (DECL_P (ref1) && DECL_P (ref2)
1016 && compare_base_decls (ref1, ref2) != 0));
1017
1018 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1019 rank. This is sufficient because we start from the same DECL and you
1020 cannot reference several fields at a time with COMPONENT_REFs (unlike
1021 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1022 of them to access a sub-component, unless you're in a union, in which
1023 case the return value will precisely be false. */
1024 while (true)
1025 {
1026 do
1027 {
1028 if (component_refs1.is_empty ())
1029 return false;
1030 ref1 = component_refs1.pop ();
1031 }
1032 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1033
1034 do
1035 {
1036 if (component_refs2.is_empty ())
1037 return false;
1038 ref2 = component_refs2.pop ();
1039 }
1040 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1041
1042 /* Beware of BIT_FIELD_REF. */
1043 if (TREE_CODE (ref1) != COMPONENT_REF
1044 || TREE_CODE (ref2) != COMPONENT_REF)
1045 return false;
1046
1047 tree field1 = TREE_OPERAND (ref1, 1);
1048 tree field2 = TREE_OPERAND (ref2, 1);
1049
1050 /* ??? We cannot simply use the type of operand #0 of the refs here
1051 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1052 for common blocks instead of using unions like everyone else. */
1053 tree type1 = DECL_CONTEXT (field1);
1054 tree type2 = DECL_CONTEXT (field2);
1055
1056 /* We cannot disambiguate fields in a union or qualified union. */
1057 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
1058 return false;
1059
1060 if (field1 != field2)
1061 {
1062 /* A field and its representative need to be considered the
1063 same. */
1064 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
1065 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
1066 return false;
1067 /* Different fields of the same record type cannot overlap.
1068 ??? Bitfields can overlap at RTL level so punt on them. */
1069 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1070 return false;
1071 return true;
1072 }
1073 }
1074
1075 return false;
1076 }
1077
1078 /* qsort compare function to sort FIELD_DECLs after their
1079 DECL_FIELD_CONTEXT TYPE_UID. */
1080
1081 static inline int
1082 ncr_compar (const void *field1_, const void *field2_)
1083 {
1084 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1085 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1086 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
1087 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
1088 if (uid1 < uid2)
1089 return -1;
1090 else if (uid1 > uid2)
1091 return 1;
1092 return 0;
1093 }
1094
1095 /* Return true if we can determine that the fields referenced cannot
1096 overlap for any pair of objects. */
1097
1098 static bool
1099 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1100 {
1101 if (!flag_strict_aliasing
1102 || !x || !y
1103 || TREE_CODE (x) != COMPONENT_REF
1104 || TREE_CODE (y) != COMPONENT_REF)
1105 return false;
1106
1107 auto_vec<const_tree, 16> fieldsx;
1108 while (TREE_CODE (x) == COMPONENT_REF)
1109 {
1110 tree field = TREE_OPERAND (x, 1);
1111 tree type = DECL_FIELD_CONTEXT (field);
1112 if (TREE_CODE (type) == RECORD_TYPE)
1113 fieldsx.safe_push (field);
1114 x = TREE_OPERAND (x, 0);
1115 }
1116 if (fieldsx.length () == 0)
1117 return false;
1118 auto_vec<const_tree, 16> fieldsy;
1119 while (TREE_CODE (y) == COMPONENT_REF)
1120 {
1121 tree field = TREE_OPERAND (y, 1);
1122 tree type = DECL_FIELD_CONTEXT (field);
1123 if (TREE_CODE (type) == RECORD_TYPE)
1124 fieldsy.safe_push (TREE_OPERAND (y, 1));
1125 y = TREE_OPERAND (y, 0);
1126 }
1127 if (fieldsy.length () == 0)
1128 return false;
1129
1130 /* Most common case first. */
1131 if (fieldsx.length () == 1
1132 && fieldsy.length () == 1)
1133 return ((DECL_FIELD_CONTEXT (fieldsx[0])
1134 == DECL_FIELD_CONTEXT (fieldsy[0]))
1135 && fieldsx[0] != fieldsy[0]
1136 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
1137
1138 if (fieldsx.length () == 2)
1139 {
1140 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1141 std::swap (fieldsx[0], fieldsx[1]);
1142 }
1143 else
1144 fieldsx.qsort (ncr_compar);
1145
1146 if (fieldsy.length () == 2)
1147 {
1148 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1149 std::swap (fieldsy[0], fieldsy[1]);
1150 }
1151 else
1152 fieldsy.qsort (ncr_compar);
1153
1154 unsigned i = 0, j = 0;
1155 do
1156 {
1157 const_tree fieldx = fieldsx[i];
1158 const_tree fieldy = fieldsy[j];
1159 tree typex = DECL_FIELD_CONTEXT (fieldx);
1160 tree typey = DECL_FIELD_CONTEXT (fieldy);
1161 if (typex == typey)
1162 {
1163 /* We're left with accessing different fields of a structure,
1164 no possible overlap. */
1165 if (fieldx != fieldy)
1166 {
1167 /* A field and its representative need to be considered the
1168 same. */
1169 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1170 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1171 return false;
1172 /* Different fields of the same record type cannot overlap.
1173 ??? Bitfields can overlap at RTL level so punt on them. */
1174 if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1175 return false;
1176 return true;
1177 }
1178 }
1179 if (TYPE_UID (typex) < TYPE_UID (typey))
1180 {
1181 i++;
1182 if (i == fieldsx.length ())
1183 break;
1184 }
1185 else
1186 {
1187 j++;
1188 if (j == fieldsy.length ())
1189 break;
1190 }
1191 }
1192 while (1);
1193
1194 return false;
1195 }
1196
1197
1198 /* Return true if two memory references based on the variables BASE1
1199 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1200 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1201 if non-NULL are the complete memory reference trees. */
1202
1203 static bool
1204 decl_refs_may_alias_p (tree ref1, tree base1,
1205 poly_int64 offset1, poly_int64 max_size1,
1206 tree ref2, tree base2,
1207 poly_int64 offset2, poly_int64 max_size2)
1208 {
1209 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1210
1211 /* If both references are based on different variables, they cannot alias. */
1212 if (compare_base_decls (base1, base2) == 0)
1213 return false;
1214
1215 /* If both references are based on the same variable, they cannot alias if
1216 the accesses do not overlap. */
1217 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1218 return false;
1219
1220 /* For components with variable position, the above test isn't sufficient,
1221 so we disambiguate component references manually. */
1222 if (ref1 && ref2
1223 && handled_component_p (ref1) && handled_component_p (ref2)
1224 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1225 return false;
1226
1227 return true;
1228 }
1229
1230 /* Return true if an indirect reference based on *PTR1 constrained
1231 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1232 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1233 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1234 in which case they are computed on-demand. REF1 and REF2
1235 if non-NULL are the complete memory reference trees. */
1236
1237 static bool
1238 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1239 poly_int64 offset1, poly_int64 max_size1,
1240 alias_set_type ref1_alias_set,
1241 alias_set_type base1_alias_set,
1242 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1243 poly_int64 offset2, poly_int64 max_size2,
1244 alias_set_type ref2_alias_set,
1245 alias_set_type base2_alias_set, bool tbaa_p)
1246 {
1247 tree ptr1;
1248 tree ptrtype1, dbase2;
1249
1250 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1251 || TREE_CODE (base1) == TARGET_MEM_REF)
1252 && DECL_P (base2));
1253
1254 ptr1 = TREE_OPERAND (base1, 0);
1255 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1256
1257 /* If only one reference is based on a variable, they cannot alias if
1258 the pointer access is beyond the extent of the variable access.
1259 (the pointer base cannot validly point to an offset less than zero
1260 of the variable).
1261 ??? IVOPTs creates bases that do not honor this restriction,
1262 so do not apply this optimization for TARGET_MEM_REFs. */
1263 if (TREE_CODE (base1) != TARGET_MEM_REF
1264 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1265 return false;
1266 /* They also cannot alias if the pointer may not point to the decl. */
1267 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1268 return false;
1269
1270 /* Disambiguations that rely on strict aliasing rules follow. */
1271 if (!flag_strict_aliasing || !tbaa_p)
1272 return true;
1273
1274 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1275
1276 /* If the alias set for a pointer access is zero all bets are off. */
1277 if (base1_alias_set == 0)
1278 return true;
1279
1280 /* When we are trying to disambiguate an access with a pointer dereference
1281 as base versus one with a decl as base we can use both the size
1282 of the decl and its dynamic type for extra disambiguation.
1283 ??? We do not know anything about the dynamic type of the decl
1284 other than that its alias-set contains base2_alias_set as a subset
1285 which does not help us here. */
1286 /* As we know nothing useful about the dynamic type of the decl just
1287 use the usual conflict check rather than a subset test.
1288 ??? We could introduce -fvery-strict-aliasing when the language
1289 does not allow decls to have a dynamic type that differs from their
1290 static type. Then we can check
1291 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1292 if (base1_alias_set != base2_alias_set
1293 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1294 return false;
1295 /* If the size of the access relevant for TBAA through the pointer
1296 is bigger than the size of the decl we can't possibly access the
1297 decl via that pointer. */
1298 if (/* ??? This in turn may run afoul when a decl of type T which is
1299 a member of union type U is accessed through a pointer to
1300 type U and sizeof T is smaller than sizeof U. */
1301 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1302 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1303 && compare_sizes (DECL_SIZE (base2),
1304 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1305 return false;
1306
1307 if (!ref2)
1308 return true;
1309
1310 /* If the decl is accessed via a MEM_REF, reconstruct the base
1311 we can use for TBAA and an appropriately adjusted offset. */
1312 dbase2 = ref2;
1313 while (handled_component_p (dbase2))
1314 dbase2 = TREE_OPERAND (dbase2, 0);
1315 poly_int64 doffset1 = offset1;
1316 poly_offset_int doffset2 = offset2;
1317 if (TREE_CODE (dbase2) == MEM_REF
1318 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1319 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1320
1321 /* If either reference is view-converted, give up now. */
1322 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1323 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1324 return true;
1325
1326 /* If both references are through the same type, they do not alias
1327 if the accesses do not overlap. This does extra disambiguation
1328 for mixed/pointer accesses but requires strict aliasing.
1329 For MEM_REFs we require that the component-ref offset we computed
1330 is relative to the start of the type which we ensure by
1331 comparing rvalue and access type and disregarding the constant
1332 pointer offset. */
1333 if ((TREE_CODE (base1) != TARGET_MEM_REF
1334 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1335 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1336 return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1337
1338 if (ref1 && ref2
1339 && nonoverlapping_component_refs_p (ref1, ref2))
1340 return false;
1341
1342 /* Do access-path based disambiguation. */
1343 if (ref1 && ref2
1344 && (handled_component_p (ref1) || handled_component_p (ref2)))
1345 return aliasing_component_refs_p (ref1,
1346 ref1_alias_set, base1_alias_set,
1347 offset1, max_size1,
1348 ref2,
1349 ref2_alias_set, base2_alias_set,
1350 offset2, max_size2, true);
1351
1352 return true;
1353 }
1354
1355 /* Return true if two indirect references based on *PTR1
1356 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1357 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1358 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1359 in which case they are computed on-demand. REF1 and REF2
1360 if non-NULL are the complete memory reference trees. */
1361
1362 static bool
1363 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1364 poly_int64 offset1, poly_int64 max_size1,
1365 alias_set_type ref1_alias_set,
1366 alias_set_type base1_alias_set,
1367 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1368 poly_int64 offset2, poly_int64 max_size2,
1369 alias_set_type ref2_alias_set,
1370 alias_set_type base2_alias_set, bool tbaa_p)
1371 {
1372 tree ptr1;
1373 tree ptr2;
1374 tree ptrtype1, ptrtype2;
1375
1376 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1377 || TREE_CODE (base1) == TARGET_MEM_REF)
1378 && (TREE_CODE (base2) == MEM_REF
1379 || TREE_CODE (base2) == TARGET_MEM_REF));
1380
1381 ptr1 = TREE_OPERAND (base1, 0);
1382 ptr2 = TREE_OPERAND (base2, 0);
1383
1384 /* If both bases are based on pointers they cannot alias if they may not
1385 point to the same memory object or if they point to the same object
1386 and the accesses do not overlap. */
1387 if ((!cfun || gimple_in_ssa_p (cfun))
1388 && operand_equal_p (ptr1, ptr2, 0)
1389 && (((TREE_CODE (base1) != TARGET_MEM_REF
1390 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1391 && (TREE_CODE (base2) != TARGET_MEM_REF
1392 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1393 || (TREE_CODE (base1) == TARGET_MEM_REF
1394 && TREE_CODE (base2) == TARGET_MEM_REF
1395 && (TMR_STEP (base1) == TMR_STEP (base2)
1396 || (TMR_STEP (base1) && TMR_STEP (base2)
1397 && operand_equal_p (TMR_STEP (base1),
1398 TMR_STEP (base2), 0)))
1399 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1400 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1401 && operand_equal_p (TMR_INDEX (base1),
1402 TMR_INDEX (base2), 0)))
1403 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1404 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1405 && operand_equal_p (TMR_INDEX2 (base1),
1406 TMR_INDEX2 (base2), 0))))))
1407 {
1408 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1409 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1410 return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1411 offset2 + moff2, max_size2);
1412 }
1413 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1414 return false;
1415
1416 /* Disambiguations that rely on strict aliasing rules follow. */
1417 if (!flag_strict_aliasing || !tbaa_p)
1418 return true;
1419
1420 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1421 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1422
1423 /* If the alias set for a pointer access is zero all bets are off. */
1424 if (base1_alias_set == 0
1425 || base2_alias_set == 0)
1426 return true;
1427
1428 /* If both references are through the same type, they do not alias
1429 if the accesses do not overlap. This does extra disambiguation
1430 for mixed/pointer accesses but requires strict aliasing. */
1431 if ((TREE_CODE (base1) != TARGET_MEM_REF
1432 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1433 && (TREE_CODE (base2) != TARGET_MEM_REF
1434 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1435 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1436 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1437 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1438 TREE_TYPE (ptrtype2)) == 1
1439 /* But avoid treating arrays as "objects", instead assume they
1440 can overlap by an exact multiple of their element size. */
1441 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1442 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1443
1444 /* Do type-based disambiguation. */
1445 if (base1_alias_set != base2_alias_set
1446 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1447 return false;
1448
1449 /* If either reference is view-converted, give up now. */
1450 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1451 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1452 return true;
1453
1454 if (ref1 && ref2
1455 && nonoverlapping_component_refs_p (ref1, ref2))
1456 return false;
1457
1458 /* Do access-path based disambiguation. */
1459 if (ref1 && ref2
1460 && (handled_component_p (ref1) || handled_component_p (ref2)))
1461 return aliasing_component_refs_p (ref1,
1462 ref1_alias_set, base1_alias_set,
1463 offset1, max_size1,
1464 ref2,
1465 ref2_alias_set, base2_alias_set,
1466 offset2, max_size2, false);
1467
1468 return true;
1469 }
1470
1471 /* Return true, if the two memory references REF1 and REF2 may alias. */
1472
1473 bool
1474 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1475 {
1476 tree base1, base2;
1477 poly_int64 offset1 = 0, offset2 = 0;
1478 poly_int64 max_size1 = -1, max_size2 = -1;
1479 bool var1_p, var2_p, ind1_p, ind2_p;
1480
1481 gcc_checking_assert ((!ref1->ref
1482 || TREE_CODE (ref1->ref) == SSA_NAME
1483 || DECL_P (ref1->ref)
1484 || TREE_CODE (ref1->ref) == STRING_CST
1485 || handled_component_p (ref1->ref)
1486 || TREE_CODE (ref1->ref) == MEM_REF
1487 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1488 && (!ref2->ref
1489 || TREE_CODE (ref2->ref) == SSA_NAME
1490 || DECL_P (ref2->ref)
1491 || TREE_CODE (ref2->ref) == STRING_CST
1492 || handled_component_p (ref2->ref)
1493 || TREE_CODE (ref2->ref) == MEM_REF
1494 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1495
1496 /* Decompose the references into their base objects and the access. */
1497 base1 = ao_ref_base (ref1);
1498 offset1 = ref1->offset;
1499 max_size1 = ref1->max_size;
1500 base2 = ao_ref_base (ref2);
1501 offset2 = ref2->offset;
1502 max_size2 = ref2->max_size;
1503
1504 /* We can end up with registers or constants as bases for example from
1505 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1506 which is seen as a struct copy. */
1507 if (TREE_CODE (base1) == SSA_NAME
1508 || TREE_CODE (base1) == CONST_DECL
1509 || TREE_CODE (base1) == CONSTRUCTOR
1510 || TREE_CODE (base1) == ADDR_EXPR
1511 || CONSTANT_CLASS_P (base1)
1512 || TREE_CODE (base2) == SSA_NAME
1513 || TREE_CODE (base2) == CONST_DECL
1514 || TREE_CODE (base2) == CONSTRUCTOR
1515 || TREE_CODE (base2) == ADDR_EXPR
1516 || CONSTANT_CLASS_P (base2))
1517 return false;
1518
1519 /* We can end up referring to code via function and label decls.
1520 As we likely do not properly track code aliases conservatively
1521 bail out. */
1522 if (TREE_CODE (base1) == FUNCTION_DECL
1523 || TREE_CODE (base1) == LABEL_DECL
1524 || TREE_CODE (base2) == FUNCTION_DECL
1525 || TREE_CODE (base2) == LABEL_DECL)
1526 return true;
1527
1528 /* Two volatile accesses always conflict. */
1529 if (ref1->volatile_p
1530 && ref2->volatile_p)
1531 return true;
1532
1533 /* Defer to simple offset based disambiguation if we have
1534 references based on two decls. Do this before defering to
1535 TBAA to handle must-alias cases in conformance with the
1536 GCC extension of allowing type-punning through unions. */
1537 var1_p = DECL_P (base1);
1538 var2_p = DECL_P (base2);
1539 if (var1_p && var2_p)
1540 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1541 ref2->ref, base2, offset2, max_size2);
1542
1543 /* Handle restrict based accesses.
1544 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1545 here. */
1546 tree rbase1 = base1;
1547 tree rbase2 = base2;
1548 if (var1_p)
1549 {
1550 rbase1 = ref1->ref;
1551 if (rbase1)
1552 while (handled_component_p (rbase1))
1553 rbase1 = TREE_OPERAND (rbase1, 0);
1554 }
1555 if (var2_p)
1556 {
1557 rbase2 = ref2->ref;
1558 if (rbase2)
1559 while (handled_component_p (rbase2))
1560 rbase2 = TREE_OPERAND (rbase2, 0);
1561 }
1562 if (rbase1 && rbase2
1563 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1564 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1565 /* If the accesses are in the same restrict clique... */
1566 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1567 /* But based on different pointers they do not alias. */
1568 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1569 return false;
1570
1571 ind1_p = (TREE_CODE (base1) == MEM_REF
1572 || TREE_CODE (base1) == TARGET_MEM_REF);
1573 ind2_p = (TREE_CODE (base2) == MEM_REF
1574 || TREE_CODE (base2) == TARGET_MEM_REF);
1575
1576 /* Canonicalize the pointer-vs-decl case. */
1577 if (ind1_p && var2_p)
1578 {
1579 std::swap (offset1, offset2);
1580 std::swap (max_size1, max_size2);
1581 std::swap (base1, base2);
1582 std::swap (ref1, ref2);
1583 var1_p = true;
1584 ind1_p = false;
1585 var2_p = false;
1586 ind2_p = true;
1587 }
1588
1589 /* First defer to TBAA if possible. */
1590 if (tbaa_p
1591 && flag_strict_aliasing
1592 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1593 ao_ref_alias_set (ref2)))
1594 return false;
1595
1596 /* If the reference is based on a pointer that points to memory
1597 that may not be written to then the other reference cannot possibly
1598 clobber it. */
1599 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1600 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1601 || (ind1_p
1602 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1603 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1604 return false;
1605
1606 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1607 if (var1_p && ind2_p)
1608 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1609 offset2, max_size2,
1610 ao_ref_alias_set (ref2),
1611 ao_ref_base_alias_set (ref2),
1612 ref1->ref, base1,
1613 offset1, max_size1,
1614 ao_ref_alias_set (ref1),
1615 ao_ref_base_alias_set (ref1),
1616 tbaa_p);
1617 else if (ind1_p && ind2_p)
1618 return indirect_refs_may_alias_p (ref1->ref, base1,
1619 offset1, max_size1,
1620 ao_ref_alias_set (ref1),
1621 ao_ref_base_alias_set (ref1),
1622 ref2->ref, base2,
1623 offset2, max_size2,
1624 ao_ref_alias_set (ref2),
1625 ao_ref_base_alias_set (ref2),
1626 tbaa_p);
1627
1628 gcc_unreachable ();
1629 }
1630
1631 static bool
1632 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1633 {
1634 ao_ref r1;
1635 ao_ref_init (&r1, ref1);
1636 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1637 }
1638
1639 bool
1640 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1641 {
1642 ao_ref r1, r2;
1643 bool res;
1644 ao_ref_init (&r1, ref1);
1645 ao_ref_init (&r2, ref2);
1646 res = refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1647 if (res)
1648 ++alias_stats.refs_may_alias_p_may_alias;
1649 else
1650 ++alias_stats.refs_may_alias_p_no_alias;
1651 return res;
1652 }
1653
1654 /* Returns true if there is a anti-dependence for the STORE that
1655 executes after the LOAD. */
1656
1657 bool
1658 refs_anti_dependent_p (tree load, tree store)
1659 {
1660 ao_ref r1, r2;
1661 ao_ref_init (&r1, load);
1662 ao_ref_init (&r2, store);
1663 return refs_may_alias_p_1 (&r1, &r2, false);
1664 }
1665
1666 /* Returns true if there is a output dependence for the stores
1667 STORE1 and STORE2. */
1668
1669 bool
1670 refs_output_dependent_p (tree store1, tree store2)
1671 {
1672 ao_ref r1, r2;
1673 ao_ref_init (&r1, store1);
1674 ao_ref_init (&r2, store2);
1675 return refs_may_alias_p_1 (&r1, &r2, false);
1676 }
1677
1678 /* If the call CALL may use the memory reference REF return true,
1679 otherwise return false. */
1680
1681 static bool
1682 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1683 {
1684 tree base, callee;
1685 unsigned i;
1686 int flags = gimple_call_flags (call);
1687
1688 /* Const functions without a static chain do not implicitly use memory. */
1689 if (!gimple_call_chain (call)
1690 && (flags & (ECF_CONST|ECF_NOVOPS)))
1691 goto process_args;
1692
1693 base = ao_ref_base (ref);
1694 if (!base)
1695 return true;
1696
1697 /* A call that is not without side-effects might involve volatile
1698 accesses and thus conflicts with all other volatile accesses. */
1699 if (ref->volatile_p)
1700 return true;
1701
1702 /* If the reference is based on a decl that is not aliased the call
1703 cannot possibly use it. */
1704 if (DECL_P (base)
1705 && !may_be_aliased (base)
1706 /* But local statics can be used through recursion. */
1707 && !is_global_var (base))
1708 goto process_args;
1709
1710 callee = gimple_call_fndecl (call);
1711
1712 /* Handle those builtin functions explicitly that do not act as
1713 escape points. See tree-ssa-structalias.c:find_func_aliases
1714 for the list of builtins we might need to handle here. */
1715 if (callee != NULL_TREE
1716 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1717 switch (DECL_FUNCTION_CODE (callee))
1718 {
1719 /* All the following functions read memory pointed to by
1720 their second argument. strcat/strncat additionally
1721 reads memory pointed to by the first argument. */
1722 case BUILT_IN_STRCAT:
1723 case BUILT_IN_STRNCAT:
1724 {
1725 ao_ref dref;
1726 ao_ref_init_from_ptr_and_size (&dref,
1727 gimple_call_arg (call, 0),
1728 NULL_TREE);
1729 if (refs_may_alias_p_1 (&dref, ref, false))
1730 return true;
1731 }
1732 /* FALLTHRU */
1733 case BUILT_IN_STRCPY:
1734 case BUILT_IN_STRNCPY:
1735 case BUILT_IN_MEMCPY:
1736 case BUILT_IN_MEMMOVE:
1737 case BUILT_IN_MEMPCPY:
1738 case BUILT_IN_STPCPY:
1739 case BUILT_IN_STPNCPY:
1740 case BUILT_IN_TM_MEMCPY:
1741 case BUILT_IN_TM_MEMMOVE:
1742 {
1743 ao_ref dref;
1744 tree size = NULL_TREE;
1745 if (gimple_call_num_args (call) == 3)
1746 size = gimple_call_arg (call, 2);
1747 ao_ref_init_from_ptr_and_size (&dref,
1748 gimple_call_arg (call, 1),
1749 size);
1750 return refs_may_alias_p_1 (&dref, ref, false);
1751 }
1752 case BUILT_IN_STRCAT_CHK:
1753 case BUILT_IN_STRNCAT_CHK:
1754 {
1755 ao_ref dref;
1756 ao_ref_init_from_ptr_and_size (&dref,
1757 gimple_call_arg (call, 0),
1758 NULL_TREE);
1759 if (refs_may_alias_p_1 (&dref, ref, false))
1760 return true;
1761 }
1762 /* FALLTHRU */
1763 case BUILT_IN_STRCPY_CHK:
1764 case BUILT_IN_STRNCPY_CHK:
1765 case BUILT_IN_MEMCPY_CHK:
1766 case BUILT_IN_MEMMOVE_CHK:
1767 case BUILT_IN_MEMPCPY_CHK:
1768 case BUILT_IN_STPCPY_CHK:
1769 case BUILT_IN_STPNCPY_CHK:
1770 {
1771 ao_ref dref;
1772 tree size = NULL_TREE;
1773 if (gimple_call_num_args (call) == 4)
1774 size = gimple_call_arg (call, 2);
1775 ao_ref_init_from_ptr_and_size (&dref,
1776 gimple_call_arg (call, 1),
1777 size);
1778 return refs_may_alias_p_1 (&dref, ref, false);
1779 }
1780 case BUILT_IN_BCOPY:
1781 {
1782 ao_ref dref;
1783 tree size = gimple_call_arg (call, 2);
1784 ao_ref_init_from_ptr_and_size (&dref,
1785 gimple_call_arg (call, 0),
1786 size);
1787 return refs_may_alias_p_1 (&dref, ref, false);
1788 }
1789
1790 /* The following functions read memory pointed to by their
1791 first argument. */
1792 CASE_BUILT_IN_TM_LOAD (1):
1793 CASE_BUILT_IN_TM_LOAD (2):
1794 CASE_BUILT_IN_TM_LOAD (4):
1795 CASE_BUILT_IN_TM_LOAD (8):
1796 CASE_BUILT_IN_TM_LOAD (FLOAT):
1797 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1798 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1799 CASE_BUILT_IN_TM_LOAD (M64):
1800 CASE_BUILT_IN_TM_LOAD (M128):
1801 CASE_BUILT_IN_TM_LOAD (M256):
1802 case BUILT_IN_TM_LOG:
1803 case BUILT_IN_TM_LOG_1:
1804 case BUILT_IN_TM_LOG_2:
1805 case BUILT_IN_TM_LOG_4:
1806 case BUILT_IN_TM_LOG_8:
1807 case BUILT_IN_TM_LOG_FLOAT:
1808 case BUILT_IN_TM_LOG_DOUBLE:
1809 case BUILT_IN_TM_LOG_LDOUBLE:
1810 case BUILT_IN_TM_LOG_M64:
1811 case BUILT_IN_TM_LOG_M128:
1812 case BUILT_IN_TM_LOG_M256:
1813 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1814
1815 /* These read memory pointed to by the first argument. */
1816 case BUILT_IN_STRDUP:
1817 case BUILT_IN_STRNDUP:
1818 case BUILT_IN_REALLOC:
1819 {
1820 ao_ref dref;
1821 tree size = NULL_TREE;
1822 if (gimple_call_num_args (call) == 2)
1823 size = gimple_call_arg (call, 1);
1824 ao_ref_init_from_ptr_and_size (&dref,
1825 gimple_call_arg (call, 0),
1826 size);
1827 return refs_may_alias_p_1 (&dref, ref, false);
1828 }
1829 /* These read memory pointed to by the first argument. */
1830 case BUILT_IN_INDEX:
1831 case BUILT_IN_STRCHR:
1832 case BUILT_IN_STRRCHR:
1833 {
1834 ao_ref dref;
1835 ao_ref_init_from_ptr_and_size (&dref,
1836 gimple_call_arg (call, 0),
1837 NULL_TREE);
1838 return refs_may_alias_p_1 (&dref, ref, false);
1839 }
1840 /* These read memory pointed to by the first argument with size
1841 in the third argument. */
1842 case BUILT_IN_MEMCHR:
1843 {
1844 ao_ref dref;
1845 ao_ref_init_from_ptr_and_size (&dref,
1846 gimple_call_arg (call, 0),
1847 gimple_call_arg (call, 2));
1848 return refs_may_alias_p_1 (&dref, ref, false);
1849 }
1850 /* These read memory pointed to by the first and second arguments. */
1851 case BUILT_IN_STRSTR:
1852 case BUILT_IN_STRPBRK:
1853 {
1854 ao_ref dref;
1855 ao_ref_init_from_ptr_and_size (&dref,
1856 gimple_call_arg (call, 0),
1857 NULL_TREE);
1858 if (refs_may_alias_p_1 (&dref, ref, false))
1859 return true;
1860 ao_ref_init_from_ptr_and_size (&dref,
1861 gimple_call_arg (call, 1),
1862 NULL_TREE);
1863 return refs_may_alias_p_1 (&dref, ref, false);
1864 }
1865
1866 /* The following builtins do not read from memory. */
1867 case BUILT_IN_FREE:
1868 case BUILT_IN_MALLOC:
1869 case BUILT_IN_POSIX_MEMALIGN:
1870 case BUILT_IN_ALIGNED_ALLOC:
1871 case BUILT_IN_CALLOC:
1872 CASE_BUILT_IN_ALLOCA:
1873 case BUILT_IN_STACK_SAVE:
1874 case BUILT_IN_STACK_RESTORE:
1875 case BUILT_IN_MEMSET:
1876 case BUILT_IN_TM_MEMSET:
1877 case BUILT_IN_MEMSET_CHK:
1878 case BUILT_IN_FREXP:
1879 case BUILT_IN_FREXPF:
1880 case BUILT_IN_FREXPL:
1881 case BUILT_IN_GAMMA_R:
1882 case BUILT_IN_GAMMAF_R:
1883 case BUILT_IN_GAMMAL_R:
1884 case BUILT_IN_LGAMMA_R:
1885 case BUILT_IN_LGAMMAF_R:
1886 case BUILT_IN_LGAMMAL_R:
1887 case BUILT_IN_MODF:
1888 case BUILT_IN_MODFF:
1889 case BUILT_IN_MODFL:
1890 case BUILT_IN_REMQUO:
1891 case BUILT_IN_REMQUOF:
1892 case BUILT_IN_REMQUOL:
1893 case BUILT_IN_SINCOS:
1894 case BUILT_IN_SINCOSF:
1895 case BUILT_IN_SINCOSL:
1896 case BUILT_IN_ASSUME_ALIGNED:
1897 case BUILT_IN_VA_END:
1898 return false;
1899 /* __sync_* builtins and some OpenMP builtins act as threading
1900 barriers. */
1901 #undef DEF_SYNC_BUILTIN
1902 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1903 #include "sync-builtins.def"
1904 #undef DEF_SYNC_BUILTIN
1905 case BUILT_IN_GOMP_ATOMIC_START:
1906 case BUILT_IN_GOMP_ATOMIC_END:
1907 case BUILT_IN_GOMP_BARRIER:
1908 case BUILT_IN_GOMP_BARRIER_CANCEL:
1909 case BUILT_IN_GOMP_TASKWAIT:
1910 case BUILT_IN_GOMP_TASKGROUP_END:
1911 case BUILT_IN_GOMP_CRITICAL_START:
1912 case BUILT_IN_GOMP_CRITICAL_END:
1913 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1914 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1915 case BUILT_IN_GOMP_LOOP_END:
1916 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1917 case BUILT_IN_GOMP_ORDERED_START:
1918 case BUILT_IN_GOMP_ORDERED_END:
1919 case BUILT_IN_GOMP_SECTIONS_END:
1920 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1921 case BUILT_IN_GOMP_SINGLE_COPY_START:
1922 case BUILT_IN_GOMP_SINGLE_COPY_END:
1923 return true;
1924
1925 default:
1926 /* Fallthru to general call handling. */;
1927 }
1928
1929 /* Check if base is a global static variable that is not read
1930 by the function. */
1931 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
1932 {
1933 struct cgraph_node *node = cgraph_node::get (callee);
1934 bitmap not_read;
1935
1936 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1937 node yet. We should enforce that there are nodes for all decls in the
1938 IL and remove this check instead. */
1939 if (node
1940 && (not_read = ipa_reference_get_not_read_global (node))
1941 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1942 goto process_args;
1943 }
1944
1945 /* Check if the base variable is call-used. */
1946 if (DECL_P (base))
1947 {
1948 if (pt_solution_includes (gimple_call_use_set (call), base))
1949 return true;
1950 }
1951 else if ((TREE_CODE (base) == MEM_REF
1952 || TREE_CODE (base) == TARGET_MEM_REF)
1953 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1954 {
1955 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1956 if (!pi)
1957 return true;
1958
1959 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1960 return true;
1961 }
1962 else
1963 return true;
1964
1965 /* Inspect call arguments for passed-by-value aliases. */
1966 process_args:
1967 for (i = 0; i < gimple_call_num_args (call); ++i)
1968 {
1969 tree op = gimple_call_arg (call, i);
1970 int flags = gimple_call_arg_flags (call, i);
1971
1972 if (flags & EAF_UNUSED)
1973 continue;
1974
1975 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1976 op = TREE_OPERAND (op, 0);
1977
1978 if (TREE_CODE (op) != SSA_NAME
1979 && !is_gimple_min_invariant (op))
1980 {
1981 ao_ref r;
1982 ao_ref_init (&r, op);
1983 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
1984 return true;
1985 }
1986 }
1987
1988 return false;
1989 }
1990
1991 static bool
1992 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
1993 {
1994 bool res;
1995 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
1996 if (res)
1997 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1998 else
1999 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2000 return res;
2001 }
2002
2003
2004 /* If the statement STMT may use the memory reference REF return
2005 true, otherwise return false. */
2006
2007 bool
2008 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2009 {
2010 if (is_gimple_assign (stmt))
2011 {
2012 tree rhs;
2013
2014 /* All memory assign statements are single. */
2015 if (!gimple_assign_single_p (stmt))
2016 return false;
2017
2018 rhs = gimple_assign_rhs1 (stmt);
2019 if (is_gimple_reg (rhs)
2020 || is_gimple_min_invariant (rhs)
2021 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2022 return false;
2023
2024 return refs_may_alias_p (rhs, ref, tbaa_p);
2025 }
2026 else if (is_gimple_call (stmt))
2027 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2028 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2029 {
2030 tree retval = gimple_return_retval (return_stmt);
2031 if (retval
2032 && TREE_CODE (retval) != SSA_NAME
2033 && !is_gimple_min_invariant (retval)
2034 && refs_may_alias_p (retval, ref, tbaa_p))
2035 return true;
2036 /* If ref escapes the function then the return acts as a use. */
2037 tree base = ao_ref_base (ref);
2038 if (!base)
2039 ;
2040 else if (DECL_P (base))
2041 return is_global_var (base);
2042 else if (TREE_CODE (base) == MEM_REF
2043 || TREE_CODE (base) == TARGET_MEM_REF)
2044 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2045 return false;
2046 }
2047
2048 return true;
2049 }
2050
2051 bool
2052 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2053 {
2054 ao_ref r;
2055 ao_ref_init (&r, ref);
2056 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2057 }
2058
2059 /* If the call in statement CALL may clobber the memory reference REF
2060 return true, otherwise return false. */
2061
2062 bool
2063 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2064 {
2065 tree base;
2066 tree callee;
2067
2068 /* If the call is pure or const it cannot clobber anything. */
2069 if (gimple_call_flags (call)
2070 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2071 return false;
2072 if (gimple_call_internal_p (call))
2073 switch (gimple_call_internal_fn (call))
2074 {
2075 /* Treat these internal calls like ECF_PURE for aliasing,
2076 they don't write to any memory the program should care about.
2077 They have important other side-effects, and read memory,
2078 so can't be ECF_NOVOPS. */
2079 case IFN_UBSAN_NULL:
2080 case IFN_UBSAN_BOUNDS:
2081 case IFN_UBSAN_VPTR:
2082 case IFN_UBSAN_OBJECT_SIZE:
2083 case IFN_UBSAN_PTR:
2084 case IFN_ASAN_CHECK:
2085 return false;
2086 default:
2087 break;
2088 }
2089
2090 base = ao_ref_base (ref);
2091 if (!base)
2092 return true;
2093
2094 if (TREE_CODE (base) == SSA_NAME
2095 || CONSTANT_CLASS_P (base))
2096 return false;
2097
2098 /* A call that is not without side-effects might involve volatile
2099 accesses and thus conflicts with all other volatile accesses. */
2100 if (ref->volatile_p)
2101 return true;
2102
2103 /* If the reference is based on a decl that is not aliased the call
2104 cannot possibly clobber it. */
2105 if (DECL_P (base)
2106 && !may_be_aliased (base)
2107 /* But local non-readonly statics can be modified through recursion
2108 or the call may implement a threading barrier which we must
2109 treat as may-def. */
2110 && (TREE_READONLY (base)
2111 || !is_global_var (base)))
2112 return false;
2113
2114 /* If the reference is based on a pointer that points to memory
2115 that may not be written to then the call cannot possibly clobber it. */
2116 if ((TREE_CODE (base) == MEM_REF
2117 || TREE_CODE (base) == TARGET_MEM_REF)
2118 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2119 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2120 return false;
2121
2122 callee = gimple_call_fndecl (call);
2123
2124 /* Handle those builtin functions explicitly that do not act as
2125 escape points. See tree-ssa-structalias.c:find_func_aliases
2126 for the list of builtins we might need to handle here. */
2127 if (callee != NULL_TREE
2128 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2129 switch (DECL_FUNCTION_CODE (callee))
2130 {
2131 /* All the following functions clobber memory pointed to by
2132 their first argument. */
2133 case BUILT_IN_STRCPY:
2134 case BUILT_IN_STRNCPY:
2135 case BUILT_IN_MEMCPY:
2136 case BUILT_IN_MEMMOVE:
2137 case BUILT_IN_MEMPCPY:
2138 case BUILT_IN_STPCPY:
2139 case BUILT_IN_STPNCPY:
2140 case BUILT_IN_STRCAT:
2141 case BUILT_IN_STRNCAT:
2142 case BUILT_IN_MEMSET:
2143 case BUILT_IN_TM_MEMSET:
2144 CASE_BUILT_IN_TM_STORE (1):
2145 CASE_BUILT_IN_TM_STORE (2):
2146 CASE_BUILT_IN_TM_STORE (4):
2147 CASE_BUILT_IN_TM_STORE (8):
2148 CASE_BUILT_IN_TM_STORE (FLOAT):
2149 CASE_BUILT_IN_TM_STORE (DOUBLE):
2150 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2151 CASE_BUILT_IN_TM_STORE (M64):
2152 CASE_BUILT_IN_TM_STORE (M128):
2153 CASE_BUILT_IN_TM_STORE (M256):
2154 case BUILT_IN_TM_MEMCPY:
2155 case BUILT_IN_TM_MEMMOVE:
2156 {
2157 ao_ref dref;
2158 tree size = NULL_TREE;
2159 /* Don't pass in size for strncat, as the maximum size
2160 is strlen (dest) + n + 1 instead of n, resp.
2161 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2162 known. */
2163 if (gimple_call_num_args (call) == 3
2164 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2165 size = gimple_call_arg (call, 2);
2166 ao_ref_init_from_ptr_and_size (&dref,
2167 gimple_call_arg (call, 0),
2168 size);
2169 return refs_may_alias_p_1 (&dref, ref, false);
2170 }
2171 case BUILT_IN_STRCPY_CHK:
2172 case BUILT_IN_STRNCPY_CHK:
2173 case BUILT_IN_MEMCPY_CHK:
2174 case BUILT_IN_MEMMOVE_CHK:
2175 case BUILT_IN_MEMPCPY_CHK:
2176 case BUILT_IN_STPCPY_CHK:
2177 case BUILT_IN_STPNCPY_CHK:
2178 case BUILT_IN_STRCAT_CHK:
2179 case BUILT_IN_STRNCAT_CHK:
2180 case BUILT_IN_MEMSET_CHK:
2181 {
2182 ao_ref dref;
2183 tree size = NULL_TREE;
2184 /* Don't pass in size for __strncat_chk, as the maximum size
2185 is strlen (dest) + n + 1 instead of n, resp.
2186 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2187 known. */
2188 if (gimple_call_num_args (call) == 4
2189 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2190 size = gimple_call_arg (call, 2);
2191 ao_ref_init_from_ptr_and_size (&dref,
2192 gimple_call_arg (call, 0),
2193 size);
2194 return refs_may_alias_p_1 (&dref, ref, false);
2195 }
2196 case BUILT_IN_BCOPY:
2197 {
2198 ao_ref dref;
2199 tree size = gimple_call_arg (call, 2);
2200 ao_ref_init_from_ptr_and_size (&dref,
2201 gimple_call_arg (call, 1),
2202 size);
2203 return refs_may_alias_p_1 (&dref, ref, false);
2204 }
2205 /* Allocating memory does not have any side-effects apart from
2206 being the definition point for the pointer. */
2207 case BUILT_IN_MALLOC:
2208 case BUILT_IN_ALIGNED_ALLOC:
2209 case BUILT_IN_CALLOC:
2210 case BUILT_IN_STRDUP:
2211 case BUILT_IN_STRNDUP:
2212 /* Unix98 specifies that errno is set on allocation failure. */
2213 if (flag_errno_math
2214 && targetm.ref_may_alias_errno (ref))
2215 return true;
2216 return false;
2217 case BUILT_IN_STACK_SAVE:
2218 CASE_BUILT_IN_ALLOCA:
2219 case BUILT_IN_ASSUME_ALIGNED:
2220 return false;
2221 /* But posix_memalign stores a pointer into the memory pointed to
2222 by its first argument. */
2223 case BUILT_IN_POSIX_MEMALIGN:
2224 {
2225 tree ptrptr = gimple_call_arg (call, 0);
2226 ao_ref dref;
2227 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2228 TYPE_SIZE_UNIT (ptr_type_node));
2229 return (refs_may_alias_p_1 (&dref, ref, false)
2230 || (flag_errno_math
2231 && targetm.ref_may_alias_errno (ref)));
2232 }
2233 /* Freeing memory kills the pointed-to memory. More importantly
2234 the call has to serve as a barrier for moving loads and stores
2235 across it. */
2236 case BUILT_IN_FREE:
2237 case BUILT_IN_VA_END:
2238 {
2239 tree ptr = gimple_call_arg (call, 0);
2240 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2241 }
2242 /* Realloc serves both as allocation point and deallocation point. */
2243 case BUILT_IN_REALLOC:
2244 {
2245 tree ptr = gimple_call_arg (call, 0);
2246 /* Unix98 specifies that errno is set on allocation failure. */
2247 return ((flag_errno_math
2248 && targetm.ref_may_alias_errno (ref))
2249 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2250 }
2251 case BUILT_IN_GAMMA_R:
2252 case BUILT_IN_GAMMAF_R:
2253 case BUILT_IN_GAMMAL_R:
2254 case BUILT_IN_LGAMMA_R:
2255 case BUILT_IN_LGAMMAF_R:
2256 case BUILT_IN_LGAMMAL_R:
2257 {
2258 tree out = gimple_call_arg (call, 1);
2259 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2260 return true;
2261 if (flag_errno_math)
2262 break;
2263 return false;
2264 }
2265 case BUILT_IN_FREXP:
2266 case BUILT_IN_FREXPF:
2267 case BUILT_IN_FREXPL:
2268 case BUILT_IN_MODF:
2269 case BUILT_IN_MODFF:
2270 case BUILT_IN_MODFL:
2271 {
2272 tree out = gimple_call_arg (call, 1);
2273 return ptr_deref_may_alias_ref_p_1 (out, ref);
2274 }
2275 case BUILT_IN_REMQUO:
2276 case BUILT_IN_REMQUOF:
2277 case BUILT_IN_REMQUOL:
2278 {
2279 tree out = gimple_call_arg (call, 2);
2280 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2281 return true;
2282 if (flag_errno_math)
2283 break;
2284 return false;
2285 }
2286 case BUILT_IN_SINCOS:
2287 case BUILT_IN_SINCOSF:
2288 case BUILT_IN_SINCOSL:
2289 {
2290 tree sin = gimple_call_arg (call, 1);
2291 tree cos = gimple_call_arg (call, 2);
2292 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2293 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2294 }
2295 /* __sync_* builtins and some OpenMP builtins act as threading
2296 barriers. */
2297 #undef DEF_SYNC_BUILTIN
2298 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2299 #include "sync-builtins.def"
2300 #undef DEF_SYNC_BUILTIN
2301 case BUILT_IN_GOMP_ATOMIC_START:
2302 case BUILT_IN_GOMP_ATOMIC_END:
2303 case BUILT_IN_GOMP_BARRIER:
2304 case BUILT_IN_GOMP_BARRIER_CANCEL:
2305 case BUILT_IN_GOMP_TASKWAIT:
2306 case BUILT_IN_GOMP_TASKGROUP_END:
2307 case BUILT_IN_GOMP_CRITICAL_START:
2308 case BUILT_IN_GOMP_CRITICAL_END:
2309 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2310 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2311 case BUILT_IN_GOMP_LOOP_END:
2312 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2313 case BUILT_IN_GOMP_ORDERED_START:
2314 case BUILT_IN_GOMP_ORDERED_END:
2315 case BUILT_IN_GOMP_SECTIONS_END:
2316 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2317 case BUILT_IN_GOMP_SINGLE_COPY_START:
2318 case BUILT_IN_GOMP_SINGLE_COPY_END:
2319 return true;
2320 default:
2321 /* Fallthru to general call handling. */;
2322 }
2323
2324 /* Check if base is a global static variable that is not written
2325 by the function. */
2326 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2327 {
2328 struct cgraph_node *node = cgraph_node::get (callee);
2329 bitmap not_written;
2330
2331 if (node
2332 && (not_written = ipa_reference_get_not_written_global (node))
2333 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2334 return false;
2335 }
2336
2337 /* Check if the base variable is call-clobbered. */
2338 if (DECL_P (base))
2339 return pt_solution_includes (gimple_call_clobber_set (call), base);
2340 else if ((TREE_CODE (base) == MEM_REF
2341 || TREE_CODE (base) == TARGET_MEM_REF)
2342 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2343 {
2344 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2345 if (!pi)
2346 return true;
2347
2348 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2349 }
2350
2351 return true;
2352 }
2353
2354 /* If the call in statement CALL may clobber the memory reference REF
2355 return true, otherwise return false. */
2356
2357 bool
2358 call_may_clobber_ref_p (gcall *call, tree ref)
2359 {
2360 bool res;
2361 ao_ref r;
2362 ao_ref_init (&r, ref);
2363 res = call_may_clobber_ref_p_1 (call, &r);
2364 if (res)
2365 ++alias_stats.call_may_clobber_ref_p_may_alias;
2366 else
2367 ++alias_stats.call_may_clobber_ref_p_no_alias;
2368 return res;
2369 }
2370
2371
2372 /* If the statement STMT may clobber the memory reference REF return true,
2373 otherwise return false. */
2374
2375 bool
2376 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2377 {
2378 if (is_gimple_call (stmt))
2379 {
2380 tree lhs = gimple_call_lhs (stmt);
2381 if (lhs
2382 && TREE_CODE (lhs) != SSA_NAME)
2383 {
2384 ao_ref r;
2385 ao_ref_init (&r, lhs);
2386 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2387 return true;
2388 }
2389
2390 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2391 }
2392 else if (gimple_assign_single_p (stmt))
2393 {
2394 tree lhs = gimple_assign_lhs (stmt);
2395 if (TREE_CODE (lhs) != SSA_NAME)
2396 {
2397 ao_ref r;
2398 ao_ref_init (&r, lhs);
2399 return refs_may_alias_p_1 (ref, &r, tbaa_p);
2400 }
2401 }
2402 else if (gimple_code (stmt) == GIMPLE_ASM)
2403 return true;
2404
2405 return false;
2406 }
2407
2408 bool
2409 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2410 {
2411 ao_ref r;
2412 ao_ref_init (&r, ref);
2413 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2414 }
2415
2416 /* Return true if store1 and store2 described by corresponding tuples
2417 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2418 address. */
2419
2420 static bool
2421 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2422 poly_int64 max_size1,
2423 tree base2, poly_int64 offset2, poly_int64 size2,
2424 poly_int64 max_size2)
2425 {
2426 /* Offsets need to be 0. */
2427 if (maybe_ne (offset1, 0)
2428 || maybe_ne (offset2, 0))
2429 return false;
2430
2431 bool base1_obj_p = SSA_VAR_P (base1);
2432 bool base2_obj_p = SSA_VAR_P (base2);
2433
2434 /* We need one object. */
2435 if (base1_obj_p == base2_obj_p)
2436 return false;
2437 tree obj = base1_obj_p ? base1 : base2;
2438
2439 /* And we need one MEM_REF. */
2440 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2441 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2442 if (base1_memref_p == base2_memref_p)
2443 return false;
2444 tree memref = base1_memref_p ? base1 : base2;
2445
2446 /* Sizes need to be valid. */
2447 if (!known_size_p (max_size1)
2448 || !known_size_p (max_size2)
2449 || !known_size_p (size1)
2450 || !known_size_p (size2))
2451 return false;
2452
2453 /* Max_size needs to match size. */
2454 if (maybe_ne (max_size1, size1)
2455 || maybe_ne (max_size2, size2))
2456 return false;
2457
2458 /* Sizes need to match. */
2459 if (maybe_ne (size1, size2))
2460 return false;
2461
2462
2463 /* Check that memref is a store to pointer with singleton points-to info. */
2464 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2465 return false;
2466 tree ptr = TREE_OPERAND (memref, 0);
2467 if (TREE_CODE (ptr) != SSA_NAME)
2468 return false;
2469 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2470 unsigned int pt_uid;
2471 if (pi == NULL
2472 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2473 return false;
2474
2475 /* Be conservative with non-call exceptions when the address might
2476 be NULL. */
2477 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2478 return false;
2479
2480 /* Check that ptr points relative to obj. */
2481 unsigned int obj_uid = DECL_PT_UID (obj);
2482 if (obj_uid != pt_uid)
2483 return false;
2484
2485 /* Check that the object size is the same as the store size. That ensures us
2486 that ptr points to the start of obj. */
2487 return (DECL_SIZE (obj)
2488 && poly_int_tree_p (DECL_SIZE (obj))
2489 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2490 }
2491
2492 /* If STMT kills the memory reference REF return true, otherwise
2493 return false. */
2494
2495 bool
2496 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2497 {
2498 if (!ao_ref_base (ref))
2499 return false;
2500
2501 if (gimple_has_lhs (stmt)
2502 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2503 /* The assignment is not necessarily carried out if it can throw
2504 and we can catch it in the current function where we could inspect
2505 the previous value.
2506 ??? We only need to care about the RHS throwing. For aggregate
2507 assignments or similar calls and non-call exceptions the LHS
2508 might throw as well. */
2509 && !stmt_can_throw_internal (cfun, stmt))
2510 {
2511 tree lhs = gimple_get_lhs (stmt);
2512 /* If LHS is literally a base of the access we are done. */
2513 if (ref->ref)
2514 {
2515 tree base = ref->ref;
2516 tree innermost_dropped_array_ref = NULL_TREE;
2517 if (handled_component_p (base))
2518 {
2519 tree saved_lhs0 = NULL_TREE;
2520 if (handled_component_p (lhs))
2521 {
2522 saved_lhs0 = TREE_OPERAND (lhs, 0);
2523 TREE_OPERAND (lhs, 0) = integer_zero_node;
2524 }
2525 do
2526 {
2527 /* Just compare the outermost handled component, if
2528 they are equal we have found a possible common
2529 base. */
2530 tree saved_base0 = TREE_OPERAND (base, 0);
2531 TREE_OPERAND (base, 0) = integer_zero_node;
2532 bool res = operand_equal_p (lhs, base, 0);
2533 TREE_OPERAND (base, 0) = saved_base0;
2534 if (res)
2535 break;
2536 /* Remember if we drop an array-ref that we need to
2537 double-check not being at struct end. */
2538 if (TREE_CODE (base) == ARRAY_REF
2539 || TREE_CODE (base) == ARRAY_RANGE_REF)
2540 innermost_dropped_array_ref = base;
2541 /* Otherwise drop handled components of the access. */
2542 base = saved_base0;
2543 }
2544 while (handled_component_p (base));
2545 if (saved_lhs0)
2546 TREE_OPERAND (lhs, 0) = saved_lhs0;
2547 }
2548 /* Finally check if the lhs has the same address and size as the
2549 base candidate of the access. Watch out if we have dropped
2550 an array-ref that was at struct end, this means ref->ref may
2551 be outside of the TYPE_SIZE of its base. */
2552 if ((! innermost_dropped_array_ref
2553 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2554 && (lhs == base
2555 || (((TYPE_SIZE (TREE_TYPE (lhs))
2556 == TYPE_SIZE (TREE_TYPE (base)))
2557 || (TYPE_SIZE (TREE_TYPE (lhs))
2558 && TYPE_SIZE (TREE_TYPE (base))
2559 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2560 TYPE_SIZE (TREE_TYPE (base)),
2561 0)))
2562 && operand_equal_p (lhs, base,
2563 OEP_ADDRESS_OF
2564 | OEP_MATCH_SIDE_EFFECTS))))
2565 return true;
2566 }
2567
2568 /* Now look for non-literal equal bases with the restriction of
2569 handling constant offset and size. */
2570 /* For a must-alias check we need to be able to constrain
2571 the access properly. */
2572 if (!ref->max_size_known_p ())
2573 return false;
2574 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2575 bool reverse;
2576 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2577 &reverse);
2578 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2579 so base == ref->base does not always hold. */
2580 if (base != ref->base)
2581 {
2582 /* Try using points-to info. */
2583 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2584 ref->offset, ref->size, ref->max_size))
2585 return true;
2586
2587 /* If both base and ref->base are MEM_REFs, only compare the
2588 first operand, and if the second operand isn't equal constant,
2589 try to add the offsets into offset and ref_offset. */
2590 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2591 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2592 {
2593 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2594 TREE_OPERAND (ref->base, 1)))
2595 {
2596 poly_offset_int off1 = mem_ref_offset (base);
2597 off1 <<= LOG2_BITS_PER_UNIT;
2598 off1 += offset;
2599 poly_offset_int off2 = mem_ref_offset (ref->base);
2600 off2 <<= LOG2_BITS_PER_UNIT;
2601 off2 += ref_offset;
2602 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2603 size = -1;
2604 }
2605 }
2606 else
2607 size = -1;
2608 }
2609 /* For a must-alias check we need to be able to constrain
2610 the access properly. */
2611 if (known_eq (size, max_size)
2612 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2613 return true;
2614 }
2615
2616 if (is_gimple_call (stmt))
2617 {
2618 tree callee = gimple_call_fndecl (stmt);
2619 if (callee != NULL_TREE
2620 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2621 switch (DECL_FUNCTION_CODE (callee))
2622 {
2623 case BUILT_IN_FREE:
2624 {
2625 tree ptr = gimple_call_arg (stmt, 0);
2626 tree base = ao_ref_base (ref);
2627 if (base && TREE_CODE (base) == MEM_REF
2628 && TREE_OPERAND (base, 0) == ptr)
2629 return true;
2630 break;
2631 }
2632
2633 case BUILT_IN_MEMCPY:
2634 case BUILT_IN_MEMPCPY:
2635 case BUILT_IN_MEMMOVE:
2636 case BUILT_IN_MEMSET:
2637 case BUILT_IN_MEMCPY_CHK:
2638 case BUILT_IN_MEMPCPY_CHK:
2639 case BUILT_IN_MEMMOVE_CHK:
2640 case BUILT_IN_MEMSET_CHK:
2641 case BUILT_IN_STRNCPY:
2642 case BUILT_IN_STPNCPY:
2643 {
2644 /* For a must-alias check we need to be able to constrain
2645 the access properly. */
2646 if (!ref->max_size_known_p ())
2647 return false;
2648 tree dest = gimple_call_arg (stmt, 0);
2649 tree len = gimple_call_arg (stmt, 2);
2650 if (!poly_int_tree_p (len))
2651 return false;
2652 tree rbase = ref->base;
2653 poly_offset_int roffset = ref->offset;
2654 ao_ref dref;
2655 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2656 tree base = ao_ref_base (&dref);
2657 poly_offset_int offset = dref.offset;
2658 if (!base || !known_size_p (dref.size))
2659 return false;
2660 if (TREE_CODE (base) == MEM_REF)
2661 {
2662 if (TREE_CODE (rbase) != MEM_REF)
2663 return false;
2664 // Compare pointers.
2665 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2666 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2667 base = TREE_OPERAND (base, 0);
2668 rbase = TREE_OPERAND (rbase, 0);
2669 }
2670 if (base == rbase
2671 && known_subrange_p (roffset, ref->max_size, offset,
2672 wi::to_poly_offset (len)
2673 << LOG2_BITS_PER_UNIT))
2674 return true;
2675 break;
2676 }
2677
2678 case BUILT_IN_VA_END:
2679 {
2680 tree ptr = gimple_call_arg (stmt, 0);
2681 if (TREE_CODE (ptr) == ADDR_EXPR)
2682 {
2683 tree base = ao_ref_base (ref);
2684 if (TREE_OPERAND (ptr, 0) == base)
2685 return true;
2686 }
2687 break;
2688 }
2689
2690 default:;
2691 }
2692 }
2693 return false;
2694 }
2695
2696 bool
2697 stmt_kills_ref_p (gimple *stmt, tree ref)
2698 {
2699 ao_ref r;
2700 ao_ref_init (&r, ref);
2701 return stmt_kills_ref_p (stmt, &r);
2702 }
2703
2704
2705 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2706 TARGET or a statement clobbering the memory reference REF in which
2707 case false is returned. The walk starts with VUSE, one argument of PHI. */
2708
2709 static bool
2710 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
2711 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
2712 bool abort_on_visited,
2713 void *(*translate)(ao_ref *, tree, void *, bool *),
2714 void *data)
2715 {
2716 basic_block bb = gimple_bb (phi);
2717
2718 if (!*visited)
2719 *visited = BITMAP_ALLOC (NULL);
2720
2721 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2722
2723 /* Walk until we hit the target. */
2724 while (vuse != target)
2725 {
2726 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2727 /* If we are searching for the target VUSE by walking up to
2728 TARGET_BB dominating the original PHI we are finished once
2729 we reach a default def or a definition in a block dominating
2730 that block. Update TARGET and return. */
2731 if (!target
2732 && (gimple_nop_p (def_stmt)
2733 || dominated_by_p (CDI_DOMINATORS,
2734 target_bb, gimple_bb (def_stmt))))
2735 {
2736 target = vuse;
2737 return true;
2738 }
2739
2740 /* Recurse for PHI nodes. */
2741 if (gimple_code (def_stmt) == GIMPLE_PHI)
2742 {
2743 /* An already visited PHI node ends the walk successfully. */
2744 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2745 return !abort_on_visited;
2746 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2747 visited, abort_on_visited,
2748 translate, data);
2749 if (!vuse)
2750 return false;
2751 continue;
2752 }
2753 else if (gimple_nop_p (def_stmt))
2754 return false;
2755 else
2756 {
2757 /* A clobbering statement or the end of the IL ends it failing. */
2758 if ((int)limit <= 0)
2759 return false;
2760 --limit;
2761 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2762 {
2763 bool disambiguate_only = true;
2764 if (translate
2765 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2766 ;
2767 else
2768 return false;
2769 }
2770 }
2771 /* If we reach a new basic-block see if we already skipped it
2772 in a previous walk that ended successfully. */
2773 if (gimple_bb (def_stmt) != bb)
2774 {
2775 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2776 return !abort_on_visited;
2777 bb = gimple_bb (def_stmt);
2778 }
2779 vuse = gimple_vuse (def_stmt);
2780 }
2781 return true;
2782 }
2783
2784
2785 /* Starting from a PHI node for the virtual operand of the memory reference
2786 REF find a continuation virtual operand that allows to continue walking
2787 statements dominating PHI skipping only statements that cannot possibly
2788 clobber REF. Decrements LIMIT for each alias disambiguation done
2789 and aborts the walk, returning NULL_TREE if it reaches zero.
2790 Returns NULL_TREE if no suitable virtual operand can be found. */
2791
2792 tree
2793 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2794 unsigned int &limit, bitmap *visited,
2795 bool abort_on_visited,
2796 void *(*translate)(ao_ref *, tree, void *, bool *),
2797 void *data)
2798 {
2799 unsigned nargs = gimple_phi_num_args (phi);
2800
2801 /* Through a single-argument PHI we can simply look through. */
2802 if (nargs == 1)
2803 return PHI_ARG_DEF (phi, 0);
2804
2805 /* For two or more arguments try to pairwise skip non-aliasing code
2806 until we hit the phi argument definition that dominates the other one. */
2807 basic_block phi_bb = gimple_bb (phi);
2808 tree arg0, arg1;
2809 unsigned i;
2810
2811 /* Find a candidate for the virtual operand which definition
2812 dominates those of all others. */
2813 /* First look if any of the args themselves satisfy this. */
2814 for (i = 0; i < nargs; ++i)
2815 {
2816 arg0 = PHI_ARG_DEF (phi, i);
2817 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
2818 break;
2819 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
2820 if (def_bb != phi_bb
2821 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
2822 break;
2823 arg0 = NULL_TREE;
2824 }
2825 /* If not, look if we can reach such candidate by walking defs
2826 until we hit the immediate dominator. maybe_skip_until will
2827 do that for us. */
2828 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
2829
2830 /* Then check against the (to be) found candidate. */
2831 for (i = 0; i < nargs; ++i)
2832 {
2833 arg1 = PHI_ARG_DEF (phi, i);
2834 if (arg1 == arg0)
2835 ;
2836 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
2837 abort_on_visited,
2838 /* Do not translate when walking over
2839 backedges. */
2840 dominated_by_p
2841 (CDI_DOMINATORS,
2842 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
2843 phi_bb)
2844 ? NULL : translate, data))
2845 return NULL_TREE;
2846 }
2847
2848 return arg0;
2849 }
2850
2851 /* Based on the memory reference REF and its virtual use VUSE call
2852 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2853 itself. That is, for each virtual use for which its defining statement
2854 does not clobber REF.
2855
2856 WALKER is called with REF, the current virtual use and DATA. If
2857 WALKER returns non-NULL the walk stops and its result is returned.
2858 At the end of a non-successful walk NULL is returned.
2859
2860 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2861 use which definition is a statement that may clobber REF and DATA.
2862 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2863 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2864 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2865 to adjust REF and *DATA to make that valid.
2866
2867 VALUEIZE if non-NULL is called with the next VUSE that is considered
2868 and return value is substituted for that. This can be used to
2869 implement optimistic value-numbering for example. Note that the
2870 VUSE argument is assumed to be valueized already.
2871
2872 LIMIT specifies the number of alias queries we are allowed to do,
2873 the walk stops when it reaches zero and NULL is returned. LIMIT
2874 is decremented by the number of alias queries (plus adjustments
2875 done by the callbacks) upon return.
2876
2877 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2878
2879 void *
2880 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2881 void *(*walker)(ao_ref *, tree, void *),
2882 void *(*translate)(ao_ref *, tree, void *, bool *),
2883 tree (*valueize)(tree),
2884 unsigned &limit, void *data)
2885 {
2886 bitmap visited = NULL;
2887 void *res;
2888 bool translated = false;
2889
2890 timevar_push (TV_ALIAS_STMT_WALK);
2891
2892 do
2893 {
2894 gimple *def_stmt;
2895
2896 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2897 res = (*walker) (ref, vuse, data);
2898 /* Abort walk. */
2899 if (res == (void *)-1)
2900 {
2901 res = NULL;
2902 break;
2903 }
2904 /* Lookup succeeded. */
2905 else if (res != NULL)
2906 break;
2907
2908 if (valueize)
2909 {
2910 vuse = valueize (vuse);
2911 if (!vuse)
2912 {
2913 res = NULL;
2914 break;
2915 }
2916 }
2917 def_stmt = SSA_NAME_DEF_STMT (vuse);
2918 if (gimple_nop_p (def_stmt))
2919 break;
2920 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2921 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2922 &visited, translated, translate, data);
2923 else
2924 {
2925 if ((int)limit <= 0)
2926 {
2927 res = NULL;
2928 break;
2929 }
2930 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2931 {
2932 if (!translate)
2933 break;
2934 bool disambiguate_only = false;
2935 res = (*translate) (ref, vuse, data, &disambiguate_only);
2936 /* Failed lookup and translation. */
2937 if (res == (void *)-1)
2938 {
2939 res = NULL;
2940 break;
2941 }
2942 /* Lookup succeeded. */
2943 else if (res != NULL)
2944 break;
2945 /* Translation succeeded, continue walking. */
2946 translated = translated || !disambiguate_only;
2947 }
2948 vuse = gimple_vuse (def_stmt);
2949 }
2950 }
2951 while (vuse);
2952
2953 if (visited)
2954 BITMAP_FREE (visited);
2955
2956 timevar_pop (TV_ALIAS_STMT_WALK);
2957
2958 return res;
2959 }
2960
2961
2962 /* Based on the memory reference REF call WALKER for each vdef which
2963 defining statement may clobber REF, starting with VDEF. If REF
2964 is NULL_TREE, each defining statement is visited.
2965
2966 WALKER is called with REF, the current vdef and DATA. If WALKER
2967 returns true the walk is stopped, otherwise it continues.
2968
2969 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2970 The pointer may be NULL and then we do not track this information.
2971
2972 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2973 PHI argument (but only one walk continues on merge points), the
2974 return value is true if any of the walks was successful.
2975
2976 The function returns the number of statements walked or -1 if
2977 LIMIT stmts were walked and the walk was aborted at this point.
2978 If LIMIT is zero the walk is not aborted. */
2979
2980 static int
2981 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2982 bool (*walker)(ao_ref *, tree, void *), void *data,
2983 bitmap *visited, unsigned int cnt,
2984 bool *function_entry_reached, unsigned limit)
2985 {
2986 do
2987 {
2988 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
2989
2990 if (*visited
2991 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2992 return cnt;
2993
2994 if (gimple_nop_p (def_stmt))
2995 {
2996 if (function_entry_reached)
2997 *function_entry_reached = true;
2998 return cnt;
2999 }
3000 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3001 {
3002 unsigned i;
3003 if (!*visited)
3004 *visited = BITMAP_ALLOC (NULL);
3005 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3006 {
3007 int res = walk_aliased_vdefs_1 (ref,
3008 gimple_phi_arg_def (def_stmt, i),
3009 walker, data, visited, cnt,
3010 function_entry_reached, limit);
3011 if (res == -1)
3012 return -1;
3013 cnt = res;
3014 }
3015 return cnt;
3016 }
3017
3018 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3019 cnt++;
3020 if (cnt == limit)
3021 return -1;
3022 if ((!ref
3023 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3024 && (*walker) (ref, vdef, data))
3025 return cnt;
3026
3027 vdef = gimple_vuse (def_stmt);
3028 }
3029 while (1);
3030 }
3031
3032 int
3033 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3034 bool (*walker)(ao_ref *, tree, void *), void *data,
3035 bitmap *visited,
3036 bool *function_entry_reached, unsigned int limit)
3037 {
3038 bitmap local_visited = NULL;
3039 int ret;
3040
3041 timevar_push (TV_ALIAS_STMT_WALK);
3042
3043 if (function_entry_reached)
3044 *function_entry_reached = false;
3045
3046 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3047 visited ? visited : &local_visited, 0,
3048 function_entry_reached, limit);
3049 if (local_visited)
3050 BITMAP_FREE (local_visited);
3051
3052 timevar_pop (TV_ALIAS_STMT_WALK);
3053
3054 return ret;
3055 }
3056