tree-ssa-alias.c (alias_stats): Add aliasing_component_refs_p_may_alias and aliasing_...
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
93
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
102 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
103 } alias_stats;
104
105 void
106 dump_alias_stats (FILE *s)
107 {
108 fprintf (s, "\nAlias oracle query stats:\n");
109 fprintf (s, " refs_may_alias_p: "
110 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
111 HOST_WIDE_INT_PRINT_DEC" queries\n",
112 alias_stats.refs_may_alias_p_no_alias,
113 alias_stats.refs_may_alias_p_no_alias
114 + alias_stats.refs_may_alias_p_may_alias);
115 fprintf (s, " ref_maybe_used_by_call_p: "
116 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
117 HOST_WIDE_INT_PRINT_DEC" queries\n",
118 alias_stats.ref_maybe_used_by_call_p_no_alias,
119 alias_stats.refs_may_alias_p_no_alias
120 + alias_stats.ref_maybe_used_by_call_p_may_alias);
121 fprintf (s, " call_may_clobber_ref_p: "
122 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
123 HOST_WIDE_INT_PRINT_DEC" queries\n",
124 alias_stats.call_may_clobber_ref_p_no_alias,
125 alias_stats.call_may_clobber_ref_p_no_alias
126 + alias_stats.call_may_clobber_ref_p_may_alias);
127 fprintf (s, " aliasing_component_ref_p: "
128 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
129 HOST_WIDE_INT_PRINT_DEC" queries\n",
130 alias_stats.aliasing_component_refs_p_no_alias,
131 alias_stats.aliasing_component_refs_p_no_alias
132 + alias_stats.aliasing_component_refs_p_may_alias);
133 dump_alias_stats_in_alias_c (s);
134 }
135
136
137 /* Return true, if dereferencing PTR may alias with a global variable. */
138
139 bool
140 ptr_deref_may_alias_global_p (tree ptr)
141 {
142 struct ptr_info_def *pi;
143
144 /* If we end up with a pointer constant here that may point
145 to global memory. */
146 if (TREE_CODE (ptr) != SSA_NAME)
147 return true;
148
149 pi = SSA_NAME_PTR_INFO (ptr);
150
151 /* If we do not have points-to information for this variable,
152 we have to punt. */
153 if (!pi)
154 return true;
155
156 /* ??? This does not use TBAA to prune globals ptr may not access. */
157 return pt_solution_includes_global (&pi->pt);
158 }
159
160 /* Return true if dereferencing PTR may alias DECL.
161 The caller is responsible for applying TBAA to see if PTR
162 may access DECL at all. */
163
164 static bool
165 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
166 {
167 struct ptr_info_def *pi;
168
169 /* Conversions are irrelevant for points-to information and
170 data-dependence analysis can feed us those. */
171 STRIP_NOPS (ptr);
172
173 /* Anything we do not explicilty handle aliases. */
174 if ((TREE_CODE (ptr) != SSA_NAME
175 && TREE_CODE (ptr) != ADDR_EXPR
176 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
177 || !POINTER_TYPE_P (TREE_TYPE (ptr))
178 || (!VAR_P (decl)
179 && TREE_CODE (decl) != PARM_DECL
180 && TREE_CODE (decl) != RESULT_DECL))
181 return true;
182
183 /* Disregard pointer offsetting. */
184 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
185 {
186 do
187 {
188 ptr = TREE_OPERAND (ptr, 0);
189 }
190 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
191 return ptr_deref_may_alias_decl_p (ptr, decl);
192 }
193
194 /* ADDR_EXPR pointers either just offset another pointer or directly
195 specify the pointed-to set. */
196 if (TREE_CODE (ptr) == ADDR_EXPR)
197 {
198 tree base = get_base_address (TREE_OPERAND (ptr, 0));
199 if (base
200 && (TREE_CODE (base) == MEM_REF
201 || TREE_CODE (base) == TARGET_MEM_REF))
202 ptr = TREE_OPERAND (base, 0);
203 else if (base
204 && DECL_P (base))
205 return compare_base_decls (base, decl) != 0;
206 else if (base
207 && CONSTANT_CLASS_P (base))
208 return false;
209 else
210 return true;
211 }
212
213 /* Non-aliased variables cannot be pointed to. */
214 if (!may_be_aliased (decl))
215 return false;
216
217 /* If we do not have useful points-to information for this pointer
218 we cannot disambiguate anything else. */
219 pi = SSA_NAME_PTR_INFO (ptr);
220 if (!pi)
221 return true;
222
223 return pt_solution_includes (&pi->pt, decl);
224 }
225
226 /* Return true if dereferenced PTR1 and PTR2 may alias.
227 The caller is responsible for applying TBAA to see if accesses
228 through PTR1 and PTR2 may conflict at all. */
229
230 bool
231 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
232 {
233 struct ptr_info_def *pi1, *pi2;
234
235 /* Conversions are irrelevant for points-to information and
236 data-dependence analysis can feed us those. */
237 STRIP_NOPS (ptr1);
238 STRIP_NOPS (ptr2);
239
240 /* Disregard pointer offsetting. */
241 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
242 {
243 do
244 {
245 ptr1 = TREE_OPERAND (ptr1, 0);
246 }
247 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
248 return ptr_derefs_may_alias_p (ptr1, ptr2);
249 }
250 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
251 {
252 do
253 {
254 ptr2 = TREE_OPERAND (ptr2, 0);
255 }
256 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
257 return ptr_derefs_may_alias_p (ptr1, ptr2);
258 }
259
260 /* ADDR_EXPR pointers either just offset another pointer or directly
261 specify the pointed-to set. */
262 if (TREE_CODE (ptr1) == ADDR_EXPR)
263 {
264 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
265 if (base
266 && (TREE_CODE (base) == MEM_REF
267 || TREE_CODE (base) == TARGET_MEM_REF))
268 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
269 else if (base
270 && DECL_P (base))
271 return ptr_deref_may_alias_decl_p (ptr2, base);
272 else
273 return true;
274 }
275 if (TREE_CODE (ptr2) == ADDR_EXPR)
276 {
277 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
278 if (base
279 && (TREE_CODE (base) == MEM_REF
280 || TREE_CODE (base) == TARGET_MEM_REF))
281 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
282 else if (base
283 && DECL_P (base))
284 return ptr_deref_may_alias_decl_p (ptr1, base);
285 else
286 return true;
287 }
288
289 /* From here we require SSA name pointers. Anything else aliases. */
290 if (TREE_CODE (ptr1) != SSA_NAME
291 || TREE_CODE (ptr2) != SSA_NAME
292 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
293 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
294 return true;
295
296 /* We may end up with two empty points-to solutions for two same pointers.
297 In this case we still want to say both pointers alias, so shortcut
298 that here. */
299 if (ptr1 == ptr2)
300 return true;
301
302 /* If we do not have useful points-to information for either pointer
303 we cannot disambiguate anything else. */
304 pi1 = SSA_NAME_PTR_INFO (ptr1);
305 pi2 = SSA_NAME_PTR_INFO (ptr2);
306 if (!pi1 || !pi2)
307 return true;
308
309 /* ??? This does not use TBAA to prune decls from the intersection
310 that not both pointers may access. */
311 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
312 }
313
314 /* Return true if dereferencing PTR may alias *REF.
315 The caller is responsible for applying TBAA to see if PTR
316 may access *REF at all. */
317
318 static bool
319 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
320 {
321 tree base = ao_ref_base (ref);
322
323 if (TREE_CODE (base) == MEM_REF
324 || TREE_CODE (base) == TARGET_MEM_REF)
325 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
326 else if (DECL_P (base))
327 return ptr_deref_may_alias_decl_p (ptr, base);
328
329 return true;
330 }
331
332 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
333
334 bool
335 ptrs_compare_unequal (tree ptr1, tree ptr2)
336 {
337 /* First resolve the pointers down to a SSA name pointer base or
338 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
339 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
340 or STRING_CSTs which needs points-to adjustments to track them
341 in the points-to sets. */
342 tree obj1 = NULL_TREE;
343 tree obj2 = NULL_TREE;
344 if (TREE_CODE (ptr1) == ADDR_EXPR)
345 {
346 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
347 if (! tem)
348 return false;
349 if (VAR_P (tem)
350 || TREE_CODE (tem) == PARM_DECL
351 || TREE_CODE (tem) == RESULT_DECL)
352 obj1 = tem;
353 else if (TREE_CODE (tem) == MEM_REF)
354 ptr1 = TREE_OPERAND (tem, 0);
355 }
356 if (TREE_CODE (ptr2) == ADDR_EXPR)
357 {
358 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
359 if (! tem)
360 return false;
361 if (VAR_P (tem)
362 || TREE_CODE (tem) == PARM_DECL
363 || TREE_CODE (tem) == RESULT_DECL)
364 obj2 = tem;
365 else if (TREE_CODE (tem) == MEM_REF)
366 ptr2 = TREE_OPERAND (tem, 0);
367 }
368
369 /* Canonicalize ptr vs. object. */
370 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
371 {
372 std::swap (ptr1, ptr2);
373 std::swap (obj1, obj2);
374 }
375
376 if (obj1 && obj2)
377 /* Other code handles this correctly, no need to duplicate it here. */;
378 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
379 {
380 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
381 /* We may not use restrict to optimize pointer comparisons.
382 See PR71062. So we have to assume that restrict-pointed-to
383 may be in fact obj1. */
384 if (!pi
385 || pi->pt.vars_contains_restrict
386 || pi->pt.vars_contains_interposable)
387 return false;
388 if (VAR_P (obj1)
389 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
390 {
391 varpool_node *node = varpool_node::get (obj1);
392 /* If obj1 may bind to NULL give up (see below). */
393 if (! node
394 || ! node->nonzero_address ()
395 || ! decl_binds_to_current_def_p (obj1))
396 return false;
397 }
398 return !pt_solution_includes (&pi->pt, obj1);
399 }
400
401 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
402 but those require pt.null to be conservatively correct. */
403
404 return false;
405 }
406
407 /* Returns whether reference REF to BASE may refer to global memory. */
408
409 static bool
410 ref_may_alias_global_p_1 (tree base)
411 {
412 if (DECL_P (base))
413 return is_global_var (base);
414 else if (TREE_CODE (base) == MEM_REF
415 || TREE_CODE (base) == TARGET_MEM_REF)
416 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
417 return true;
418 }
419
420 bool
421 ref_may_alias_global_p (ao_ref *ref)
422 {
423 tree base = ao_ref_base (ref);
424 return ref_may_alias_global_p_1 (base);
425 }
426
427 bool
428 ref_may_alias_global_p (tree ref)
429 {
430 tree base = get_base_address (ref);
431 return ref_may_alias_global_p_1 (base);
432 }
433
434 /* Return true whether STMT may clobber global memory. */
435
436 bool
437 stmt_may_clobber_global_p (gimple *stmt)
438 {
439 tree lhs;
440
441 if (!gimple_vdef (stmt))
442 return false;
443
444 /* ??? We can ask the oracle whether an artificial pointer
445 dereference with a pointer with points-to information covering
446 all global memory (what about non-address taken memory?) maybe
447 clobbered by this call. As there is at the moment no convenient
448 way of doing that without generating garbage do some manual
449 checking instead.
450 ??? We could make a NULL ao_ref argument to the various
451 predicates special, meaning any global memory. */
452
453 switch (gimple_code (stmt))
454 {
455 case GIMPLE_ASSIGN:
456 lhs = gimple_assign_lhs (stmt);
457 return (TREE_CODE (lhs) != SSA_NAME
458 && ref_may_alias_global_p (lhs));
459 case GIMPLE_CALL:
460 return true;
461 default:
462 return true;
463 }
464 }
465
466
467 /* Dump alias information on FILE. */
468
469 void
470 dump_alias_info (FILE *file)
471 {
472 unsigned i;
473 tree ptr;
474 const char *funcname
475 = lang_hooks.decl_printable_name (current_function_decl, 2);
476 tree var;
477
478 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
479
480 fprintf (file, "Aliased symbols\n\n");
481
482 FOR_EACH_LOCAL_DECL (cfun, i, var)
483 {
484 if (may_be_aliased (var))
485 dump_variable (file, var);
486 }
487
488 fprintf (file, "\nCall clobber information\n");
489
490 fprintf (file, "\nESCAPED");
491 dump_points_to_solution (file, &cfun->gimple_df->escaped);
492
493 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
494
495 FOR_EACH_SSA_NAME (i, ptr, cfun)
496 {
497 struct ptr_info_def *pi;
498
499 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
500 || SSA_NAME_IN_FREE_LIST (ptr))
501 continue;
502
503 pi = SSA_NAME_PTR_INFO (ptr);
504 if (pi)
505 dump_points_to_info_for (file, ptr);
506 }
507
508 fprintf (file, "\n");
509 }
510
511
512 /* Dump alias information on stderr. */
513
514 DEBUG_FUNCTION void
515 debug_alias_info (void)
516 {
517 dump_alias_info (stderr);
518 }
519
520
521 /* Dump the points-to set *PT into FILE. */
522
523 void
524 dump_points_to_solution (FILE *file, struct pt_solution *pt)
525 {
526 if (pt->anything)
527 fprintf (file, ", points-to anything");
528
529 if (pt->nonlocal)
530 fprintf (file, ", points-to non-local");
531
532 if (pt->escaped)
533 fprintf (file, ", points-to escaped");
534
535 if (pt->ipa_escaped)
536 fprintf (file, ", points-to unit escaped");
537
538 if (pt->null)
539 fprintf (file, ", points-to NULL");
540
541 if (pt->vars)
542 {
543 fprintf (file, ", points-to vars: ");
544 dump_decl_set (file, pt->vars);
545 if (pt->vars_contains_nonlocal
546 || pt->vars_contains_escaped
547 || pt->vars_contains_escaped_heap
548 || pt->vars_contains_restrict)
549 {
550 const char *comma = "";
551 fprintf (file, " (");
552 if (pt->vars_contains_nonlocal)
553 {
554 fprintf (file, "nonlocal");
555 comma = ", ";
556 }
557 if (pt->vars_contains_escaped)
558 {
559 fprintf (file, "%sescaped", comma);
560 comma = ", ";
561 }
562 if (pt->vars_contains_escaped_heap)
563 {
564 fprintf (file, "%sescaped heap", comma);
565 comma = ", ";
566 }
567 if (pt->vars_contains_restrict)
568 {
569 fprintf (file, "%srestrict", comma);
570 comma = ", ";
571 }
572 if (pt->vars_contains_interposable)
573 fprintf (file, "%sinterposable", comma);
574 fprintf (file, ")");
575 }
576 }
577 }
578
579
580 /* Unified dump function for pt_solution. */
581
582 DEBUG_FUNCTION void
583 debug (pt_solution &ref)
584 {
585 dump_points_to_solution (stderr, &ref);
586 }
587
588 DEBUG_FUNCTION void
589 debug (pt_solution *ptr)
590 {
591 if (ptr)
592 debug (*ptr);
593 else
594 fprintf (stderr, "<nil>\n");
595 }
596
597
598 /* Dump points-to information for SSA_NAME PTR into FILE. */
599
600 void
601 dump_points_to_info_for (FILE *file, tree ptr)
602 {
603 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
604
605 print_generic_expr (file, ptr, dump_flags);
606
607 if (pi)
608 dump_points_to_solution (file, &pi->pt);
609 else
610 fprintf (file, ", points-to anything");
611
612 fprintf (file, "\n");
613 }
614
615
616 /* Dump points-to information for VAR into stderr. */
617
618 DEBUG_FUNCTION void
619 debug_points_to_info_for (tree var)
620 {
621 dump_points_to_info_for (stderr, var);
622 }
623
624
625 /* Initializes the alias-oracle reference representation *R from REF. */
626
627 void
628 ao_ref_init (ao_ref *r, tree ref)
629 {
630 r->ref = ref;
631 r->base = NULL_TREE;
632 r->offset = 0;
633 r->size = -1;
634 r->max_size = -1;
635 r->ref_alias_set = -1;
636 r->base_alias_set = -1;
637 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
638 }
639
640 /* Returns the base object of the memory reference *REF. */
641
642 tree
643 ao_ref_base (ao_ref *ref)
644 {
645 bool reverse;
646
647 if (ref->base)
648 return ref->base;
649 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
650 &ref->max_size, &reverse);
651 return ref->base;
652 }
653
654 /* Returns the base object alias set of the memory reference *REF. */
655
656 alias_set_type
657 ao_ref_base_alias_set (ao_ref *ref)
658 {
659 tree base_ref;
660 if (ref->base_alias_set != -1)
661 return ref->base_alias_set;
662 if (!ref->ref)
663 return 0;
664 base_ref = ref->ref;
665 while (handled_component_p (base_ref))
666 base_ref = TREE_OPERAND (base_ref, 0);
667 ref->base_alias_set = get_alias_set (base_ref);
668 return ref->base_alias_set;
669 }
670
671 /* Returns the reference alias set of the memory reference *REF. */
672
673 alias_set_type
674 ao_ref_alias_set (ao_ref *ref)
675 {
676 if (ref->ref_alias_set != -1)
677 return ref->ref_alias_set;
678 ref->ref_alias_set = get_alias_set (ref->ref);
679 return ref->ref_alias_set;
680 }
681
682 /* Init an alias-oracle reference representation from a gimple pointer
683 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
684 size is assumed to be unknown. The access is assumed to be only
685 to or after of the pointer target, not before it. */
686
687 void
688 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
689 {
690 poly_int64 t, size_hwi, extra_offset = 0;
691 ref->ref = NULL_TREE;
692 if (TREE_CODE (ptr) == SSA_NAME)
693 {
694 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
695 if (gimple_assign_single_p (stmt)
696 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
697 ptr = gimple_assign_rhs1 (stmt);
698 else if (is_gimple_assign (stmt)
699 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
700 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
701 {
702 ptr = gimple_assign_rhs1 (stmt);
703 extra_offset *= BITS_PER_UNIT;
704 }
705 }
706
707 if (TREE_CODE (ptr) == ADDR_EXPR)
708 {
709 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
710 if (ref->base)
711 ref->offset = BITS_PER_UNIT * t;
712 else
713 {
714 size = NULL_TREE;
715 ref->offset = 0;
716 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
717 }
718 }
719 else
720 {
721 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
722 ref->base = build2 (MEM_REF, char_type_node,
723 ptr, null_pointer_node);
724 ref->offset = 0;
725 }
726 ref->offset += extra_offset;
727 if (size
728 && poly_int_tree_p (size, &size_hwi)
729 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
730 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
731 else
732 ref->max_size = ref->size = -1;
733 ref->ref_alias_set = 0;
734 ref->base_alias_set = 0;
735 ref->volatile_p = false;
736 }
737
738 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
739 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
740 decide. */
741
742 static inline int
743 same_type_for_tbaa (tree type1, tree type2)
744 {
745 type1 = TYPE_MAIN_VARIANT (type1);
746 type2 = TYPE_MAIN_VARIANT (type2);
747
748 /* If we would have to do structural comparison bail out. */
749 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
750 || TYPE_STRUCTURAL_EQUALITY_P (type2))
751 return -1;
752
753 /* Compare the canonical types. */
754 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
755 return 1;
756
757 /* ??? Array types are not properly unified in all cases as we have
758 spurious changes in the index types for example. Removing this
759 causes all sorts of problems with the Fortran frontend. */
760 if (TREE_CODE (type1) == ARRAY_TYPE
761 && TREE_CODE (type2) == ARRAY_TYPE)
762 return -1;
763
764 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
765 object of one of its constrained subtypes, e.g. when a function with an
766 unconstrained parameter passed by reference is called on an object and
767 inlined. But, even in the case of a fixed size, type and subtypes are
768 not equivalent enough as to share the same TYPE_CANONICAL, since this
769 would mean that conversions between them are useless, whereas they are
770 not (e.g. type and subtypes can have different modes). So, in the end,
771 they are only guaranteed to have the same alias set. */
772 if (get_alias_set (type1) == get_alias_set (type2))
773 return -1;
774
775 /* The types are known to be not equal. */
776 return 0;
777 }
778
779 /* Determine if the two component references REF1 and REF2 which are
780 based on access types TYPE1 and TYPE2 and of which at least one is based
781 on an indirect reference may alias. REF2 is the only one that can
782 be a decl in which case REF2_IS_DECL is true.
783 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
784 are the respective alias sets. */
785
786 static bool
787 aliasing_component_refs_p (tree ref1,
788 alias_set_type ref1_alias_set,
789 alias_set_type base1_alias_set,
790 poly_int64 offset1, poly_int64 max_size1,
791 tree ref2,
792 alias_set_type ref2_alias_set,
793 alias_set_type base2_alias_set,
794 poly_int64 offset2, poly_int64 max_size2,
795 bool ref2_is_decl)
796 {
797 /* If one reference is a component references through pointers try to find a
798 common base and apply offset based disambiguation. This handles
799 for example
800 struct A { int i; int j; } *q;
801 struct B { struct A a; int k; } *p;
802 disambiguating q->i and p->a.j. */
803 tree base1, base2;
804 tree type1, type2;
805 tree *refp;
806 int same_p, same_p2;
807
808 /* Choose bases and base types to search for. */
809 base1 = ref1;
810 while (handled_component_p (base1))
811 base1 = TREE_OPERAND (base1, 0);
812 type1 = TREE_TYPE (base1);
813 base2 = ref2;
814 while (handled_component_p (base2))
815 base2 = TREE_OPERAND (base2, 0);
816 type2 = TREE_TYPE (base2);
817
818 /* Now search for the type1 in the access path of ref2. This
819 would be a common base for doing offset based disambiguation on. */
820 refp = &ref2;
821 while (handled_component_p (*refp)
822 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
823 refp = &TREE_OPERAND (*refp, 0);
824 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
825 if (same_p == 1)
826 {
827 poly_int64 offadj, sztmp, msztmp;
828 bool reverse;
829 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
830 offset2 -= offadj;
831 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
832 offset1 -= offadj;
833 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
834 {
835 ++alias_stats.aliasing_component_refs_p_may_alias;
836 return true;
837 }
838 else
839 {
840 ++alias_stats.aliasing_component_refs_p_no_alias;
841 return false;
842 }
843 }
844
845 /* If we didn't find a common base, try the other way around. */
846 refp = &ref1;
847 while (handled_component_p (*refp)
848 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
849 refp = &TREE_OPERAND (*refp, 0);
850 same_p2 = same_type_for_tbaa (TREE_TYPE (*refp), type2);
851 if (same_p2 == 1)
852 {
853 poly_int64 offadj, sztmp, msztmp;
854 bool reverse;
855
856 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
857 offset1 -= offadj;
858 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
859 offset2 -= offadj;
860 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
861 {
862 ++alias_stats.aliasing_component_refs_p_may_alias;
863 return true;
864 }
865 else
866 {
867 ++alias_stats.aliasing_component_refs_p_no_alias;
868 return false;
869 }
870 }
871
872 /* In the remaining test we assume that there is no overlapping type
873 at all. So if we are unsure, we need to give up. */
874 if (same_p == -1 || same_p2 == -1)
875 {
876 ++alias_stats.aliasing_component_refs_p_may_alias;
877 return true;
878 }
879
880 /* If we have two type access paths B1.path1 and B2.path2 they may
881 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
882 But we can still have a path that goes B1.path1...B2.path2 with
883 a part that we do not see. So we can only disambiguate now
884 if there is no B2 in the tail of path1 and no B1 on the
885 tail of path2. */
886 if (base1_alias_set == ref2_alias_set
887 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
888 {
889 ++alias_stats.aliasing_component_refs_p_may_alias;
890 return true;
891 }
892 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
893 if (!ref2_is_decl
894 && (base2_alias_set == ref1_alias_set
895 || alias_set_subset_of (base2_alias_set, ref1_alias_set)))
896 {
897 ++alias_stats.aliasing_component_refs_p_may_alias;
898 return true;
899 }
900 ++alias_stats.aliasing_component_refs_p_no_alias;
901 return false;
902 }
903
904 /* Return true if we can determine that component references REF1 and REF2,
905 that are within a common DECL, cannot overlap. */
906
907 static bool
908 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
909 {
910 auto_vec<tree, 16> component_refs1;
911 auto_vec<tree, 16> component_refs2;
912
913 /* Create the stack of handled components for REF1. */
914 while (handled_component_p (ref1))
915 {
916 component_refs1.safe_push (ref1);
917 ref1 = TREE_OPERAND (ref1, 0);
918 }
919 if (TREE_CODE (ref1) == MEM_REF)
920 {
921 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
922 return false;
923 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
924 }
925
926 /* Create the stack of handled components for REF2. */
927 while (handled_component_p (ref2))
928 {
929 component_refs2.safe_push (ref2);
930 ref2 = TREE_OPERAND (ref2, 0);
931 }
932 if (TREE_CODE (ref2) == MEM_REF)
933 {
934 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
935 return false;
936 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
937 }
938
939 /* Bases must be either same or uncomparable. */
940 gcc_checking_assert (ref1 == ref2
941 || (DECL_P (ref1) && DECL_P (ref2)
942 && compare_base_decls (ref1, ref2) != 0));
943
944 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
945 rank. This is sufficient because we start from the same DECL and you
946 cannot reference several fields at a time with COMPONENT_REFs (unlike
947 with ARRAY_RANGE_REFs for arrays) so you always need the same number
948 of them to access a sub-component, unless you're in a union, in which
949 case the return value will precisely be false. */
950 while (true)
951 {
952 do
953 {
954 if (component_refs1.is_empty ())
955 return false;
956 ref1 = component_refs1.pop ();
957 }
958 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
959
960 do
961 {
962 if (component_refs2.is_empty ())
963 return false;
964 ref2 = component_refs2.pop ();
965 }
966 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
967
968 /* Beware of BIT_FIELD_REF. */
969 if (TREE_CODE (ref1) != COMPONENT_REF
970 || TREE_CODE (ref2) != COMPONENT_REF)
971 return false;
972
973 tree field1 = TREE_OPERAND (ref1, 1);
974 tree field2 = TREE_OPERAND (ref2, 1);
975
976 /* ??? We cannot simply use the type of operand #0 of the refs here
977 as the Fortran compiler smuggles type punning into COMPONENT_REFs
978 for common blocks instead of using unions like everyone else. */
979 tree type1 = DECL_CONTEXT (field1);
980 tree type2 = DECL_CONTEXT (field2);
981
982 /* We cannot disambiguate fields in a union or qualified union. */
983 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
984 return false;
985
986 if (field1 != field2)
987 {
988 /* A field and its representative need to be considered the
989 same. */
990 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
991 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
992 return false;
993 /* Different fields of the same record type cannot overlap.
994 ??? Bitfields can overlap at RTL level so punt on them. */
995 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
996 return false;
997 return true;
998 }
999 }
1000
1001 return false;
1002 }
1003
1004 /* qsort compare function to sort FIELD_DECLs after their
1005 DECL_FIELD_CONTEXT TYPE_UID. */
1006
1007 static inline int
1008 ncr_compar (const void *field1_, const void *field2_)
1009 {
1010 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1011 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1012 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
1013 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
1014 if (uid1 < uid2)
1015 return -1;
1016 else if (uid1 > uid2)
1017 return 1;
1018 return 0;
1019 }
1020
1021 /* Return true if we can determine that the fields referenced cannot
1022 overlap for any pair of objects. */
1023
1024 static bool
1025 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1026 {
1027 if (!flag_strict_aliasing
1028 || !x || !y
1029 || TREE_CODE (x) != COMPONENT_REF
1030 || TREE_CODE (y) != COMPONENT_REF)
1031 return false;
1032
1033 auto_vec<const_tree, 16> fieldsx;
1034 while (TREE_CODE (x) == COMPONENT_REF)
1035 {
1036 tree field = TREE_OPERAND (x, 1);
1037 tree type = DECL_FIELD_CONTEXT (field);
1038 if (TREE_CODE (type) == RECORD_TYPE)
1039 fieldsx.safe_push (field);
1040 x = TREE_OPERAND (x, 0);
1041 }
1042 if (fieldsx.length () == 0)
1043 return false;
1044 auto_vec<const_tree, 16> fieldsy;
1045 while (TREE_CODE (y) == COMPONENT_REF)
1046 {
1047 tree field = TREE_OPERAND (y, 1);
1048 tree type = DECL_FIELD_CONTEXT (field);
1049 if (TREE_CODE (type) == RECORD_TYPE)
1050 fieldsy.safe_push (TREE_OPERAND (y, 1));
1051 y = TREE_OPERAND (y, 0);
1052 }
1053 if (fieldsy.length () == 0)
1054 return false;
1055
1056 /* Most common case first. */
1057 if (fieldsx.length () == 1
1058 && fieldsy.length () == 1)
1059 return ((DECL_FIELD_CONTEXT (fieldsx[0])
1060 == DECL_FIELD_CONTEXT (fieldsy[0]))
1061 && fieldsx[0] != fieldsy[0]
1062 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
1063
1064 if (fieldsx.length () == 2)
1065 {
1066 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1067 std::swap (fieldsx[0], fieldsx[1]);
1068 }
1069 else
1070 fieldsx.qsort (ncr_compar);
1071
1072 if (fieldsy.length () == 2)
1073 {
1074 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1075 std::swap (fieldsy[0], fieldsy[1]);
1076 }
1077 else
1078 fieldsy.qsort (ncr_compar);
1079
1080 unsigned i = 0, j = 0;
1081 do
1082 {
1083 const_tree fieldx = fieldsx[i];
1084 const_tree fieldy = fieldsy[j];
1085 tree typex = DECL_FIELD_CONTEXT (fieldx);
1086 tree typey = DECL_FIELD_CONTEXT (fieldy);
1087 if (typex == typey)
1088 {
1089 /* We're left with accessing different fields of a structure,
1090 no possible overlap. */
1091 if (fieldx != fieldy)
1092 {
1093 /* A field and its representative need to be considered the
1094 same. */
1095 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1096 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1097 return false;
1098 /* Different fields of the same record type cannot overlap.
1099 ??? Bitfields can overlap at RTL level so punt on them. */
1100 if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1101 return false;
1102 return true;
1103 }
1104 }
1105 if (TYPE_UID (typex) < TYPE_UID (typey))
1106 {
1107 i++;
1108 if (i == fieldsx.length ())
1109 break;
1110 }
1111 else
1112 {
1113 j++;
1114 if (j == fieldsy.length ())
1115 break;
1116 }
1117 }
1118 while (1);
1119
1120 return false;
1121 }
1122
1123
1124 /* Return true if two memory references based on the variables BASE1
1125 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1126 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1127 if non-NULL are the complete memory reference trees. */
1128
1129 static bool
1130 decl_refs_may_alias_p (tree ref1, tree base1,
1131 poly_int64 offset1, poly_int64 max_size1,
1132 tree ref2, tree base2,
1133 poly_int64 offset2, poly_int64 max_size2)
1134 {
1135 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1136
1137 /* If both references are based on different variables, they cannot alias. */
1138 if (compare_base_decls (base1, base2) == 0)
1139 return false;
1140
1141 /* If both references are based on the same variable, they cannot alias if
1142 the accesses do not overlap. */
1143 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1144 return false;
1145
1146 /* For components with variable position, the above test isn't sufficient,
1147 so we disambiguate component references manually. */
1148 if (ref1 && ref2
1149 && handled_component_p (ref1) && handled_component_p (ref2)
1150 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1151 return false;
1152
1153 return true;
1154 }
1155
1156 /* Return true if an indirect reference based on *PTR1 constrained
1157 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1158 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1159 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1160 in which case they are computed on-demand. REF1 and REF2
1161 if non-NULL are the complete memory reference trees. */
1162
1163 static bool
1164 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1165 poly_int64 offset1, poly_int64 max_size1,
1166 alias_set_type ref1_alias_set,
1167 alias_set_type base1_alias_set,
1168 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1169 poly_int64 offset2, poly_int64 max_size2,
1170 alias_set_type ref2_alias_set,
1171 alias_set_type base2_alias_set, bool tbaa_p)
1172 {
1173 tree ptr1;
1174 tree ptrtype1, dbase2;
1175
1176 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1177 || TREE_CODE (base1) == TARGET_MEM_REF)
1178 && DECL_P (base2));
1179
1180 ptr1 = TREE_OPERAND (base1, 0);
1181 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1182
1183 /* If only one reference is based on a variable, they cannot alias if
1184 the pointer access is beyond the extent of the variable access.
1185 (the pointer base cannot validly point to an offset less than zero
1186 of the variable).
1187 ??? IVOPTs creates bases that do not honor this restriction,
1188 so do not apply this optimization for TARGET_MEM_REFs. */
1189 if (TREE_CODE (base1) != TARGET_MEM_REF
1190 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1191 return false;
1192 /* They also cannot alias if the pointer may not point to the decl. */
1193 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1194 return false;
1195
1196 /* Disambiguations that rely on strict aliasing rules follow. */
1197 if (!flag_strict_aliasing || !tbaa_p)
1198 return true;
1199
1200 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1201
1202 /* If the alias set for a pointer access is zero all bets are off. */
1203 if (base1_alias_set == 0)
1204 return true;
1205
1206 /* When we are trying to disambiguate an access with a pointer dereference
1207 as base versus one with a decl as base we can use both the size
1208 of the decl and its dynamic type for extra disambiguation.
1209 ??? We do not know anything about the dynamic type of the decl
1210 other than that its alias-set contains base2_alias_set as a subset
1211 which does not help us here. */
1212 /* As we know nothing useful about the dynamic type of the decl just
1213 use the usual conflict check rather than a subset test.
1214 ??? We could introduce -fvery-strict-aliasing when the language
1215 does not allow decls to have a dynamic type that differs from their
1216 static type. Then we can check
1217 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1218 if (base1_alias_set != base2_alias_set
1219 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1220 return false;
1221 /* If the size of the access relevant for TBAA through the pointer
1222 is bigger than the size of the decl we can't possibly access the
1223 decl via that pointer. */
1224 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1225 && poly_int_tree_p (DECL_SIZE (base2))
1226 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (ptrtype1)))
1227 /* ??? This in turn may run afoul when a decl of type T which is
1228 a member of union type U is accessed through a pointer to
1229 type U and sizeof T is smaller than sizeof U. */
1230 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1231 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1232 && known_lt (wi::to_poly_widest (DECL_SIZE (base2)),
1233 wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ptrtype1)))))
1234 return false;
1235
1236 if (!ref2)
1237 return true;
1238
1239 /* If the decl is accessed via a MEM_REF, reconstruct the base
1240 we can use for TBAA and an appropriately adjusted offset. */
1241 dbase2 = ref2;
1242 while (handled_component_p (dbase2))
1243 dbase2 = TREE_OPERAND (dbase2, 0);
1244 poly_int64 doffset1 = offset1;
1245 poly_offset_int doffset2 = offset2;
1246 if (TREE_CODE (dbase2) == MEM_REF
1247 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1248 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1249
1250 /* If either reference is view-converted, give up now. */
1251 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1252 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1253 return true;
1254
1255 /* If both references are through the same type, they do not alias
1256 if the accesses do not overlap. This does extra disambiguation
1257 for mixed/pointer accesses but requires strict aliasing.
1258 For MEM_REFs we require that the component-ref offset we computed
1259 is relative to the start of the type which we ensure by
1260 comparing rvalue and access type and disregarding the constant
1261 pointer offset. */
1262 if ((TREE_CODE (base1) != TARGET_MEM_REF
1263 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1264 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1265 return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1266
1267 if (ref1 && ref2
1268 && nonoverlapping_component_refs_p (ref1, ref2))
1269 return false;
1270
1271 /* Do access-path based disambiguation. */
1272 if (ref1 && ref2
1273 && (handled_component_p (ref1) || handled_component_p (ref2)))
1274 return aliasing_component_refs_p (ref1,
1275 ref1_alias_set, base1_alias_set,
1276 offset1, max_size1,
1277 ref2,
1278 ref2_alias_set, base2_alias_set,
1279 offset2, max_size2, true);
1280
1281 return true;
1282 }
1283
1284 /* Return true if two indirect references based on *PTR1
1285 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1286 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1287 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1288 in which case they are computed on-demand. REF1 and REF2
1289 if non-NULL are the complete memory reference trees. */
1290
1291 static bool
1292 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1293 poly_int64 offset1, poly_int64 max_size1,
1294 alias_set_type ref1_alias_set,
1295 alias_set_type base1_alias_set,
1296 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1297 poly_int64 offset2, poly_int64 max_size2,
1298 alias_set_type ref2_alias_set,
1299 alias_set_type base2_alias_set, bool tbaa_p)
1300 {
1301 tree ptr1;
1302 tree ptr2;
1303 tree ptrtype1, ptrtype2;
1304
1305 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1306 || TREE_CODE (base1) == TARGET_MEM_REF)
1307 && (TREE_CODE (base2) == MEM_REF
1308 || TREE_CODE (base2) == TARGET_MEM_REF));
1309
1310 ptr1 = TREE_OPERAND (base1, 0);
1311 ptr2 = TREE_OPERAND (base2, 0);
1312
1313 /* If both bases are based on pointers they cannot alias if they may not
1314 point to the same memory object or if they point to the same object
1315 and the accesses do not overlap. */
1316 if ((!cfun || gimple_in_ssa_p (cfun))
1317 && operand_equal_p (ptr1, ptr2, 0)
1318 && (((TREE_CODE (base1) != TARGET_MEM_REF
1319 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1320 && (TREE_CODE (base2) != TARGET_MEM_REF
1321 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1322 || (TREE_CODE (base1) == TARGET_MEM_REF
1323 && TREE_CODE (base2) == TARGET_MEM_REF
1324 && (TMR_STEP (base1) == TMR_STEP (base2)
1325 || (TMR_STEP (base1) && TMR_STEP (base2)
1326 && operand_equal_p (TMR_STEP (base1),
1327 TMR_STEP (base2), 0)))
1328 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1329 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1330 && operand_equal_p (TMR_INDEX (base1),
1331 TMR_INDEX (base2), 0)))
1332 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1333 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1334 && operand_equal_p (TMR_INDEX2 (base1),
1335 TMR_INDEX2 (base2), 0))))))
1336 {
1337 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1338 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1339 return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1340 offset2 + moff2, max_size2);
1341 }
1342 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1343 return false;
1344
1345 /* Disambiguations that rely on strict aliasing rules follow. */
1346 if (!flag_strict_aliasing || !tbaa_p)
1347 return true;
1348
1349 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1350 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1351
1352 /* If the alias set for a pointer access is zero all bets are off. */
1353 if (base1_alias_set == 0
1354 || base2_alias_set == 0)
1355 return true;
1356
1357 /* If both references are through the same type, they do not alias
1358 if the accesses do not overlap. This does extra disambiguation
1359 for mixed/pointer accesses but requires strict aliasing. */
1360 if ((TREE_CODE (base1) != TARGET_MEM_REF
1361 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1362 && (TREE_CODE (base2) != TARGET_MEM_REF
1363 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1364 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1365 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1366 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1367 TREE_TYPE (ptrtype2)) == 1
1368 /* But avoid treating arrays as "objects", instead assume they
1369 can overlap by an exact multiple of their element size. */
1370 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1371 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1372
1373 /* Do type-based disambiguation. */
1374 if (base1_alias_set != base2_alias_set
1375 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1376 return false;
1377
1378 /* If either reference is view-converted, give up now. */
1379 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1380 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1381 return true;
1382
1383 if (ref1 && ref2
1384 && nonoverlapping_component_refs_p (ref1, ref2))
1385 return false;
1386
1387 /* Do access-path based disambiguation. */
1388 if (ref1 && ref2
1389 && (handled_component_p (ref1) || handled_component_p (ref2)))
1390 return aliasing_component_refs_p (ref1,
1391 ref1_alias_set, base1_alias_set,
1392 offset1, max_size1,
1393 ref2,
1394 ref2_alias_set, base2_alias_set,
1395 offset2, max_size2, false);
1396
1397 return true;
1398 }
1399
1400 /* Return true, if the two memory references REF1 and REF2 may alias. */
1401
1402 bool
1403 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1404 {
1405 tree base1, base2;
1406 poly_int64 offset1 = 0, offset2 = 0;
1407 poly_int64 max_size1 = -1, max_size2 = -1;
1408 bool var1_p, var2_p, ind1_p, ind2_p;
1409
1410 gcc_checking_assert ((!ref1->ref
1411 || TREE_CODE (ref1->ref) == SSA_NAME
1412 || DECL_P (ref1->ref)
1413 || TREE_CODE (ref1->ref) == STRING_CST
1414 || handled_component_p (ref1->ref)
1415 || TREE_CODE (ref1->ref) == MEM_REF
1416 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1417 && (!ref2->ref
1418 || TREE_CODE (ref2->ref) == SSA_NAME
1419 || DECL_P (ref2->ref)
1420 || TREE_CODE (ref2->ref) == STRING_CST
1421 || handled_component_p (ref2->ref)
1422 || TREE_CODE (ref2->ref) == MEM_REF
1423 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1424
1425 /* Decompose the references into their base objects and the access. */
1426 base1 = ao_ref_base (ref1);
1427 offset1 = ref1->offset;
1428 max_size1 = ref1->max_size;
1429 base2 = ao_ref_base (ref2);
1430 offset2 = ref2->offset;
1431 max_size2 = ref2->max_size;
1432
1433 /* We can end up with registers or constants as bases for example from
1434 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1435 which is seen as a struct copy. */
1436 if (TREE_CODE (base1) == SSA_NAME
1437 || TREE_CODE (base1) == CONST_DECL
1438 || TREE_CODE (base1) == CONSTRUCTOR
1439 || TREE_CODE (base1) == ADDR_EXPR
1440 || CONSTANT_CLASS_P (base1)
1441 || TREE_CODE (base2) == SSA_NAME
1442 || TREE_CODE (base2) == CONST_DECL
1443 || TREE_CODE (base2) == CONSTRUCTOR
1444 || TREE_CODE (base2) == ADDR_EXPR
1445 || CONSTANT_CLASS_P (base2))
1446 return false;
1447
1448 /* We can end up referring to code via function and label decls.
1449 As we likely do not properly track code aliases conservatively
1450 bail out. */
1451 if (TREE_CODE (base1) == FUNCTION_DECL
1452 || TREE_CODE (base1) == LABEL_DECL
1453 || TREE_CODE (base2) == FUNCTION_DECL
1454 || TREE_CODE (base2) == LABEL_DECL)
1455 return true;
1456
1457 /* Two volatile accesses always conflict. */
1458 if (ref1->volatile_p
1459 && ref2->volatile_p)
1460 return true;
1461
1462 /* Defer to simple offset based disambiguation if we have
1463 references based on two decls. Do this before defering to
1464 TBAA to handle must-alias cases in conformance with the
1465 GCC extension of allowing type-punning through unions. */
1466 var1_p = DECL_P (base1);
1467 var2_p = DECL_P (base2);
1468 if (var1_p && var2_p)
1469 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1470 ref2->ref, base2, offset2, max_size2);
1471
1472 /* Handle restrict based accesses.
1473 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1474 here. */
1475 tree rbase1 = base1;
1476 tree rbase2 = base2;
1477 if (var1_p)
1478 {
1479 rbase1 = ref1->ref;
1480 if (rbase1)
1481 while (handled_component_p (rbase1))
1482 rbase1 = TREE_OPERAND (rbase1, 0);
1483 }
1484 if (var2_p)
1485 {
1486 rbase2 = ref2->ref;
1487 if (rbase2)
1488 while (handled_component_p (rbase2))
1489 rbase2 = TREE_OPERAND (rbase2, 0);
1490 }
1491 if (rbase1 && rbase2
1492 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1493 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1494 /* If the accesses are in the same restrict clique... */
1495 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1496 /* But based on different pointers they do not alias. */
1497 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1498 return false;
1499
1500 ind1_p = (TREE_CODE (base1) == MEM_REF
1501 || TREE_CODE (base1) == TARGET_MEM_REF);
1502 ind2_p = (TREE_CODE (base2) == MEM_REF
1503 || TREE_CODE (base2) == TARGET_MEM_REF);
1504
1505 /* Canonicalize the pointer-vs-decl case. */
1506 if (ind1_p && var2_p)
1507 {
1508 std::swap (offset1, offset2);
1509 std::swap (max_size1, max_size2);
1510 std::swap (base1, base2);
1511 std::swap (ref1, ref2);
1512 var1_p = true;
1513 ind1_p = false;
1514 var2_p = false;
1515 ind2_p = true;
1516 }
1517
1518 /* First defer to TBAA if possible. */
1519 if (tbaa_p
1520 && flag_strict_aliasing
1521 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1522 ao_ref_alias_set (ref2)))
1523 return false;
1524
1525 /* If the reference is based on a pointer that points to memory
1526 that may not be written to then the other reference cannot possibly
1527 clobber it. */
1528 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1529 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1530 || (ind1_p
1531 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1532 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1533 return false;
1534
1535 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1536 if (var1_p && ind2_p)
1537 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1538 offset2, max_size2,
1539 ao_ref_alias_set (ref2),
1540 ao_ref_base_alias_set (ref2),
1541 ref1->ref, base1,
1542 offset1, max_size1,
1543 ao_ref_alias_set (ref1),
1544 ao_ref_base_alias_set (ref1),
1545 tbaa_p);
1546 else if (ind1_p && ind2_p)
1547 return indirect_refs_may_alias_p (ref1->ref, base1,
1548 offset1, max_size1,
1549 ao_ref_alias_set (ref1),
1550 ao_ref_base_alias_set (ref1),
1551 ref2->ref, base2,
1552 offset2, max_size2,
1553 ao_ref_alias_set (ref2),
1554 ao_ref_base_alias_set (ref2),
1555 tbaa_p);
1556
1557 gcc_unreachable ();
1558 }
1559
1560 static bool
1561 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1562 {
1563 ao_ref r1;
1564 ao_ref_init (&r1, ref1);
1565 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1566 }
1567
1568 bool
1569 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1570 {
1571 ao_ref r1, r2;
1572 bool res;
1573 ao_ref_init (&r1, ref1);
1574 ao_ref_init (&r2, ref2);
1575 res = refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1576 if (res)
1577 ++alias_stats.refs_may_alias_p_may_alias;
1578 else
1579 ++alias_stats.refs_may_alias_p_no_alias;
1580 return res;
1581 }
1582
1583 /* Returns true if there is a anti-dependence for the STORE that
1584 executes after the LOAD. */
1585
1586 bool
1587 refs_anti_dependent_p (tree load, tree store)
1588 {
1589 ao_ref r1, r2;
1590 ao_ref_init (&r1, load);
1591 ao_ref_init (&r2, store);
1592 return refs_may_alias_p_1 (&r1, &r2, false);
1593 }
1594
1595 /* Returns true if there is a output dependence for the stores
1596 STORE1 and STORE2. */
1597
1598 bool
1599 refs_output_dependent_p (tree store1, tree store2)
1600 {
1601 ao_ref r1, r2;
1602 ao_ref_init (&r1, store1);
1603 ao_ref_init (&r2, store2);
1604 return refs_may_alias_p_1 (&r1, &r2, false);
1605 }
1606
1607 /* If the call CALL may use the memory reference REF return true,
1608 otherwise return false. */
1609
1610 static bool
1611 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1612 {
1613 tree base, callee;
1614 unsigned i;
1615 int flags = gimple_call_flags (call);
1616
1617 /* Const functions without a static chain do not implicitly use memory. */
1618 if (!gimple_call_chain (call)
1619 && (flags & (ECF_CONST|ECF_NOVOPS)))
1620 goto process_args;
1621
1622 base = ao_ref_base (ref);
1623 if (!base)
1624 return true;
1625
1626 /* A call that is not without side-effects might involve volatile
1627 accesses and thus conflicts with all other volatile accesses. */
1628 if (ref->volatile_p)
1629 return true;
1630
1631 /* If the reference is based on a decl that is not aliased the call
1632 cannot possibly use it. */
1633 if (DECL_P (base)
1634 && !may_be_aliased (base)
1635 /* But local statics can be used through recursion. */
1636 && !is_global_var (base))
1637 goto process_args;
1638
1639 callee = gimple_call_fndecl (call);
1640
1641 /* Handle those builtin functions explicitly that do not act as
1642 escape points. See tree-ssa-structalias.c:find_func_aliases
1643 for the list of builtins we might need to handle here. */
1644 if (callee != NULL_TREE
1645 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1646 switch (DECL_FUNCTION_CODE (callee))
1647 {
1648 /* All the following functions read memory pointed to by
1649 their second argument. strcat/strncat additionally
1650 reads memory pointed to by the first argument. */
1651 case BUILT_IN_STRCAT:
1652 case BUILT_IN_STRNCAT:
1653 {
1654 ao_ref dref;
1655 ao_ref_init_from_ptr_and_size (&dref,
1656 gimple_call_arg (call, 0),
1657 NULL_TREE);
1658 if (refs_may_alias_p_1 (&dref, ref, false))
1659 return true;
1660 }
1661 /* FALLTHRU */
1662 case BUILT_IN_STRCPY:
1663 case BUILT_IN_STRNCPY:
1664 case BUILT_IN_MEMCPY:
1665 case BUILT_IN_MEMMOVE:
1666 case BUILT_IN_MEMPCPY:
1667 case BUILT_IN_STPCPY:
1668 case BUILT_IN_STPNCPY:
1669 case BUILT_IN_TM_MEMCPY:
1670 case BUILT_IN_TM_MEMMOVE:
1671 {
1672 ao_ref dref;
1673 tree size = NULL_TREE;
1674 if (gimple_call_num_args (call) == 3)
1675 size = gimple_call_arg (call, 2);
1676 ao_ref_init_from_ptr_and_size (&dref,
1677 gimple_call_arg (call, 1),
1678 size);
1679 return refs_may_alias_p_1 (&dref, ref, false);
1680 }
1681 case BUILT_IN_STRCAT_CHK:
1682 case BUILT_IN_STRNCAT_CHK:
1683 {
1684 ao_ref dref;
1685 ao_ref_init_from_ptr_and_size (&dref,
1686 gimple_call_arg (call, 0),
1687 NULL_TREE);
1688 if (refs_may_alias_p_1 (&dref, ref, false))
1689 return true;
1690 }
1691 /* FALLTHRU */
1692 case BUILT_IN_STRCPY_CHK:
1693 case BUILT_IN_STRNCPY_CHK:
1694 case BUILT_IN_MEMCPY_CHK:
1695 case BUILT_IN_MEMMOVE_CHK:
1696 case BUILT_IN_MEMPCPY_CHK:
1697 case BUILT_IN_STPCPY_CHK:
1698 case BUILT_IN_STPNCPY_CHK:
1699 {
1700 ao_ref dref;
1701 tree size = NULL_TREE;
1702 if (gimple_call_num_args (call) == 4)
1703 size = gimple_call_arg (call, 2);
1704 ao_ref_init_from_ptr_and_size (&dref,
1705 gimple_call_arg (call, 1),
1706 size);
1707 return refs_may_alias_p_1 (&dref, ref, false);
1708 }
1709 case BUILT_IN_BCOPY:
1710 {
1711 ao_ref dref;
1712 tree size = gimple_call_arg (call, 2);
1713 ao_ref_init_from_ptr_and_size (&dref,
1714 gimple_call_arg (call, 0),
1715 size);
1716 return refs_may_alias_p_1 (&dref, ref, false);
1717 }
1718
1719 /* The following functions read memory pointed to by their
1720 first argument. */
1721 CASE_BUILT_IN_TM_LOAD (1):
1722 CASE_BUILT_IN_TM_LOAD (2):
1723 CASE_BUILT_IN_TM_LOAD (4):
1724 CASE_BUILT_IN_TM_LOAD (8):
1725 CASE_BUILT_IN_TM_LOAD (FLOAT):
1726 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1727 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1728 CASE_BUILT_IN_TM_LOAD (M64):
1729 CASE_BUILT_IN_TM_LOAD (M128):
1730 CASE_BUILT_IN_TM_LOAD (M256):
1731 case BUILT_IN_TM_LOG:
1732 case BUILT_IN_TM_LOG_1:
1733 case BUILT_IN_TM_LOG_2:
1734 case BUILT_IN_TM_LOG_4:
1735 case BUILT_IN_TM_LOG_8:
1736 case BUILT_IN_TM_LOG_FLOAT:
1737 case BUILT_IN_TM_LOG_DOUBLE:
1738 case BUILT_IN_TM_LOG_LDOUBLE:
1739 case BUILT_IN_TM_LOG_M64:
1740 case BUILT_IN_TM_LOG_M128:
1741 case BUILT_IN_TM_LOG_M256:
1742 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1743
1744 /* These read memory pointed to by the first argument. */
1745 case BUILT_IN_STRDUP:
1746 case BUILT_IN_STRNDUP:
1747 case BUILT_IN_REALLOC:
1748 {
1749 ao_ref dref;
1750 tree size = NULL_TREE;
1751 if (gimple_call_num_args (call) == 2)
1752 size = gimple_call_arg (call, 1);
1753 ao_ref_init_from_ptr_and_size (&dref,
1754 gimple_call_arg (call, 0),
1755 size);
1756 return refs_may_alias_p_1 (&dref, ref, false);
1757 }
1758 /* These read memory pointed to by the first argument. */
1759 case BUILT_IN_INDEX:
1760 case BUILT_IN_STRCHR:
1761 case BUILT_IN_STRRCHR:
1762 {
1763 ao_ref dref;
1764 ao_ref_init_from_ptr_and_size (&dref,
1765 gimple_call_arg (call, 0),
1766 NULL_TREE);
1767 return refs_may_alias_p_1 (&dref, ref, false);
1768 }
1769 /* These read memory pointed to by the first argument with size
1770 in the third argument. */
1771 case BUILT_IN_MEMCHR:
1772 {
1773 ao_ref dref;
1774 ao_ref_init_from_ptr_and_size (&dref,
1775 gimple_call_arg (call, 0),
1776 gimple_call_arg (call, 2));
1777 return refs_may_alias_p_1 (&dref, ref, false);
1778 }
1779 /* These read memory pointed to by the first and second arguments. */
1780 case BUILT_IN_STRSTR:
1781 case BUILT_IN_STRPBRK:
1782 {
1783 ao_ref dref;
1784 ao_ref_init_from_ptr_and_size (&dref,
1785 gimple_call_arg (call, 0),
1786 NULL_TREE);
1787 if (refs_may_alias_p_1 (&dref, ref, false))
1788 return true;
1789 ao_ref_init_from_ptr_and_size (&dref,
1790 gimple_call_arg (call, 1),
1791 NULL_TREE);
1792 return refs_may_alias_p_1 (&dref, ref, false);
1793 }
1794
1795 /* The following builtins do not read from memory. */
1796 case BUILT_IN_FREE:
1797 case BUILT_IN_MALLOC:
1798 case BUILT_IN_POSIX_MEMALIGN:
1799 case BUILT_IN_ALIGNED_ALLOC:
1800 case BUILT_IN_CALLOC:
1801 CASE_BUILT_IN_ALLOCA:
1802 case BUILT_IN_STACK_SAVE:
1803 case BUILT_IN_STACK_RESTORE:
1804 case BUILT_IN_MEMSET:
1805 case BUILT_IN_TM_MEMSET:
1806 case BUILT_IN_MEMSET_CHK:
1807 case BUILT_IN_FREXP:
1808 case BUILT_IN_FREXPF:
1809 case BUILT_IN_FREXPL:
1810 case BUILT_IN_GAMMA_R:
1811 case BUILT_IN_GAMMAF_R:
1812 case BUILT_IN_GAMMAL_R:
1813 case BUILT_IN_LGAMMA_R:
1814 case BUILT_IN_LGAMMAF_R:
1815 case BUILT_IN_LGAMMAL_R:
1816 case BUILT_IN_MODF:
1817 case BUILT_IN_MODFF:
1818 case BUILT_IN_MODFL:
1819 case BUILT_IN_REMQUO:
1820 case BUILT_IN_REMQUOF:
1821 case BUILT_IN_REMQUOL:
1822 case BUILT_IN_SINCOS:
1823 case BUILT_IN_SINCOSF:
1824 case BUILT_IN_SINCOSL:
1825 case BUILT_IN_ASSUME_ALIGNED:
1826 case BUILT_IN_VA_END:
1827 return false;
1828 /* __sync_* builtins and some OpenMP builtins act as threading
1829 barriers. */
1830 #undef DEF_SYNC_BUILTIN
1831 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1832 #include "sync-builtins.def"
1833 #undef DEF_SYNC_BUILTIN
1834 case BUILT_IN_GOMP_ATOMIC_START:
1835 case BUILT_IN_GOMP_ATOMIC_END:
1836 case BUILT_IN_GOMP_BARRIER:
1837 case BUILT_IN_GOMP_BARRIER_CANCEL:
1838 case BUILT_IN_GOMP_TASKWAIT:
1839 case BUILT_IN_GOMP_TASKGROUP_END:
1840 case BUILT_IN_GOMP_CRITICAL_START:
1841 case BUILT_IN_GOMP_CRITICAL_END:
1842 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1843 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1844 case BUILT_IN_GOMP_LOOP_END:
1845 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1846 case BUILT_IN_GOMP_ORDERED_START:
1847 case BUILT_IN_GOMP_ORDERED_END:
1848 case BUILT_IN_GOMP_SECTIONS_END:
1849 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1850 case BUILT_IN_GOMP_SINGLE_COPY_START:
1851 case BUILT_IN_GOMP_SINGLE_COPY_END:
1852 return true;
1853
1854 default:
1855 /* Fallthru to general call handling. */;
1856 }
1857
1858 /* Check if base is a global static variable that is not read
1859 by the function. */
1860 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
1861 {
1862 struct cgraph_node *node = cgraph_node::get (callee);
1863 bitmap not_read;
1864
1865 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1866 node yet. We should enforce that there are nodes for all decls in the
1867 IL and remove this check instead. */
1868 if (node
1869 && (not_read = ipa_reference_get_not_read_global (node))
1870 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1871 goto process_args;
1872 }
1873
1874 /* Check if the base variable is call-used. */
1875 if (DECL_P (base))
1876 {
1877 if (pt_solution_includes (gimple_call_use_set (call), base))
1878 return true;
1879 }
1880 else if ((TREE_CODE (base) == MEM_REF
1881 || TREE_CODE (base) == TARGET_MEM_REF)
1882 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1883 {
1884 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1885 if (!pi)
1886 return true;
1887
1888 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1889 return true;
1890 }
1891 else
1892 return true;
1893
1894 /* Inspect call arguments for passed-by-value aliases. */
1895 process_args:
1896 for (i = 0; i < gimple_call_num_args (call); ++i)
1897 {
1898 tree op = gimple_call_arg (call, i);
1899 int flags = gimple_call_arg_flags (call, i);
1900
1901 if (flags & EAF_UNUSED)
1902 continue;
1903
1904 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1905 op = TREE_OPERAND (op, 0);
1906
1907 if (TREE_CODE (op) != SSA_NAME
1908 && !is_gimple_min_invariant (op))
1909 {
1910 ao_ref r;
1911 ao_ref_init (&r, op);
1912 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
1913 return true;
1914 }
1915 }
1916
1917 return false;
1918 }
1919
1920 static bool
1921 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
1922 {
1923 bool res;
1924 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
1925 if (res)
1926 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1927 else
1928 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1929 return res;
1930 }
1931
1932
1933 /* If the statement STMT may use the memory reference REF return
1934 true, otherwise return false. */
1935
1936 bool
1937 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
1938 {
1939 if (is_gimple_assign (stmt))
1940 {
1941 tree rhs;
1942
1943 /* All memory assign statements are single. */
1944 if (!gimple_assign_single_p (stmt))
1945 return false;
1946
1947 rhs = gimple_assign_rhs1 (stmt);
1948 if (is_gimple_reg (rhs)
1949 || is_gimple_min_invariant (rhs)
1950 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1951 return false;
1952
1953 return refs_may_alias_p (rhs, ref, tbaa_p);
1954 }
1955 else if (is_gimple_call (stmt))
1956 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
1957 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1958 {
1959 tree retval = gimple_return_retval (return_stmt);
1960 if (retval
1961 && TREE_CODE (retval) != SSA_NAME
1962 && !is_gimple_min_invariant (retval)
1963 && refs_may_alias_p (retval, ref, tbaa_p))
1964 return true;
1965 /* If ref escapes the function then the return acts as a use. */
1966 tree base = ao_ref_base (ref);
1967 if (!base)
1968 ;
1969 else if (DECL_P (base))
1970 return is_global_var (base);
1971 else if (TREE_CODE (base) == MEM_REF
1972 || TREE_CODE (base) == TARGET_MEM_REF)
1973 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1974 return false;
1975 }
1976
1977 return true;
1978 }
1979
1980 bool
1981 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
1982 {
1983 ao_ref r;
1984 ao_ref_init (&r, ref);
1985 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
1986 }
1987
1988 /* If the call in statement CALL may clobber the memory reference REF
1989 return true, otherwise return false. */
1990
1991 bool
1992 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1993 {
1994 tree base;
1995 tree callee;
1996
1997 /* If the call is pure or const it cannot clobber anything. */
1998 if (gimple_call_flags (call)
1999 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2000 return false;
2001 if (gimple_call_internal_p (call))
2002 switch (gimple_call_internal_fn (call))
2003 {
2004 /* Treat these internal calls like ECF_PURE for aliasing,
2005 they don't write to any memory the program should care about.
2006 They have important other side-effects, and read memory,
2007 so can't be ECF_NOVOPS. */
2008 case IFN_UBSAN_NULL:
2009 case IFN_UBSAN_BOUNDS:
2010 case IFN_UBSAN_VPTR:
2011 case IFN_UBSAN_OBJECT_SIZE:
2012 case IFN_UBSAN_PTR:
2013 case IFN_ASAN_CHECK:
2014 return false;
2015 default:
2016 break;
2017 }
2018
2019 base = ao_ref_base (ref);
2020 if (!base)
2021 return true;
2022
2023 if (TREE_CODE (base) == SSA_NAME
2024 || CONSTANT_CLASS_P (base))
2025 return false;
2026
2027 /* A call that is not without side-effects might involve volatile
2028 accesses and thus conflicts with all other volatile accesses. */
2029 if (ref->volatile_p)
2030 return true;
2031
2032 /* If the reference is based on a decl that is not aliased the call
2033 cannot possibly clobber it. */
2034 if (DECL_P (base)
2035 && !may_be_aliased (base)
2036 /* But local non-readonly statics can be modified through recursion
2037 or the call may implement a threading barrier which we must
2038 treat as may-def. */
2039 && (TREE_READONLY (base)
2040 || !is_global_var (base)))
2041 return false;
2042
2043 /* If the reference is based on a pointer that points to memory
2044 that may not be written to then the call cannot possibly clobber it. */
2045 if ((TREE_CODE (base) == MEM_REF
2046 || TREE_CODE (base) == TARGET_MEM_REF)
2047 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2048 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2049 return false;
2050
2051 callee = gimple_call_fndecl (call);
2052
2053 /* Handle those builtin functions explicitly that do not act as
2054 escape points. See tree-ssa-structalias.c:find_func_aliases
2055 for the list of builtins we might need to handle here. */
2056 if (callee != NULL_TREE
2057 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2058 switch (DECL_FUNCTION_CODE (callee))
2059 {
2060 /* All the following functions clobber memory pointed to by
2061 their first argument. */
2062 case BUILT_IN_STRCPY:
2063 case BUILT_IN_STRNCPY:
2064 case BUILT_IN_MEMCPY:
2065 case BUILT_IN_MEMMOVE:
2066 case BUILT_IN_MEMPCPY:
2067 case BUILT_IN_STPCPY:
2068 case BUILT_IN_STPNCPY:
2069 case BUILT_IN_STRCAT:
2070 case BUILT_IN_STRNCAT:
2071 case BUILT_IN_MEMSET:
2072 case BUILT_IN_TM_MEMSET:
2073 CASE_BUILT_IN_TM_STORE (1):
2074 CASE_BUILT_IN_TM_STORE (2):
2075 CASE_BUILT_IN_TM_STORE (4):
2076 CASE_BUILT_IN_TM_STORE (8):
2077 CASE_BUILT_IN_TM_STORE (FLOAT):
2078 CASE_BUILT_IN_TM_STORE (DOUBLE):
2079 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2080 CASE_BUILT_IN_TM_STORE (M64):
2081 CASE_BUILT_IN_TM_STORE (M128):
2082 CASE_BUILT_IN_TM_STORE (M256):
2083 case BUILT_IN_TM_MEMCPY:
2084 case BUILT_IN_TM_MEMMOVE:
2085 {
2086 ao_ref dref;
2087 tree size = NULL_TREE;
2088 /* Don't pass in size for strncat, as the maximum size
2089 is strlen (dest) + n + 1 instead of n, resp.
2090 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2091 known. */
2092 if (gimple_call_num_args (call) == 3
2093 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2094 size = gimple_call_arg (call, 2);
2095 ao_ref_init_from_ptr_and_size (&dref,
2096 gimple_call_arg (call, 0),
2097 size);
2098 return refs_may_alias_p_1 (&dref, ref, false);
2099 }
2100 case BUILT_IN_STRCPY_CHK:
2101 case BUILT_IN_STRNCPY_CHK:
2102 case BUILT_IN_MEMCPY_CHK:
2103 case BUILT_IN_MEMMOVE_CHK:
2104 case BUILT_IN_MEMPCPY_CHK:
2105 case BUILT_IN_STPCPY_CHK:
2106 case BUILT_IN_STPNCPY_CHK:
2107 case BUILT_IN_STRCAT_CHK:
2108 case BUILT_IN_STRNCAT_CHK:
2109 case BUILT_IN_MEMSET_CHK:
2110 {
2111 ao_ref dref;
2112 tree size = NULL_TREE;
2113 /* Don't pass in size for __strncat_chk, as the maximum size
2114 is strlen (dest) + n + 1 instead of n, resp.
2115 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2116 known. */
2117 if (gimple_call_num_args (call) == 4
2118 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2119 size = gimple_call_arg (call, 2);
2120 ao_ref_init_from_ptr_and_size (&dref,
2121 gimple_call_arg (call, 0),
2122 size);
2123 return refs_may_alias_p_1 (&dref, ref, false);
2124 }
2125 case BUILT_IN_BCOPY:
2126 {
2127 ao_ref dref;
2128 tree size = gimple_call_arg (call, 2);
2129 ao_ref_init_from_ptr_and_size (&dref,
2130 gimple_call_arg (call, 1),
2131 size);
2132 return refs_may_alias_p_1 (&dref, ref, false);
2133 }
2134 /* Allocating memory does not have any side-effects apart from
2135 being the definition point for the pointer. */
2136 case BUILT_IN_MALLOC:
2137 case BUILT_IN_ALIGNED_ALLOC:
2138 case BUILT_IN_CALLOC:
2139 case BUILT_IN_STRDUP:
2140 case BUILT_IN_STRNDUP:
2141 /* Unix98 specifies that errno is set on allocation failure. */
2142 if (flag_errno_math
2143 && targetm.ref_may_alias_errno (ref))
2144 return true;
2145 return false;
2146 case BUILT_IN_STACK_SAVE:
2147 CASE_BUILT_IN_ALLOCA:
2148 case BUILT_IN_ASSUME_ALIGNED:
2149 return false;
2150 /* But posix_memalign stores a pointer into the memory pointed to
2151 by its first argument. */
2152 case BUILT_IN_POSIX_MEMALIGN:
2153 {
2154 tree ptrptr = gimple_call_arg (call, 0);
2155 ao_ref dref;
2156 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2157 TYPE_SIZE_UNIT (ptr_type_node));
2158 return (refs_may_alias_p_1 (&dref, ref, false)
2159 || (flag_errno_math
2160 && targetm.ref_may_alias_errno (ref)));
2161 }
2162 /* Freeing memory kills the pointed-to memory. More importantly
2163 the call has to serve as a barrier for moving loads and stores
2164 across it. */
2165 case BUILT_IN_FREE:
2166 case BUILT_IN_VA_END:
2167 {
2168 tree ptr = gimple_call_arg (call, 0);
2169 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2170 }
2171 /* Realloc serves both as allocation point and deallocation point. */
2172 case BUILT_IN_REALLOC:
2173 {
2174 tree ptr = gimple_call_arg (call, 0);
2175 /* Unix98 specifies that errno is set on allocation failure. */
2176 return ((flag_errno_math
2177 && targetm.ref_may_alias_errno (ref))
2178 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2179 }
2180 case BUILT_IN_GAMMA_R:
2181 case BUILT_IN_GAMMAF_R:
2182 case BUILT_IN_GAMMAL_R:
2183 case BUILT_IN_LGAMMA_R:
2184 case BUILT_IN_LGAMMAF_R:
2185 case BUILT_IN_LGAMMAL_R:
2186 {
2187 tree out = gimple_call_arg (call, 1);
2188 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2189 return true;
2190 if (flag_errno_math)
2191 break;
2192 return false;
2193 }
2194 case BUILT_IN_FREXP:
2195 case BUILT_IN_FREXPF:
2196 case BUILT_IN_FREXPL:
2197 case BUILT_IN_MODF:
2198 case BUILT_IN_MODFF:
2199 case BUILT_IN_MODFL:
2200 {
2201 tree out = gimple_call_arg (call, 1);
2202 return ptr_deref_may_alias_ref_p_1 (out, ref);
2203 }
2204 case BUILT_IN_REMQUO:
2205 case BUILT_IN_REMQUOF:
2206 case BUILT_IN_REMQUOL:
2207 {
2208 tree out = gimple_call_arg (call, 2);
2209 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2210 return true;
2211 if (flag_errno_math)
2212 break;
2213 return false;
2214 }
2215 case BUILT_IN_SINCOS:
2216 case BUILT_IN_SINCOSF:
2217 case BUILT_IN_SINCOSL:
2218 {
2219 tree sin = gimple_call_arg (call, 1);
2220 tree cos = gimple_call_arg (call, 2);
2221 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2222 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2223 }
2224 /* __sync_* builtins and some OpenMP builtins act as threading
2225 barriers. */
2226 #undef DEF_SYNC_BUILTIN
2227 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2228 #include "sync-builtins.def"
2229 #undef DEF_SYNC_BUILTIN
2230 case BUILT_IN_GOMP_ATOMIC_START:
2231 case BUILT_IN_GOMP_ATOMIC_END:
2232 case BUILT_IN_GOMP_BARRIER:
2233 case BUILT_IN_GOMP_BARRIER_CANCEL:
2234 case BUILT_IN_GOMP_TASKWAIT:
2235 case BUILT_IN_GOMP_TASKGROUP_END:
2236 case BUILT_IN_GOMP_CRITICAL_START:
2237 case BUILT_IN_GOMP_CRITICAL_END:
2238 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2239 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2240 case BUILT_IN_GOMP_LOOP_END:
2241 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2242 case BUILT_IN_GOMP_ORDERED_START:
2243 case BUILT_IN_GOMP_ORDERED_END:
2244 case BUILT_IN_GOMP_SECTIONS_END:
2245 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2246 case BUILT_IN_GOMP_SINGLE_COPY_START:
2247 case BUILT_IN_GOMP_SINGLE_COPY_END:
2248 return true;
2249 default:
2250 /* Fallthru to general call handling. */;
2251 }
2252
2253 /* Check if base is a global static variable that is not written
2254 by the function. */
2255 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2256 {
2257 struct cgraph_node *node = cgraph_node::get (callee);
2258 bitmap not_written;
2259
2260 if (node
2261 && (not_written = ipa_reference_get_not_written_global (node))
2262 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2263 return false;
2264 }
2265
2266 /* Check if the base variable is call-clobbered. */
2267 if (DECL_P (base))
2268 return pt_solution_includes (gimple_call_clobber_set (call), base);
2269 else if ((TREE_CODE (base) == MEM_REF
2270 || TREE_CODE (base) == TARGET_MEM_REF)
2271 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2272 {
2273 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2274 if (!pi)
2275 return true;
2276
2277 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2278 }
2279
2280 return true;
2281 }
2282
2283 /* If the call in statement CALL may clobber the memory reference REF
2284 return true, otherwise return false. */
2285
2286 bool
2287 call_may_clobber_ref_p (gcall *call, tree ref)
2288 {
2289 bool res;
2290 ao_ref r;
2291 ao_ref_init (&r, ref);
2292 res = call_may_clobber_ref_p_1 (call, &r);
2293 if (res)
2294 ++alias_stats.call_may_clobber_ref_p_may_alias;
2295 else
2296 ++alias_stats.call_may_clobber_ref_p_no_alias;
2297 return res;
2298 }
2299
2300
2301 /* If the statement STMT may clobber the memory reference REF return true,
2302 otherwise return false. */
2303
2304 bool
2305 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2306 {
2307 if (is_gimple_call (stmt))
2308 {
2309 tree lhs = gimple_call_lhs (stmt);
2310 if (lhs
2311 && TREE_CODE (lhs) != SSA_NAME)
2312 {
2313 ao_ref r;
2314 ao_ref_init (&r, lhs);
2315 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2316 return true;
2317 }
2318
2319 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2320 }
2321 else if (gimple_assign_single_p (stmt))
2322 {
2323 tree lhs = gimple_assign_lhs (stmt);
2324 if (TREE_CODE (lhs) != SSA_NAME)
2325 {
2326 ao_ref r;
2327 ao_ref_init (&r, lhs);
2328 return refs_may_alias_p_1 (ref, &r, tbaa_p);
2329 }
2330 }
2331 else if (gimple_code (stmt) == GIMPLE_ASM)
2332 return true;
2333
2334 return false;
2335 }
2336
2337 bool
2338 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2339 {
2340 ao_ref r;
2341 ao_ref_init (&r, ref);
2342 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2343 }
2344
2345 /* Return true if store1 and store2 described by corresponding tuples
2346 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2347 address. */
2348
2349 static bool
2350 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2351 poly_int64 max_size1,
2352 tree base2, poly_int64 offset2, poly_int64 size2,
2353 poly_int64 max_size2)
2354 {
2355 /* Offsets need to be 0. */
2356 if (maybe_ne (offset1, 0)
2357 || maybe_ne (offset2, 0))
2358 return false;
2359
2360 bool base1_obj_p = SSA_VAR_P (base1);
2361 bool base2_obj_p = SSA_VAR_P (base2);
2362
2363 /* We need one object. */
2364 if (base1_obj_p == base2_obj_p)
2365 return false;
2366 tree obj = base1_obj_p ? base1 : base2;
2367
2368 /* And we need one MEM_REF. */
2369 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2370 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2371 if (base1_memref_p == base2_memref_p)
2372 return false;
2373 tree memref = base1_memref_p ? base1 : base2;
2374
2375 /* Sizes need to be valid. */
2376 if (!known_size_p (max_size1)
2377 || !known_size_p (max_size2)
2378 || !known_size_p (size1)
2379 || !known_size_p (size2))
2380 return false;
2381
2382 /* Max_size needs to match size. */
2383 if (maybe_ne (max_size1, size1)
2384 || maybe_ne (max_size2, size2))
2385 return false;
2386
2387 /* Sizes need to match. */
2388 if (maybe_ne (size1, size2))
2389 return false;
2390
2391
2392 /* Check that memref is a store to pointer with singleton points-to info. */
2393 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2394 return false;
2395 tree ptr = TREE_OPERAND (memref, 0);
2396 if (TREE_CODE (ptr) != SSA_NAME)
2397 return false;
2398 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2399 unsigned int pt_uid;
2400 if (pi == NULL
2401 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2402 return false;
2403
2404 /* Be conservative with non-call exceptions when the address might
2405 be NULL. */
2406 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2407 return false;
2408
2409 /* Check that ptr points relative to obj. */
2410 unsigned int obj_uid = DECL_PT_UID (obj);
2411 if (obj_uid != pt_uid)
2412 return false;
2413
2414 /* Check that the object size is the same as the store size. That ensures us
2415 that ptr points to the start of obj. */
2416 return (DECL_SIZE (obj)
2417 && poly_int_tree_p (DECL_SIZE (obj))
2418 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2419 }
2420
2421 /* If STMT kills the memory reference REF return true, otherwise
2422 return false. */
2423
2424 bool
2425 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2426 {
2427 if (!ao_ref_base (ref))
2428 return false;
2429
2430 if (gimple_has_lhs (stmt)
2431 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2432 /* The assignment is not necessarily carried out if it can throw
2433 and we can catch it in the current function where we could inspect
2434 the previous value.
2435 ??? We only need to care about the RHS throwing. For aggregate
2436 assignments or similar calls and non-call exceptions the LHS
2437 might throw as well. */
2438 && !stmt_can_throw_internal (cfun, stmt))
2439 {
2440 tree lhs = gimple_get_lhs (stmt);
2441 /* If LHS is literally a base of the access we are done. */
2442 if (ref->ref)
2443 {
2444 tree base = ref->ref;
2445 tree innermost_dropped_array_ref = NULL_TREE;
2446 if (handled_component_p (base))
2447 {
2448 tree saved_lhs0 = NULL_TREE;
2449 if (handled_component_p (lhs))
2450 {
2451 saved_lhs0 = TREE_OPERAND (lhs, 0);
2452 TREE_OPERAND (lhs, 0) = integer_zero_node;
2453 }
2454 do
2455 {
2456 /* Just compare the outermost handled component, if
2457 they are equal we have found a possible common
2458 base. */
2459 tree saved_base0 = TREE_OPERAND (base, 0);
2460 TREE_OPERAND (base, 0) = integer_zero_node;
2461 bool res = operand_equal_p (lhs, base, 0);
2462 TREE_OPERAND (base, 0) = saved_base0;
2463 if (res)
2464 break;
2465 /* Remember if we drop an array-ref that we need to
2466 double-check not being at struct end. */
2467 if (TREE_CODE (base) == ARRAY_REF
2468 || TREE_CODE (base) == ARRAY_RANGE_REF)
2469 innermost_dropped_array_ref = base;
2470 /* Otherwise drop handled components of the access. */
2471 base = saved_base0;
2472 }
2473 while (handled_component_p (base));
2474 if (saved_lhs0)
2475 TREE_OPERAND (lhs, 0) = saved_lhs0;
2476 }
2477 /* Finally check if the lhs has the same address and size as the
2478 base candidate of the access. Watch out if we have dropped
2479 an array-ref that was at struct end, this means ref->ref may
2480 be outside of the TYPE_SIZE of its base. */
2481 if ((! innermost_dropped_array_ref
2482 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2483 && (lhs == base
2484 || (((TYPE_SIZE (TREE_TYPE (lhs))
2485 == TYPE_SIZE (TREE_TYPE (base)))
2486 || (TYPE_SIZE (TREE_TYPE (lhs))
2487 && TYPE_SIZE (TREE_TYPE (base))
2488 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2489 TYPE_SIZE (TREE_TYPE (base)),
2490 0)))
2491 && operand_equal_p (lhs, base,
2492 OEP_ADDRESS_OF
2493 | OEP_MATCH_SIDE_EFFECTS))))
2494 return true;
2495 }
2496
2497 /* Now look for non-literal equal bases with the restriction of
2498 handling constant offset and size. */
2499 /* For a must-alias check we need to be able to constrain
2500 the access properly. */
2501 if (!ref->max_size_known_p ())
2502 return false;
2503 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2504 bool reverse;
2505 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2506 &reverse);
2507 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2508 so base == ref->base does not always hold. */
2509 if (base != ref->base)
2510 {
2511 /* Try using points-to info. */
2512 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2513 ref->offset, ref->size, ref->max_size))
2514 return true;
2515
2516 /* If both base and ref->base are MEM_REFs, only compare the
2517 first operand, and if the second operand isn't equal constant,
2518 try to add the offsets into offset and ref_offset. */
2519 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2520 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2521 {
2522 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2523 TREE_OPERAND (ref->base, 1)))
2524 {
2525 poly_offset_int off1 = mem_ref_offset (base);
2526 off1 <<= LOG2_BITS_PER_UNIT;
2527 off1 += offset;
2528 poly_offset_int off2 = mem_ref_offset (ref->base);
2529 off2 <<= LOG2_BITS_PER_UNIT;
2530 off2 += ref_offset;
2531 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2532 size = -1;
2533 }
2534 }
2535 else
2536 size = -1;
2537 }
2538 /* For a must-alias check we need to be able to constrain
2539 the access properly. */
2540 if (known_eq (size, max_size)
2541 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2542 return true;
2543 }
2544
2545 if (is_gimple_call (stmt))
2546 {
2547 tree callee = gimple_call_fndecl (stmt);
2548 if (callee != NULL_TREE
2549 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2550 switch (DECL_FUNCTION_CODE (callee))
2551 {
2552 case BUILT_IN_FREE:
2553 {
2554 tree ptr = gimple_call_arg (stmt, 0);
2555 tree base = ao_ref_base (ref);
2556 if (base && TREE_CODE (base) == MEM_REF
2557 && TREE_OPERAND (base, 0) == ptr)
2558 return true;
2559 break;
2560 }
2561
2562 case BUILT_IN_MEMCPY:
2563 case BUILT_IN_MEMPCPY:
2564 case BUILT_IN_MEMMOVE:
2565 case BUILT_IN_MEMSET:
2566 case BUILT_IN_MEMCPY_CHK:
2567 case BUILT_IN_MEMPCPY_CHK:
2568 case BUILT_IN_MEMMOVE_CHK:
2569 case BUILT_IN_MEMSET_CHK:
2570 case BUILT_IN_STRNCPY:
2571 case BUILT_IN_STPNCPY:
2572 {
2573 /* For a must-alias check we need to be able to constrain
2574 the access properly. */
2575 if (!ref->max_size_known_p ())
2576 return false;
2577 tree dest = gimple_call_arg (stmt, 0);
2578 tree len = gimple_call_arg (stmt, 2);
2579 if (!poly_int_tree_p (len))
2580 return false;
2581 tree rbase = ref->base;
2582 poly_offset_int roffset = ref->offset;
2583 ao_ref dref;
2584 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2585 tree base = ao_ref_base (&dref);
2586 poly_offset_int offset = dref.offset;
2587 if (!base || !known_size_p (dref.size))
2588 return false;
2589 if (TREE_CODE (base) == MEM_REF)
2590 {
2591 if (TREE_CODE (rbase) != MEM_REF)
2592 return false;
2593 // Compare pointers.
2594 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2595 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2596 base = TREE_OPERAND (base, 0);
2597 rbase = TREE_OPERAND (rbase, 0);
2598 }
2599 if (base == rbase
2600 && known_subrange_p (roffset, ref->max_size, offset,
2601 wi::to_poly_offset (len)
2602 << LOG2_BITS_PER_UNIT))
2603 return true;
2604 break;
2605 }
2606
2607 case BUILT_IN_VA_END:
2608 {
2609 tree ptr = gimple_call_arg (stmt, 0);
2610 if (TREE_CODE (ptr) == ADDR_EXPR)
2611 {
2612 tree base = ao_ref_base (ref);
2613 if (TREE_OPERAND (ptr, 0) == base)
2614 return true;
2615 }
2616 break;
2617 }
2618
2619 default:;
2620 }
2621 }
2622 return false;
2623 }
2624
2625 bool
2626 stmt_kills_ref_p (gimple *stmt, tree ref)
2627 {
2628 ao_ref r;
2629 ao_ref_init (&r, ref);
2630 return stmt_kills_ref_p (stmt, &r);
2631 }
2632
2633
2634 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2635 TARGET or a statement clobbering the memory reference REF in which
2636 case false is returned. The walk starts with VUSE, one argument of PHI. */
2637
2638 static bool
2639 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
2640 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
2641 bool abort_on_visited,
2642 void *(*translate)(ao_ref *, tree, void *, bool *),
2643 void *data)
2644 {
2645 basic_block bb = gimple_bb (phi);
2646
2647 if (!*visited)
2648 *visited = BITMAP_ALLOC (NULL);
2649
2650 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2651
2652 /* Walk until we hit the target. */
2653 while (vuse != target)
2654 {
2655 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2656 /* If we are searching for the target VUSE by walking up to
2657 TARGET_BB dominating the original PHI we are finished once
2658 we reach a default def or a definition in a block dominating
2659 that block. Update TARGET and return. */
2660 if (!target
2661 && (gimple_nop_p (def_stmt)
2662 || dominated_by_p (CDI_DOMINATORS,
2663 target_bb, gimple_bb (def_stmt))))
2664 {
2665 target = vuse;
2666 return true;
2667 }
2668
2669 /* Recurse for PHI nodes. */
2670 if (gimple_code (def_stmt) == GIMPLE_PHI)
2671 {
2672 /* An already visited PHI node ends the walk successfully. */
2673 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2674 return !abort_on_visited;
2675 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2676 visited, abort_on_visited,
2677 translate, data);
2678 if (!vuse)
2679 return false;
2680 continue;
2681 }
2682 else if (gimple_nop_p (def_stmt))
2683 return false;
2684 else
2685 {
2686 /* A clobbering statement or the end of the IL ends it failing. */
2687 if ((int)limit <= 0)
2688 return false;
2689 --limit;
2690 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2691 {
2692 bool disambiguate_only = true;
2693 if (translate
2694 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2695 ;
2696 else
2697 return false;
2698 }
2699 }
2700 /* If we reach a new basic-block see if we already skipped it
2701 in a previous walk that ended successfully. */
2702 if (gimple_bb (def_stmt) != bb)
2703 {
2704 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2705 return !abort_on_visited;
2706 bb = gimple_bb (def_stmt);
2707 }
2708 vuse = gimple_vuse (def_stmt);
2709 }
2710 return true;
2711 }
2712
2713
2714 /* Starting from a PHI node for the virtual operand of the memory reference
2715 REF find a continuation virtual operand that allows to continue walking
2716 statements dominating PHI skipping only statements that cannot possibly
2717 clobber REF. Decrements LIMIT for each alias disambiguation done
2718 and aborts the walk, returning NULL_TREE if it reaches zero.
2719 Returns NULL_TREE if no suitable virtual operand can be found. */
2720
2721 tree
2722 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2723 unsigned int &limit, bitmap *visited,
2724 bool abort_on_visited,
2725 void *(*translate)(ao_ref *, tree, void *, bool *),
2726 void *data)
2727 {
2728 unsigned nargs = gimple_phi_num_args (phi);
2729
2730 /* Through a single-argument PHI we can simply look through. */
2731 if (nargs == 1)
2732 return PHI_ARG_DEF (phi, 0);
2733
2734 /* For two or more arguments try to pairwise skip non-aliasing code
2735 until we hit the phi argument definition that dominates the other one. */
2736 basic_block phi_bb = gimple_bb (phi);
2737 tree arg0, arg1;
2738 unsigned i;
2739
2740 /* Find a candidate for the virtual operand which definition
2741 dominates those of all others. */
2742 /* First look if any of the args themselves satisfy this. */
2743 for (i = 0; i < nargs; ++i)
2744 {
2745 arg0 = PHI_ARG_DEF (phi, i);
2746 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
2747 break;
2748 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
2749 if (def_bb != phi_bb
2750 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
2751 break;
2752 arg0 = NULL_TREE;
2753 }
2754 /* If not, look if we can reach such candidate by walking defs
2755 until we hit the immediate dominator. maybe_skip_until will
2756 do that for us. */
2757 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
2758
2759 /* Then check against the (to be) found candidate. */
2760 for (i = 0; i < nargs; ++i)
2761 {
2762 arg1 = PHI_ARG_DEF (phi, i);
2763 if (arg1 == arg0)
2764 ;
2765 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
2766 abort_on_visited,
2767 /* Do not translate when walking over
2768 backedges. */
2769 dominated_by_p
2770 (CDI_DOMINATORS,
2771 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
2772 phi_bb)
2773 ? NULL : translate, data))
2774 return NULL_TREE;
2775 }
2776
2777 return arg0;
2778 }
2779
2780 /* Based on the memory reference REF and its virtual use VUSE call
2781 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2782 itself. That is, for each virtual use for which its defining statement
2783 does not clobber REF.
2784
2785 WALKER is called with REF, the current virtual use and DATA. If
2786 WALKER returns non-NULL the walk stops and its result is returned.
2787 At the end of a non-successful walk NULL is returned.
2788
2789 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2790 use which definition is a statement that may clobber REF and DATA.
2791 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2792 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2793 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2794 to adjust REF and *DATA to make that valid.
2795
2796 VALUEIZE if non-NULL is called with the next VUSE that is considered
2797 and return value is substituted for that. This can be used to
2798 implement optimistic value-numbering for example. Note that the
2799 VUSE argument is assumed to be valueized already.
2800
2801 LIMIT specifies the number of alias queries we are allowed to do,
2802 the walk stops when it reaches zero and NULL is returned. LIMIT
2803 is decremented by the number of alias queries (plus adjustments
2804 done by the callbacks) upon return.
2805
2806 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2807
2808 void *
2809 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2810 void *(*walker)(ao_ref *, tree, void *),
2811 void *(*translate)(ao_ref *, tree, void *, bool *),
2812 tree (*valueize)(tree),
2813 unsigned &limit, void *data)
2814 {
2815 bitmap visited = NULL;
2816 void *res;
2817 bool translated = false;
2818
2819 timevar_push (TV_ALIAS_STMT_WALK);
2820
2821 do
2822 {
2823 gimple *def_stmt;
2824
2825 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2826 res = (*walker) (ref, vuse, data);
2827 /* Abort walk. */
2828 if (res == (void *)-1)
2829 {
2830 res = NULL;
2831 break;
2832 }
2833 /* Lookup succeeded. */
2834 else if (res != NULL)
2835 break;
2836
2837 if (valueize)
2838 {
2839 vuse = valueize (vuse);
2840 if (!vuse)
2841 {
2842 res = NULL;
2843 break;
2844 }
2845 }
2846 def_stmt = SSA_NAME_DEF_STMT (vuse);
2847 if (gimple_nop_p (def_stmt))
2848 break;
2849 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2850 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2851 &visited, translated, translate, data);
2852 else
2853 {
2854 if ((int)limit <= 0)
2855 {
2856 res = NULL;
2857 break;
2858 }
2859 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2860 {
2861 if (!translate)
2862 break;
2863 bool disambiguate_only = false;
2864 res = (*translate) (ref, vuse, data, &disambiguate_only);
2865 /* Failed lookup and translation. */
2866 if (res == (void *)-1)
2867 {
2868 res = NULL;
2869 break;
2870 }
2871 /* Lookup succeeded. */
2872 else if (res != NULL)
2873 break;
2874 /* Translation succeeded, continue walking. */
2875 translated = translated || !disambiguate_only;
2876 }
2877 vuse = gimple_vuse (def_stmt);
2878 }
2879 }
2880 while (vuse);
2881
2882 if (visited)
2883 BITMAP_FREE (visited);
2884
2885 timevar_pop (TV_ALIAS_STMT_WALK);
2886
2887 return res;
2888 }
2889
2890
2891 /* Based on the memory reference REF call WALKER for each vdef which
2892 defining statement may clobber REF, starting with VDEF. If REF
2893 is NULL_TREE, each defining statement is visited.
2894
2895 WALKER is called with REF, the current vdef and DATA. If WALKER
2896 returns true the walk is stopped, otherwise it continues.
2897
2898 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2899 The pointer may be NULL and then we do not track this information.
2900
2901 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2902 PHI argument (but only one walk continues on merge points), the
2903 return value is true if any of the walks was successful.
2904
2905 The function returns the number of statements walked or -1 if
2906 LIMIT stmts were walked and the walk was aborted at this point.
2907 If LIMIT is zero the walk is not aborted. */
2908
2909 static int
2910 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2911 bool (*walker)(ao_ref *, tree, void *), void *data,
2912 bitmap *visited, unsigned int cnt,
2913 bool *function_entry_reached, unsigned limit)
2914 {
2915 do
2916 {
2917 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
2918
2919 if (*visited
2920 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2921 return cnt;
2922
2923 if (gimple_nop_p (def_stmt))
2924 {
2925 if (function_entry_reached)
2926 *function_entry_reached = true;
2927 return cnt;
2928 }
2929 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2930 {
2931 unsigned i;
2932 if (!*visited)
2933 *visited = BITMAP_ALLOC (NULL);
2934 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2935 {
2936 int res = walk_aliased_vdefs_1 (ref,
2937 gimple_phi_arg_def (def_stmt, i),
2938 walker, data, visited, cnt,
2939 function_entry_reached, limit);
2940 if (res == -1)
2941 return -1;
2942 cnt = res;
2943 }
2944 return cnt;
2945 }
2946
2947 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2948 cnt++;
2949 if (cnt == limit)
2950 return -1;
2951 if ((!ref
2952 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2953 && (*walker) (ref, vdef, data))
2954 return cnt;
2955
2956 vdef = gimple_vuse (def_stmt);
2957 }
2958 while (1);
2959 }
2960
2961 int
2962 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2963 bool (*walker)(ao_ref *, tree, void *), void *data,
2964 bitmap *visited,
2965 bool *function_entry_reached, unsigned int limit)
2966 {
2967 bitmap local_visited = NULL;
2968 int ret;
2969
2970 timevar_push (TV_ALIAS_STMT_WALK);
2971
2972 if (function_entry_reached)
2973 *function_entry_reached = false;
2974
2975 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2976 visited ? visited : &local_visited, 0,
2977 function_entry_reached, limit);
2978 if (local_visited)
2979 BITMAP_FREE (local_visited);
2980
2981 timevar_pop (TV_ALIAS_STMT_WALK);
2982
2983 return ret;
2984 }
2985