i386.c (legitimize_tls_address): Generate tls_initial_exec_64_sun only when !TARGET_X32.
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "target.h"
29 #include "basic-block.h"
30 #include "timevar.h"
31 #include "ggc.h"
32 #include "langhooks.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "tree-pretty-print.h"
36 #include "tree-dump.h"
37 #include "gimple.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
40 #include "tree-pass.h"
41 #include "convert.h"
42 #include "params.h"
43 #include "vec.h"
44 #include "bitmap.h"
45 #include "vecprim.h"
46 #include "pointer-set.h"
47 #include "alloc-pool.h"
48 #include "tree-ssa-alias.h"
49
50 /* Broad overview of how alias analysis on gimple works:
51
52 Statements clobbering or using memory are linked through the
53 virtual operand factored use-def chain. The virtual operand
54 is unique per function, its symbol is accessible via gimple_vop (cfun).
55 Virtual operands are used for efficiently walking memory statements
56 in the gimple IL and are useful for things like value-numbering as
57 a generation count for memory references.
58
59 SSA_NAME pointers may have associated points-to information
60 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
61 points-to information is (re-)computed by the TODO_rebuild_alias
62 pass manager todo. Points-to information is also used for more
63 precise tracking of call-clobbered and call-used variables and
64 related disambiguations.
65
66 This file contains functions for disambiguating memory references,
67 the so called alias-oracle and tools for walking of the gimple IL.
68
69 The main alias-oracle entry-points are
70
71 bool stmt_may_clobber_ref_p (gimple, tree)
72
73 This function queries if a statement may invalidate (parts of)
74 the memory designated by the reference tree argument.
75
76 bool ref_maybe_used_by_stmt_p (gimple, tree)
77
78 This function queries if a statement may need (parts of) the
79 memory designated by the reference tree argument.
80
81 There are variants of these functions that only handle the call
82 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
83 Note that these do not disambiguate against a possible call lhs.
84
85 bool refs_may_alias_p (tree, tree)
86
87 This function tries to disambiguate two reference trees.
88
89 bool ptr_deref_may_alias_global_p (tree)
90
91 This function queries if dereferencing a pointer variable may
92 alias global memory.
93
94 More low-level disambiguators are available and documented in
95 this file. Low-level disambiguators dealing with points-to
96 information are in tree-ssa-structalias.c. */
97
98
99 /* Query statistics for the different low-level disambiguators.
100 A high-level query may trigger multiple of them. */
101
102 static struct {
103 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
104 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
105 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
106 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
107 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
108 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
109 } alias_stats;
110
111 void
112 dump_alias_stats (FILE *s)
113 {
114 fprintf (s, "\nAlias oracle query stats:\n");
115 fprintf (s, " refs_may_alias_p: "
116 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
117 HOST_WIDE_INT_PRINT_DEC" queries\n",
118 alias_stats.refs_may_alias_p_no_alias,
119 alias_stats.refs_may_alias_p_no_alias
120 + alias_stats.refs_may_alias_p_may_alias);
121 fprintf (s, " ref_maybe_used_by_call_p: "
122 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
123 HOST_WIDE_INT_PRINT_DEC" queries\n",
124 alias_stats.ref_maybe_used_by_call_p_no_alias,
125 alias_stats.refs_may_alias_p_no_alias
126 + alias_stats.ref_maybe_used_by_call_p_may_alias);
127 fprintf (s, " call_may_clobber_ref_p: "
128 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
129 HOST_WIDE_INT_PRINT_DEC" queries\n",
130 alias_stats.call_may_clobber_ref_p_no_alias,
131 alias_stats.call_may_clobber_ref_p_no_alias
132 + alias_stats.call_may_clobber_ref_p_may_alias);
133 }
134
135
136 /* Return true, if dereferencing PTR may alias with a global variable. */
137
138 bool
139 ptr_deref_may_alias_global_p (tree ptr)
140 {
141 struct ptr_info_def *pi;
142
143 /* If we end up with a pointer constant here that may point
144 to global memory. */
145 if (TREE_CODE (ptr) != SSA_NAME)
146 return true;
147
148 pi = SSA_NAME_PTR_INFO (ptr);
149
150 /* If we do not have points-to information for this variable,
151 we have to punt. */
152 if (!pi)
153 return true;
154
155 /* ??? This does not use TBAA to prune globals ptr may not access. */
156 return pt_solution_includes_global (&pi->pt);
157 }
158
159 /* Return true if dereferencing PTR may alias DECL.
160 The caller is responsible for applying TBAA to see if PTR
161 may access DECL at all. */
162
163 static bool
164 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
165 {
166 struct ptr_info_def *pi;
167
168 /* Conversions are irrelevant for points-to information and
169 data-dependence analysis can feed us those. */
170 STRIP_NOPS (ptr);
171
172 /* Anything we do not explicilty handle aliases. */
173 if ((TREE_CODE (ptr) != SSA_NAME
174 && TREE_CODE (ptr) != ADDR_EXPR
175 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
176 || !POINTER_TYPE_P (TREE_TYPE (ptr))
177 || (TREE_CODE (decl) != VAR_DECL
178 && TREE_CODE (decl) != PARM_DECL
179 && TREE_CODE (decl) != RESULT_DECL))
180 return true;
181
182 /* Disregard pointer offsetting. */
183 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
184 {
185 do
186 {
187 ptr = TREE_OPERAND (ptr, 0);
188 }
189 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
190 return ptr_deref_may_alias_decl_p (ptr, decl);
191 }
192
193 /* ADDR_EXPR pointers either just offset another pointer or directly
194 specify the pointed-to set. */
195 if (TREE_CODE (ptr) == ADDR_EXPR)
196 {
197 tree base = get_base_address (TREE_OPERAND (ptr, 0));
198 if (base
199 && (TREE_CODE (base) == MEM_REF
200 || TREE_CODE (base) == TARGET_MEM_REF))
201 ptr = TREE_OPERAND (base, 0);
202 else if (base
203 && DECL_P (base))
204 return base == decl;
205 else if (base
206 && CONSTANT_CLASS_P (base))
207 return false;
208 else
209 return true;
210 }
211
212 /* Non-aliased variables can not be pointed to. */
213 if (!may_be_aliased (decl))
214 return false;
215
216 /* If we do not have useful points-to information for this pointer
217 we cannot disambiguate anything else. */
218 pi = SSA_NAME_PTR_INFO (ptr);
219 if (!pi)
220 return true;
221
222 return pt_solution_includes (&pi->pt, decl);
223 }
224
225 /* Return true if dereferenced PTR1 and PTR2 may alias.
226 The caller is responsible for applying TBAA to see if accesses
227 through PTR1 and PTR2 may conflict at all. */
228
229 bool
230 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
231 {
232 struct ptr_info_def *pi1, *pi2;
233
234 /* Conversions are irrelevant for points-to information and
235 data-dependence analysis can feed us those. */
236 STRIP_NOPS (ptr1);
237 STRIP_NOPS (ptr2);
238
239 /* Disregard pointer offsetting. */
240 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
241 {
242 do
243 {
244 ptr1 = TREE_OPERAND (ptr1, 0);
245 }
246 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
247 return ptr_derefs_may_alias_p (ptr1, ptr2);
248 }
249 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
250 {
251 do
252 {
253 ptr2 = TREE_OPERAND (ptr2, 0);
254 }
255 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
256 return ptr_derefs_may_alias_p (ptr1, ptr2);
257 }
258
259 /* ADDR_EXPR pointers either just offset another pointer or directly
260 specify the pointed-to set. */
261 if (TREE_CODE (ptr1) == ADDR_EXPR)
262 {
263 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
264 if (base
265 && (TREE_CODE (base) == MEM_REF
266 || TREE_CODE (base) == TARGET_MEM_REF))
267 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
268 else if (base
269 && DECL_P (base))
270 return ptr_deref_may_alias_decl_p (ptr2, base);
271 else
272 return true;
273 }
274 if (TREE_CODE (ptr2) == ADDR_EXPR)
275 {
276 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
277 if (base
278 && (TREE_CODE (base) == MEM_REF
279 || TREE_CODE (base) == TARGET_MEM_REF))
280 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
281 else if (base
282 && DECL_P (base))
283 return ptr_deref_may_alias_decl_p (ptr1, base);
284 else
285 return true;
286 }
287
288 /* From here we require SSA name pointers. Anything else aliases. */
289 if (TREE_CODE (ptr1) != SSA_NAME
290 || TREE_CODE (ptr2) != SSA_NAME
291 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
292 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
293 return true;
294
295 /* We may end up with two empty points-to solutions for two same pointers.
296 In this case we still want to say both pointers alias, so shortcut
297 that here. */
298 if (ptr1 == ptr2)
299 return true;
300
301 /* If we do not have useful points-to information for either pointer
302 we cannot disambiguate anything else. */
303 pi1 = SSA_NAME_PTR_INFO (ptr1);
304 pi2 = SSA_NAME_PTR_INFO (ptr2);
305 if (!pi1 || !pi2)
306 return true;
307
308 /* ??? This does not use TBAA to prune decls from the intersection
309 that not both pointers may access. */
310 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
311 }
312
313 /* Return true if dereferencing PTR may alias *REF.
314 The caller is responsible for applying TBAA to see if PTR
315 may access *REF at all. */
316
317 static bool
318 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
319 {
320 tree base = ao_ref_base (ref);
321
322 if (TREE_CODE (base) == MEM_REF
323 || TREE_CODE (base) == TARGET_MEM_REF)
324 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
325 else if (DECL_P (base))
326 return ptr_deref_may_alias_decl_p (ptr, base);
327
328 return true;
329 }
330
331 /* Return true whether REF may refer to global memory. */
332
333 bool
334 ref_may_alias_global_p (tree ref)
335 {
336 tree base = get_base_address (ref);
337 if (DECL_P (base))
338 return is_global_var (base);
339 else if (TREE_CODE (base) == MEM_REF
340 || TREE_CODE (base) == TARGET_MEM_REF)
341 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
342 return true;
343 }
344
345 /* Return true whether STMT may clobber global memory. */
346
347 bool
348 stmt_may_clobber_global_p (gimple stmt)
349 {
350 tree lhs;
351
352 if (!gimple_vdef (stmt))
353 return false;
354
355 /* ??? We can ask the oracle whether an artificial pointer
356 dereference with a pointer with points-to information covering
357 all global memory (what about non-address taken memory?) maybe
358 clobbered by this call. As there is at the moment no convenient
359 way of doing that without generating garbage do some manual
360 checking instead.
361 ??? We could make a NULL ao_ref argument to the various
362 predicates special, meaning any global memory. */
363
364 switch (gimple_code (stmt))
365 {
366 case GIMPLE_ASSIGN:
367 lhs = gimple_assign_lhs (stmt);
368 return (TREE_CODE (lhs) != SSA_NAME
369 && ref_may_alias_global_p (lhs));
370 case GIMPLE_CALL:
371 return true;
372 default:
373 return true;
374 }
375 }
376
377
378 /* Dump alias information on FILE. */
379
380 void
381 dump_alias_info (FILE *file)
382 {
383 size_t i;
384 const char *funcname
385 = lang_hooks.decl_printable_name (current_function_decl, 2);
386 referenced_var_iterator rvi;
387 tree var;
388
389 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
390
391 fprintf (file, "Aliased symbols\n\n");
392
393 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
394 {
395 if (may_be_aliased (var))
396 dump_variable (file, var);
397 }
398
399 fprintf (file, "\nCall clobber information\n");
400
401 fprintf (file, "\nESCAPED");
402 dump_points_to_solution (file, &cfun->gimple_df->escaped);
403
404 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
405
406 for (i = 1; i < num_ssa_names; i++)
407 {
408 tree ptr = ssa_name (i);
409 struct ptr_info_def *pi;
410
411 if (ptr == NULL_TREE
412 || SSA_NAME_IN_FREE_LIST (ptr))
413 continue;
414
415 pi = SSA_NAME_PTR_INFO (ptr);
416 if (pi)
417 dump_points_to_info_for (file, ptr);
418 }
419
420 fprintf (file, "\n");
421 }
422
423
424 /* Dump alias information on stderr. */
425
426 DEBUG_FUNCTION void
427 debug_alias_info (void)
428 {
429 dump_alias_info (stderr);
430 }
431
432
433 /* Dump the points-to set *PT into FILE. */
434
435 void
436 dump_points_to_solution (FILE *file, struct pt_solution *pt)
437 {
438 if (pt->anything)
439 fprintf (file, ", points-to anything");
440
441 if (pt->nonlocal)
442 fprintf (file, ", points-to non-local");
443
444 if (pt->escaped)
445 fprintf (file, ", points-to escaped");
446
447 if (pt->ipa_escaped)
448 fprintf (file, ", points-to unit escaped");
449
450 if (pt->null)
451 fprintf (file, ", points-to NULL");
452
453 if (pt->vars)
454 {
455 fprintf (file, ", points-to vars: ");
456 dump_decl_set (file, pt->vars);
457 if (pt->vars_contains_global)
458 fprintf (file, " (includes global vars)");
459 }
460 }
461
462 /* Dump points-to information for SSA_NAME PTR into FILE. */
463
464 void
465 dump_points_to_info_for (FILE *file, tree ptr)
466 {
467 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
468
469 print_generic_expr (file, ptr, dump_flags);
470
471 if (pi)
472 dump_points_to_solution (file, &pi->pt);
473 else
474 fprintf (file, ", points-to anything");
475
476 fprintf (file, "\n");
477 }
478
479
480 /* Dump points-to information for VAR into stderr. */
481
482 DEBUG_FUNCTION void
483 debug_points_to_info_for (tree var)
484 {
485 dump_points_to_info_for (stderr, var);
486 }
487
488
489 /* Initializes the alias-oracle reference representation *R from REF. */
490
491 void
492 ao_ref_init (ao_ref *r, tree ref)
493 {
494 r->ref = ref;
495 r->base = NULL_TREE;
496 r->offset = 0;
497 r->size = -1;
498 r->max_size = -1;
499 r->ref_alias_set = -1;
500 r->base_alias_set = -1;
501 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
502 }
503
504 /* Returns the base object of the memory reference *REF. */
505
506 tree
507 ao_ref_base (ao_ref *ref)
508 {
509 if (ref->base)
510 return ref->base;
511 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
512 &ref->max_size);
513 return ref->base;
514 }
515
516 /* Returns the base object alias set of the memory reference *REF. */
517
518 static alias_set_type
519 ao_ref_base_alias_set (ao_ref *ref)
520 {
521 tree base_ref;
522 if (ref->base_alias_set != -1)
523 return ref->base_alias_set;
524 if (!ref->ref)
525 return 0;
526 base_ref = ref->ref;
527 while (handled_component_p (base_ref))
528 base_ref = TREE_OPERAND (base_ref, 0);
529 ref->base_alias_set = get_alias_set (base_ref);
530 return ref->base_alias_set;
531 }
532
533 /* Returns the reference alias set of the memory reference *REF. */
534
535 alias_set_type
536 ao_ref_alias_set (ao_ref *ref)
537 {
538 if (ref->ref_alias_set != -1)
539 return ref->ref_alias_set;
540 ref->ref_alias_set = get_alias_set (ref->ref);
541 return ref->ref_alias_set;
542 }
543
544 /* Init an alias-oracle reference representation from a gimple pointer
545 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE the the
546 size is assumed to be unknown. The access is assumed to be only
547 to or after of the pointer target, not before it. */
548
549 void
550 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
551 {
552 HOST_WIDE_INT t1, t2;
553 ref->ref = NULL_TREE;
554 if (TREE_CODE (ptr) == ADDR_EXPR)
555 ref->base = get_ref_base_and_extent (TREE_OPERAND (ptr, 0),
556 &ref->offset, &t1, &t2);
557 else
558 {
559 ref->base = build2 (MEM_REF, char_type_node,
560 ptr, null_pointer_node);
561 ref->offset = 0;
562 }
563 if (size
564 && host_integerp (size, 0)
565 && TREE_INT_CST_LOW (size) * 8 / 8 == TREE_INT_CST_LOW (size))
566 ref->max_size = ref->size = TREE_INT_CST_LOW (size) * 8;
567 else
568 ref->max_size = ref->size = -1;
569 ref->ref_alias_set = 0;
570 ref->base_alias_set = 0;
571 ref->volatile_p = false;
572 }
573
574 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
575 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
576 decide. */
577
578 static inline int
579 same_type_for_tbaa (tree type1, tree type2)
580 {
581 type1 = TYPE_MAIN_VARIANT (type1);
582 type2 = TYPE_MAIN_VARIANT (type2);
583
584 /* If we would have to do structural comparison bail out. */
585 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
586 || TYPE_STRUCTURAL_EQUALITY_P (type2))
587 return -1;
588
589 /* Compare the canonical types. */
590 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
591 return 1;
592
593 /* ??? Array types are not properly unified in all cases as we have
594 spurious changes in the index types for example. Removing this
595 causes all sorts of problems with the Fortran frontend. */
596 if (TREE_CODE (type1) == ARRAY_TYPE
597 && TREE_CODE (type2) == ARRAY_TYPE)
598 return -1;
599
600 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
601 object of one of its constrained subtypes, e.g. when a function with an
602 unconstrained parameter passed by reference is called on an object and
603 inlined. But, even in the case of a fixed size, type and subtypes are
604 not equivalent enough as to share the same TYPE_CANONICAL, since this
605 would mean that conversions between them are useless, whereas they are
606 not (e.g. type and subtypes can have different modes). So, in the end,
607 they are only guaranteed to have the same alias set. */
608 if (get_alias_set (type1) == get_alias_set (type2))
609 return -1;
610
611 /* The types are known to be not equal. */
612 return 0;
613 }
614
615 /* Determine if the two component references REF1 and REF2 which are
616 based on access types TYPE1 and TYPE2 and of which at least one is based
617 on an indirect reference may alias. REF2 is the only one that can
618 be a decl in which case REF2_IS_DECL is true.
619 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
620 are the respective alias sets. */
621
622 static bool
623 aliasing_component_refs_p (tree ref1,
624 alias_set_type ref1_alias_set,
625 alias_set_type base1_alias_set,
626 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
627 tree ref2,
628 alias_set_type ref2_alias_set,
629 alias_set_type base2_alias_set,
630 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
631 bool ref2_is_decl)
632 {
633 /* If one reference is a component references through pointers try to find a
634 common base and apply offset based disambiguation. This handles
635 for example
636 struct A { int i; int j; } *q;
637 struct B { struct A a; int k; } *p;
638 disambiguating q->i and p->a.j. */
639 tree base1, base2;
640 tree type1, type2;
641 tree *refp;
642 int same_p;
643
644 /* Choose bases and base types to search for. */
645 base1 = ref1;
646 while (handled_component_p (base1))
647 base1 = TREE_OPERAND (base1, 0);
648 type1 = TREE_TYPE (base1);
649 base2 = ref2;
650 while (handled_component_p (base2))
651 base2 = TREE_OPERAND (base2, 0);
652 type2 = TREE_TYPE (base2);
653
654 /* Now search for the type1 in the access path of ref2. This
655 would be a common base for doing offset based disambiguation on. */
656 refp = &ref2;
657 while (handled_component_p (*refp)
658 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
659 refp = &TREE_OPERAND (*refp, 0);
660 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
661 /* If we couldn't compare types we have to bail out. */
662 if (same_p == -1)
663 return true;
664 else if (same_p == 1)
665 {
666 HOST_WIDE_INT offadj, sztmp, msztmp;
667 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
668 offset2 -= offadj;
669 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
670 offset1 -= offadj;
671 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
672 }
673 /* If we didn't find a common base, try the other way around. */
674 refp = &ref1;
675 while (handled_component_p (*refp)
676 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
677 refp = &TREE_OPERAND (*refp, 0);
678 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
679 /* If we couldn't compare types we have to bail out. */
680 if (same_p == -1)
681 return true;
682 else if (same_p == 1)
683 {
684 HOST_WIDE_INT offadj, sztmp, msztmp;
685 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
686 offset1 -= offadj;
687 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
688 offset2 -= offadj;
689 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
690 }
691
692 /* If we have two type access paths B1.path1 and B2.path2 they may
693 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
694 But we can still have a path that goes B1.path1...B2.path2 with
695 a part that we do not see. So we can only disambiguate now
696 if there is no B2 in the tail of path1 and no B1 on the
697 tail of path2. */
698 if (base1_alias_set == ref2_alias_set
699 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
700 return true;
701 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
702 if (!ref2_is_decl)
703 return (base2_alias_set == ref1_alias_set
704 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
705 return false;
706 }
707
708 /* Return true if two memory references based on the variables BASE1
709 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
710 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. */
711
712 static bool
713 decl_refs_may_alias_p (tree base1,
714 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
715 tree base2,
716 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
717 {
718 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
719
720 /* If both references are based on different variables, they cannot alias. */
721 if (base1 != base2)
722 return false;
723
724 /* If both references are based on the same variable, they cannot alias if
725 the accesses do not overlap. */
726 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
727 }
728
729 /* Return true if an indirect reference based on *PTR1 constrained
730 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
731 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
732 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
733 in which case they are computed on-demand. REF1 and REF2
734 if non-NULL are the complete memory reference trees. */
735
736 static bool
737 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
738 HOST_WIDE_INT offset1,
739 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
740 alias_set_type ref1_alias_set,
741 alias_set_type base1_alias_set,
742 tree ref2 ATTRIBUTE_UNUSED, tree base2,
743 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
744 alias_set_type ref2_alias_set,
745 alias_set_type base2_alias_set, bool tbaa_p)
746 {
747 tree ptr1;
748 tree ptrtype1, dbase2;
749 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
750 HOST_WIDE_INT doffset1, doffset2;
751 double_int moff;
752
753 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
754 || TREE_CODE (base1) == TARGET_MEM_REF)
755 && DECL_P (base2));
756
757 ptr1 = TREE_OPERAND (base1, 0);
758
759 /* The offset embedded in MEM_REFs can be negative. Bias them
760 so that the resulting offset adjustment is positive. */
761 moff = mem_ref_offset (base1);
762 moff = double_int_lshift (moff,
763 BITS_PER_UNIT == 8
764 ? 3 : exact_log2 (BITS_PER_UNIT),
765 HOST_BITS_PER_DOUBLE_INT, true);
766 if (double_int_negative_p (moff))
767 offset2p += double_int_neg (moff).low;
768 else
769 offset1p += moff.low;
770
771 /* If only one reference is based on a variable, they cannot alias if
772 the pointer access is beyond the extent of the variable access.
773 (the pointer base cannot validly point to an offset less than zero
774 of the variable).
775 ??? IVOPTs creates bases that do not honor this restriction,
776 so do not apply this optimization for TARGET_MEM_REFs. */
777 if (TREE_CODE (base1) != TARGET_MEM_REF
778 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
779 return false;
780 /* They also cannot alias if the pointer may not point to the decl. */
781 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
782 return false;
783
784 /* Disambiguations that rely on strict aliasing rules follow. */
785 if (!flag_strict_aliasing || !tbaa_p)
786 return true;
787
788 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
789
790 /* If the alias set for a pointer access is zero all bets are off. */
791 if (base1_alias_set == -1)
792 base1_alias_set = get_deref_alias_set (ptrtype1);
793 if (base1_alias_set == 0)
794 return true;
795 if (base2_alias_set == -1)
796 base2_alias_set = get_alias_set (base2);
797
798 /* When we are trying to disambiguate an access with a pointer dereference
799 as base versus one with a decl as base we can use both the size
800 of the decl and its dynamic type for extra disambiguation.
801 ??? We do not know anything about the dynamic type of the decl
802 other than that its alias-set contains base2_alias_set as a subset
803 which does not help us here. */
804 /* As we know nothing useful about the dynamic type of the decl just
805 use the usual conflict check rather than a subset test.
806 ??? We could introduce -fvery-strict-aliasing when the language
807 does not allow decls to have a dynamic type that differs from their
808 static type. Then we can check
809 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
810 if (base1_alias_set != base2_alias_set
811 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
812 return false;
813 /* If the size of the access relevant for TBAA through the pointer
814 is bigger than the size of the decl we can't possibly access the
815 decl via that pointer. */
816 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
817 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
818 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
819 /* ??? This in turn may run afoul when a decl of type T which is
820 a member of union type U is accessed through a pointer to
821 type U and sizeof T is smaller than sizeof U. */
822 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
823 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
824 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
825 return false;
826
827 if (!ref2)
828 return true;
829
830 /* If the decl is accessed via a MEM_REF, reconstruct the base
831 we can use for TBAA and an appropriately adjusted offset. */
832 dbase2 = ref2;
833 while (handled_component_p (dbase2))
834 dbase2 = TREE_OPERAND (dbase2, 0);
835 doffset1 = offset1;
836 doffset2 = offset2;
837 if (TREE_CODE (dbase2) == MEM_REF
838 || TREE_CODE (dbase2) == TARGET_MEM_REF)
839 {
840 double_int moff = mem_ref_offset (dbase2);
841 moff = double_int_lshift (moff,
842 BITS_PER_UNIT == 8
843 ? 3 : exact_log2 (BITS_PER_UNIT),
844 HOST_BITS_PER_DOUBLE_INT, true);
845 if (double_int_negative_p (moff))
846 doffset1 -= double_int_neg (moff).low;
847 else
848 doffset2 -= moff.low;
849 }
850
851 /* If either reference is view-converted, give up now. */
852 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
853 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
854 return true;
855
856 /* If both references are through the same type, they do not alias
857 if the accesses do not overlap. This does extra disambiguation
858 for mixed/pointer accesses but requires strict aliasing.
859 For MEM_REFs we require that the component-ref offset we computed
860 is relative to the start of the type which we ensure by
861 comparing rvalue and access type and disregarding the constant
862 pointer offset. */
863 if ((TREE_CODE (base1) != TARGET_MEM_REF
864 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
865 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
866 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
867
868 /* Do access-path based disambiguation. */
869 if (ref1 && ref2
870 && (handled_component_p (ref1) || handled_component_p (ref2)))
871 return aliasing_component_refs_p (ref1,
872 ref1_alias_set, base1_alias_set,
873 offset1, max_size1,
874 ref2,
875 ref2_alias_set, base2_alias_set,
876 offset2, max_size2, true);
877
878 return true;
879 }
880
881 /* Return true if two indirect references based on *PTR1
882 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
883 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
884 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
885 in which case they are computed on-demand. REF1 and REF2
886 if non-NULL are the complete memory reference trees. */
887
888 static bool
889 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
890 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
891 alias_set_type ref1_alias_set,
892 alias_set_type base1_alias_set,
893 tree ref2 ATTRIBUTE_UNUSED, tree base2,
894 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
895 alias_set_type ref2_alias_set,
896 alias_set_type base2_alias_set, bool tbaa_p)
897 {
898 tree ptr1;
899 tree ptr2;
900 tree ptrtype1, ptrtype2;
901
902 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
903 || TREE_CODE (base1) == TARGET_MEM_REF)
904 && (TREE_CODE (base2) == MEM_REF
905 || TREE_CODE (base2) == TARGET_MEM_REF));
906
907 ptr1 = TREE_OPERAND (base1, 0);
908 ptr2 = TREE_OPERAND (base2, 0);
909
910 /* If both bases are based on pointers they cannot alias if they may not
911 point to the same memory object or if they point to the same object
912 and the accesses do not overlap. */
913 if ((!cfun || gimple_in_ssa_p (cfun))
914 && operand_equal_p (ptr1, ptr2, 0)
915 && (((TREE_CODE (base1) != TARGET_MEM_REF
916 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
917 && (TREE_CODE (base2) != TARGET_MEM_REF
918 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
919 || (TREE_CODE (base1) == TARGET_MEM_REF
920 && TREE_CODE (base2) == TARGET_MEM_REF
921 && (TMR_STEP (base1) == TMR_STEP (base2)
922 || (TMR_STEP (base1) && TMR_STEP (base2)
923 && operand_equal_p (TMR_STEP (base1),
924 TMR_STEP (base2), 0)))
925 && (TMR_INDEX (base1) == TMR_INDEX (base2)
926 || (TMR_INDEX (base1) && TMR_INDEX (base2)
927 && operand_equal_p (TMR_INDEX (base1),
928 TMR_INDEX (base2), 0)))
929 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
930 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
931 && operand_equal_p (TMR_INDEX2 (base1),
932 TMR_INDEX2 (base2), 0))))))
933 {
934 double_int moff;
935 /* The offset embedded in MEM_REFs can be negative. Bias them
936 so that the resulting offset adjustment is positive. */
937 moff = mem_ref_offset (base1);
938 moff = double_int_lshift (moff,
939 BITS_PER_UNIT == 8
940 ? 3 : exact_log2 (BITS_PER_UNIT),
941 HOST_BITS_PER_DOUBLE_INT, true);
942 if (double_int_negative_p (moff))
943 offset2 += double_int_neg (moff).low;
944 else
945 offset1 += moff.low;
946 moff = mem_ref_offset (base2);
947 moff = double_int_lshift (moff,
948 BITS_PER_UNIT == 8
949 ? 3 : exact_log2 (BITS_PER_UNIT),
950 HOST_BITS_PER_DOUBLE_INT, true);
951 if (double_int_negative_p (moff))
952 offset1 += double_int_neg (moff).low;
953 else
954 offset2 += moff.low;
955 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
956 }
957 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
958 return false;
959
960 /* Disambiguations that rely on strict aliasing rules follow. */
961 if (!flag_strict_aliasing || !tbaa_p)
962 return true;
963
964 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
965 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
966
967 /* If the alias set for a pointer access is zero all bets are off. */
968 if (base1_alias_set == -1)
969 base1_alias_set = get_deref_alias_set (ptrtype1);
970 if (base1_alias_set == 0)
971 return true;
972 if (base2_alias_set == -1)
973 base2_alias_set = get_deref_alias_set (ptrtype2);
974 if (base2_alias_set == 0)
975 return true;
976
977 /* If both references are through the same type, they do not alias
978 if the accesses do not overlap. This does extra disambiguation
979 for mixed/pointer accesses but requires strict aliasing. */
980 if ((TREE_CODE (base1) != TARGET_MEM_REF
981 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
982 && (TREE_CODE (base2) != TARGET_MEM_REF
983 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
984 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
985 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
986 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
987 TREE_TYPE (ptrtype2)) == 1)
988 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
989
990 /* Do type-based disambiguation. */
991 if (base1_alias_set != base2_alias_set
992 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
993 return false;
994
995 /* Do access-path based disambiguation. */
996 if (ref1 && ref2
997 && (handled_component_p (ref1) || handled_component_p (ref2))
998 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
999 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1)
1000 return aliasing_component_refs_p (ref1,
1001 ref1_alias_set, base1_alias_set,
1002 offset1, max_size1,
1003 ref2,
1004 ref2_alias_set, base2_alias_set,
1005 offset2, max_size2, false);
1006
1007 return true;
1008 }
1009
1010 /* Return true, if the two memory references REF1 and REF2 may alias. */
1011
1012 bool
1013 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1014 {
1015 tree base1, base2;
1016 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1017 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1018 bool var1_p, var2_p, ind1_p, ind2_p;
1019
1020 gcc_checking_assert ((!ref1->ref
1021 || TREE_CODE (ref1->ref) == SSA_NAME
1022 || DECL_P (ref1->ref)
1023 || TREE_CODE (ref1->ref) == STRING_CST
1024 || handled_component_p (ref1->ref)
1025 || TREE_CODE (ref1->ref) == MEM_REF
1026 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1027 && (!ref2->ref
1028 || TREE_CODE (ref2->ref) == SSA_NAME
1029 || DECL_P (ref2->ref)
1030 || TREE_CODE (ref2->ref) == STRING_CST
1031 || handled_component_p (ref2->ref)
1032 || TREE_CODE (ref2->ref) == MEM_REF
1033 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1034
1035 /* Decompose the references into their base objects and the access. */
1036 base1 = ao_ref_base (ref1);
1037 offset1 = ref1->offset;
1038 max_size1 = ref1->max_size;
1039 base2 = ao_ref_base (ref2);
1040 offset2 = ref2->offset;
1041 max_size2 = ref2->max_size;
1042
1043 /* We can end up with registers or constants as bases for example from
1044 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1045 which is seen as a struct copy. */
1046 if (TREE_CODE (base1) == SSA_NAME
1047 || TREE_CODE (base1) == CONST_DECL
1048 || TREE_CODE (base1) == CONSTRUCTOR
1049 || TREE_CODE (base1) == ADDR_EXPR
1050 || CONSTANT_CLASS_P (base1)
1051 || TREE_CODE (base2) == SSA_NAME
1052 || TREE_CODE (base2) == CONST_DECL
1053 || TREE_CODE (base2) == CONSTRUCTOR
1054 || TREE_CODE (base2) == ADDR_EXPR
1055 || CONSTANT_CLASS_P (base2))
1056 return false;
1057
1058 /* We can end up referring to code via function and label decls.
1059 As we likely do not properly track code aliases conservatively
1060 bail out. */
1061 if (TREE_CODE (base1) == FUNCTION_DECL
1062 || TREE_CODE (base1) == LABEL_DECL
1063 || TREE_CODE (base2) == FUNCTION_DECL
1064 || TREE_CODE (base2) == LABEL_DECL)
1065 return true;
1066
1067 /* Two volatile accesses always conflict. */
1068 if (ref1->volatile_p
1069 && ref2->volatile_p)
1070 return true;
1071
1072 /* Defer to simple offset based disambiguation if we have
1073 references based on two decls. Do this before defering to
1074 TBAA to handle must-alias cases in conformance with the
1075 GCC extension of allowing type-punning through unions. */
1076 var1_p = DECL_P (base1);
1077 var2_p = DECL_P (base2);
1078 if (var1_p && var2_p)
1079 return decl_refs_may_alias_p (base1, offset1, max_size1,
1080 base2, offset2, max_size2);
1081
1082 ind1_p = (TREE_CODE (base1) == MEM_REF
1083 || TREE_CODE (base1) == TARGET_MEM_REF);
1084 ind2_p = (TREE_CODE (base2) == MEM_REF
1085 || TREE_CODE (base2) == TARGET_MEM_REF);
1086
1087 /* Canonicalize the pointer-vs-decl case. */
1088 if (ind1_p && var2_p)
1089 {
1090 HOST_WIDE_INT tmp1;
1091 tree tmp2;
1092 ao_ref *tmp3;
1093 tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1094 tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1095 tmp2 = base1; base1 = base2; base2 = tmp2;
1096 tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1097 var1_p = true;
1098 ind1_p = false;
1099 var2_p = false;
1100 ind2_p = true;
1101 }
1102
1103 /* First defer to TBAA if possible. */
1104 if (tbaa_p
1105 && flag_strict_aliasing
1106 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1107 ao_ref_alias_set (ref2)))
1108 return false;
1109
1110 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1111 if (var1_p && ind2_p)
1112 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1113 offset2, max_size2,
1114 ao_ref_alias_set (ref2), -1,
1115 ref1->ref, base1,
1116 offset1, max_size1,
1117 ao_ref_alias_set (ref1),
1118 ao_ref_base_alias_set (ref1),
1119 tbaa_p);
1120 else if (ind1_p && ind2_p)
1121 return indirect_refs_may_alias_p (ref1->ref, base1,
1122 offset1, max_size1,
1123 ao_ref_alias_set (ref1), -1,
1124 ref2->ref, base2,
1125 offset2, max_size2,
1126 ao_ref_alias_set (ref2), -1,
1127 tbaa_p);
1128
1129 /* We really do not want to end up here, but returning true is safe. */
1130 #ifdef ENABLE_CHECKING
1131 gcc_unreachable ();
1132 #else
1133 return true;
1134 #endif
1135 }
1136
1137 bool
1138 refs_may_alias_p (tree ref1, tree ref2)
1139 {
1140 ao_ref r1, r2;
1141 bool res;
1142 ao_ref_init (&r1, ref1);
1143 ao_ref_init (&r2, ref2);
1144 res = refs_may_alias_p_1 (&r1, &r2, true);
1145 if (res)
1146 ++alias_stats.refs_may_alias_p_may_alias;
1147 else
1148 ++alias_stats.refs_may_alias_p_no_alias;
1149 return res;
1150 }
1151
1152 /* Returns true if there is a anti-dependence for the STORE that
1153 executes after the LOAD. */
1154
1155 bool
1156 refs_anti_dependent_p (tree load, tree store)
1157 {
1158 ao_ref r1, r2;
1159 ao_ref_init (&r1, load);
1160 ao_ref_init (&r2, store);
1161 return refs_may_alias_p_1 (&r1, &r2, false);
1162 }
1163
1164 /* Returns true if there is a output dependence for the stores
1165 STORE1 and STORE2. */
1166
1167 bool
1168 refs_output_dependent_p (tree store1, tree store2)
1169 {
1170 ao_ref r1, r2;
1171 ao_ref_init (&r1, store1);
1172 ao_ref_init (&r2, store2);
1173 return refs_may_alias_p_1 (&r1, &r2, false);
1174 }
1175
1176 /* If the call CALL may use the memory reference REF return true,
1177 otherwise return false. */
1178
1179 static bool
1180 ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
1181 {
1182 tree base, callee;
1183 unsigned i;
1184 int flags = gimple_call_flags (call);
1185
1186 /* Const functions without a static chain do not implicitly use memory. */
1187 if (!gimple_call_chain (call)
1188 && (flags & (ECF_CONST|ECF_NOVOPS)))
1189 goto process_args;
1190
1191 base = ao_ref_base (ref);
1192 if (!base)
1193 return true;
1194
1195 /* A call that is not without side-effects might involve volatile
1196 accesses and thus conflicts with all other volatile accesses. */
1197 if (ref->volatile_p)
1198 return true;
1199
1200 /* If the reference is based on a decl that is not aliased the call
1201 cannot possibly use it. */
1202 if (DECL_P (base)
1203 && !may_be_aliased (base)
1204 /* But local statics can be used through recursion. */
1205 && !is_global_var (base))
1206 goto process_args;
1207
1208 callee = gimple_call_fndecl (call);
1209
1210 /* Handle those builtin functions explicitly that do not act as
1211 escape points. See tree-ssa-structalias.c:find_func_aliases
1212 for the list of builtins we might need to handle here. */
1213 if (callee != NULL_TREE
1214 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1215 switch (DECL_FUNCTION_CODE (callee))
1216 {
1217 /* All the following functions read memory pointed to by
1218 their second argument. strcat/strncat additionally
1219 reads memory pointed to by the first argument. */
1220 case BUILT_IN_STRCAT:
1221 case BUILT_IN_STRNCAT:
1222 {
1223 ao_ref dref;
1224 ao_ref_init_from_ptr_and_size (&dref,
1225 gimple_call_arg (call, 0),
1226 NULL_TREE);
1227 if (refs_may_alias_p_1 (&dref, ref, false))
1228 return true;
1229 }
1230 /* FALLTHRU */
1231 case BUILT_IN_STRCPY:
1232 case BUILT_IN_STRNCPY:
1233 case BUILT_IN_MEMCPY:
1234 case BUILT_IN_MEMMOVE:
1235 case BUILT_IN_MEMPCPY:
1236 case BUILT_IN_STPCPY:
1237 case BUILT_IN_STPNCPY:
1238 case BUILT_IN_TM_MEMCPY:
1239 case BUILT_IN_TM_MEMMOVE:
1240 {
1241 ao_ref dref;
1242 tree size = NULL_TREE;
1243 if (gimple_call_num_args (call) == 3)
1244 size = gimple_call_arg (call, 2);
1245 ao_ref_init_from_ptr_and_size (&dref,
1246 gimple_call_arg (call, 1),
1247 size);
1248 return refs_may_alias_p_1 (&dref, ref, false);
1249 }
1250 case BUILT_IN_STRCAT_CHK:
1251 case BUILT_IN_STRNCAT_CHK:
1252 {
1253 ao_ref dref;
1254 ao_ref_init_from_ptr_and_size (&dref,
1255 gimple_call_arg (call, 0),
1256 NULL_TREE);
1257 if (refs_may_alias_p_1 (&dref, ref, false))
1258 return true;
1259 }
1260 /* FALLTHRU */
1261 case BUILT_IN_STRCPY_CHK:
1262 case BUILT_IN_STRNCPY_CHK:
1263 case BUILT_IN_MEMCPY_CHK:
1264 case BUILT_IN_MEMMOVE_CHK:
1265 case BUILT_IN_MEMPCPY_CHK:
1266 case BUILT_IN_STPCPY_CHK:
1267 case BUILT_IN_STPNCPY_CHK:
1268 {
1269 ao_ref dref;
1270 tree size = NULL_TREE;
1271 if (gimple_call_num_args (call) == 4)
1272 size = gimple_call_arg (call, 2);
1273 ao_ref_init_from_ptr_and_size (&dref,
1274 gimple_call_arg (call, 1),
1275 size);
1276 return refs_may_alias_p_1 (&dref, ref, false);
1277 }
1278 case BUILT_IN_BCOPY:
1279 {
1280 ao_ref dref;
1281 tree size = gimple_call_arg (call, 2);
1282 ao_ref_init_from_ptr_and_size (&dref,
1283 gimple_call_arg (call, 0),
1284 size);
1285 return refs_may_alias_p_1 (&dref, ref, false);
1286 }
1287
1288 /* The following functions read memory pointed to by their
1289 first argument. */
1290 CASE_BUILT_IN_TM_LOAD (1):
1291 CASE_BUILT_IN_TM_LOAD (2):
1292 CASE_BUILT_IN_TM_LOAD (4):
1293 CASE_BUILT_IN_TM_LOAD (8):
1294 CASE_BUILT_IN_TM_LOAD (FLOAT):
1295 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1296 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1297 CASE_BUILT_IN_TM_LOAD (M64):
1298 CASE_BUILT_IN_TM_LOAD (M128):
1299 CASE_BUILT_IN_TM_LOAD (M256):
1300 case BUILT_IN_TM_LOG:
1301 case BUILT_IN_TM_LOG_1:
1302 case BUILT_IN_TM_LOG_2:
1303 case BUILT_IN_TM_LOG_4:
1304 case BUILT_IN_TM_LOG_8:
1305 case BUILT_IN_TM_LOG_FLOAT:
1306 case BUILT_IN_TM_LOG_DOUBLE:
1307 case BUILT_IN_TM_LOG_LDOUBLE:
1308 case BUILT_IN_TM_LOG_M64:
1309 case BUILT_IN_TM_LOG_M128:
1310 case BUILT_IN_TM_LOG_M256:
1311 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1312
1313 /* These read memory pointed to by the first argument. */
1314 case BUILT_IN_STRDUP:
1315 case BUILT_IN_STRNDUP:
1316 {
1317 ao_ref dref;
1318 tree size = NULL_TREE;
1319 if (gimple_call_num_args (call) == 2)
1320 size = gimple_call_arg (call, 1);
1321 ao_ref_init_from_ptr_and_size (&dref,
1322 gimple_call_arg (call, 0),
1323 size);
1324 return refs_may_alias_p_1 (&dref, ref, false);
1325 }
1326 /* The following builtins do not read from memory. */
1327 case BUILT_IN_FREE:
1328 case BUILT_IN_MALLOC:
1329 case BUILT_IN_CALLOC:
1330 case BUILT_IN_ALLOCA:
1331 case BUILT_IN_ALLOCA_WITH_ALIGN:
1332 case BUILT_IN_STACK_SAVE:
1333 case BUILT_IN_STACK_RESTORE:
1334 case BUILT_IN_MEMSET:
1335 case BUILT_IN_TM_MEMSET:
1336 case BUILT_IN_MEMSET_CHK:
1337 case BUILT_IN_FREXP:
1338 case BUILT_IN_FREXPF:
1339 case BUILT_IN_FREXPL:
1340 case BUILT_IN_GAMMA_R:
1341 case BUILT_IN_GAMMAF_R:
1342 case BUILT_IN_GAMMAL_R:
1343 case BUILT_IN_LGAMMA_R:
1344 case BUILT_IN_LGAMMAF_R:
1345 case BUILT_IN_LGAMMAL_R:
1346 case BUILT_IN_MODF:
1347 case BUILT_IN_MODFF:
1348 case BUILT_IN_MODFL:
1349 case BUILT_IN_REMQUO:
1350 case BUILT_IN_REMQUOF:
1351 case BUILT_IN_REMQUOL:
1352 case BUILT_IN_SINCOS:
1353 case BUILT_IN_SINCOSF:
1354 case BUILT_IN_SINCOSL:
1355 case BUILT_IN_ASSUME_ALIGNED:
1356 case BUILT_IN_VA_END:
1357 return false;
1358 /* __sync_* builtins and some OpenMP builtins act as threading
1359 barriers. */
1360 #undef DEF_SYNC_BUILTIN
1361 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1362 #include "sync-builtins.def"
1363 #undef DEF_SYNC_BUILTIN
1364 case BUILT_IN_GOMP_ATOMIC_START:
1365 case BUILT_IN_GOMP_ATOMIC_END:
1366 case BUILT_IN_GOMP_BARRIER:
1367 case BUILT_IN_GOMP_TASKWAIT:
1368 case BUILT_IN_GOMP_CRITICAL_START:
1369 case BUILT_IN_GOMP_CRITICAL_END:
1370 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1371 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1372 case BUILT_IN_GOMP_LOOP_END:
1373 case BUILT_IN_GOMP_ORDERED_START:
1374 case BUILT_IN_GOMP_ORDERED_END:
1375 case BUILT_IN_GOMP_PARALLEL_END:
1376 case BUILT_IN_GOMP_SECTIONS_END:
1377 case BUILT_IN_GOMP_SINGLE_COPY_START:
1378 case BUILT_IN_GOMP_SINGLE_COPY_END:
1379 return true;
1380
1381 default:
1382 /* Fallthru to general call handling. */;
1383 }
1384
1385 /* Check if base is a global static variable that is not read
1386 by the function. */
1387 if (callee != NULL_TREE
1388 && TREE_CODE (base) == VAR_DECL
1389 && TREE_STATIC (base))
1390 {
1391 struct cgraph_node *node = cgraph_get_node (callee);
1392 bitmap not_read;
1393
1394 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1395 node yet. We should enforce that there are nodes for all decls in the
1396 IL and remove this check instead. */
1397 if (node
1398 && (not_read = ipa_reference_get_not_read_global (node))
1399 && bitmap_bit_p (not_read, DECL_UID (base)))
1400 goto process_args;
1401 }
1402
1403 /* Check if the base variable is call-used. */
1404 if (DECL_P (base))
1405 {
1406 if (pt_solution_includes (gimple_call_use_set (call), base))
1407 return true;
1408 }
1409 else if ((TREE_CODE (base) == MEM_REF
1410 || TREE_CODE (base) == TARGET_MEM_REF)
1411 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1412 {
1413 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1414 if (!pi)
1415 return true;
1416
1417 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1418 return true;
1419 }
1420 else
1421 return true;
1422
1423 /* Inspect call arguments for passed-by-value aliases. */
1424 process_args:
1425 for (i = 0; i < gimple_call_num_args (call); ++i)
1426 {
1427 tree op = gimple_call_arg (call, i);
1428 int flags = gimple_call_arg_flags (call, i);
1429
1430 if (flags & EAF_UNUSED)
1431 continue;
1432
1433 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1434 op = TREE_OPERAND (op, 0);
1435
1436 if (TREE_CODE (op) != SSA_NAME
1437 && !is_gimple_min_invariant (op))
1438 {
1439 ao_ref r;
1440 ao_ref_init (&r, op);
1441 if (refs_may_alias_p_1 (&r, ref, true))
1442 return true;
1443 }
1444 }
1445
1446 return false;
1447 }
1448
1449 static bool
1450 ref_maybe_used_by_call_p (gimple call, tree ref)
1451 {
1452 ao_ref r;
1453 bool res;
1454 ao_ref_init (&r, ref);
1455 res = ref_maybe_used_by_call_p_1 (call, &r);
1456 if (res)
1457 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1458 else
1459 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1460 return res;
1461 }
1462
1463
1464 /* If the statement STMT may use the memory reference REF return
1465 true, otherwise return false. */
1466
1467 bool
1468 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1469 {
1470 if (is_gimple_assign (stmt))
1471 {
1472 tree rhs;
1473
1474 /* All memory assign statements are single. */
1475 if (!gimple_assign_single_p (stmt))
1476 return false;
1477
1478 rhs = gimple_assign_rhs1 (stmt);
1479 if (is_gimple_reg (rhs)
1480 || is_gimple_min_invariant (rhs)
1481 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1482 return false;
1483
1484 return refs_may_alias_p (rhs, ref);
1485 }
1486 else if (is_gimple_call (stmt))
1487 return ref_maybe_used_by_call_p (stmt, ref);
1488 else if (gimple_code (stmt) == GIMPLE_RETURN)
1489 {
1490 tree retval = gimple_return_retval (stmt);
1491 tree base;
1492 if (retval
1493 && TREE_CODE (retval) != SSA_NAME
1494 && !is_gimple_min_invariant (retval)
1495 && refs_may_alias_p (retval, ref))
1496 return true;
1497 /* If ref escapes the function then the return acts as a use. */
1498 base = get_base_address (ref);
1499 if (!base)
1500 ;
1501 else if (DECL_P (base))
1502 return is_global_var (base);
1503 else if (TREE_CODE (base) == MEM_REF
1504 || TREE_CODE (base) == TARGET_MEM_REF)
1505 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1506 return false;
1507 }
1508
1509 return true;
1510 }
1511
1512 /* If the call in statement CALL may clobber the memory reference REF
1513 return true, otherwise return false. */
1514
1515 static bool
1516 call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
1517 {
1518 tree base;
1519 tree callee;
1520
1521 /* If the call is pure or const it cannot clobber anything. */
1522 if (gimple_call_flags (call)
1523 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1524 return false;
1525
1526 base = ao_ref_base (ref);
1527 if (!base)
1528 return true;
1529
1530 if (TREE_CODE (base) == SSA_NAME
1531 || CONSTANT_CLASS_P (base))
1532 return false;
1533
1534 /* A call that is not without side-effects might involve volatile
1535 accesses and thus conflicts with all other volatile accesses. */
1536 if (ref->volatile_p)
1537 return true;
1538
1539 /* If the reference is based on a decl that is not aliased the call
1540 cannot possibly clobber it. */
1541 if (DECL_P (base)
1542 && !may_be_aliased (base)
1543 /* But local non-readonly statics can be modified through recursion
1544 or the call may implement a threading barrier which we must
1545 treat as may-def. */
1546 && (TREE_READONLY (base)
1547 || !is_global_var (base)))
1548 return false;
1549
1550 callee = gimple_call_fndecl (call);
1551
1552 /* Handle those builtin functions explicitly that do not act as
1553 escape points. See tree-ssa-structalias.c:find_func_aliases
1554 for the list of builtins we might need to handle here. */
1555 if (callee != NULL_TREE
1556 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1557 switch (DECL_FUNCTION_CODE (callee))
1558 {
1559 /* All the following functions clobber memory pointed to by
1560 their first argument. */
1561 case BUILT_IN_STRCPY:
1562 case BUILT_IN_STRNCPY:
1563 case BUILT_IN_MEMCPY:
1564 case BUILT_IN_MEMMOVE:
1565 case BUILT_IN_MEMPCPY:
1566 case BUILT_IN_STPCPY:
1567 case BUILT_IN_STPNCPY:
1568 case BUILT_IN_STRCAT:
1569 case BUILT_IN_STRNCAT:
1570 case BUILT_IN_MEMSET:
1571 case BUILT_IN_TM_MEMSET:
1572 CASE_BUILT_IN_TM_STORE (1):
1573 CASE_BUILT_IN_TM_STORE (2):
1574 CASE_BUILT_IN_TM_STORE (4):
1575 CASE_BUILT_IN_TM_STORE (8):
1576 CASE_BUILT_IN_TM_STORE (FLOAT):
1577 CASE_BUILT_IN_TM_STORE (DOUBLE):
1578 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1579 CASE_BUILT_IN_TM_STORE (M64):
1580 CASE_BUILT_IN_TM_STORE (M128):
1581 CASE_BUILT_IN_TM_STORE (M256):
1582 case BUILT_IN_TM_MEMCPY:
1583 case BUILT_IN_TM_MEMMOVE:
1584 {
1585 ao_ref dref;
1586 tree size = NULL_TREE;
1587 /* Don't pass in size for strncat, as the maximum size
1588 is strlen (dest) + n + 1 instead of n, resp.
1589 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1590 known. */
1591 if (gimple_call_num_args (call) == 3
1592 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1593 size = gimple_call_arg (call, 2);
1594 ao_ref_init_from_ptr_and_size (&dref,
1595 gimple_call_arg (call, 0),
1596 size);
1597 return refs_may_alias_p_1 (&dref, ref, false);
1598 }
1599 case BUILT_IN_STRCPY_CHK:
1600 case BUILT_IN_STRNCPY_CHK:
1601 case BUILT_IN_MEMCPY_CHK:
1602 case BUILT_IN_MEMMOVE_CHK:
1603 case BUILT_IN_MEMPCPY_CHK:
1604 case BUILT_IN_STPCPY_CHK:
1605 case BUILT_IN_STPNCPY_CHK:
1606 case BUILT_IN_STRCAT_CHK:
1607 case BUILT_IN_STRNCAT_CHK:
1608 case BUILT_IN_MEMSET_CHK:
1609 {
1610 ao_ref dref;
1611 tree size = NULL_TREE;
1612 /* Don't pass in size for __strncat_chk, as the maximum size
1613 is strlen (dest) + n + 1 instead of n, resp.
1614 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1615 known. */
1616 if (gimple_call_num_args (call) == 4
1617 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1618 size = gimple_call_arg (call, 2);
1619 ao_ref_init_from_ptr_and_size (&dref,
1620 gimple_call_arg (call, 0),
1621 size);
1622 return refs_may_alias_p_1 (&dref, ref, false);
1623 }
1624 case BUILT_IN_BCOPY:
1625 {
1626 ao_ref dref;
1627 tree size = gimple_call_arg (call, 2);
1628 ao_ref_init_from_ptr_and_size (&dref,
1629 gimple_call_arg (call, 1),
1630 size);
1631 return refs_may_alias_p_1 (&dref, ref, false);
1632 }
1633 /* Allocating memory does not have any side-effects apart from
1634 being the definition point for the pointer. */
1635 case BUILT_IN_MALLOC:
1636 case BUILT_IN_CALLOC:
1637 case BUILT_IN_STRDUP:
1638 case BUILT_IN_STRNDUP:
1639 /* Unix98 specifies that errno is set on allocation failure. */
1640 if (flag_errno_math
1641 && targetm.ref_may_alias_errno (ref))
1642 return true;
1643 return false;
1644 case BUILT_IN_STACK_SAVE:
1645 case BUILT_IN_ALLOCA:
1646 case BUILT_IN_ALLOCA_WITH_ALIGN:
1647 case BUILT_IN_ASSUME_ALIGNED:
1648 return false;
1649 /* Freeing memory kills the pointed-to memory. More importantly
1650 the call has to serve as a barrier for moving loads and stores
1651 across it. */
1652 case BUILT_IN_FREE:
1653 case BUILT_IN_VA_END:
1654 {
1655 tree ptr = gimple_call_arg (call, 0);
1656 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1657 }
1658 case BUILT_IN_GAMMA_R:
1659 case BUILT_IN_GAMMAF_R:
1660 case BUILT_IN_GAMMAL_R:
1661 case BUILT_IN_LGAMMA_R:
1662 case BUILT_IN_LGAMMAF_R:
1663 case BUILT_IN_LGAMMAL_R:
1664 {
1665 tree out = gimple_call_arg (call, 1);
1666 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1667 return true;
1668 if (flag_errno_math)
1669 break;
1670 return false;
1671 }
1672 case BUILT_IN_FREXP:
1673 case BUILT_IN_FREXPF:
1674 case BUILT_IN_FREXPL:
1675 case BUILT_IN_MODF:
1676 case BUILT_IN_MODFF:
1677 case BUILT_IN_MODFL:
1678 {
1679 tree out = gimple_call_arg (call, 1);
1680 return ptr_deref_may_alias_ref_p_1 (out, ref);
1681 }
1682 case BUILT_IN_REMQUO:
1683 case BUILT_IN_REMQUOF:
1684 case BUILT_IN_REMQUOL:
1685 {
1686 tree out = gimple_call_arg (call, 2);
1687 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1688 return true;
1689 if (flag_errno_math)
1690 break;
1691 return false;
1692 }
1693 case BUILT_IN_SINCOS:
1694 case BUILT_IN_SINCOSF:
1695 case BUILT_IN_SINCOSL:
1696 {
1697 tree sin = gimple_call_arg (call, 1);
1698 tree cos = gimple_call_arg (call, 2);
1699 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
1700 || ptr_deref_may_alias_ref_p_1 (cos, ref));
1701 }
1702 /* __sync_* builtins and some OpenMP builtins act as threading
1703 barriers. */
1704 #undef DEF_SYNC_BUILTIN
1705 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1706 #include "sync-builtins.def"
1707 #undef DEF_SYNC_BUILTIN
1708 case BUILT_IN_GOMP_ATOMIC_START:
1709 case BUILT_IN_GOMP_ATOMIC_END:
1710 case BUILT_IN_GOMP_BARRIER:
1711 case BUILT_IN_GOMP_TASKWAIT:
1712 case BUILT_IN_GOMP_CRITICAL_START:
1713 case BUILT_IN_GOMP_CRITICAL_END:
1714 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1715 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1716 case BUILT_IN_GOMP_LOOP_END:
1717 case BUILT_IN_GOMP_ORDERED_START:
1718 case BUILT_IN_GOMP_ORDERED_END:
1719 case BUILT_IN_GOMP_PARALLEL_END:
1720 case BUILT_IN_GOMP_SECTIONS_END:
1721 case BUILT_IN_GOMP_SINGLE_COPY_START:
1722 case BUILT_IN_GOMP_SINGLE_COPY_END:
1723 return true;
1724 default:
1725 /* Fallthru to general call handling. */;
1726 }
1727
1728 /* Check if base is a global static variable that is not written
1729 by the function. */
1730 if (callee != NULL_TREE
1731 && TREE_CODE (base) == VAR_DECL
1732 && TREE_STATIC (base))
1733 {
1734 struct cgraph_node *node = cgraph_get_node (callee);
1735 bitmap not_written;
1736
1737 if (node
1738 && (not_written = ipa_reference_get_not_written_global (node))
1739 && bitmap_bit_p (not_written, DECL_UID (base)))
1740 return false;
1741 }
1742
1743 /* Check if the base variable is call-clobbered. */
1744 if (DECL_P (base))
1745 return pt_solution_includes (gimple_call_clobber_set (call), base);
1746 else if ((TREE_CODE (base) == MEM_REF
1747 || TREE_CODE (base) == TARGET_MEM_REF)
1748 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1749 {
1750 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1751 if (!pi)
1752 return true;
1753
1754 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
1755 }
1756
1757 return true;
1758 }
1759
1760 /* If the call in statement CALL may clobber the memory reference REF
1761 return true, otherwise return false. */
1762
1763 bool
1764 call_may_clobber_ref_p (gimple call, tree ref)
1765 {
1766 bool res;
1767 ao_ref r;
1768 ao_ref_init (&r, ref);
1769 res = call_may_clobber_ref_p_1 (call, &r);
1770 if (res)
1771 ++alias_stats.call_may_clobber_ref_p_may_alias;
1772 else
1773 ++alias_stats.call_may_clobber_ref_p_no_alias;
1774 return res;
1775 }
1776
1777
1778 /* If the statement STMT may clobber the memory reference REF return true,
1779 otherwise return false. */
1780
1781 bool
1782 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
1783 {
1784 if (is_gimple_call (stmt))
1785 {
1786 tree lhs = gimple_call_lhs (stmt);
1787 if (lhs
1788 && TREE_CODE (lhs) != SSA_NAME)
1789 {
1790 ao_ref r;
1791 ao_ref_init (&r, lhs);
1792 if (refs_may_alias_p_1 (ref, &r, true))
1793 return true;
1794 }
1795
1796 return call_may_clobber_ref_p_1 (stmt, ref);
1797 }
1798 else if (gimple_assign_single_p (stmt))
1799 {
1800 tree lhs = gimple_assign_lhs (stmt);
1801 if (TREE_CODE (lhs) != SSA_NAME)
1802 {
1803 ao_ref r;
1804 ao_ref_init (&r, lhs);
1805 return refs_may_alias_p_1 (ref, &r, true);
1806 }
1807 }
1808 else if (gimple_code (stmt) == GIMPLE_ASM)
1809 return true;
1810
1811 return false;
1812 }
1813
1814 bool
1815 stmt_may_clobber_ref_p (gimple stmt, tree ref)
1816 {
1817 ao_ref r;
1818 ao_ref_init (&r, ref);
1819 return stmt_may_clobber_ref_p_1 (stmt, &r);
1820 }
1821
1822 /* If STMT kills the memory reference REF return true, otherwise
1823 return false. */
1824
1825 static bool
1826 stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
1827 {
1828 /* For a must-alias check we need to be able to constrain
1829 the access properly. */
1830 ao_ref_base (ref);
1831 if (ref->max_size == -1)
1832 return false;
1833
1834 if (gimple_has_lhs (stmt)
1835 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
1836 /* The assignment is not necessarily carried out if it can throw
1837 and we can catch it in the current function where we could inspect
1838 the previous value.
1839 ??? We only need to care about the RHS throwing. For aggregate
1840 assignments or similar calls and non-call exceptions the LHS
1841 might throw as well. */
1842 && !stmt_can_throw_internal (stmt))
1843 {
1844 tree base, lhs = gimple_get_lhs (stmt);
1845 HOST_WIDE_INT size, offset, max_size;
1846 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
1847 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
1848 so base == ref->base does not always hold. */
1849 if (base == ref->base)
1850 {
1851 /* For a must-alias check we need to be able to constrain
1852 the access properly. */
1853 if (size != -1 && size == max_size)
1854 {
1855 if (offset <= ref->offset
1856 && offset + size >= ref->offset + ref->max_size)
1857 return true;
1858 }
1859 }
1860 }
1861
1862 if (is_gimple_call (stmt))
1863 {
1864 tree callee = gimple_call_fndecl (stmt);
1865 if (callee != NULL_TREE
1866 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1867 switch (DECL_FUNCTION_CODE (callee))
1868 {
1869 case BUILT_IN_MEMCPY:
1870 case BUILT_IN_MEMPCPY:
1871 case BUILT_IN_MEMMOVE:
1872 case BUILT_IN_MEMSET:
1873 case BUILT_IN_MEMCPY_CHK:
1874 case BUILT_IN_MEMPCPY_CHK:
1875 case BUILT_IN_MEMMOVE_CHK:
1876 case BUILT_IN_MEMSET_CHK:
1877 {
1878 tree dest = gimple_call_arg (stmt, 0);
1879 tree len = gimple_call_arg (stmt, 2);
1880 tree base = NULL_TREE;
1881 HOST_WIDE_INT offset = 0;
1882 if (!host_integerp (len, 0))
1883 return false;
1884 if (TREE_CODE (dest) == ADDR_EXPR)
1885 base = get_addr_base_and_unit_offset (TREE_OPERAND (dest, 0),
1886 &offset);
1887 else if (TREE_CODE (dest) == SSA_NAME)
1888 base = dest;
1889 if (base
1890 && base == ao_ref_base (ref))
1891 {
1892 HOST_WIDE_INT size = TREE_INT_CST_LOW (len);
1893 if (offset <= ref->offset / BITS_PER_UNIT
1894 && (offset + size
1895 >= ((ref->offset + ref->max_size + BITS_PER_UNIT - 1)
1896 / BITS_PER_UNIT)))
1897 return true;
1898 }
1899 break;
1900 }
1901
1902 case BUILT_IN_VA_END:
1903 {
1904 tree ptr = gimple_call_arg (stmt, 0);
1905 if (TREE_CODE (ptr) == ADDR_EXPR)
1906 {
1907 tree base = ao_ref_base (ref);
1908 if (TREE_OPERAND (ptr, 0) == base)
1909 return true;
1910 }
1911 break;
1912 }
1913
1914 default:;
1915 }
1916 }
1917 return false;
1918 }
1919
1920 bool
1921 stmt_kills_ref_p (gimple stmt, tree ref)
1922 {
1923 ao_ref r;
1924 ao_ref_init (&r, ref);
1925 return stmt_kills_ref_p_1 (stmt, &r);
1926 }
1927
1928
1929 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
1930 TARGET or a statement clobbering the memory reference REF in which
1931 case false is returned. The walk starts with VUSE, one argument of PHI. */
1932
1933 static bool
1934 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
1935 tree vuse, bitmap *visited)
1936 {
1937 basic_block bb = gimple_bb (phi);
1938
1939 if (!*visited)
1940 *visited = BITMAP_ALLOC (NULL);
1941
1942 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
1943
1944 /* Walk until we hit the target. */
1945 while (vuse != target)
1946 {
1947 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1948 /* Recurse for PHI nodes. */
1949 if (gimple_code (def_stmt) == GIMPLE_PHI)
1950 {
1951 /* An already visited PHI node ends the walk successfully. */
1952 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
1953 return true;
1954 vuse = get_continuation_for_phi (def_stmt, ref, visited);
1955 if (!vuse)
1956 return false;
1957 continue;
1958 }
1959 /* A clobbering statement or the end of the IL ends it failing. */
1960 else if (gimple_nop_p (def_stmt)
1961 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
1962 return false;
1963 /* If we reach a new basic-block see if we already skipped it
1964 in a previous walk that ended successfully. */
1965 if (gimple_bb (def_stmt) != bb)
1966 {
1967 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
1968 return true;
1969 bb = gimple_bb (def_stmt);
1970 }
1971 vuse = gimple_vuse (def_stmt);
1972 }
1973 return true;
1974 }
1975
1976 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
1977 until we hit the phi argument definition that dominates the other one.
1978 Return that, or NULL_TREE if there is no such definition. */
1979
1980 static tree
1981 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
1982 ao_ref *ref, bitmap *visited)
1983 {
1984 gimple def0 = SSA_NAME_DEF_STMT (arg0);
1985 gimple def1 = SSA_NAME_DEF_STMT (arg1);
1986 tree common_vuse;
1987
1988 if (arg0 == arg1)
1989 return arg0;
1990 else if (gimple_nop_p (def0)
1991 || (!gimple_nop_p (def1)
1992 && dominated_by_p (CDI_DOMINATORS,
1993 gimple_bb (def1), gimple_bb (def0))))
1994 {
1995 if (maybe_skip_until (phi, arg0, ref, arg1, visited))
1996 return arg0;
1997 }
1998 else if (gimple_nop_p (def1)
1999 || dominated_by_p (CDI_DOMINATORS,
2000 gimple_bb (def0), gimple_bb (def1)))
2001 {
2002 if (maybe_skip_until (phi, arg1, ref, arg0, visited))
2003 return arg1;
2004 }
2005 /* Special case of a diamond:
2006 MEM_1 = ...
2007 goto (cond) ? L1 : L2
2008 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2009 goto L3
2010 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2011 L3: MEM_4 = PHI<MEM_2, MEM_3>
2012 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2013 dominate each other, but still we can easily skip this PHI node
2014 if we recognize that the vuse MEM operand is the same for both,
2015 and that we can skip both statements (they don't clobber us).
2016 This is still linear. Don't use maybe_skip_until, that might
2017 potentially be slow. */
2018 else if ((common_vuse = gimple_vuse (def0))
2019 && common_vuse == gimple_vuse (def1))
2020 {
2021 if (!stmt_may_clobber_ref_p_1 (def0, ref)
2022 && !stmt_may_clobber_ref_p_1 (def1, ref))
2023 return common_vuse;
2024 }
2025
2026 return NULL_TREE;
2027 }
2028
2029
2030 /* Starting from a PHI node for the virtual operand of the memory reference
2031 REF find a continuation virtual operand that allows to continue walking
2032 statements dominating PHI skipping only statements that cannot possibly
2033 clobber REF. Returns NULL_TREE if no suitable virtual operand can
2034 be found. */
2035
2036 tree
2037 get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited)
2038 {
2039 unsigned nargs = gimple_phi_num_args (phi);
2040
2041 /* Through a single-argument PHI we can simply look through. */
2042 if (nargs == 1)
2043 return PHI_ARG_DEF (phi, 0);
2044
2045 /* For two or more arguments try to pairwise skip non-aliasing code
2046 until we hit the phi argument definition that dominates the other one. */
2047 else if (nargs >= 2)
2048 {
2049 tree arg0, arg1;
2050 unsigned i;
2051
2052 /* Find a candidate for the virtual operand which definition
2053 dominates those of all others. */
2054 arg0 = PHI_ARG_DEF (phi, 0);
2055 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2056 for (i = 1; i < nargs; ++i)
2057 {
2058 arg1 = PHI_ARG_DEF (phi, i);
2059 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2060 {
2061 arg0 = arg1;
2062 break;
2063 }
2064 if (dominated_by_p (CDI_DOMINATORS,
2065 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2066 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2067 arg0 = arg1;
2068 }
2069
2070 /* Then pairwise reduce against the found candidate. */
2071 for (i = 0; i < nargs; ++i)
2072 {
2073 arg1 = PHI_ARG_DEF (phi, i);
2074 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref, visited);
2075 if (!arg0)
2076 return NULL_TREE;
2077 }
2078
2079 return arg0;
2080 }
2081
2082 return NULL_TREE;
2083 }
2084
2085 /* Based on the memory reference REF and its virtual use VUSE call
2086 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2087 itself. That is, for each virtual use for which its defining statement
2088 does not clobber REF.
2089
2090 WALKER is called with REF, the current virtual use and DATA. If
2091 WALKER returns non-NULL the walk stops and its result is returned.
2092 At the end of a non-successful walk NULL is returned.
2093
2094 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2095 use which definition is a statement that may clobber REF and DATA.
2096 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2097 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2098 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2099 to adjust REF and *DATA to make that valid.
2100
2101 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2102
2103 void *
2104 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2105 void *(*walker)(ao_ref *, tree, void *),
2106 void *(*translate)(ao_ref *, tree, void *), void *data)
2107 {
2108 bitmap visited = NULL;
2109 void *res;
2110
2111 timevar_push (TV_ALIAS_STMT_WALK);
2112
2113 do
2114 {
2115 gimple def_stmt;
2116
2117 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2118 res = (*walker) (ref, vuse, data);
2119 if (res)
2120 break;
2121
2122 def_stmt = SSA_NAME_DEF_STMT (vuse);
2123 if (gimple_nop_p (def_stmt))
2124 break;
2125 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2126 vuse = get_continuation_for_phi (def_stmt, ref, &visited);
2127 else
2128 {
2129 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2130 {
2131 if (!translate)
2132 break;
2133 res = (*translate) (ref, vuse, data);
2134 /* Failed lookup and translation. */
2135 if (res == (void *)-1)
2136 {
2137 res = NULL;
2138 break;
2139 }
2140 /* Lookup succeeded. */
2141 else if (res != NULL)
2142 break;
2143 /* Translation succeeded, continue walking. */
2144 }
2145 vuse = gimple_vuse (def_stmt);
2146 }
2147 }
2148 while (vuse);
2149
2150 if (visited)
2151 BITMAP_FREE (visited);
2152
2153 timevar_pop (TV_ALIAS_STMT_WALK);
2154
2155 return res;
2156 }
2157
2158
2159 /* Based on the memory reference REF call WALKER for each vdef which
2160 defining statement may clobber REF, starting with VDEF. If REF
2161 is NULL_TREE, each defining statement is visited.
2162
2163 WALKER is called with REF, the current vdef and DATA. If WALKER
2164 returns true the walk is stopped, otherwise it continues.
2165
2166 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2167 PHI argument (but only one walk continues on merge points), the
2168 return value is true if any of the walks was successful.
2169
2170 The function returns the number of statements walked. */
2171
2172 static unsigned int
2173 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2174 bool (*walker)(ao_ref *, tree, void *), void *data,
2175 bitmap *visited, unsigned int cnt)
2176 {
2177 do
2178 {
2179 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2180
2181 if (*visited
2182 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2183 return cnt;
2184
2185 if (gimple_nop_p (def_stmt))
2186 return cnt;
2187 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2188 {
2189 unsigned i;
2190 if (!*visited)
2191 *visited = BITMAP_ALLOC (NULL);
2192 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2193 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2194 walker, data, visited, 0);
2195 return cnt;
2196 }
2197
2198 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2199 cnt++;
2200 if ((!ref
2201 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2202 && (*walker) (ref, vdef, data))
2203 return cnt;
2204
2205 vdef = gimple_vuse (def_stmt);
2206 }
2207 while (1);
2208 }
2209
2210 unsigned int
2211 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2212 bool (*walker)(ao_ref *, tree, void *), void *data,
2213 bitmap *visited)
2214 {
2215 bitmap local_visited = NULL;
2216 unsigned int ret;
2217
2218 timevar_push (TV_ALIAS_STMT_WALK);
2219
2220 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2221 visited ? visited : &local_visited, 0);
2222 if (local_visited)
2223 BITMAP_FREE (local_visited);
2224
2225 timevar_pop (TV_ALIAS_STMT_WALK);
2226
2227 return ret;
2228 }
2229