Don't use #if inside C test expression.
[gcc.git] / gcc / alias.c
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "function.h"
28 #include "expr.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "output.h"
34 #include "toplev.h"
35 #include "cselib.h"
36 #include "splay-tree.h"
37 #include "ggc.h"
38
39 /* The alias sets assigned to MEMs assist the back-end in determining
40 which MEMs can alias which other MEMs. In general, two MEMs in
41 different alias sets cannot alias each other, with one important
42 exception. Consider something like:
43
44 struct S {int i; double d; };
45
46 a store to an `S' can alias something of either type `int' or type
47 `double'. (However, a store to an `int' cannot alias a `double'
48 and vice versa.) We indicate this via a tree structure that looks
49 like:
50 struct S
51 / \
52 / \
53 |/_ _\|
54 int double
55
56 (The arrows are directed and point downwards.)
57 In this situation we say the alias set for `struct S' is the
58 `superset' and that those for `int' and `double' are `subsets'.
59
60 To see whether two alias sets can point to the same memory, we must
61 see if either alias set is a subset of the other. We need not trace
62 past immediate decendents, however, since we propagate all
63 grandchildren up one level.
64
65 Alias set zero is implicitly a superset of all other alias sets.
66 However, this is no actual entry for alias set zero. It is an
67 error to attempt to explicitly construct a subset of zero. */
68
69 typedef struct alias_set_entry
70 {
71 /* The alias set number, as stored in MEM_ALIAS_SET. */
72 HOST_WIDE_INT alias_set;
73
74 /* The children of the alias set. These are not just the immediate
75 children, but, in fact, all decendents. So, if we have:
76
77 struct T { struct S s; float f; }
78
79 continuing our example above, the children here will be all of
80 `int', `double', `float', and `struct S'. */
81 splay_tree children;
82
83 /* Nonzero if would have a child of zero: this effectively makes this
84 alias set the same as alias set zero. */
85 int has_zero_child;
86 } *alias_set_entry;
87
88 static int rtx_equal_for_memref_p PARAMS ((rtx, rtx));
89 static rtx find_symbolic_term PARAMS ((rtx));
90 rtx get_addr PARAMS ((rtx));
91 static int memrefs_conflict_p PARAMS ((int, rtx, int, rtx,
92 HOST_WIDE_INT));
93 static void record_set PARAMS ((rtx, rtx, void *));
94 static rtx find_base_term PARAMS ((rtx));
95 static int base_alias_check PARAMS ((rtx, rtx, enum machine_mode,
96 enum machine_mode));
97 static int handled_component_p PARAMS ((tree));
98 static int can_address_p PARAMS ((tree));
99 static rtx find_base_value PARAMS ((rtx));
100 static int mems_in_disjoint_alias_sets_p PARAMS ((rtx, rtx));
101 static int insert_subset_children PARAMS ((splay_tree_node, void*));
102 static tree find_base_decl PARAMS ((tree));
103 static alias_set_entry get_alias_set_entry PARAMS ((HOST_WIDE_INT));
104 static rtx fixed_scalar_and_varying_struct_p PARAMS ((rtx, rtx, rtx, rtx,
105 int (*) (rtx, int)));
106 static int aliases_everything_p PARAMS ((rtx));
107 static int write_dependence_p PARAMS ((rtx, rtx, int));
108 static int nonlocal_mentioned_p PARAMS ((rtx));
109
110 /* Set up all info needed to perform alias analysis on memory references. */
111
112 /* Returns the size in bytes of the mode of X. */
113 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
114
115 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
116 different alias sets. We ignore alias sets in functions making use
117 of variable arguments because the va_arg macros on some systems are
118 not legal ANSI C. */
119 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
120 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
121
122 /* Cap the number of passes we make over the insns propagating alias
123 information through set chains. 10 is a completely arbitrary choice. */
124 #define MAX_ALIAS_LOOP_PASSES 10
125
126 /* reg_base_value[N] gives an address to which register N is related.
127 If all sets after the first add or subtract to the current value
128 or otherwise modify it so it does not point to a different top level
129 object, reg_base_value[N] is equal to the address part of the source
130 of the first set.
131
132 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
133 expressions represent certain special values: function arguments and
134 the stack, frame, and argument pointers.
135
136 The contents of an ADDRESS is not normally used, the mode of the
137 ADDRESS determines whether the ADDRESS is a function argument or some
138 other special value. Pointer equality, not rtx_equal_p, determines whether
139 two ADDRESS expressions refer to the same base address.
140
141 The only use of the contents of an ADDRESS is for determining if the
142 current function performs nonlocal memory memory references for the
143 purposes of marking the function as a constant function. */
144
145 static rtx *reg_base_value;
146 static rtx *new_reg_base_value;
147 static unsigned int reg_base_value_size; /* size of reg_base_value array */
148
149 #define REG_BASE_VALUE(X) \
150 (REGNO (X) < reg_base_value_size \
151 ? reg_base_value[REGNO (X)] : 0)
152
153 /* Vector of known invariant relationships between registers. Set in
154 loop unrolling. Indexed by register number, if nonzero the value
155 is an expression describing this register in terms of another.
156
157 The length of this array is REG_BASE_VALUE_SIZE.
158
159 Because this array contains only pseudo registers it has no effect
160 after reload. */
161 static rtx *alias_invariant;
162
163 /* Vector indexed by N giving the initial (unchanging) value known for
164 pseudo-register N. This array is initialized in
165 init_alias_analysis, and does not change until end_alias_analysis
166 is called. */
167 rtx *reg_known_value;
168
169 /* Indicates number of valid entries in reg_known_value. */
170 static unsigned int reg_known_value_size;
171
172 /* Vector recording for each reg_known_value whether it is due to a
173 REG_EQUIV note. Future passes (viz., reload) may replace the
174 pseudo with the equivalent expression and so we account for the
175 dependences that would be introduced if that happens.
176
177 The REG_EQUIV notes created in assign_parms may mention the arg
178 pointer, and there are explicit insns in the RTL that modify the
179 arg pointer. Thus we must ensure that such insns don't get
180 scheduled across each other because that would invalidate the
181 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
182 wrong, but solving the problem in the scheduler will likely give
183 better code, so we do it here. */
184 char *reg_known_equiv_p;
185
186 /* True when scanning insns from the start of the rtl to the
187 NOTE_INSN_FUNCTION_BEG note. */
188 static int copying_arguments;
189
190 /* The splay-tree used to store the various alias set entries. */
191 static splay_tree alias_sets;
192 \f
193 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
194 such an entry, or NULL otherwise. */
195
196 static alias_set_entry
197 get_alias_set_entry (alias_set)
198 HOST_WIDE_INT alias_set;
199 {
200 splay_tree_node sn
201 = splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
202
203 return sn != 0 ? ((alias_set_entry) sn->value) : 0;
204 }
205
206 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
207 the two MEMs cannot alias each other. */
208
209 static int
210 mems_in_disjoint_alias_sets_p (mem1, mem2)
211 rtx mem1;
212 rtx mem2;
213 {
214 #ifdef ENABLE_CHECKING
215 /* Perform a basic sanity check. Namely, that there are no alias sets
216 if we're not using strict aliasing. This helps to catch bugs
217 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
218 where a MEM is allocated in some way other than by the use of
219 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
220 use alias sets to indicate that spilled registers cannot alias each
221 other, we might need to remove this check. */
222 if (! flag_strict_aliasing
223 && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
224 abort ();
225 #endif
226
227 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
228 }
229
230 /* Insert the NODE into the splay tree given by DATA. Used by
231 record_alias_subset via splay_tree_foreach. */
232
233 static int
234 insert_subset_children (node, data)
235 splay_tree_node node;
236 void *data;
237 {
238 splay_tree_insert ((splay_tree) data, node->key, node->value);
239
240 return 0;
241 }
242
243 /* Return 1 if the two specified alias sets may conflict. */
244
245 int
246 alias_sets_conflict_p (set1, set2)
247 HOST_WIDE_INT set1, set2;
248 {
249 alias_set_entry ase;
250
251 /* If have no alias set information for one of the operands, we have
252 to assume it can alias anything. */
253 if (set1 == 0 || set2 == 0
254 /* If the two alias sets are the same, they may alias. */
255 || set1 == set2)
256 return 1;
257
258 /* See if the first alias set is a subset of the second. */
259 ase = get_alias_set_entry (set1);
260 if (ase != 0
261 && (ase->has_zero_child
262 || splay_tree_lookup (ase->children,
263 (splay_tree_key) set2)))
264 return 1;
265
266 /* Now do the same, but with the alias sets reversed. */
267 ase = get_alias_set_entry (set2);
268 if (ase != 0
269 && (ase->has_zero_child
270 || splay_tree_lookup (ase->children,
271 (splay_tree_key) set1)))
272 return 1;
273
274 /* The two alias sets are distinct and neither one is the
275 child of the other. Therefore, they cannot alias. */
276 return 0;
277 }
278 \f
279 /* Set the alias set of MEM to SET. */
280
281 void
282 set_mem_alias_set (mem, set)
283 rtx mem;
284 HOST_WIDE_INT set;
285 {
286 /* We would like to do this test but can't yet since when converting a
287 REG to a MEM, the alias set field is undefined. */
288 #if 0
289 /* If the new and old alias sets don't conflict, something is wrong. */
290 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
291 abort ();
292 #endif
293
294 MEM_ALIAS_SET (mem) = set;
295 }
296 \f
297 /* Return 1 if TYPE is a RECORD_TYPE, UNION_TYPE, or QUAL_UNION_TYPE and has
298 has any readonly fields. If any of the fields have types that
299 contain readonly fields, return true as well. */
300
301 int
302 readonly_fields_p (type)
303 tree type;
304 {
305 tree field;
306
307 if (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
308 && TREE_CODE (type) != QUAL_UNION_TYPE)
309 return 0;
310
311 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
312 if (TREE_CODE (field) == FIELD_DECL
313 && (TREE_READONLY (field)
314 || readonly_fields_p (TREE_TYPE (field))))
315 return 1;
316
317 return 0;
318 }
319 \f
320 /* Return 1 if any MEM object of type T1 will always conflict (using the
321 dependency routines in this file) with any MEM object of type T2.
322 This is used when allocating temporary storage. If T1 and/or T2 are
323 NULL_TREE, it means we know nothing about the storage. */
324
325 int
326 objects_must_conflict_p (t1, t2)
327 tree t1, t2;
328 {
329 /* If neither has a type specified, we don't know if they'll conflict
330 because we may be using them to store objects of various types, for
331 example the argument and local variables areas of inlined functions. */
332 if (t1 == 0 && t2 == 0)
333 return 0;
334
335 /* If one or the other has readonly fields or is readonly,
336 then they may not conflict. */
337 if ((t1 != 0 && readonly_fields_p (t1))
338 || (t2 != 0 && readonly_fields_p (t2))
339 || (t1 != 0 && TYPE_READONLY (t1))
340 || (t2 != 0 && TYPE_READONLY (t2)))
341 return 0;
342
343 /* If they are the same type, they must conflict. */
344 if (t1 == t2
345 /* Likewise if both are volatile. */
346 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
347 return 1;
348
349 /* If one is aggregate and the other is scalar then they may not
350 conflict. */
351 if ((t1 != 0 && AGGREGATE_TYPE_P (t1))
352 != (t2 != 0 && AGGREGATE_TYPE_P (t2)))
353 return 0;
354
355 /* Otherwise they conflict only if the alias sets conflict. */
356 return alias_sets_conflict_p (t1 ? get_alias_set (t1) : 0,
357 t2 ? get_alias_set (t2) : 0);
358 }
359 \f
360 /* T is an expression with pointer type. Find the DECL on which this
361 expression is based. (For example, in `a[i]' this would be `a'.)
362 If there is no such DECL, or a unique decl cannot be determined,
363 NULL_TREE is retured. */
364
365 static tree
366 find_base_decl (t)
367 tree t;
368 {
369 tree d0, d1, d2;
370
371 if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
372 return 0;
373
374 /* If this is a declaration, return it. */
375 if (TREE_CODE_CLASS (TREE_CODE (t)) == 'd')
376 return t;
377
378 /* Handle general expressions. It would be nice to deal with
379 COMPONENT_REFs here. If we could tell that `a' and `b' were the
380 same, then `a->f' and `b->f' are also the same. */
381 switch (TREE_CODE_CLASS (TREE_CODE (t)))
382 {
383 case '1':
384 return find_base_decl (TREE_OPERAND (t, 0));
385
386 case '2':
387 /* Return 0 if found in neither or both are the same. */
388 d0 = find_base_decl (TREE_OPERAND (t, 0));
389 d1 = find_base_decl (TREE_OPERAND (t, 1));
390 if (d0 == d1)
391 return d0;
392 else if (d0 == 0)
393 return d1;
394 else if (d1 == 0)
395 return d0;
396 else
397 return 0;
398
399 case '3':
400 d0 = find_base_decl (TREE_OPERAND (t, 0));
401 d1 = find_base_decl (TREE_OPERAND (t, 1));
402 d0 = find_base_decl (TREE_OPERAND (t, 0));
403 d2 = find_base_decl (TREE_OPERAND (t, 2));
404
405 /* Set any nonzero values from the last, then from the first. */
406 if (d1 == 0) d1 = d2;
407 if (d0 == 0) d0 = d1;
408 if (d1 == 0) d1 = d0;
409 if (d2 == 0) d2 = d1;
410
411 /* At this point all are nonzero or all are zero. If all three are the
412 same, return it. Otherwise, return zero. */
413 return (d0 == d1 && d1 == d2) ? d0 : 0;
414
415 default:
416 return 0;
417 }
418 }
419
420 /* Return 1 if T is an expression that get_inner_reference handles. */
421
422 static int
423 handled_component_p (t)
424 tree t;
425 {
426 switch (TREE_CODE (t))
427 {
428 case BIT_FIELD_REF:
429 case COMPONENT_REF:
430 case ARRAY_REF:
431 case ARRAY_RANGE_REF:
432 case NON_LVALUE_EXPR:
433 return 1;
434
435 case NOP_EXPR:
436 case CONVERT_EXPR:
437 return (TYPE_MODE (TREE_TYPE (t))
438 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
439
440 default:
441 return 0;
442 }
443 }
444
445 /* Return 1 if all the nested component references handled by
446 get_inner_reference in T are such that we can address the object in T. */
447
448 static int
449 can_address_p (t)
450 tree t;
451 {
452 /* If we're at the end, it is vacuously addressable. */
453 if (! handled_component_p (t))
454 return 1;
455
456 /* Bitfields are never addressable. */
457 else if (TREE_CODE (t) == BIT_FIELD_REF)
458 return 0;
459
460 else if (TREE_CODE (t) == COMPONENT_REF
461 && ! DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))
462 && can_address_p (TREE_OPERAND (t, 0)))
463 return 1;
464
465 else if ((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
466 && ! TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))
467 && can_address_p (TREE_OPERAND (t, 0)))
468 return 1;
469
470 return 0;
471 }
472
473 /* Return the alias set for T, which may be either a type or an
474 expression. Call language-specific routine for help, if needed. */
475
476 HOST_WIDE_INT
477 get_alias_set (t)
478 tree t;
479 {
480 tree orig_t;
481 HOST_WIDE_INT set;
482
483 /* If we're not doing any alias analysis, just assume everything
484 aliases everything else. Also return 0 if this or its type is
485 an error. */
486 if (! flag_strict_aliasing || t == error_mark_node
487 || (! TYPE_P (t)
488 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
489 return 0;
490
491 /* We can be passed either an expression or a type. This and the
492 language-specific routine may make mutually-recursive calls to
493 each other to figure out what to do. At each juncture, we see if
494 this is a tree that the language may need to handle specially.
495 First handle things that aren't types and start by removing nops
496 since we care only about the actual object. */
497 if (! TYPE_P (t))
498 {
499 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
500 || TREE_CODE (t) == NON_LVALUE_EXPR)
501 t = TREE_OPERAND (t, 0);
502
503 /* Now give the language a chance to do something but record what we
504 gave it this time. */
505 orig_t = t;
506 if ((set = lang_get_alias_set (t)) != -1)
507 return set;
508
509 /* Now loop the same way as get_inner_reference and get the alias
510 set to use. Pick up the outermost object that we could have
511 a pointer to. */
512 while (handled_component_p (t) && ! can_address_p (t))
513 t = TREE_OPERAND (t, 0);
514
515 if (TREE_CODE (t) == INDIRECT_REF)
516 {
517 /* Check for accesses through restrict-qualified pointers. */
518 tree decl = find_base_decl (TREE_OPERAND (t, 0));
519
520 if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
521 /* We use the alias set indicated in the declaration. */
522 return DECL_POINTER_ALIAS_SET (decl);
523
524 /* If we have an INDIRECT_REF via a void pointer, we don't
525 know anything about what that might alias. */
526 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE)
527 return 0;
528 }
529
530 /* Give the language another chance to do something special. */
531 if (orig_t != t
532 && (set = lang_get_alias_set (t)) != -1)
533 return set;
534
535 /* Now all we care about is the type. */
536 t = TREE_TYPE (t);
537 }
538
539 /* Variant qualifiers don't affect the alias set, so get the main
540 variant. If this is a type with a known alias set, return it. */
541 t = TYPE_MAIN_VARIANT (t);
542 if (TYPE_P (t) && TYPE_ALIAS_SET_KNOWN_P (t))
543 return TYPE_ALIAS_SET (t);
544
545 /* See if the language has special handling for this type. */
546 if ((set = lang_get_alias_set (t)) != -1)
547 {
548 /* If the alias set is now known, we are done. */
549 if (TYPE_ALIAS_SET_KNOWN_P (t))
550 return TYPE_ALIAS_SET (t);
551 }
552
553 /* There are no objects of FUNCTION_TYPE, so there's no point in
554 using up an alias set for them. (There are, of course, pointers
555 and references to functions, but that's different.) */
556 else if (TREE_CODE (t) == FUNCTION_TYPE)
557 set = 0;
558 else
559 /* Otherwise make a new alias set for this type. */
560 set = new_alias_set ();
561
562 TYPE_ALIAS_SET (t) = set;
563
564 /* If this is an aggregate type, we must record any component aliasing
565 information. */
566 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
567 record_component_aliases (t);
568
569 return set;
570 }
571
572 /* Return a brand-new alias set. */
573
574 HOST_WIDE_INT
575 new_alias_set ()
576 {
577 static HOST_WIDE_INT last_alias_set;
578
579 if (flag_strict_aliasing)
580 return ++last_alias_set;
581 else
582 return 0;
583 }
584
585 /* Indicate that things in SUBSET can alias things in SUPERSET, but
586 not vice versa. For example, in C, a store to an `int' can alias a
587 structure containing an `int', but not vice versa. Here, the
588 structure would be the SUPERSET and `int' the SUBSET. This
589 function should be called only once per SUPERSET/SUBSET pair.
590
591 It is illegal for SUPERSET to be zero; everything is implicitly a
592 subset of alias set zero. */
593
594 void
595 record_alias_subset (superset, subset)
596 HOST_WIDE_INT superset;
597 HOST_WIDE_INT subset;
598 {
599 alias_set_entry superset_entry;
600 alias_set_entry subset_entry;
601
602 if (superset == 0)
603 abort ();
604
605 superset_entry = get_alias_set_entry (superset);
606 if (superset_entry == 0)
607 {
608 /* Create an entry for the SUPERSET, so that we have a place to
609 attach the SUBSET. */
610 superset_entry
611 = (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
612 superset_entry->alias_set = superset;
613 superset_entry->children
614 = splay_tree_new (splay_tree_compare_ints, 0, 0);
615 superset_entry->has_zero_child = 0;
616 splay_tree_insert (alias_sets, (splay_tree_key) superset,
617 (splay_tree_value) superset_entry);
618 }
619
620 if (subset == 0)
621 superset_entry->has_zero_child = 1;
622 else
623 {
624 subset_entry = get_alias_set_entry (subset);
625 /* If there is an entry for the subset, enter all of its children
626 (if they are not already present) as children of the SUPERSET. */
627 if (subset_entry)
628 {
629 if (subset_entry->has_zero_child)
630 superset_entry->has_zero_child = 1;
631
632 splay_tree_foreach (subset_entry->children, insert_subset_children,
633 superset_entry->children);
634 }
635
636 /* Enter the SUBSET itself as a child of the SUPERSET. */
637 splay_tree_insert (superset_entry->children,
638 (splay_tree_key) subset, 0);
639 }
640 }
641
642 /* Record that component types of TYPE, if any, are part of that type for
643 aliasing purposes. For record types, we only record component types
644 for fields that are marked addressable. For array types, we always
645 record the component types, so the front end should not call this
646 function if the individual component aren't addressable. */
647
648 void
649 record_component_aliases (type)
650 tree type;
651 {
652 HOST_WIDE_INT superset = get_alias_set (type);
653 tree field;
654
655 if (superset == 0)
656 return;
657
658 switch (TREE_CODE (type))
659 {
660 case ARRAY_TYPE:
661 if (! TYPE_NONALIASED_COMPONENT (type))
662 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
663 break;
664
665 case RECORD_TYPE:
666 case UNION_TYPE:
667 case QUAL_UNION_TYPE:
668 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
669 if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field))
670 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
671 break;
672
673 case COMPLEX_TYPE:
674 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
675 break;
676
677 default:
678 break;
679 }
680 }
681
682 /* Allocate an alias set for use in storing and reading from the varargs
683 spill area. */
684
685 HOST_WIDE_INT
686 get_varargs_alias_set ()
687 {
688 static HOST_WIDE_INT set = -1;
689
690 if (set == -1)
691 set = new_alias_set ();
692
693 return set;
694 }
695
696 /* Likewise, but used for the fixed portions of the frame, e.g., register
697 save areas. */
698
699 HOST_WIDE_INT
700 get_frame_alias_set ()
701 {
702 static HOST_WIDE_INT set = -1;
703
704 if (set == -1)
705 set = new_alias_set ();
706
707 return set;
708 }
709
710 /* Inside SRC, the source of a SET, find a base address. */
711
712 static rtx
713 find_base_value (src)
714 register rtx src;
715 {
716 unsigned int regno;
717 switch (GET_CODE (src))
718 {
719 case SYMBOL_REF:
720 case LABEL_REF:
721 return src;
722
723 case REG:
724 regno = REGNO (src);
725 /* At the start of a function, argument registers have known base
726 values which may be lost later. Returning an ADDRESS
727 expression here allows optimization based on argument values
728 even when the argument registers are used for other purposes. */
729 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
730 return new_reg_base_value[regno];
731
732 /* If a pseudo has a known base value, return it. Do not do this
733 for hard regs since it can result in a circular dependency
734 chain for registers which have values at function entry.
735
736 The test above is not sufficient because the scheduler may move
737 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
738 if (regno >= FIRST_PSEUDO_REGISTER
739 && regno < reg_base_value_size
740 && reg_base_value[regno])
741 return reg_base_value[regno];
742
743 return src;
744
745 case MEM:
746 /* Check for an argument passed in memory. Only record in the
747 copying-arguments block; it is too hard to track changes
748 otherwise. */
749 if (copying_arguments
750 && (XEXP (src, 0) == arg_pointer_rtx
751 || (GET_CODE (XEXP (src, 0)) == PLUS
752 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
753 return gen_rtx_ADDRESS (VOIDmode, src);
754 return 0;
755
756 case CONST:
757 src = XEXP (src, 0);
758 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
759 break;
760
761 /* ... fall through ... */
762
763 case PLUS:
764 case MINUS:
765 {
766 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
767
768 /* If either operand is a REG, then see if we already have
769 a known value for it. */
770 if (GET_CODE (src_0) == REG)
771 {
772 temp = find_base_value (src_0);
773 if (temp != 0)
774 src_0 = temp;
775 }
776
777 if (GET_CODE (src_1) == REG)
778 {
779 temp = find_base_value (src_1);
780 if (temp!= 0)
781 src_1 = temp;
782 }
783
784 /* Guess which operand is the base address:
785 If either operand is a symbol, then it is the base. If
786 either operand is a CONST_INT, then the other is the base. */
787 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
788 return find_base_value (src_0);
789 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
790 return find_base_value (src_1);
791
792 /* This might not be necessary anymore:
793 If either operand is a REG that is a known pointer, then it
794 is the base. */
795 else if (GET_CODE (src_0) == REG && REG_POINTER (src_0))
796 return find_base_value (src_0);
797 else if (GET_CODE (src_1) == REG && REG_POINTER (src_1))
798 return find_base_value (src_1);
799
800 return 0;
801 }
802
803 case LO_SUM:
804 /* The standard form is (lo_sum reg sym) so look only at the
805 second operand. */
806 return find_base_value (XEXP (src, 1));
807
808 case AND:
809 /* If the second operand is constant set the base
810 address to the first operand. */
811 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
812 return find_base_value (XEXP (src, 0));
813 return 0;
814
815 case TRUNCATE:
816 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
817 break;
818 /* Fall through. */
819 case ZERO_EXTEND:
820 case SIGN_EXTEND: /* used for NT/Alpha pointers */
821 case HIGH:
822 return find_base_value (XEXP (src, 0));
823
824 default:
825 break;
826 }
827
828 return 0;
829 }
830
831 /* Called from init_alias_analysis indirectly through note_stores. */
832
833 /* While scanning insns to find base values, reg_seen[N] is nonzero if
834 register N has been set in this function. */
835 static char *reg_seen;
836
837 /* Addresses which are known not to alias anything else are identified
838 by a unique integer. */
839 static int unique_id;
840
841 static void
842 record_set (dest, set, data)
843 rtx dest, set;
844 void *data ATTRIBUTE_UNUSED;
845 {
846 register unsigned regno;
847 rtx src;
848
849 if (GET_CODE (dest) != REG)
850 return;
851
852 regno = REGNO (dest);
853
854 if (regno >= reg_base_value_size)
855 abort ();
856
857 if (set)
858 {
859 /* A CLOBBER wipes out any old value but does not prevent a previously
860 unset register from acquiring a base address (i.e. reg_seen is not
861 set). */
862 if (GET_CODE (set) == CLOBBER)
863 {
864 new_reg_base_value[regno] = 0;
865 return;
866 }
867 src = SET_SRC (set);
868 }
869 else
870 {
871 if (reg_seen[regno])
872 {
873 new_reg_base_value[regno] = 0;
874 return;
875 }
876 reg_seen[regno] = 1;
877 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
878 GEN_INT (unique_id++));
879 return;
880 }
881
882 /* This is not the first set. If the new value is not related to the
883 old value, forget the base value. Note that the following code is
884 not detected:
885 extern int x, y; int *p = &x; p += (&y-&x);
886 ANSI C does not allow computing the difference of addresses
887 of distinct top level objects. */
888 if (new_reg_base_value[regno])
889 switch (GET_CODE (src))
890 {
891 case LO_SUM:
892 case MINUS:
893 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
894 new_reg_base_value[regno] = 0;
895 break;
896 case PLUS:
897 /* If the value we add in the PLUS is also a valid base value,
898 this might be the actual base value, and the original value
899 an index. */
900 {
901 rtx other = NULL_RTX;
902
903 if (XEXP (src, 0) == dest)
904 other = XEXP (src, 1);
905 else if (XEXP (src, 1) == dest)
906 other = XEXP (src, 0);
907
908 if (! other || find_base_value (other))
909 new_reg_base_value[regno] = 0;
910 break;
911 }
912 case AND:
913 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
914 new_reg_base_value[regno] = 0;
915 break;
916 default:
917 new_reg_base_value[regno] = 0;
918 break;
919 }
920 /* If this is the first set of a register, record the value. */
921 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
922 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
923 new_reg_base_value[regno] = find_base_value (src);
924
925 reg_seen[regno] = 1;
926 }
927
928 /* Called from loop optimization when a new pseudo-register is
929 created. It indicates that REGNO is being set to VAL. f INVARIANT
930 is true then this value also describes an invariant relationship
931 which can be used to deduce that two registers with unknown values
932 are different. */
933
934 void
935 record_base_value (regno, val, invariant)
936 unsigned int regno;
937 rtx val;
938 int invariant;
939 {
940 if (regno >= reg_base_value_size)
941 return;
942
943 if (invariant && alias_invariant)
944 alias_invariant[regno] = val;
945
946 if (GET_CODE (val) == REG)
947 {
948 if (REGNO (val) < reg_base_value_size)
949 reg_base_value[regno] = reg_base_value[REGNO (val)];
950
951 return;
952 }
953
954 reg_base_value[regno] = find_base_value (val);
955 }
956
957 /* Returns a canonical version of X, from the point of view alias
958 analysis. (For example, if X is a MEM whose address is a register,
959 and the register has a known value (say a SYMBOL_REF), then a MEM
960 whose address is the SYMBOL_REF is returned.) */
961
962 rtx
963 canon_rtx (x)
964 rtx x;
965 {
966 /* Recursively look for equivalences. */
967 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
968 && REGNO (x) < reg_known_value_size)
969 return reg_known_value[REGNO (x)] == x
970 ? x : canon_rtx (reg_known_value[REGNO (x)]);
971 else if (GET_CODE (x) == PLUS)
972 {
973 rtx x0 = canon_rtx (XEXP (x, 0));
974 rtx x1 = canon_rtx (XEXP (x, 1));
975
976 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
977 {
978 if (GET_CODE (x0) == CONST_INT)
979 return plus_constant (x1, INTVAL (x0));
980 else if (GET_CODE (x1) == CONST_INT)
981 return plus_constant (x0, INTVAL (x1));
982 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
983 }
984 }
985
986 /* This gives us much better alias analysis when called from
987 the loop optimizer. Note we want to leave the original
988 MEM alone, but need to return the canonicalized MEM with
989 all the flags with their original values. */
990 else if (GET_CODE (x) == MEM)
991 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
992
993 return x;
994 }
995
996 /* Return 1 if X and Y are identical-looking rtx's.
997
998 We use the data in reg_known_value above to see if two registers with
999 different numbers are, in fact, equivalent. */
1000
1001 static int
1002 rtx_equal_for_memref_p (x, y)
1003 rtx x, y;
1004 {
1005 register int i;
1006 register int j;
1007 register enum rtx_code code;
1008 register const char *fmt;
1009
1010 if (x == 0 && y == 0)
1011 return 1;
1012 if (x == 0 || y == 0)
1013 return 0;
1014
1015 x = canon_rtx (x);
1016 y = canon_rtx (y);
1017
1018 if (x == y)
1019 return 1;
1020
1021 code = GET_CODE (x);
1022 /* Rtx's of different codes cannot be equal. */
1023 if (code != GET_CODE (y))
1024 return 0;
1025
1026 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1027 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1028
1029 if (GET_MODE (x) != GET_MODE (y))
1030 return 0;
1031
1032 /* Some RTL can be compared without a recursive examination. */
1033 switch (code)
1034 {
1035 case REG:
1036 return REGNO (x) == REGNO (y);
1037
1038 case LABEL_REF:
1039 return XEXP (x, 0) == XEXP (y, 0);
1040
1041 case SYMBOL_REF:
1042 return XSTR (x, 0) == XSTR (y, 0);
1043
1044 case CONST_INT:
1045 case CONST_DOUBLE:
1046 /* There's no need to compare the contents of CONST_DOUBLEs or
1047 CONST_INTs because pointer equality is a good enough
1048 comparison for these nodes. */
1049 return 0;
1050
1051 case ADDRESSOF:
1052 return (XINT (x, 1) == XINT (y, 1)
1053 && rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0)));
1054
1055 default:
1056 break;
1057 }
1058
1059 /* For commutative operations, the RTX match if the operand match in any
1060 order. Also handle the simple binary and unary cases without a loop. */
1061 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
1062 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1063 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1064 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1065 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1066 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
1067 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1068 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
1069 else if (GET_RTX_CLASS (code) == '1')
1070 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
1071
1072 /* Compare the elements. If any pair of corresponding elements
1073 fail to match, return 0 for the whole things.
1074
1075 Limit cases to types which actually appear in addresses. */
1076
1077 fmt = GET_RTX_FORMAT (code);
1078 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1079 {
1080 switch (fmt[i])
1081 {
1082 case 'i':
1083 if (XINT (x, i) != XINT (y, i))
1084 return 0;
1085 break;
1086
1087 case 'E':
1088 /* Two vectors must have the same length. */
1089 if (XVECLEN (x, i) != XVECLEN (y, i))
1090 return 0;
1091
1092 /* And the corresponding elements must match. */
1093 for (j = 0; j < XVECLEN (x, i); j++)
1094 if (rtx_equal_for_memref_p (XVECEXP (x, i, j),
1095 XVECEXP (y, i, j)) == 0)
1096 return 0;
1097 break;
1098
1099 case 'e':
1100 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
1101 return 0;
1102 break;
1103
1104 /* This can happen for asm operands. */
1105 case 's':
1106 if (strcmp (XSTR (x, i), XSTR (y, i)))
1107 return 0;
1108 break;
1109
1110 /* This can happen for an asm which clobbers memory. */
1111 case '0':
1112 break;
1113
1114 /* It is believed that rtx's at this level will never
1115 contain anything but integers and other rtx's,
1116 except for within LABEL_REFs and SYMBOL_REFs. */
1117 default:
1118 abort ();
1119 }
1120 }
1121 return 1;
1122 }
1123
1124 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
1125 X and return it, or return 0 if none found. */
1126
1127 static rtx
1128 find_symbolic_term (x)
1129 rtx x;
1130 {
1131 register int i;
1132 register enum rtx_code code;
1133 register const char *fmt;
1134
1135 code = GET_CODE (x);
1136 if (code == SYMBOL_REF || code == LABEL_REF)
1137 return x;
1138 if (GET_RTX_CLASS (code) == 'o')
1139 return 0;
1140
1141 fmt = GET_RTX_FORMAT (code);
1142 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1143 {
1144 rtx t;
1145
1146 if (fmt[i] == 'e')
1147 {
1148 t = find_symbolic_term (XEXP (x, i));
1149 if (t != 0)
1150 return t;
1151 }
1152 else if (fmt[i] == 'E')
1153 break;
1154 }
1155 return 0;
1156 }
1157
1158 static rtx
1159 find_base_term (x)
1160 register rtx x;
1161 {
1162 cselib_val *val;
1163 struct elt_loc_list *l;
1164
1165 #if defined (FIND_BASE_TERM)
1166 /* Try machine-dependent ways to find the base term. */
1167 x = FIND_BASE_TERM (x);
1168 #endif
1169
1170 switch (GET_CODE (x))
1171 {
1172 case REG:
1173 return REG_BASE_VALUE (x);
1174
1175 case ZERO_EXTEND:
1176 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1177 case HIGH:
1178 case PRE_INC:
1179 case PRE_DEC:
1180 case POST_INC:
1181 case POST_DEC:
1182 return find_base_term (XEXP (x, 0));
1183
1184 case VALUE:
1185 val = CSELIB_VAL_PTR (x);
1186 for (l = val->locs; l; l = l->next)
1187 if ((x = find_base_term (l->loc)) != 0)
1188 return x;
1189 return 0;
1190
1191 case CONST:
1192 x = XEXP (x, 0);
1193 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1194 return 0;
1195 /* fall through */
1196 case LO_SUM:
1197 case PLUS:
1198 case MINUS:
1199 {
1200 rtx tmp1 = XEXP (x, 0);
1201 rtx tmp2 = XEXP (x, 1);
1202
1203 /* This is a litle bit tricky since we have to determine which of
1204 the two operands represents the real base address. Otherwise this
1205 routine may return the index register instead of the base register.
1206
1207 That may cause us to believe no aliasing was possible, when in
1208 fact aliasing is possible.
1209
1210 We use a few simple tests to guess the base register. Additional
1211 tests can certainly be added. For example, if one of the operands
1212 is a shift or multiply, then it must be the index register and the
1213 other operand is the base register. */
1214
1215 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1216 return find_base_term (tmp2);
1217
1218 /* If either operand is known to be a pointer, then use it
1219 to determine the base term. */
1220 if (REG_P (tmp1) && REG_POINTER (tmp1))
1221 return find_base_term (tmp1);
1222
1223 if (REG_P (tmp2) && REG_POINTER (tmp2))
1224 return find_base_term (tmp2);
1225
1226 /* Neither operand was known to be a pointer. Go ahead and find the
1227 base term for both operands. */
1228 tmp1 = find_base_term (tmp1);
1229 tmp2 = find_base_term (tmp2);
1230
1231 /* If either base term is named object or a special address
1232 (like an argument or stack reference), then use it for the
1233 base term. */
1234 if (tmp1 != 0
1235 && (GET_CODE (tmp1) == SYMBOL_REF
1236 || GET_CODE (tmp1) == LABEL_REF
1237 || (GET_CODE (tmp1) == ADDRESS
1238 && GET_MODE (tmp1) != VOIDmode)))
1239 return tmp1;
1240
1241 if (tmp2 != 0
1242 && (GET_CODE (tmp2) == SYMBOL_REF
1243 || GET_CODE (tmp2) == LABEL_REF
1244 || (GET_CODE (tmp2) == ADDRESS
1245 && GET_MODE (tmp2) != VOIDmode)))
1246 return tmp2;
1247
1248 /* We could not determine which of the two operands was the
1249 base register and which was the index. So we can determine
1250 nothing from the base alias check. */
1251 return 0;
1252 }
1253
1254 case AND:
1255 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
1256 return REG_BASE_VALUE (XEXP (x, 0));
1257 return 0;
1258
1259 case SYMBOL_REF:
1260 case LABEL_REF:
1261 return x;
1262
1263 case ADDRESSOF:
1264 return REG_BASE_VALUE (frame_pointer_rtx);
1265
1266 default:
1267 return 0;
1268 }
1269 }
1270
1271 /* Return 0 if the addresses X and Y are known to point to different
1272 objects, 1 if they might be pointers to the same object. */
1273
1274 static int
1275 base_alias_check (x, y, x_mode, y_mode)
1276 rtx x, y;
1277 enum machine_mode x_mode, y_mode;
1278 {
1279 rtx x_base = find_base_term (x);
1280 rtx y_base = find_base_term (y);
1281
1282 /* If the address itself has no known base see if a known equivalent
1283 value has one. If either address still has no known base, nothing
1284 is known about aliasing. */
1285 if (x_base == 0)
1286 {
1287 rtx x_c;
1288
1289 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1290 return 1;
1291
1292 x_base = find_base_term (x_c);
1293 if (x_base == 0)
1294 return 1;
1295 }
1296
1297 if (y_base == 0)
1298 {
1299 rtx y_c;
1300 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1301 return 1;
1302
1303 y_base = find_base_term (y_c);
1304 if (y_base == 0)
1305 return 1;
1306 }
1307
1308 /* If the base addresses are equal nothing is known about aliasing. */
1309 if (rtx_equal_p (x_base, y_base))
1310 return 1;
1311
1312 /* The base addresses of the read and write are different expressions.
1313 If they are both symbols and they are not accessed via AND, there is
1314 no conflict. We can bring knowledge of object alignment into play
1315 here. For example, on alpha, "char a, b;" can alias one another,
1316 though "char a; long b;" cannot. */
1317 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1318 {
1319 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1320 return 1;
1321 if (GET_CODE (x) == AND
1322 && (GET_CODE (XEXP (x, 1)) != CONST_INT
1323 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1324 return 1;
1325 if (GET_CODE (y) == AND
1326 && (GET_CODE (XEXP (y, 1)) != CONST_INT
1327 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1328 return 1;
1329 /* Differing symbols never alias. */
1330 return 0;
1331 }
1332
1333 /* If one address is a stack reference there can be no alias:
1334 stack references using different base registers do not alias,
1335 a stack reference can not alias a parameter, and a stack reference
1336 can not alias a global. */
1337 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1338 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1339 return 0;
1340
1341 if (! flag_argument_noalias)
1342 return 1;
1343
1344 if (flag_argument_noalias > 1)
1345 return 0;
1346
1347 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1348 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1349 }
1350
1351 /* Convert the address X into something we can use. This is done by returning
1352 it unchanged unless it is a value; in the latter case we call cselib to get
1353 a more useful rtx. */
1354
1355 rtx
1356 get_addr (x)
1357 rtx x;
1358 {
1359 cselib_val *v;
1360 struct elt_loc_list *l;
1361
1362 if (GET_CODE (x) != VALUE)
1363 return x;
1364 v = CSELIB_VAL_PTR (x);
1365 for (l = v->locs; l; l = l->next)
1366 if (CONSTANT_P (l->loc))
1367 return l->loc;
1368 for (l = v->locs; l; l = l->next)
1369 if (GET_CODE (l->loc) != REG && GET_CODE (l->loc) != MEM)
1370 return l->loc;
1371 if (v->locs)
1372 return v->locs->loc;
1373 return x;
1374 }
1375
1376 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1377 where SIZE is the size in bytes of the memory reference. If ADDR
1378 is not modified by the memory reference then ADDR is returned. */
1379
1380 rtx
1381 addr_side_effect_eval (addr, size, n_refs)
1382 rtx addr;
1383 int size;
1384 int n_refs;
1385 {
1386 int offset = 0;
1387
1388 switch (GET_CODE (addr))
1389 {
1390 case PRE_INC:
1391 offset = (n_refs + 1) * size;
1392 break;
1393 case PRE_DEC:
1394 offset = -(n_refs + 1) * size;
1395 break;
1396 case POST_INC:
1397 offset = n_refs * size;
1398 break;
1399 case POST_DEC:
1400 offset = -n_refs * size;
1401 break;
1402
1403 default:
1404 return addr;
1405 }
1406
1407 if (offset)
1408 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
1409 else
1410 addr = XEXP (addr, 0);
1411
1412 return addr;
1413 }
1414
1415 /* Return nonzero if X and Y (memory addresses) could reference the
1416 same location in memory. C is an offset accumulator. When
1417 C is nonzero, we are testing aliases between X and Y + C.
1418 XSIZE is the size in bytes of the X reference,
1419 similarly YSIZE is the size in bytes for Y.
1420
1421 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1422 referenced (the reference was BLKmode), so make the most pessimistic
1423 assumptions.
1424
1425 If XSIZE or YSIZE is negative, we may access memory outside the object
1426 being referenced as a side effect. This can happen when using AND to
1427 align memory references, as is done on the Alpha.
1428
1429 Nice to notice that varying addresses cannot conflict with fp if no
1430 local variables had their addresses taken, but that's too hard now. */
1431
1432 static int
1433 memrefs_conflict_p (xsize, x, ysize, y, c)
1434 register rtx x, y;
1435 int xsize, ysize;
1436 HOST_WIDE_INT c;
1437 {
1438 if (GET_CODE (x) == VALUE)
1439 x = get_addr (x);
1440 if (GET_CODE (y) == VALUE)
1441 y = get_addr (y);
1442 if (GET_CODE (x) == HIGH)
1443 x = XEXP (x, 0);
1444 else if (GET_CODE (x) == LO_SUM)
1445 x = XEXP (x, 1);
1446 else
1447 x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
1448 if (GET_CODE (y) == HIGH)
1449 y = XEXP (y, 0);
1450 else if (GET_CODE (y) == LO_SUM)
1451 y = XEXP (y, 1);
1452 else
1453 y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
1454
1455 if (rtx_equal_for_memref_p (x, y))
1456 {
1457 if (xsize <= 0 || ysize <= 0)
1458 return 1;
1459 if (c >= 0 && xsize > c)
1460 return 1;
1461 if (c < 0 && ysize+c > 0)
1462 return 1;
1463 return 0;
1464 }
1465
1466 /* This code used to check for conflicts involving stack references and
1467 globals but the base address alias code now handles these cases. */
1468
1469 if (GET_CODE (x) == PLUS)
1470 {
1471 /* The fact that X is canonicalized means that this
1472 PLUS rtx is canonicalized. */
1473 rtx x0 = XEXP (x, 0);
1474 rtx x1 = XEXP (x, 1);
1475
1476 if (GET_CODE (y) == PLUS)
1477 {
1478 /* The fact that Y is canonicalized means that this
1479 PLUS rtx is canonicalized. */
1480 rtx y0 = XEXP (y, 0);
1481 rtx y1 = XEXP (y, 1);
1482
1483 if (rtx_equal_for_memref_p (x1, y1))
1484 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1485 if (rtx_equal_for_memref_p (x0, y0))
1486 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1487 if (GET_CODE (x1) == CONST_INT)
1488 {
1489 if (GET_CODE (y1) == CONST_INT)
1490 return memrefs_conflict_p (xsize, x0, ysize, y0,
1491 c - INTVAL (x1) + INTVAL (y1));
1492 else
1493 return memrefs_conflict_p (xsize, x0, ysize, y,
1494 c - INTVAL (x1));
1495 }
1496 else if (GET_CODE (y1) == CONST_INT)
1497 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1498
1499 return 1;
1500 }
1501 else if (GET_CODE (x1) == CONST_INT)
1502 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1503 }
1504 else if (GET_CODE (y) == PLUS)
1505 {
1506 /* The fact that Y is canonicalized means that this
1507 PLUS rtx is canonicalized. */
1508 rtx y0 = XEXP (y, 0);
1509 rtx y1 = XEXP (y, 1);
1510
1511 if (GET_CODE (y1) == CONST_INT)
1512 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1513 else
1514 return 1;
1515 }
1516
1517 if (GET_CODE (x) == GET_CODE (y))
1518 switch (GET_CODE (x))
1519 {
1520 case MULT:
1521 {
1522 /* Handle cases where we expect the second operands to be the
1523 same, and check only whether the first operand would conflict
1524 or not. */
1525 rtx x0, y0;
1526 rtx x1 = canon_rtx (XEXP (x, 1));
1527 rtx y1 = canon_rtx (XEXP (y, 1));
1528 if (! rtx_equal_for_memref_p (x1, y1))
1529 return 1;
1530 x0 = canon_rtx (XEXP (x, 0));
1531 y0 = canon_rtx (XEXP (y, 0));
1532 if (rtx_equal_for_memref_p (x0, y0))
1533 return (xsize == 0 || ysize == 0
1534 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1535
1536 /* Can't properly adjust our sizes. */
1537 if (GET_CODE (x1) != CONST_INT)
1538 return 1;
1539 xsize /= INTVAL (x1);
1540 ysize /= INTVAL (x1);
1541 c /= INTVAL (x1);
1542 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1543 }
1544
1545 case REG:
1546 /* Are these registers known not to be equal? */
1547 if (alias_invariant)
1548 {
1549 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1550 rtx i_x, i_y; /* invariant relationships of X and Y */
1551
1552 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1553 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1554
1555 if (i_x == 0 && i_y == 0)
1556 break;
1557
1558 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1559 ysize, i_y ? i_y : y, c))
1560 return 0;
1561 }
1562 break;
1563
1564 default:
1565 break;
1566 }
1567
1568 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1569 as an access with indeterminate size. Assume that references
1570 besides AND are aligned, so if the size of the other reference is
1571 at least as large as the alignment, assume no other overlap. */
1572 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1573 {
1574 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1575 xsize = -1;
1576 return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1577 }
1578 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1579 {
1580 /* ??? If we are indexing far enough into the array/structure, we
1581 may yet be able to determine that we can not overlap. But we
1582 also need to that we are far enough from the end not to overlap
1583 a following reference, so we do nothing with that for now. */
1584 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1585 ysize = -1;
1586 return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1587 }
1588
1589 if (GET_CODE (x) == ADDRESSOF)
1590 {
1591 if (y == frame_pointer_rtx
1592 || GET_CODE (y) == ADDRESSOF)
1593 return xsize <= 0 || ysize <= 0;
1594 }
1595 if (GET_CODE (y) == ADDRESSOF)
1596 {
1597 if (x == frame_pointer_rtx)
1598 return xsize <= 0 || ysize <= 0;
1599 }
1600
1601 if (CONSTANT_P (x))
1602 {
1603 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1604 {
1605 c += (INTVAL (y) - INTVAL (x));
1606 return (xsize <= 0 || ysize <= 0
1607 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1608 }
1609
1610 if (GET_CODE (x) == CONST)
1611 {
1612 if (GET_CODE (y) == CONST)
1613 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1614 ysize, canon_rtx (XEXP (y, 0)), c);
1615 else
1616 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1617 ysize, y, c);
1618 }
1619 if (GET_CODE (y) == CONST)
1620 return memrefs_conflict_p (xsize, x, ysize,
1621 canon_rtx (XEXP (y, 0)), c);
1622
1623 if (CONSTANT_P (y))
1624 return (xsize <= 0 || ysize <= 0
1625 || (rtx_equal_for_memref_p (x, y)
1626 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1627
1628 return 1;
1629 }
1630 return 1;
1631 }
1632
1633 /* Functions to compute memory dependencies.
1634
1635 Since we process the insns in execution order, we can build tables
1636 to keep track of what registers are fixed (and not aliased), what registers
1637 are varying in known ways, and what registers are varying in unknown
1638 ways.
1639
1640 If both memory references are volatile, then there must always be a
1641 dependence between the two references, since their order can not be
1642 changed. A volatile and non-volatile reference can be interchanged
1643 though.
1644
1645 A MEM_IN_STRUCT reference at a non-AND varying address can never
1646 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1647 also must allow AND addresses, because they may generate accesses
1648 outside the object being referenced. This is used to generate
1649 aligned addresses from unaligned addresses, for instance, the alpha
1650 storeqi_unaligned pattern. */
1651
1652 /* Read dependence: X is read after read in MEM takes place. There can
1653 only be a dependence here if both reads are volatile. */
1654
1655 int
1656 read_dependence (mem, x)
1657 rtx mem;
1658 rtx x;
1659 {
1660 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1661 }
1662
1663 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1664 MEM2 is a reference to a structure at a varying address, or returns
1665 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1666 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1667 to decide whether or not an address may vary; it should return
1668 nonzero whenever variation is possible.
1669 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1670
1671 static rtx
1672 fixed_scalar_and_varying_struct_p (mem1, mem2, mem1_addr, mem2_addr, varies_p)
1673 rtx mem1, mem2;
1674 rtx mem1_addr, mem2_addr;
1675 int (*varies_p) PARAMS ((rtx, int));
1676 {
1677 if (! flag_strict_aliasing)
1678 return NULL_RTX;
1679
1680 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1681 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1682 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1683 varying address. */
1684 return mem1;
1685
1686 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1687 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1688 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1689 varying address. */
1690 return mem2;
1691
1692 return NULL_RTX;
1693 }
1694
1695 /* Returns nonzero if something about the mode or address format MEM1
1696 indicates that it might well alias *anything*. */
1697
1698 static int
1699 aliases_everything_p (mem)
1700 rtx mem;
1701 {
1702 if (GET_CODE (XEXP (mem, 0)) == AND)
1703 /* If the address is an AND, its very hard to know at what it is
1704 actually pointing. */
1705 return 1;
1706
1707 return 0;
1708 }
1709
1710 /* True dependence: X is read after store in MEM takes place. */
1711
1712 int
1713 true_dependence (mem, mem_mode, x, varies)
1714 rtx mem;
1715 enum machine_mode mem_mode;
1716 rtx x;
1717 int (*varies) PARAMS ((rtx, int));
1718 {
1719 register rtx x_addr, mem_addr;
1720 rtx base;
1721
1722 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1723 return 1;
1724
1725 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1726 return 0;
1727
1728 /* Unchanging memory can't conflict with non-unchanging memory.
1729 A non-unchanging read can conflict with a non-unchanging write.
1730 An unchanging read can conflict with an unchanging write since
1731 there may be a single store to this address to initialize it.
1732 Note that an unchanging store can conflict with a non-unchanging read
1733 since we have to make conservative assumptions when we have a
1734 record with readonly fields and we are copying the whole thing.
1735 Just fall through to the code below to resolve potential conflicts.
1736 This won't handle all cases optimally, but the possible performance
1737 loss should be negligible. */
1738 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
1739 return 0;
1740
1741 if (mem_mode == VOIDmode)
1742 mem_mode = GET_MODE (mem);
1743
1744 x_addr = get_addr (XEXP (x, 0));
1745 mem_addr = get_addr (XEXP (mem, 0));
1746
1747 base = find_base_term (x_addr);
1748 if (base && (GET_CODE (base) == LABEL_REF
1749 || (GET_CODE (base) == SYMBOL_REF
1750 && CONSTANT_POOL_ADDRESS_P (base))))
1751 return 0;
1752
1753 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
1754 return 0;
1755
1756 x_addr = canon_rtx (x_addr);
1757 mem_addr = canon_rtx (mem_addr);
1758
1759 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
1760 SIZE_FOR_MODE (x), x_addr, 0))
1761 return 0;
1762
1763 if (aliases_everything_p (x))
1764 return 1;
1765
1766 /* We cannot use aliases_everyting_p to test MEM, since we must look
1767 at MEM_MODE, rather than GET_MODE (MEM). */
1768 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
1769 return 1;
1770
1771 /* In true_dependence we also allow BLKmode to alias anything. Why
1772 don't we do this in anti_dependence and output_dependence? */
1773 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
1774 return 1;
1775
1776 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1777 varies);
1778 }
1779
1780 /* Canonical true dependence: X is read after store in MEM takes place.
1781 Variant of true_dependece which assumes MEM has already been
1782 canonicalized (hence we no longer do that here).
1783 The mem_addr argument has been added, since true_dependence computed
1784 this value prior to canonicalizing. */
1785
1786 int
1787 canon_true_dependence (mem, mem_mode, mem_addr, x, varies)
1788 rtx mem, mem_addr, x;
1789 enum machine_mode mem_mode;
1790 int (*varies) PARAMS ((rtx, int));
1791 {
1792 register rtx x_addr;
1793
1794 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1795 return 1;
1796
1797 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1798 return 0;
1799
1800 /* If X is an unchanging read, then it can't possibly conflict with any
1801 non-unchanging store. It may conflict with an unchanging write though,
1802 because there may be a single store to this address to initialize it.
1803 Just fall through to the code below to resolve the case where we have
1804 both an unchanging read and an unchanging write. This won't handle all
1805 cases optimally, but the possible performance loss should be
1806 negligible. */
1807 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
1808 return 0;
1809
1810 x_addr = get_addr (XEXP (x, 0));
1811
1812 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
1813 return 0;
1814
1815 x_addr = canon_rtx (x_addr);
1816 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
1817 SIZE_FOR_MODE (x), x_addr, 0))
1818 return 0;
1819
1820 if (aliases_everything_p (x))
1821 return 1;
1822
1823 /* We cannot use aliases_everyting_p to test MEM, since we must look
1824 at MEM_MODE, rather than GET_MODE (MEM). */
1825 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
1826 return 1;
1827
1828 /* In true_dependence we also allow BLKmode to alias anything. Why
1829 don't we do this in anti_dependence and output_dependence? */
1830 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
1831 return 1;
1832
1833 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1834 varies);
1835 }
1836
1837 /* Returns non-zero if a write to X might alias a previous read from
1838 (or, if WRITEP is non-zero, a write to) MEM. */
1839
1840 static int
1841 write_dependence_p (mem, x, writep)
1842 rtx mem;
1843 rtx x;
1844 int writep;
1845 {
1846 rtx x_addr, mem_addr;
1847 rtx fixed_scalar;
1848 rtx base;
1849
1850 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1851 return 1;
1852
1853 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1854 return 0;
1855
1856 /* Unchanging memory can't conflict with non-unchanging memory. */
1857 if (RTX_UNCHANGING_P (x) != RTX_UNCHANGING_P (mem))
1858 return 0;
1859
1860 /* If MEM is an unchanging read, then it can't possibly conflict with
1861 the store to X, because there is at most one store to MEM, and it must
1862 have occurred somewhere before MEM. */
1863 if (! writep && RTX_UNCHANGING_P (mem))
1864 return 0;
1865
1866 x_addr = get_addr (XEXP (x, 0));
1867 mem_addr = get_addr (XEXP (mem, 0));
1868
1869 if (! writep)
1870 {
1871 base = find_base_term (mem_addr);
1872 if (base && (GET_CODE (base) == LABEL_REF
1873 || (GET_CODE (base) == SYMBOL_REF
1874 && CONSTANT_POOL_ADDRESS_P (base))))
1875 return 0;
1876 }
1877
1878 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
1879 GET_MODE (mem)))
1880 return 0;
1881
1882 x_addr = canon_rtx (x_addr);
1883 mem_addr = canon_rtx (mem_addr);
1884
1885 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
1886 SIZE_FOR_MODE (x), x_addr, 0))
1887 return 0;
1888
1889 fixed_scalar
1890 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1891 rtx_addr_varies_p);
1892
1893 return (!(fixed_scalar == mem && !aliases_everything_p (x))
1894 && !(fixed_scalar == x && !aliases_everything_p (mem)));
1895 }
1896
1897 /* Anti dependence: X is written after read in MEM takes place. */
1898
1899 int
1900 anti_dependence (mem, x)
1901 rtx mem;
1902 rtx x;
1903 {
1904 return write_dependence_p (mem, x, /*writep=*/0);
1905 }
1906
1907 /* Output dependence: X is written after store in MEM takes place. */
1908
1909 int
1910 output_dependence (mem, x)
1911 register rtx mem;
1912 register rtx x;
1913 {
1914 return write_dependence_p (mem, x, /*writep=*/1);
1915 }
1916
1917 /* Returns non-zero if X mentions something which is not
1918 local to the function and is not constant. */
1919
1920 static int
1921 nonlocal_mentioned_p (x)
1922 rtx x;
1923 {
1924 rtx base;
1925 register RTX_CODE code;
1926 int regno;
1927
1928 code = GET_CODE (x);
1929
1930 if (GET_RTX_CLASS (code) == 'i')
1931 {
1932 /* Constant functions can be constant if they don't use
1933 scratch memory used to mark function w/o side effects. */
1934 if (code == CALL_INSN && CONST_CALL_P (x))
1935 {
1936 x = CALL_INSN_FUNCTION_USAGE (x);
1937 if (x == 0)
1938 return 0;
1939 }
1940 else
1941 x = PATTERN (x);
1942 code = GET_CODE (x);
1943 }
1944
1945 switch (code)
1946 {
1947 case SUBREG:
1948 if (GET_CODE (SUBREG_REG (x)) == REG)
1949 {
1950 /* Global registers are not local. */
1951 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
1952 && global_regs[subreg_regno (x)])
1953 return 1;
1954 return 0;
1955 }
1956 break;
1957
1958 case REG:
1959 regno = REGNO (x);
1960 /* Global registers are not local. */
1961 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
1962 return 1;
1963 return 0;
1964
1965 case SCRATCH:
1966 case PC:
1967 case CC0:
1968 case CONST_INT:
1969 case CONST_DOUBLE:
1970 case CONST:
1971 case LABEL_REF:
1972 return 0;
1973
1974 case SYMBOL_REF:
1975 /* Constants in the function's constants pool are constant. */
1976 if (CONSTANT_POOL_ADDRESS_P (x))
1977 return 0;
1978 return 1;
1979
1980 case CALL:
1981 /* Non-constant calls and recursion are not local. */
1982 return 1;
1983
1984 case MEM:
1985 /* Be overly conservative and consider any volatile memory
1986 reference as not local. */
1987 if (MEM_VOLATILE_P (x))
1988 return 1;
1989 base = find_base_term (XEXP (x, 0));
1990 if (base)
1991 {
1992 /* A Pmode ADDRESS could be a reference via the structure value
1993 address or static chain. Such memory references are nonlocal.
1994
1995 Thus, we have to examine the contents of the ADDRESS to find
1996 out if this is a local reference or not. */
1997 if (GET_CODE (base) == ADDRESS
1998 && GET_MODE (base) == Pmode
1999 && (XEXP (base, 0) == stack_pointer_rtx
2000 || XEXP (base, 0) == arg_pointer_rtx
2001 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2002 || XEXP (base, 0) == hard_frame_pointer_rtx
2003 #endif
2004 || XEXP (base, 0) == frame_pointer_rtx))
2005 return 0;
2006 /* Constants in the function's constant pool are constant. */
2007 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
2008 return 0;
2009 }
2010 return 1;
2011
2012 case UNSPEC_VOLATILE:
2013 case ASM_INPUT:
2014 return 1;
2015
2016 case ASM_OPERANDS:
2017 if (MEM_VOLATILE_P (x))
2018 return 1;
2019
2020 /* FALLTHROUGH */
2021
2022 default:
2023 break;
2024 }
2025
2026 /* Recursively scan the operands of this expression. */
2027
2028 {
2029 register const char *fmt = GET_RTX_FORMAT (code);
2030 register int i;
2031
2032 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2033 {
2034 if (fmt[i] == 'e' && XEXP (x, i))
2035 {
2036 if (nonlocal_mentioned_p (XEXP (x, i)))
2037 return 1;
2038 }
2039 else if (fmt[i] == 'E')
2040 {
2041 register int j;
2042 for (j = 0; j < XVECLEN (x, i); j++)
2043 if (nonlocal_mentioned_p (XVECEXP (x, i, j)))
2044 return 1;
2045 }
2046 }
2047 }
2048
2049 return 0;
2050 }
2051
2052 /* Mark the function if it is constant. */
2053
2054 void
2055 mark_constant_function ()
2056 {
2057 rtx insn;
2058 int nonlocal_mentioned;
2059
2060 if (TREE_PUBLIC (current_function_decl)
2061 || TREE_READONLY (current_function_decl)
2062 || DECL_IS_PURE (current_function_decl)
2063 || TREE_THIS_VOLATILE (current_function_decl)
2064 || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode)
2065 return;
2066
2067 /* A loop might not return which counts as a side effect. */
2068 if (mark_dfs_back_edges ())
2069 return;
2070
2071 nonlocal_mentioned = 0;
2072
2073 init_alias_analysis ();
2074
2075 /* Determine if this is a constant function. */
2076
2077 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2078 if (INSN_P (insn) && nonlocal_mentioned_p (insn))
2079 {
2080 nonlocal_mentioned = 1;
2081 break;
2082 }
2083
2084 end_alias_analysis ();
2085
2086 /* Mark the function. */
2087
2088 if (! nonlocal_mentioned)
2089 TREE_READONLY (current_function_decl) = 1;
2090 }
2091
2092
2093 static HARD_REG_SET argument_registers;
2094
2095 void
2096 init_alias_once ()
2097 {
2098 register int i;
2099
2100 #ifndef OUTGOING_REGNO
2101 #define OUTGOING_REGNO(N) N
2102 #endif
2103 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2104 /* Check whether this register can hold an incoming pointer
2105 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2106 numbers, so translate if necessary due to register windows. */
2107 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2108 && HARD_REGNO_MODE_OK (i, Pmode))
2109 SET_HARD_REG_BIT (argument_registers, i);
2110
2111 alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
2112 }
2113
2114 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2115 array. */
2116
2117 void
2118 init_alias_analysis ()
2119 {
2120 int maxreg = max_reg_num ();
2121 int changed, pass;
2122 register int i;
2123 register unsigned int ui;
2124 register rtx insn;
2125
2126 reg_known_value_size = maxreg;
2127
2128 reg_known_value
2129 = (rtx *) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (rtx))
2130 - FIRST_PSEUDO_REGISTER;
2131 reg_known_equiv_p
2132 = (char*) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (char))
2133 - FIRST_PSEUDO_REGISTER;
2134
2135 /* Overallocate reg_base_value to allow some growth during loop
2136 optimization. Loop unrolling can create a large number of
2137 registers. */
2138 reg_base_value_size = maxreg * 2;
2139 reg_base_value = (rtx *) xcalloc (reg_base_value_size, sizeof (rtx));
2140 ggc_add_rtx_root (reg_base_value, reg_base_value_size);
2141
2142 new_reg_base_value = (rtx *) xmalloc (reg_base_value_size * sizeof (rtx));
2143 reg_seen = (char *) xmalloc (reg_base_value_size);
2144 if (! reload_completed && flag_unroll_loops)
2145 {
2146 /* ??? Why are we realloc'ing if we're just going to zero it? */
2147 alias_invariant = (rtx *)xrealloc (alias_invariant,
2148 reg_base_value_size * sizeof (rtx));
2149 memset ((char *)alias_invariant, 0, reg_base_value_size * sizeof (rtx));
2150 }
2151
2152 /* The basic idea is that each pass through this loop will use the
2153 "constant" information from the previous pass to propagate alias
2154 information through another level of assignments.
2155
2156 This could get expensive if the assignment chains are long. Maybe
2157 we should throttle the number of iterations, possibly based on
2158 the optimization level or flag_expensive_optimizations.
2159
2160 We could propagate more information in the first pass by making use
2161 of REG_N_SETS to determine immediately that the alias information
2162 for a pseudo is "constant".
2163
2164 A program with an uninitialized variable can cause an infinite loop
2165 here. Instead of doing a full dataflow analysis to detect such problems
2166 we just cap the number of iterations for the loop.
2167
2168 The state of the arrays for the set chain in question does not matter
2169 since the program has undefined behavior. */
2170
2171 pass = 0;
2172 do
2173 {
2174 /* Assume nothing will change this iteration of the loop. */
2175 changed = 0;
2176
2177 /* We want to assign the same IDs each iteration of this loop, so
2178 start counting from zero each iteration of the loop. */
2179 unique_id = 0;
2180
2181 /* We're at the start of the funtion each iteration through the
2182 loop, so we're copying arguments. */
2183 copying_arguments = 1;
2184
2185 /* Wipe the potential alias information clean for this pass. */
2186 memset ((char *) new_reg_base_value, 0, reg_base_value_size * sizeof (rtx));
2187
2188 /* Wipe the reg_seen array clean. */
2189 memset ((char *) reg_seen, 0, reg_base_value_size);
2190
2191 /* Mark all hard registers which may contain an address.
2192 The stack, frame and argument pointers may contain an address.
2193 An argument register which can hold a Pmode value may contain
2194 an address even if it is not in BASE_REGS.
2195
2196 The address expression is VOIDmode for an argument and
2197 Pmode for other registers. */
2198
2199 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2200 if (TEST_HARD_REG_BIT (argument_registers, i))
2201 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
2202 gen_rtx_REG (Pmode, i));
2203
2204 new_reg_base_value[STACK_POINTER_REGNUM]
2205 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2206 new_reg_base_value[ARG_POINTER_REGNUM]
2207 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2208 new_reg_base_value[FRAME_POINTER_REGNUM]
2209 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2210 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2211 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2212 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2213 #endif
2214
2215 /* Walk the insns adding values to the new_reg_base_value array. */
2216 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2217 {
2218 if (INSN_P (insn))
2219 {
2220 rtx note, set;
2221
2222 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2223 /* The prologue/epilouge insns are not threaded onto the
2224 insn chain until after reload has completed. Thus,
2225 there is no sense wasting time checking if INSN is in
2226 the prologue/epilogue until after reload has completed. */
2227 if (reload_completed
2228 && prologue_epilogue_contains (insn))
2229 continue;
2230 #endif
2231
2232 /* If this insn has a noalias note, process it, Otherwise,
2233 scan for sets. A simple set will have no side effects
2234 which could change the base value of any other register. */
2235
2236 if (GET_CODE (PATTERN (insn)) == SET
2237 && REG_NOTES (insn) != 0
2238 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2239 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2240 else
2241 note_stores (PATTERN (insn), record_set, NULL);
2242
2243 set = single_set (insn);
2244
2245 if (set != 0
2246 && GET_CODE (SET_DEST (set)) == REG
2247 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2248 {
2249 unsigned int regno = REGNO (SET_DEST (set));
2250 rtx src = SET_SRC (set);
2251
2252 if (REG_NOTES (insn) != 0
2253 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
2254 && REG_N_SETS (regno) == 1)
2255 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2256 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2257 && ! rtx_varies_p (XEXP (note, 0), 1)
2258 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
2259 {
2260 reg_known_value[regno] = XEXP (note, 0);
2261 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
2262 }
2263 else if (REG_N_SETS (regno) == 1
2264 && GET_CODE (src) == PLUS
2265 && GET_CODE (XEXP (src, 0)) == REG
2266 && REGNO (XEXP (src, 0)) >= FIRST_PSEUDO_REGISTER
2267 && (reg_known_value[REGNO (XEXP (src, 0))])
2268 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2269 {
2270 rtx op0 = XEXP (src, 0);
2271 op0 = reg_known_value[REGNO (op0)];
2272 reg_known_value[regno]
2273 = plus_constant (op0, INTVAL (XEXP (src, 1)));
2274 reg_known_equiv_p[regno] = 0;
2275 }
2276 else if (REG_N_SETS (regno) == 1
2277 && ! rtx_varies_p (src, 1))
2278 {
2279 reg_known_value[regno] = src;
2280 reg_known_equiv_p[regno] = 0;
2281 }
2282 }
2283 }
2284 else if (GET_CODE (insn) == NOTE
2285 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
2286 copying_arguments = 0;
2287 }
2288
2289 /* Now propagate values from new_reg_base_value to reg_base_value. */
2290 for (ui = 0; ui < reg_base_value_size; ui++)
2291 {
2292 if (new_reg_base_value[ui]
2293 && new_reg_base_value[ui] != reg_base_value[ui]
2294 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
2295 {
2296 reg_base_value[ui] = new_reg_base_value[ui];
2297 changed = 1;
2298 }
2299 }
2300 }
2301 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2302
2303 /* Fill in the remaining entries. */
2304 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
2305 if (reg_known_value[i] == 0)
2306 reg_known_value[i] = regno_reg_rtx[i];
2307
2308 /* Simplify the reg_base_value array so that no register refers to
2309 another register, except to special registers indirectly through
2310 ADDRESS expressions.
2311
2312 In theory this loop can take as long as O(registers^2), but unless
2313 there are very long dependency chains it will run in close to linear
2314 time.
2315
2316 This loop may not be needed any longer now that the main loop does
2317 a better job at propagating alias information. */
2318 pass = 0;
2319 do
2320 {
2321 changed = 0;
2322 pass++;
2323 for (ui = 0; ui < reg_base_value_size; ui++)
2324 {
2325 rtx base = reg_base_value[ui];
2326 if (base && GET_CODE (base) == REG)
2327 {
2328 unsigned int base_regno = REGNO (base);
2329 if (base_regno == ui) /* register set from itself */
2330 reg_base_value[ui] = 0;
2331 else
2332 reg_base_value[ui] = reg_base_value[base_regno];
2333 changed = 1;
2334 }
2335 }
2336 }
2337 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
2338
2339 /* Clean up. */
2340 free (new_reg_base_value);
2341 new_reg_base_value = 0;
2342 free (reg_seen);
2343 reg_seen = 0;
2344 }
2345
2346 void
2347 end_alias_analysis ()
2348 {
2349 free (reg_known_value + FIRST_PSEUDO_REGISTER);
2350 reg_known_value = 0;
2351 reg_known_value_size = 0;
2352 free (reg_known_equiv_p + FIRST_PSEUDO_REGISTER);
2353 reg_known_equiv_p = 0;
2354 if (reg_base_value)
2355 {
2356 ggc_del_root (reg_base_value);
2357 free (reg_base_value);
2358 reg_base_value = 0;
2359 }
2360 reg_base_value_size = 0;
2361 if (alias_invariant)
2362 {
2363 free (alias_invariant);
2364 alias_invariant = 0;
2365 }
2366 }