cselib.c (expand_loc): Return sp, fp, hfp or cfa base reg right away if seen.
[gcc.git] / gcc / cselib.c
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "function.h"
35 #include "emit-rtl.h"
36 #include "diagnostic-core.h"
37 #include "output.h"
38 #include "ggc.h"
39 #include "hashtab.h"
40 #include "tree-pass.h"
41 #include "cselib.h"
42 #include "params.h"
43 #include "alloc-pool.h"
44 #include "target.h"
45 #include "bitmap.h"
46
47 /* A list of cselib_val structures. */
48 struct elt_list {
49 struct elt_list *next;
50 cselib_val *elt;
51 };
52
53 static bool cselib_record_memory;
54 static bool cselib_preserve_constants;
55 static int entry_and_rtx_equal_p (const void *, const void *);
56 static hashval_t get_value_hash (const void *);
57 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
58 static void new_elt_loc_list (cselib_val *, rtx);
59 static void unchain_one_value (cselib_val *);
60 static void unchain_one_elt_list (struct elt_list **);
61 static void unchain_one_elt_loc_list (struct elt_loc_list **);
62 static int discard_useless_locs (void **, void *);
63 static int discard_useless_values (void **, void *);
64 static void remove_useless_values (void);
65 static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
66 static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
67 static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
68 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
69 static cselib_val *cselib_lookup_mem (rtx, int);
70 static void cselib_invalidate_regno (unsigned int, enum machine_mode);
71 static void cselib_invalidate_mem (rtx);
72 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
73 static void cselib_record_sets (rtx);
74
75 struct expand_value_data
76 {
77 bitmap regs_active;
78 cselib_expand_callback callback;
79 void *callback_arg;
80 bool dummy;
81 };
82
83 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
84
85 /* There are three ways in which cselib can look up an rtx:
86 - for a REG, the reg_values table (which is indexed by regno) is used
87 - for a MEM, we recursively look up its address and then follow the
88 addr_list of that value
89 - for everything else, we compute a hash value and go through the hash
90 table. Since different rtx's can still have the same hash value,
91 this involves walking the table entries for a given value and comparing
92 the locations of the entries with the rtx we are looking up. */
93
94 /* A table that enables us to look up elts by their value. */
95 static htab_t cselib_hash_table;
96
97 /* This is a global so we don't have to pass this through every function.
98 It is used in new_elt_loc_list to set SETTING_INSN. */
99 static rtx cselib_current_insn;
100
101 /* The unique id that the next create value will take. */
102 static unsigned int next_uid;
103
104 /* The number of registers we had when the varrays were last resized. */
105 static unsigned int cselib_nregs;
106
107 /* Count values without known locations, or with only locations that
108 wouldn't have been known except for debug insns. Whenever this
109 grows too big, we remove these useless values from the table.
110
111 Counting values with only debug values is a bit tricky. We don't
112 want to increment n_useless_values when we create a value for a
113 debug insn, for this would get n_useless_values out of sync, but we
114 want increment it if all locs in the list that were ever referenced
115 in nondebug insns are removed from the list.
116
117 In the general case, once we do that, we'd have to stop accepting
118 nondebug expressions in the loc list, to avoid having two values
119 equivalent that, without debug insns, would have been made into
120 separate values. However, because debug insns never introduce
121 equivalences themselves (no assignments), the only means for
122 growing loc lists is through nondebug assignments. If the locs
123 also happen to be referenced in debug insns, it will work just fine.
124
125 A consequence of this is that there's at most one debug-only loc in
126 each loc list. If we keep it in the first entry, testing whether
127 we have a debug-only loc list takes O(1).
128
129 Furthermore, since any additional entry in a loc list containing a
130 debug loc would have to come from an assignment (nondebug) that
131 references both the initial debug loc and the newly-equivalent loc,
132 the initial debug loc would be promoted to a nondebug loc, and the
133 loc list would not contain debug locs any more.
134
135 So the only case we have to be careful with in order to keep
136 n_useless_values in sync between debug and nondebug compilations is
137 to avoid incrementing n_useless_values when removing the single loc
138 from a value that turns out to not appear outside debug values. We
139 increment n_useless_debug_values instead, and leave such values
140 alone until, for other reasons, we garbage-collect useless
141 values. */
142 static int n_useless_values;
143 static int n_useless_debug_values;
144
145 /* Count values whose locs have been taken exclusively from debug
146 insns for the entire life of the value. */
147 static int n_debug_values;
148
149 /* Number of useless values before we remove them from the hash table. */
150 #define MAX_USELESS_VALUES 32
151
152 /* This table maps from register number to values. It does not
153 contain pointers to cselib_val structures, but rather elt_lists.
154 The purpose is to be able to refer to the same register in
155 different modes. The first element of the list defines the mode in
156 which the register was set; if the mode is unknown or the value is
157 no longer valid in that mode, ELT will be NULL for the first
158 element. */
159 static struct elt_list **reg_values;
160 static unsigned int reg_values_size;
161 #define REG_VALUES(i) reg_values[i]
162
163 /* The largest number of hard regs used by any entry added to the
164 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
165 static unsigned int max_value_regs;
166
167 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
168 in cselib_clear_table() for fast emptying. */
169 static unsigned int *used_regs;
170 static unsigned int n_used_regs;
171
172 /* We pass this to cselib_invalidate_mem to invalidate all of
173 memory for a non-const call instruction. */
174 static GTY(()) rtx callmem;
175
176 /* Set by discard_useless_locs if it deleted the last location of any
177 value. */
178 static int values_became_useless;
179
180 /* Used as stop element of the containing_mem list so we can check
181 presence in the list by checking the next pointer. */
182 static cselib_val dummy_val;
183
184 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
185 that is constant through the whole function and should never be
186 eliminated. */
187 static cselib_val *cfa_base_preserved_val;
188 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
189
190 /* Used to list all values that contain memory reference.
191 May or may not contain the useless values - the list is compacted
192 each time memory is invalidated. */
193 static cselib_val *first_containing_mem = &dummy_val;
194 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
195
196 /* If nonnull, cselib will call this function before freeing useless
197 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
198 void (*cselib_discard_hook) (cselib_val *);
199
200 /* If nonnull, cselib will call this function before recording sets or
201 even clobbering outputs of INSN. All the recorded sets will be
202 represented in the array sets[n_sets]. new_val_min can be used to
203 tell whether values present in sets are introduced by this
204 instruction. */
205 void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
206 int n_sets);
207
208 #define PRESERVED_VALUE_P(RTX) \
209 (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
210
211 \f
212
213 /* Allocate a struct elt_list and fill in its two elements with the
214 arguments. */
215
216 static inline struct elt_list *
217 new_elt_list (struct elt_list *next, cselib_val *elt)
218 {
219 struct elt_list *el;
220 el = (struct elt_list *) pool_alloc (elt_list_pool);
221 el->next = next;
222 el->elt = elt;
223 return el;
224 }
225
226 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
227 list. */
228
229 static inline void
230 new_elt_loc_list (cselib_val *val, rtx loc)
231 {
232 struct elt_loc_list *el, *next = val->locs;
233
234 gcc_checking_assert (!next || !next->setting_insn
235 || !DEBUG_INSN_P (next->setting_insn)
236 || cselib_current_insn == next->setting_insn);
237
238 /* If we're creating the first loc in a debug insn context, we've
239 just created a debug value. Count it. */
240 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
241 n_debug_values++;
242
243 val = canonical_cselib_val (val);
244 next = val->locs;
245
246 if (GET_CODE (loc) == VALUE)
247 {
248 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
249
250 gcc_checking_assert (PRESERVED_VALUE_P (loc)
251 == PRESERVED_VALUE_P (val->val_rtx));
252
253 if (val->val_rtx == loc)
254 return;
255 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
256 {
257 /* Reverse the insertion. */
258 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
259 return;
260 }
261
262 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
263
264 if (CSELIB_VAL_PTR (loc)->locs)
265 {
266 /* Bring all locs from LOC to VAL. */
267 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
268 {
269 /* Adjust values that have LOC as canonical so that VAL
270 becomes their canonical. */
271 if (el->loc && GET_CODE (el->loc) == VALUE)
272 {
273 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
274 == loc);
275 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
276 }
277 }
278 el->next = val->locs;
279 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
280 }
281
282 if (CSELIB_VAL_PTR (loc)->addr_list)
283 {
284 /* Bring in addr_list into canonical node. */
285 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
286 while (last->next)
287 last = last->next;
288 last->next = val->addr_list;
289 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
290 CSELIB_VAL_PTR (loc)->addr_list = NULL;
291 }
292
293 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
294 && val->next_containing_mem == NULL)
295 {
296 /* Add VAL to the containing_mem list after LOC. LOC will
297 be removed when we notice it doesn't contain any
298 MEMs. */
299 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
300 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
301 }
302
303 /* Chain LOC back to VAL. */
304 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
305 el->loc = val->val_rtx;
306 el->setting_insn = cselib_current_insn;
307 el->next = NULL;
308 CSELIB_VAL_PTR (loc)->locs = el;
309 }
310
311 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
312 el->loc = loc;
313 el->setting_insn = cselib_current_insn;
314 el->next = next;
315 val->locs = el;
316 }
317
318 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
319 originating from a debug insn, maintaining the debug values
320 count. */
321
322 static inline void
323 promote_debug_loc (struct elt_loc_list *l)
324 {
325 if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
326 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
327 {
328 n_debug_values--;
329 l->setting_insn = cselib_current_insn;
330 if (cselib_preserve_constants && l->next)
331 {
332 gcc_assert (l->next->setting_insn
333 && DEBUG_INSN_P (l->next->setting_insn)
334 && !l->next->next);
335 l->next->setting_insn = cselib_current_insn;
336 }
337 else
338 gcc_assert (!l->next);
339 }
340 }
341
342 /* The elt_list at *PL is no longer needed. Unchain it and free its
343 storage. */
344
345 static inline void
346 unchain_one_elt_list (struct elt_list **pl)
347 {
348 struct elt_list *l = *pl;
349
350 *pl = l->next;
351 pool_free (elt_list_pool, l);
352 }
353
354 /* Likewise for elt_loc_lists. */
355
356 static void
357 unchain_one_elt_loc_list (struct elt_loc_list **pl)
358 {
359 struct elt_loc_list *l = *pl;
360
361 *pl = l->next;
362 pool_free (elt_loc_list_pool, l);
363 }
364
365 /* Likewise for cselib_vals. This also frees the addr_list associated with
366 V. */
367
368 static void
369 unchain_one_value (cselib_val *v)
370 {
371 while (v->addr_list)
372 unchain_one_elt_list (&v->addr_list);
373
374 pool_free (cselib_val_pool, v);
375 }
376
377 /* Remove all entries from the hash table. Also used during
378 initialization. */
379
380 void
381 cselib_clear_table (void)
382 {
383 cselib_reset_table (1);
384 }
385
386 /* Remove from hash table all VALUEs except constants
387 and function invariants. */
388
389 static int
390 preserve_only_constants (void **x, void *info ATTRIBUTE_UNUSED)
391 {
392 cselib_val *v = (cselib_val *)*x;
393 struct elt_loc_list *l;
394
395 if (v->locs != NULL
396 && v->locs->next == NULL)
397 {
398 if (CONSTANT_P (v->locs->loc)
399 && (GET_CODE (v->locs->loc) != CONST
400 || !references_value_p (v->locs->loc, 0)))
401 return 1;
402 /* Although a debug expr may be bound to different expressions,
403 we can preserve it as if it was constant, to get unification
404 and proper merging within var-tracking. */
405 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
406 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
407 || GET_CODE (v->locs->loc) == ENTRY_VALUE
408 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
409 return 1;
410 if (cfa_base_preserved_val)
411 {
412 if (v == cfa_base_preserved_val)
413 return 1;
414 if (GET_CODE (v->locs->loc) == PLUS
415 && CONST_INT_P (XEXP (v->locs->loc, 1))
416 && XEXP (v->locs->loc, 0) == cfa_base_preserved_val->val_rtx)
417 return 1;
418 }
419 }
420
421 /* Keep VALUE equivalences around. */
422 for (l = v->locs; l; l = l->next)
423 if (GET_CODE (l->loc) == VALUE)
424 return 1;
425
426 htab_clear_slot (cselib_hash_table, x);
427 return 1;
428 }
429
430 /* Remove all entries from the hash table, arranging for the next
431 value to be numbered NUM. */
432
433 void
434 cselib_reset_table (unsigned int num)
435 {
436 unsigned int i;
437
438 max_value_regs = 0;
439
440 if (cfa_base_preserved_val)
441 {
442 unsigned int regno = cfa_base_preserved_regno;
443 unsigned int new_used_regs = 0;
444 for (i = 0; i < n_used_regs; i++)
445 if (used_regs[i] == regno)
446 {
447 new_used_regs = 1;
448 continue;
449 }
450 else
451 REG_VALUES (used_regs[i]) = 0;
452 gcc_assert (new_used_regs == 1);
453 n_used_regs = new_used_regs;
454 used_regs[0] = regno;
455 max_value_regs
456 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
457 }
458 else
459 {
460 for (i = 0; i < n_used_regs; i++)
461 REG_VALUES (used_regs[i]) = 0;
462 n_used_regs = 0;
463 }
464
465 if (cselib_preserve_constants)
466 htab_traverse (cselib_hash_table, preserve_only_constants, NULL);
467 else
468 htab_empty (cselib_hash_table);
469
470 n_useless_values = 0;
471 n_useless_debug_values = 0;
472 n_debug_values = 0;
473
474 next_uid = num;
475
476 first_containing_mem = &dummy_val;
477 }
478
479 /* Return the number of the next value that will be generated. */
480
481 unsigned int
482 cselib_get_next_uid (void)
483 {
484 return next_uid;
485 }
486
487 /* See the documentation of cselib_find_slot below. */
488 static enum machine_mode find_slot_memmode;
489
490 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
491 INSERTing if requested. When X is part of the address of a MEM,
492 MEMMODE should specify the mode of the MEM. While searching the
493 table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs
494 in X can be resolved. */
495
496 static void **
497 cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert,
498 enum machine_mode memmode)
499 {
500 void **slot;
501 find_slot_memmode = memmode;
502 slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert);
503 find_slot_memmode = VOIDmode;
504 return slot;
505 }
506
507 /* The equality test for our hash table. The first argument ENTRY is a table
508 element (i.e. a cselib_val), while the second arg X is an rtx. We know
509 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
510 CONST of an appropriate mode. */
511
512 static int
513 entry_and_rtx_equal_p (const void *entry, const void *x_arg)
514 {
515 struct elt_loc_list *l;
516 const cselib_val *const v = (const cselib_val *) entry;
517 rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
518 enum machine_mode mode = GET_MODE (x);
519
520 gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
521 && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
522
523 if (mode != GET_MODE (v->val_rtx))
524 return 0;
525
526 /* Unwrap X if necessary. */
527 if (GET_CODE (x) == CONST
528 && (CONST_INT_P (XEXP (x, 0))
529 || GET_CODE (XEXP (x, 0)) == CONST_FIXED
530 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
531 x = XEXP (x, 0);
532
533 /* We don't guarantee that distinct rtx's have different hash values,
534 so we need to do a comparison. */
535 for (l = v->locs; l; l = l->next)
536 if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
537 {
538 promote_debug_loc (l);
539 return 1;
540 }
541
542 return 0;
543 }
544
545 /* The hash function for our hash table. The value is always computed with
546 cselib_hash_rtx when adding an element; this function just extracts the
547 hash value from a cselib_val structure. */
548
549 static hashval_t
550 get_value_hash (const void *entry)
551 {
552 const cselib_val *const v = (const cselib_val *) entry;
553 return v->hash;
554 }
555
556 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
557 only return true for values which point to a cselib_val whose value
558 element has been set to zero, which implies the cselib_val will be
559 removed. */
560
561 int
562 references_value_p (const_rtx x, int only_useless)
563 {
564 const enum rtx_code code = GET_CODE (x);
565 const char *fmt = GET_RTX_FORMAT (code);
566 int i, j;
567
568 if (GET_CODE (x) == VALUE
569 && (! only_useless ||
570 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
571 return 1;
572
573 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
574 {
575 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
576 return 1;
577 else if (fmt[i] == 'E')
578 for (j = 0; j < XVECLEN (x, i); j++)
579 if (references_value_p (XVECEXP (x, i, j), only_useless))
580 return 1;
581 }
582
583 return 0;
584 }
585
586 /* For all locations found in X, delete locations that reference useless
587 values (i.e. values without any location). Called through
588 htab_traverse. */
589
590 static int
591 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
592 {
593 cselib_val *v = (cselib_val *)*x;
594 struct elt_loc_list **p = &v->locs;
595 bool had_locs = v->locs != NULL;
596 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
597
598 while (*p)
599 {
600 if (references_value_p ((*p)->loc, 1))
601 unchain_one_elt_loc_list (p);
602 else
603 p = &(*p)->next;
604 }
605
606 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
607 {
608 if (setting_insn && DEBUG_INSN_P (setting_insn))
609 n_useless_debug_values++;
610 else
611 n_useless_values++;
612 values_became_useless = 1;
613 }
614 return 1;
615 }
616
617 /* If X is a value with no locations, remove it from the hashtable. */
618
619 static int
620 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
621 {
622 cselib_val *v = (cselib_val *)*x;
623
624 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
625 {
626 if (cselib_discard_hook)
627 cselib_discard_hook (v);
628
629 CSELIB_VAL_PTR (v->val_rtx) = NULL;
630 htab_clear_slot (cselib_hash_table, x);
631 unchain_one_value (v);
632 n_useless_values--;
633 }
634
635 return 1;
636 }
637
638 /* Clean out useless values (i.e. those which no longer have locations
639 associated with them) from the hash table. */
640
641 static void
642 remove_useless_values (void)
643 {
644 cselib_val **p, *v;
645
646 /* First pass: eliminate locations that reference the value. That in
647 turn can make more values useless. */
648 do
649 {
650 values_became_useless = 0;
651 htab_traverse (cselib_hash_table, discard_useless_locs, 0);
652 }
653 while (values_became_useless);
654
655 /* Second pass: actually remove the values. */
656
657 p = &first_containing_mem;
658 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
659 if (v->locs && v == canonical_cselib_val (v))
660 {
661 *p = v;
662 p = &(*p)->next_containing_mem;
663 }
664 *p = &dummy_val;
665
666 n_useless_values += n_useless_debug_values;
667 n_debug_values -= n_useless_debug_values;
668 n_useless_debug_values = 0;
669
670 htab_traverse (cselib_hash_table, discard_useless_values, 0);
671
672 gcc_assert (!n_useless_values);
673 }
674
675 /* Arrange for a value to not be removed from the hash table even if
676 it becomes useless. */
677
678 void
679 cselib_preserve_value (cselib_val *v)
680 {
681 PRESERVED_VALUE_P (v->val_rtx) = 1;
682 }
683
684 /* Test whether a value is preserved. */
685
686 bool
687 cselib_preserved_value_p (cselib_val *v)
688 {
689 return PRESERVED_VALUE_P (v->val_rtx);
690 }
691
692 /* Arrange for a REG value to be assumed constant through the whole function,
693 never invalidated and preserved across cselib_reset_table calls. */
694
695 void
696 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
697 {
698 if (cselib_preserve_constants
699 && v->locs
700 && REG_P (v->locs->loc))
701 {
702 cfa_base_preserved_val = v;
703 cfa_base_preserved_regno = regno;
704 }
705 }
706
707 /* Clean all non-constant expressions in the hash table, but retain
708 their values. */
709
710 void
711 cselib_preserve_only_values (void)
712 {
713 int i;
714
715 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
716 cselib_invalidate_regno (i, reg_raw_mode[i]);
717
718 cselib_invalidate_mem (callmem);
719
720 remove_useless_values ();
721
722 gcc_assert (first_containing_mem == &dummy_val);
723 }
724
725 /* Return the mode in which a register was last set. If X is not a
726 register, return its mode. If the mode in which the register was
727 set is not known, or the value was already clobbered, return
728 VOIDmode. */
729
730 enum machine_mode
731 cselib_reg_set_mode (const_rtx x)
732 {
733 if (!REG_P (x))
734 return GET_MODE (x);
735
736 if (REG_VALUES (REGNO (x)) == NULL
737 || REG_VALUES (REGNO (x))->elt == NULL)
738 return VOIDmode;
739
740 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
741 }
742
743 /* Return nonzero if we can prove that X and Y contain the same value, taking
744 our gathered information into account. */
745
746 int
747 rtx_equal_for_cselib_p (rtx x, rtx y)
748 {
749 return rtx_equal_for_cselib_1 (x, y, VOIDmode);
750 }
751
752 /* If x is a PLUS or an autoinc operation, expand the operation,
753 storing the offset, if any, in *OFF. */
754
755 static rtx
756 autoinc_split (rtx x, rtx *off, enum machine_mode memmode)
757 {
758 switch (GET_CODE (x))
759 {
760 case PLUS:
761 *off = XEXP (x, 1);
762 return XEXP (x, 0);
763
764 case PRE_DEC:
765 if (memmode == VOIDmode)
766 return x;
767
768 *off = GEN_INT (-GET_MODE_SIZE (memmode));
769 return XEXP (x, 0);
770 break;
771
772 case PRE_INC:
773 if (memmode == VOIDmode)
774 return x;
775
776 *off = GEN_INT (GET_MODE_SIZE (memmode));
777 return XEXP (x, 0);
778
779 case PRE_MODIFY:
780 return XEXP (x, 1);
781
782 case POST_DEC:
783 case POST_INC:
784 case POST_MODIFY:
785 return XEXP (x, 0);
786
787 default:
788 return x;
789 }
790 }
791
792 /* Return nonzero if we can prove that X and Y contain the same value,
793 taking our gathered information into account. MEMMODE holds the
794 mode of the enclosing MEM, if any, as required to deal with autoinc
795 addressing modes. If X and Y are not (known to be) part of
796 addresses, MEMMODE should be VOIDmode. */
797
798 static int
799 rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode)
800 {
801 enum rtx_code code;
802 const char *fmt;
803 int i;
804
805 if (REG_P (x) || MEM_P (x))
806 {
807 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
808
809 if (e)
810 x = e->val_rtx;
811 }
812
813 if (REG_P (y) || MEM_P (y))
814 {
815 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
816
817 if (e)
818 y = e->val_rtx;
819 }
820
821 if (x == y)
822 return 1;
823
824 if (GET_CODE (x) == VALUE)
825 {
826 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
827 struct elt_loc_list *l;
828
829 if (GET_CODE (y) == VALUE)
830 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
831
832 for (l = e->locs; l; l = l->next)
833 {
834 rtx t = l->loc;
835
836 /* Avoid infinite recursion. We know we have the canonical
837 value, so we can just skip any values in the equivalence
838 list. */
839 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
840 continue;
841 else if (rtx_equal_for_cselib_1 (t, y, memmode))
842 return 1;
843 }
844
845 return 0;
846 }
847 else if (GET_CODE (y) == VALUE)
848 {
849 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
850 struct elt_loc_list *l;
851
852 for (l = e->locs; l; l = l->next)
853 {
854 rtx t = l->loc;
855
856 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
857 continue;
858 else if (rtx_equal_for_cselib_1 (x, t, memmode))
859 return 1;
860 }
861
862 return 0;
863 }
864
865 if (GET_MODE (x) != GET_MODE (y))
866 return 0;
867
868 if (GET_CODE (x) != GET_CODE (y))
869 {
870 rtx xorig = x, yorig = y;
871 rtx xoff = NULL, yoff = NULL;
872
873 x = autoinc_split (x, &xoff, memmode);
874 y = autoinc_split (y, &yoff, memmode);
875
876 if (!xoff != !yoff)
877 return 0;
878
879 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
880 return 0;
881
882 /* Don't recurse if nothing changed. */
883 if (x != xorig || y != yorig)
884 return rtx_equal_for_cselib_1 (x, y, memmode);
885
886 return 0;
887 }
888
889 /* These won't be handled correctly by the code below. */
890 switch (GET_CODE (x))
891 {
892 case CONST_DOUBLE:
893 case CONST_FIXED:
894 case DEBUG_EXPR:
895 return 0;
896
897 case DEBUG_IMPLICIT_PTR:
898 return DEBUG_IMPLICIT_PTR_DECL (x)
899 == DEBUG_IMPLICIT_PTR_DECL (y);
900
901 case DEBUG_PARAMETER_REF:
902 return DEBUG_PARAMETER_REF_DECL (x)
903 == DEBUG_PARAMETER_REF_DECL (y);
904
905 case ENTRY_VALUE:
906 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
907 use rtx_equal_for_cselib_1 to compare the operands. */
908 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
909
910 case LABEL_REF:
911 return XEXP (x, 0) == XEXP (y, 0);
912
913 case MEM:
914 /* We have to compare any autoinc operations in the addresses
915 using this MEM's mode. */
916 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
917
918 default:
919 break;
920 }
921
922 code = GET_CODE (x);
923 fmt = GET_RTX_FORMAT (code);
924
925 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
926 {
927 int j;
928
929 switch (fmt[i])
930 {
931 case 'w':
932 if (XWINT (x, i) != XWINT (y, i))
933 return 0;
934 break;
935
936 case 'n':
937 case 'i':
938 if (XINT (x, i) != XINT (y, i))
939 return 0;
940 break;
941
942 case 'V':
943 case 'E':
944 /* Two vectors must have the same length. */
945 if (XVECLEN (x, i) != XVECLEN (y, i))
946 return 0;
947
948 /* And the corresponding elements must match. */
949 for (j = 0; j < XVECLEN (x, i); j++)
950 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
951 XVECEXP (y, i, j), memmode))
952 return 0;
953 break;
954
955 case 'e':
956 if (i == 1
957 && targetm.commutative_p (x, UNKNOWN)
958 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
959 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
960 return 1;
961 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
962 return 0;
963 break;
964
965 case 'S':
966 case 's':
967 if (strcmp (XSTR (x, i), XSTR (y, i)))
968 return 0;
969 break;
970
971 case 'u':
972 /* These are just backpointers, so they don't matter. */
973 break;
974
975 case '0':
976 case 't':
977 break;
978
979 /* It is believed that rtx's at this level will never
980 contain anything but integers and other rtx's,
981 except for within LABEL_REFs and SYMBOL_REFs. */
982 default:
983 gcc_unreachable ();
984 }
985 }
986 return 1;
987 }
988
989 /* We need to pass down the mode of constants through the hash table
990 functions. For that purpose, wrap them in a CONST of the appropriate
991 mode. */
992 static rtx
993 wrap_constant (enum machine_mode mode, rtx x)
994 {
995 if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
996 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
997 return x;
998 gcc_assert (mode != VOIDmode);
999 return gen_rtx_CONST (mode, x);
1000 }
1001
1002 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1003 For registers and memory locations, we look up their cselib_val structure
1004 and return its VALUE element.
1005 Possible reasons for return 0 are: the object is volatile, or we couldn't
1006 find a register or memory location in the table and CREATE is zero. If
1007 CREATE is nonzero, table elts are created for regs and mem.
1008 N.B. this hash function returns the same hash value for RTXes that
1009 differ only in the order of operands, thus it is suitable for comparisons
1010 that take commutativity into account.
1011 If we wanted to also support associative rules, we'd have to use a different
1012 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1013 MEMMODE indicates the mode of an enclosing MEM, and it's only
1014 used to compute autoinc values.
1015 We used to have a MODE argument for hashing for CONST_INTs, but that
1016 didn't make sense, since it caused spurious hash differences between
1017 (set (reg:SI 1) (const_int))
1018 (plus:SI (reg:SI 2) (reg:SI 1))
1019 and
1020 (plus:SI (reg:SI 2) (const_int))
1021 If the mode is important in any context, it must be checked specifically
1022 in a comparison anyway, since relying on hash differences is unsafe. */
1023
1024 static unsigned int
1025 cselib_hash_rtx (rtx x, int create, enum machine_mode memmode)
1026 {
1027 cselib_val *e;
1028 int i, j;
1029 enum rtx_code code;
1030 const char *fmt;
1031 unsigned int hash = 0;
1032
1033 code = GET_CODE (x);
1034 hash += (unsigned) code + (unsigned) GET_MODE (x);
1035
1036 switch (code)
1037 {
1038 case VALUE:
1039 e = CSELIB_VAL_PTR (x);
1040 return e->hash;
1041
1042 case MEM:
1043 case REG:
1044 e = cselib_lookup (x, GET_MODE (x), create, memmode);
1045 if (! e)
1046 return 0;
1047
1048 return e->hash;
1049
1050 case DEBUG_EXPR:
1051 hash += ((unsigned) DEBUG_EXPR << 7)
1052 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1053 return hash ? hash : (unsigned int) DEBUG_EXPR;
1054
1055 case DEBUG_IMPLICIT_PTR:
1056 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1057 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1058 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1059
1060 case DEBUG_PARAMETER_REF:
1061 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1062 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1063 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1064
1065 case ENTRY_VALUE:
1066 /* ENTRY_VALUEs are function invariant, thus try to avoid
1067 recursing on argument if ENTRY_VALUE is one of the
1068 forms emitted by expand_debug_expr, otherwise
1069 ENTRY_VALUE hash would depend on the current value
1070 in some register or memory. */
1071 if (REG_P (ENTRY_VALUE_EXP (x)))
1072 hash += (unsigned int) REG
1073 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1074 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1075 else if (MEM_P (ENTRY_VALUE_EXP (x))
1076 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1077 hash += (unsigned int) MEM
1078 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1079 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1080 else
1081 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1082 return hash ? hash : (unsigned int) ENTRY_VALUE;
1083
1084 case CONST_INT:
1085 hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
1086 return hash ? hash : (unsigned int) CONST_INT;
1087
1088 case CONST_DOUBLE:
1089 /* This is like the general case, except that it only counts
1090 the integers representing the constant. */
1091 hash += (unsigned) code + (unsigned) GET_MODE (x);
1092 if (GET_MODE (x) != VOIDmode)
1093 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1094 else
1095 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1096 + (unsigned) CONST_DOUBLE_HIGH (x));
1097 return hash ? hash : (unsigned int) CONST_DOUBLE;
1098
1099 case CONST_FIXED:
1100 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1101 hash += fixed_hash (CONST_FIXED_VALUE (x));
1102 return hash ? hash : (unsigned int) CONST_FIXED;
1103
1104 case CONST_VECTOR:
1105 {
1106 int units;
1107 rtx elt;
1108
1109 units = CONST_VECTOR_NUNITS (x);
1110
1111 for (i = 0; i < units; ++i)
1112 {
1113 elt = CONST_VECTOR_ELT (x, i);
1114 hash += cselib_hash_rtx (elt, 0, memmode);
1115 }
1116
1117 return hash;
1118 }
1119
1120 /* Assume there is only one rtx object for any given label. */
1121 case LABEL_REF:
1122 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1123 differences and differences between each stage's debugging dumps. */
1124 hash += (((unsigned int) LABEL_REF << 7)
1125 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1126 return hash ? hash : (unsigned int) LABEL_REF;
1127
1128 case SYMBOL_REF:
1129 {
1130 /* Don't hash on the symbol's address to avoid bootstrap differences.
1131 Different hash values may cause expressions to be recorded in
1132 different orders and thus different registers to be used in the
1133 final assembler. This also avoids differences in the dump files
1134 between various stages. */
1135 unsigned int h = 0;
1136 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1137
1138 while (*p)
1139 h += (h << 7) + *p++; /* ??? revisit */
1140
1141 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1142 return hash ? hash : (unsigned int) SYMBOL_REF;
1143 }
1144
1145 case PRE_DEC:
1146 case PRE_INC:
1147 /* We can't compute these without knowing the MEM mode. */
1148 gcc_assert (memmode != VOIDmode);
1149 i = GET_MODE_SIZE (memmode);
1150 if (code == PRE_DEC)
1151 i = -i;
1152 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1153 like (mem:MEMMODE (plus (reg) (const_int I))). */
1154 hash += (unsigned) PLUS - (unsigned)code
1155 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1156 + cselib_hash_rtx (GEN_INT (i), create, memmode);
1157 return hash ? hash : 1 + (unsigned) PLUS;
1158
1159 case PRE_MODIFY:
1160 gcc_assert (memmode != VOIDmode);
1161 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1162
1163 case POST_DEC:
1164 case POST_INC:
1165 case POST_MODIFY:
1166 gcc_assert (memmode != VOIDmode);
1167 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1168
1169 case PC:
1170 case CC0:
1171 case CALL:
1172 case UNSPEC_VOLATILE:
1173 return 0;
1174
1175 case ASM_OPERANDS:
1176 if (MEM_VOLATILE_P (x))
1177 return 0;
1178
1179 break;
1180
1181 default:
1182 break;
1183 }
1184
1185 i = GET_RTX_LENGTH (code) - 1;
1186 fmt = GET_RTX_FORMAT (code);
1187 for (; i >= 0; i--)
1188 {
1189 switch (fmt[i])
1190 {
1191 case 'e':
1192 {
1193 rtx tem = XEXP (x, i);
1194 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1195
1196 if (tem_hash == 0)
1197 return 0;
1198
1199 hash += tem_hash;
1200 }
1201 break;
1202 case 'E':
1203 for (j = 0; j < XVECLEN (x, i); j++)
1204 {
1205 unsigned int tem_hash
1206 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1207
1208 if (tem_hash == 0)
1209 return 0;
1210
1211 hash += tem_hash;
1212 }
1213 break;
1214
1215 case 's':
1216 {
1217 const unsigned char *p = (const unsigned char *) XSTR (x, i);
1218
1219 if (p)
1220 while (*p)
1221 hash += *p++;
1222 break;
1223 }
1224
1225 case 'i':
1226 hash += XINT (x, i);
1227 break;
1228
1229 case '0':
1230 case 't':
1231 /* unused */
1232 break;
1233
1234 default:
1235 gcc_unreachable ();
1236 }
1237 }
1238
1239 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1240 }
1241
1242 /* Create a new value structure for VALUE and initialize it. The mode of the
1243 value is MODE. */
1244
1245 static inline cselib_val *
1246 new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
1247 {
1248 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
1249
1250 gcc_assert (hash);
1251 gcc_assert (next_uid);
1252
1253 e->hash = hash;
1254 e->uid = next_uid++;
1255 /* We use an alloc pool to allocate this RTL construct because it
1256 accounts for about 8% of the overall memory usage. We know
1257 precisely when we can have VALUE RTXen (when cselib is active)
1258 so we don't need to put them in garbage collected memory.
1259 ??? Why should a VALUE be an RTX in the first place? */
1260 e->val_rtx = (rtx) pool_alloc (value_pool);
1261 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1262 PUT_CODE (e->val_rtx, VALUE);
1263 PUT_MODE (e->val_rtx, mode);
1264 CSELIB_VAL_PTR (e->val_rtx) = e;
1265 e->addr_list = 0;
1266 e->locs = 0;
1267 e->next_containing_mem = 0;
1268
1269 if (dump_file && (dump_flags & TDF_CSELIB))
1270 {
1271 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1272 if (flag_dump_noaddr || flag_dump_unnumbered)
1273 fputs ("# ", dump_file);
1274 else
1275 fprintf (dump_file, "%p ", (void*)e);
1276 print_rtl_single (dump_file, x);
1277 fputc ('\n', dump_file);
1278 }
1279
1280 return e;
1281 }
1282
1283 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1284 contains the data at this address. X is a MEM that represents the
1285 value. Update the two value structures to represent this situation. */
1286
1287 static void
1288 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1289 {
1290 struct elt_loc_list *l;
1291
1292 addr_elt = canonical_cselib_val (addr_elt);
1293 mem_elt = canonical_cselib_val (mem_elt);
1294
1295 /* Avoid duplicates. */
1296 for (l = mem_elt->locs; l; l = l->next)
1297 if (MEM_P (l->loc)
1298 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1299 {
1300 promote_debug_loc (l);
1301 return;
1302 }
1303
1304 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1305 new_elt_loc_list (mem_elt,
1306 replace_equiv_address_nv (x, addr_elt->val_rtx));
1307 if (mem_elt->next_containing_mem == NULL)
1308 {
1309 mem_elt->next_containing_mem = first_containing_mem;
1310 first_containing_mem = mem_elt;
1311 }
1312 }
1313
1314 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1315 If CREATE, make a new one if we haven't seen it before. */
1316
1317 static cselib_val *
1318 cselib_lookup_mem (rtx x, int create)
1319 {
1320 enum machine_mode mode = GET_MODE (x);
1321 enum machine_mode addr_mode;
1322 void **slot;
1323 cselib_val *addr;
1324 cselib_val *mem_elt;
1325 struct elt_list *l;
1326
1327 if (MEM_VOLATILE_P (x) || mode == BLKmode
1328 || !cselib_record_memory
1329 || (FLOAT_MODE_P (mode) && flag_float_store))
1330 return 0;
1331
1332 addr_mode = GET_MODE (XEXP (x, 0));
1333 if (addr_mode == VOIDmode)
1334 addr_mode = Pmode;
1335
1336 /* Look up the value for the address. */
1337 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1338 if (! addr)
1339 return 0;
1340
1341 addr = canonical_cselib_val (addr);
1342 /* Find a value that describes a value of our mode at that address. */
1343 for (l = addr->addr_list; l; l = l->next)
1344 if (GET_MODE (l->elt->val_rtx) == mode)
1345 {
1346 promote_debug_loc (l->elt->locs);
1347 return l->elt;
1348 }
1349
1350 if (! create)
1351 return 0;
1352
1353 mem_elt = new_cselib_val (next_uid, mode, x);
1354 add_mem_for_addr (addr, mem_elt, x);
1355 slot = cselib_find_slot (wrap_constant (mode, x), mem_elt->hash,
1356 INSERT, mode);
1357 *slot = mem_elt;
1358 return mem_elt;
1359 }
1360
1361 /* Search thru the possible substitutions in P. We prefer a non reg
1362 substitution because this allows us to expand the tree further. If
1363 we find, just a reg, take the lowest regno. There may be several
1364 non-reg results, we just take the first one because they will all
1365 expand to the same place. */
1366
1367 static rtx
1368 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1369 int max_depth)
1370 {
1371 rtx reg_result = NULL;
1372 unsigned int regno = UINT_MAX;
1373 struct elt_loc_list *p_in = p;
1374
1375 for (; p; p = p->next)
1376 {
1377 /* Return these right away to avoid returning stack pointer based
1378 expressions for frame pointer and vice versa, which is something
1379 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1380 for more details. */
1381 if (REG_P (p->loc)
1382 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1383 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1384 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1385 || REGNO (p->loc) == cfa_base_preserved_regno))
1386 return p->loc;
1387 /* Avoid infinite recursion trying to expand a reg into a
1388 the same reg. */
1389 if ((REG_P (p->loc))
1390 && (REGNO (p->loc) < regno)
1391 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1392 {
1393 reg_result = p->loc;
1394 regno = REGNO (p->loc);
1395 }
1396 /* Avoid infinite recursion and do not try to expand the
1397 value. */
1398 else if (GET_CODE (p->loc) == VALUE
1399 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1400 continue;
1401 else if (!REG_P (p->loc))
1402 {
1403 rtx result, note;
1404 if (dump_file && (dump_flags & TDF_CSELIB))
1405 {
1406 print_inline_rtx (dump_file, p->loc, 0);
1407 fprintf (dump_file, "\n");
1408 }
1409 if (GET_CODE (p->loc) == LO_SUM
1410 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1411 && p->setting_insn
1412 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1413 && XEXP (note, 0) == XEXP (p->loc, 1))
1414 return XEXP (p->loc, 1);
1415 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1416 if (result)
1417 return result;
1418 }
1419
1420 }
1421
1422 if (regno != UINT_MAX)
1423 {
1424 rtx result;
1425 if (dump_file && (dump_flags & TDF_CSELIB))
1426 fprintf (dump_file, "r%d\n", regno);
1427
1428 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1429 if (result)
1430 return result;
1431 }
1432
1433 if (dump_file && (dump_flags & TDF_CSELIB))
1434 {
1435 if (reg_result)
1436 {
1437 print_inline_rtx (dump_file, reg_result, 0);
1438 fprintf (dump_file, "\n");
1439 }
1440 else
1441 fprintf (dump_file, "NULL\n");
1442 }
1443 return reg_result;
1444 }
1445
1446
1447 /* Forward substitute and expand an expression out to its roots.
1448 This is the opposite of common subexpression. Because local value
1449 numbering is such a weak optimization, the expanded expression is
1450 pretty much unique (not from a pointer equals point of view but
1451 from a tree shape point of view.
1452
1453 This function returns NULL if the expansion fails. The expansion
1454 will fail if there is no value number for one of the operands or if
1455 one of the operands has been overwritten between the current insn
1456 and the beginning of the basic block. For instance x has no
1457 expansion in:
1458
1459 r1 <- r1 + 3
1460 x <- r1 + 8
1461
1462 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1463 It is clear on return. */
1464
1465 rtx
1466 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1467 {
1468 struct expand_value_data evd;
1469
1470 evd.regs_active = regs_active;
1471 evd.callback = NULL;
1472 evd.callback_arg = NULL;
1473 evd.dummy = false;
1474
1475 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1476 }
1477
1478 /* Same as cselib_expand_value_rtx, but using a callback to try to
1479 resolve some expressions. The CB function should return ORIG if it
1480 can't or does not want to deal with a certain RTX. Any other
1481 return value, including NULL, will be used as the expansion for
1482 VALUE, without any further changes. */
1483
1484 rtx
1485 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1486 cselib_expand_callback cb, void *data)
1487 {
1488 struct expand_value_data evd;
1489
1490 evd.regs_active = regs_active;
1491 evd.callback = cb;
1492 evd.callback_arg = data;
1493 evd.dummy = false;
1494
1495 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1496 }
1497
1498 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1499 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1500 would return NULL or non-NULL, without allocating new rtx. */
1501
1502 bool
1503 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1504 cselib_expand_callback cb, void *data)
1505 {
1506 struct expand_value_data evd;
1507
1508 evd.regs_active = regs_active;
1509 evd.callback = cb;
1510 evd.callback_arg = data;
1511 evd.dummy = true;
1512
1513 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1514 }
1515
1516 /* Internal implementation of cselib_expand_value_rtx and
1517 cselib_expand_value_rtx_cb. */
1518
1519 static rtx
1520 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1521 int max_depth)
1522 {
1523 rtx copy, scopy;
1524 int i, j;
1525 RTX_CODE code;
1526 const char *format_ptr;
1527 enum machine_mode mode;
1528
1529 code = GET_CODE (orig);
1530
1531 /* For the context of dse, if we end up expand into a huge tree, we
1532 will not have a useful address, so we might as well just give up
1533 quickly. */
1534 if (max_depth <= 0)
1535 return NULL;
1536
1537 switch (code)
1538 {
1539 case REG:
1540 {
1541 struct elt_list *l = REG_VALUES (REGNO (orig));
1542
1543 if (l && l->elt == NULL)
1544 l = l->next;
1545 for (; l; l = l->next)
1546 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1547 {
1548 rtx result;
1549 unsigned regno = REGNO (orig);
1550
1551 /* The only thing that we are not willing to do (this
1552 is requirement of dse and if others potential uses
1553 need this function we should add a parm to control
1554 it) is that we will not substitute the
1555 STACK_POINTER_REGNUM, FRAME_POINTER or the
1556 HARD_FRAME_POINTER.
1557
1558 These expansions confuses the code that notices that
1559 stores into the frame go dead at the end of the
1560 function and that the frame is not effected by calls
1561 to subroutines. If you allow the
1562 STACK_POINTER_REGNUM substitution, then dse will
1563 think that parameter pushing also goes dead which is
1564 wrong. If you allow the FRAME_POINTER or the
1565 HARD_FRAME_POINTER then you lose the opportunity to
1566 make the frame assumptions. */
1567 if (regno == STACK_POINTER_REGNUM
1568 || regno == FRAME_POINTER_REGNUM
1569 || regno == HARD_FRAME_POINTER_REGNUM
1570 || regno == cfa_base_preserved_regno)
1571 return orig;
1572
1573 bitmap_set_bit (evd->regs_active, regno);
1574
1575 if (dump_file && (dump_flags & TDF_CSELIB))
1576 fprintf (dump_file, "expanding: r%d into: ", regno);
1577
1578 result = expand_loc (l->elt->locs, evd, max_depth);
1579 bitmap_clear_bit (evd->regs_active, regno);
1580
1581 if (result)
1582 return result;
1583 else
1584 return orig;
1585 }
1586 }
1587
1588 case CONST_INT:
1589 case CONST_DOUBLE:
1590 case CONST_VECTOR:
1591 case SYMBOL_REF:
1592 case CODE_LABEL:
1593 case PC:
1594 case CC0:
1595 case SCRATCH:
1596 /* SCRATCH must be shared because they represent distinct values. */
1597 return orig;
1598 case CLOBBER:
1599 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1600 return orig;
1601 break;
1602
1603 case CONST:
1604 if (shared_const_p (orig))
1605 return orig;
1606 break;
1607
1608 case SUBREG:
1609 {
1610 rtx subreg;
1611
1612 if (evd->callback)
1613 {
1614 subreg = evd->callback (orig, evd->regs_active, max_depth,
1615 evd->callback_arg);
1616 if (subreg != orig)
1617 return subreg;
1618 }
1619
1620 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1621 max_depth - 1);
1622 if (!subreg)
1623 return NULL;
1624 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1625 GET_MODE (SUBREG_REG (orig)),
1626 SUBREG_BYTE (orig));
1627 if (scopy == NULL
1628 || (GET_CODE (scopy) == SUBREG
1629 && !REG_P (SUBREG_REG (scopy))
1630 && !MEM_P (SUBREG_REG (scopy))))
1631 return NULL;
1632
1633 return scopy;
1634 }
1635
1636 case VALUE:
1637 {
1638 rtx result;
1639
1640 if (dump_file && (dump_flags & TDF_CSELIB))
1641 {
1642 fputs ("\nexpanding ", dump_file);
1643 print_rtl_single (dump_file, orig);
1644 fputs (" into...", dump_file);
1645 }
1646
1647 if (evd->callback)
1648 {
1649 result = evd->callback (orig, evd->regs_active, max_depth,
1650 evd->callback_arg);
1651
1652 if (result != orig)
1653 return result;
1654 }
1655
1656 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1657 return result;
1658 }
1659
1660 case DEBUG_EXPR:
1661 if (evd->callback)
1662 return evd->callback (orig, evd->regs_active, max_depth,
1663 evd->callback_arg);
1664 return orig;
1665
1666 default:
1667 break;
1668 }
1669
1670 /* Copy the various flags, fields, and other information. We assume
1671 that all fields need copying, and then clear the fields that should
1672 not be copied. That is the sensible default behavior, and forces
1673 us to explicitly document why we are *not* copying a flag. */
1674 if (evd->dummy)
1675 copy = NULL;
1676 else
1677 copy = shallow_copy_rtx (orig);
1678
1679 format_ptr = GET_RTX_FORMAT (code);
1680
1681 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1682 switch (*format_ptr++)
1683 {
1684 case 'e':
1685 if (XEXP (orig, i) != NULL)
1686 {
1687 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1688 max_depth - 1);
1689 if (!result)
1690 return NULL;
1691 if (copy)
1692 XEXP (copy, i) = result;
1693 }
1694 break;
1695
1696 case 'E':
1697 case 'V':
1698 if (XVEC (orig, i) != NULL)
1699 {
1700 if (copy)
1701 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1702 for (j = 0; j < XVECLEN (orig, i); j++)
1703 {
1704 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1705 evd, max_depth - 1);
1706 if (!result)
1707 return NULL;
1708 if (copy)
1709 XVECEXP (copy, i, j) = result;
1710 }
1711 }
1712 break;
1713
1714 case 't':
1715 case 'w':
1716 case 'i':
1717 case 's':
1718 case 'S':
1719 case 'T':
1720 case 'u':
1721 case 'B':
1722 case '0':
1723 /* These are left unchanged. */
1724 break;
1725
1726 default:
1727 gcc_unreachable ();
1728 }
1729
1730 if (evd->dummy)
1731 return orig;
1732
1733 mode = GET_MODE (copy);
1734 /* If an operand has been simplified into CONST_INT, which doesn't
1735 have a mode and the mode isn't derivable from whole rtx's mode,
1736 try simplify_*_operation first with mode from original's operand
1737 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1738 scopy = copy;
1739 switch (GET_RTX_CLASS (code))
1740 {
1741 case RTX_UNARY:
1742 if (CONST_INT_P (XEXP (copy, 0))
1743 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1744 {
1745 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1746 GET_MODE (XEXP (orig, 0)));
1747 if (scopy)
1748 return scopy;
1749 }
1750 break;
1751 case RTX_COMM_ARITH:
1752 case RTX_BIN_ARITH:
1753 /* These expressions can derive operand modes from the whole rtx's mode. */
1754 break;
1755 case RTX_TERNARY:
1756 case RTX_BITFIELD_OPS:
1757 if (CONST_INT_P (XEXP (copy, 0))
1758 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1759 {
1760 scopy = simplify_ternary_operation (code, mode,
1761 GET_MODE (XEXP (orig, 0)),
1762 XEXP (copy, 0), XEXP (copy, 1),
1763 XEXP (copy, 2));
1764 if (scopy)
1765 return scopy;
1766 }
1767 break;
1768 case RTX_COMPARE:
1769 case RTX_COMM_COMPARE:
1770 if (CONST_INT_P (XEXP (copy, 0))
1771 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1772 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1773 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1774 {
1775 scopy = simplify_relational_operation (code, mode,
1776 (GET_MODE (XEXP (orig, 0))
1777 != VOIDmode)
1778 ? GET_MODE (XEXP (orig, 0))
1779 : GET_MODE (XEXP (orig, 1)),
1780 XEXP (copy, 0),
1781 XEXP (copy, 1));
1782 if (scopy)
1783 return scopy;
1784 }
1785 break;
1786 default:
1787 break;
1788 }
1789 scopy = simplify_rtx (copy);
1790 if (scopy)
1791 return scopy;
1792 return copy;
1793 }
1794
1795 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1796 with VALUE expressions. This way, it becomes independent of changes
1797 to registers and memory.
1798 X isn't actually modified; if modifications are needed, new rtl is
1799 allocated. However, the return value can share rtl with X.
1800 If X is within a MEM, MEMMODE must be the mode of the MEM. */
1801
1802 rtx
1803 cselib_subst_to_values (rtx x, enum machine_mode memmode)
1804 {
1805 enum rtx_code code = GET_CODE (x);
1806 const char *fmt = GET_RTX_FORMAT (code);
1807 cselib_val *e;
1808 struct elt_list *l;
1809 rtx copy = x;
1810 int i;
1811
1812 switch (code)
1813 {
1814 case REG:
1815 l = REG_VALUES (REGNO (x));
1816 if (l && l->elt == NULL)
1817 l = l->next;
1818 for (; l; l = l->next)
1819 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1820 return l->elt->val_rtx;
1821
1822 gcc_unreachable ();
1823
1824 case MEM:
1825 e = cselib_lookup_mem (x, 0);
1826 /* This used to happen for autoincrements, but we deal with them
1827 properly now. Remove the if stmt for the next release. */
1828 if (! e)
1829 {
1830 /* Assign a value that doesn't match any other. */
1831 e = new_cselib_val (next_uid, GET_MODE (x), x);
1832 }
1833 return e->val_rtx;
1834
1835 case ENTRY_VALUE:
1836 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1837 if (! e)
1838 break;
1839 return e->val_rtx;
1840
1841 case CONST_DOUBLE:
1842 case CONST_VECTOR:
1843 case CONST_INT:
1844 case CONST_FIXED:
1845 return x;
1846
1847 case PRE_DEC:
1848 case PRE_INC:
1849 gcc_assert (memmode != VOIDmode);
1850 i = GET_MODE_SIZE (memmode);
1851 if (code == PRE_DEC)
1852 i = -i;
1853 return cselib_subst_to_values (plus_constant (XEXP (x, 0), i),
1854 memmode);
1855
1856 case PRE_MODIFY:
1857 gcc_assert (memmode != VOIDmode);
1858 return cselib_subst_to_values (XEXP (x, 1), memmode);
1859
1860 case POST_DEC:
1861 case POST_INC:
1862 case POST_MODIFY:
1863 gcc_assert (memmode != VOIDmode);
1864 return cselib_subst_to_values (XEXP (x, 0), memmode);
1865
1866 default:
1867 break;
1868 }
1869
1870 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1871 {
1872 if (fmt[i] == 'e')
1873 {
1874 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1875
1876 if (t != XEXP (x, i))
1877 {
1878 if (x == copy)
1879 copy = shallow_copy_rtx (x);
1880 XEXP (copy, i) = t;
1881 }
1882 }
1883 else if (fmt[i] == 'E')
1884 {
1885 int j;
1886
1887 for (j = 0; j < XVECLEN (x, i); j++)
1888 {
1889 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1890
1891 if (t != XVECEXP (x, i, j))
1892 {
1893 if (XVEC (x, i) == XVEC (copy, i))
1894 {
1895 if (x == copy)
1896 copy = shallow_copy_rtx (x);
1897 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1898 }
1899 XVECEXP (copy, i, j) = t;
1900 }
1901 }
1902 }
1903 }
1904
1905 return copy;
1906 }
1907
1908 /* Look up the rtl expression X in our tables and return the value it
1909 has. If CREATE is zero, we return NULL if we don't know the value.
1910 Otherwise, we create a new one if possible, using mode MODE if X
1911 doesn't have a mode (i.e. because it's a constant). When X is part
1912 of an address, MEMMODE should be the mode of the enclosing MEM if
1913 we're tracking autoinc expressions. */
1914
1915 static cselib_val *
1916 cselib_lookup_1 (rtx x, enum machine_mode mode,
1917 int create, enum machine_mode memmode)
1918 {
1919 void **slot;
1920 cselib_val *e;
1921 unsigned int hashval;
1922
1923 if (GET_MODE (x) != VOIDmode)
1924 mode = GET_MODE (x);
1925
1926 if (GET_CODE (x) == VALUE)
1927 return CSELIB_VAL_PTR (x);
1928
1929 if (REG_P (x))
1930 {
1931 struct elt_list *l;
1932 unsigned int i = REGNO (x);
1933
1934 l = REG_VALUES (i);
1935 if (l && l->elt == NULL)
1936 l = l->next;
1937 for (; l; l = l->next)
1938 if (mode == GET_MODE (l->elt->val_rtx))
1939 {
1940 promote_debug_loc (l->elt->locs);
1941 return l->elt;
1942 }
1943
1944 if (! create)
1945 return 0;
1946
1947 if (i < FIRST_PSEUDO_REGISTER)
1948 {
1949 unsigned int n = hard_regno_nregs[i][mode];
1950
1951 if (n > max_value_regs)
1952 max_value_regs = n;
1953 }
1954
1955 e = new_cselib_val (next_uid, GET_MODE (x), x);
1956 new_elt_loc_list (e, x);
1957 if (REG_VALUES (i) == 0)
1958 {
1959 /* Maintain the invariant that the first entry of
1960 REG_VALUES, if present, must be the value used to set the
1961 register, or NULL. */
1962 used_regs[n_used_regs++] = i;
1963 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
1964 }
1965 else if (cselib_preserve_constants
1966 && GET_MODE_CLASS (mode) == MODE_INT)
1967 {
1968 /* During var-tracking, try harder to find equivalences
1969 for SUBREGs. If a setter sets say a DImode register
1970 and user uses that register only in SImode, add a lowpart
1971 subreg location. */
1972 struct elt_list *lwider = NULL;
1973 l = REG_VALUES (i);
1974 if (l && l->elt == NULL)
1975 l = l->next;
1976 for (; l; l = l->next)
1977 if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT
1978 && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
1979 > GET_MODE_SIZE (mode)
1980 && (lwider == NULL
1981 || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
1982 < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
1983 {
1984 struct elt_loc_list *el;
1985 if (i < FIRST_PSEUDO_REGISTER
1986 && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1)
1987 continue;
1988 for (el = l->elt->locs; el; el = el->next)
1989 if (!REG_P (el->loc))
1990 break;
1991 if (el)
1992 lwider = l;
1993 }
1994 if (lwider)
1995 {
1996 rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx,
1997 GET_MODE (lwider->elt->val_rtx));
1998 if (sub)
1999 new_elt_loc_list (e, sub);
2000 }
2001 }
2002 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2003 slot = cselib_find_slot (x, e->hash, INSERT, memmode);
2004 *slot = e;
2005 return e;
2006 }
2007
2008 if (MEM_P (x))
2009 return cselib_lookup_mem (x, create);
2010
2011 hashval = cselib_hash_rtx (x, create, memmode);
2012 /* Can't even create if hashing is not possible. */
2013 if (! hashval)
2014 return 0;
2015
2016 slot = cselib_find_slot (wrap_constant (mode, x), hashval,
2017 create ? INSERT : NO_INSERT, memmode);
2018 if (slot == 0)
2019 return 0;
2020
2021 e = (cselib_val *) *slot;
2022 if (e)
2023 return e;
2024
2025 e = new_cselib_val (hashval, mode, x);
2026
2027 /* We have to fill the slot before calling cselib_subst_to_values:
2028 the hash table is inconsistent until we do so, and
2029 cselib_subst_to_values will need to do lookups. */
2030 *slot = (void *) e;
2031 new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2032 return e;
2033 }
2034
2035 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2036
2037 cselib_val *
2038 cselib_lookup_from_insn (rtx x, enum machine_mode mode,
2039 int create, enum machine_mode memmode, rtx insn)
2040 {
2041 cselib_val *ret;
2042
2043 gcc_assert (!cselib_current_insn);
2044 cselib_current_insn = insn;
2045
2046 ret = cselib_lookup (x, mode, create, memmode);
2047
2048 cselib_current_insn = NULL;
2049
2050 return ret;
2051 }
2052
2053 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2054 maintains invariants related with debug insns. */
2055
2056 cselib_val *
2057 cselib_lookup (rtx x, enum machine_mode mode,
2058 int create, enum machine_mode memmode)
2059 {
2060 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2061
2062 /* ??? Should we return NULL if we're not to create an entry, the
2063 found loc is a debug loc and cselib_current_insn is not DEBUG?
2064 If so, we should also avoid converting val to non-DEBUG; probably
2065 easiest setting cselib_current_insn to NULL before the call
2066 above. */
2067
2068 if (dump_file && (dump_flags & TDF_CSELIB))
2069 {
2070 fputs ("cselib lookup ", dump_file);
2071 print_inline_rtx (dump_file, x, 2);
2072 fprintf (dump_file, " => %u:%u\n",
2073 ret ? ret->uid : 0,
2074 ret ? ret->hash : 0);
2075 }
2076
2077 return ret;
2078 }
2079
2080 /* Invalidate any entries in reg_values that overlap REGNO. This is called
2081 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2082 is used to determine how many hard registers are being changed. If MODE
2083 is VOIDmode, then only REGNO is being changed; this is used when
2084 invalidating call clobbered registers across a call. */
2085
2086 static void
2087 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
2088 {
2089 unsigned int endregno;
2090 unsigned int i;
2091
2092 /* If we see pseudos after reload, something is _wrong_. */
2093 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2094 || reg_renumber[regno] < 0);
2095
2096 /* Determine the range of registers that must be invalidated. For
2097 pseudos, only REGNO is affected. For hard regs, we must take MODE
2098 into account, and we must also invalidate lower register numbers
2099 if they contain values that overlap REGNO. */
2100 if (regno < FIRST_PSEUDO_REGISTER)
2101 {
2102 gcc_assert (mode != VOIDmode);
2103
2104 if (regno < max_value_regs)
2105 i = 0;
2106 else
2107 i = regno - max_value_regs;
2108
2109 endregno = end_hard_regno (mode, regno);
2110 }
2111 else
2112 {
2113 i = regno;
2114 endregno = regno + 1;
2115 }
2116
2117 for (; i < endregno; i++)
2118 {
2119 struct elt_list **l = &REG_VALUES (i);
2120
2121 /* Go through all known values for this reg; if it overlaps the range
2122 we're invalidating, remove the value. */
2123 while (*l)
2124 {
2125 cselib_val *v = (*l)->elt;
2126 bool had_locs;
2127 rtx setting_insn;
2128 struct elt_loc_list **p;
2129 unsigned int this_last = i;
2130
2131 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2132 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2133
2134 if (this_last < regno || v == NULL
2135 || (v == cfa_base_preserved_val
2136 && i == cfa_base_preserved_regno))
2137 {
2138 l = &(*l)->next;
2139 continue;
2140 }
2141
2142 /* We have an overlap. */
2143 if (*l == REG_VALUES (i))
2144 {
2145 /* Maintain the invariant that the first entry of
2146 REG_VALUES, if present, must be the value used to set
2147 the register, or NULL. This is also nice because
2148 then we won't push the same regno onto user_regs
2149 multiple times. */
2150 (*l)->elt = NULL;
2151 l = &(*l)->next;
2152 }
2153 else
2154 unchain_one_elt_list (l);
2155
2156 v = canonical_cselib_val (v);
2157
2158 had_locs = v->locs != NULL;
2159 setting_insn = v->locs ? v->locs->setting_insn : NULL;
2160
2161 /* Now, we clear the mapping from value to reg. It must exist, so
2162 this code will crash intentionally if it doesn't. */
2163 for (p = &v->locs; ; p = &(*p)->next)
2164 {
2165 rtx x = (*p)->loc;
2166
2167 if (REG_P (x) && REGNO (x) == i)
2168 {
2169 unchain_one_elt_loc_list (p);
2170 break;
2171 }
2172 }
2173
2174 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2175 {
2176 if (setting_insn && DEBUG_INSN_P (setting_insn))
2177 n_useless_debug_values++;
2178 else
2179 n_useless_values++;
2180 }
2181 }
2182 }
2183 }
2184 \f
2185 /* Invalidate any locations in the table which are changed because of a
2186 store to MEM_RTX. If this is called because of a non-const call
2187 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2188
2189 static void
2190 cselib_invalidate_mem (rtx mem_rtx)
2191 {
2192 cselib_val **vp, *v, *next;
2193 int num_mems = 0;
2194 rtx mem_addr;
2195
2196 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2197 mem_rtx = canon_rtx (mem_rtx);
2198
2199 vp = &first_containing_mem;
2200 for (v = *vp; v != &dummy_val; v = next)
2201 {
2202 bool has_mem = false;
2203 struct elt_loc_list **p = &v->locs;
2204 bool had_locs = v->locs != NULL;
2205 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
2206
2207 while (*p)
2208 {
2209 rtx x = (*p)->loc;
2210 cselib_val *addr;
2211 struct elt_list **mem_chain;
2212
2213 /* MEMs may occur in locations only at the top level; below
2214 that every MEM or REG is substituted by its VALUE. */
2215 if (!MEM_P (x))
2216 {
2217 p = &(*p)->next;
2218 continue;
2219 }
2220 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2221 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx),
2222 mem_addr, x, NULL_RTX))
2223 {
2224 has_mem = true;
2225 num_mems++;
2226 p = &(*p)->next;
2227 continue;
2228 }
2229
2230 /* This one overlaps. */
2231 /* We must have a mapping from this MEM's address to the
2232 value (E). Remove that, too. */
2233 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2234 addr = canonical_cselib_val (addr);
2235 gcc_checking_assert (v == canonical_cselib_val (v));
2236 mem_chain = &addr->addr_list;
2237 for (;;)
2238 {
2239 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2240
2241 if (canon == v)
2242 {
2243 unchain_one_elt_list (mem_chain);
2244 break;
2245 }
2246
2247 /* Record canonicalized elt. */
2248 (*mem_chain)->elt = canon;
2249
2250 mem_chain = &(*mem_chain)->next;
2251 }
2252
2253 unchain_one_elt_loc_list (p);
2254 }
2255
2256 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2257 {
2258 if (setting_insn && DEBUG_INSN_P (setting_insn))
2259 n_useless_debug_values++;
2260 else
2261 n_useless_values++;
2262 }
2263
2264 next = v->next_containing_mem;
2265 if (has_mem)
2266 {
2267 *vp = v;
2268 vp = &(*vp)->next_containing_mem;
2269 }
2270 else
2271 v->next_containing_mem = NULL;
2272 }
2273 *vp = &dummy_val;
2274 }
2275
2276 /* Invalidate DEST, which is being assigned to or clobbered. */
2277
2278 void
2279 cselib_invalidate_rtx (rtx dest)
2280 {
2281 while (GET_CODE (dest) == SUBREG
2282 || GET_CODE (dest) == ZERO_EXTRACT
2283 || GET_CODE (dest) == STRICT_LOW_PART)
2284 dest = XEXP (dest, 0);
2285
2286 if (REG_P (dest))
2287 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2288 else if (MEM_P (dest))
2289 cselib_invalidate_mem (dest);
2290 }
2291
2292 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2293
2294 static void
2295 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
2296 void *data ATTRIBUTE_UNUSED)
2297 {
2298 cselib_invalidate_rtx (dest);
2299 }
2300
2301 /* Record the result of a SET instruction. DEST is being set; the source
2302 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2303 describes its address. */
2304
2305 static void
2306 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2307 {
2308 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
2309
2310 if (src_elt == 0 || side_effects_p (dest))
2311 return;
2312
2313 if (dreg >= 0)
2314 {
2315 if (dreg < FIRST_PSEUDO_REGISTER)
2316 {
2317 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
2318
2319 if (n > max_value_regs)
2320 max_value_regs = n;
2321 }
2322
2323 if (REG_VALUES (dreg) == 0)
2324 {
2325 used_regs[n_used_regs++] = dreg;
2326 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2327 }
2328 else
2329 {
2330 /* The register should have been invalidated. */
2331 gcc_assert (REG_VALUES (dreg)->elt == 0);
2332 REG_VALUES (dreg)->elt = src_elt;
2333 }
2334
2335 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2336 n_useless_values--;
2337 new_elt_loc_list (src_elt, dest);
2338 }
2339 else if (MEM_P (dest) && dest_addr_elt != 0
2340 && cselib_record_memory)
2341 {
2342 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2343 n_useless_values--;
2344 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2345 }
2346 }
2347
2348 /* Make ELT and X's VALUE equivalent to each other at INSN. */
2349
2350 void
2351 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx insn)
2352 {
2353 cselib_val *nelt;
2354 rtx save_cselib_current_insn = cselib_current_insn;
2355
2356 gcc_checking_assert (elt);
2357 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2358 gcc_checking_assert (!side_effects_p (x));
2359
2360 cselib_current_insn = insn;
2361
2362 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2363
2364 if (nelt != elt)
2365 {
2366 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2367 cselib_preserve_value (nelt);
2368
2369 new_elt_loc_list (nelt, elt->val_rtx);
2370 }
2371
2372 cselib_current_insn = save_cselib_current_insn;
2373 }
2374
2375 /* There is no good way to determine how many elements there can be
2376 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2377 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2378
2379 struct cselib_record_autoinc_data
2380 {
2381 struct cselib_set *sets;
2382 int n_sets;
2383 };
2384
2385 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2386 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2387
2388 static int
2389 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2390 rtx dest, rtx src, rtx srcoff, void *arg)
2391 {
2392 struct cselib_record_autoinc_data *data;
2393 data = (struct cselib_record_autoinc_data *)arg;
2394
2395 data->sets[data->n_sets].dest = dest;
2396
2397 if (srcoff)
2398 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2399 else
2400 data->sets[data->n_sets].src = src;
2401
2402 data->n_sets++;
2403
2404 return -1;
2405 }
2406
2407 /* Record the effects of any sets and autoincs in INSN. */
2408 static void
2409 cselib_record_sets (rtx insn)
2410 {
2411 int n_sets = 0;
2412 int i;
2413 struct cselib_set sets[MAX_SETS];
2414 rtx body = PATTERN (insn);
2415 rtx cond = 0;
2416 int n_sets_before_autoinc;
2417 struct cselib_record_autoinc_data data;
2418
2419 body = PATTERN (insn);
2420 if (GET_CODE (body) == COND_EXEC)
2421 {
2422 cond = COND_EXEC_TEST (body);
2423 body = COND_EXEC_CODE (body);
2424 }
2425
2426 /* Find all sets. */
2427 if (GET_CODE (body) == SET)
2428 {
2429 sets[0].src = SET_SRC (body);
2430 sets[0].dest = SET_DEST (body);
2431 n_sets = 1;
2432 }
2433 else if (GET_CODE (body) == PARALLEL)
2434 {
2435 /* Look through the PARALLEL and record the values being
2436 set, if possible. Also handle any CLOBBERs. */
2437 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2438 {
2439 rtx x = XVECEXP (body, 0, i);
2440
2441 if (GET_CODE (x) == SET)
2442 {
2443 sets[n_sets].src = SET_SRC (x);
2444 sets[n_sets].dest = SET_DEST (x);
2445 n_sets++;
2446 }
2447 }
2448 }
2449
2450 if (n_sets == 1
2451 && MEM_P (sets[0].src)
2452 && !cselib_record_memory
2453 && MEM_READONLY_P (sets[0].src))
2454 {
2455 rtx note = find_reg_equal_equiv_note (insn);
2456
2457 if (note && CONSTANT_P (XEXP (note, 0)))
2458 sets[0].src = XEXP (note, 0);
2459 }
2460
2461 data.sets = sets;
2462 data.n_sets = n_sets_before_autoinc = n_sets;
2463 for_each_inc_dec (&insn, cselib_record_autoinc_cb, &data);
2464 n_sets = data.n_sets;
2465
2466 /* Look up the values that are read. Do this before invalidating the
2467 locations that are written. */
2468 for (i = 0; i < n_sets; i++)
2469 {
2470 rtx dest = sets[i].dest;
2471
2472 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2473 the low part after invalidating any knowledge about larger modes. */
2474 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2475 sets[i].dest = dest = XEXP (dest, 0);
2476
2477 /* We don't know how to record anything but REG or MEM. */
2478 if (REG_P (dest)
2479 || (MEM_P (dest) && cselib_record_memory))
2480 {
2481 rtx src = sets[i].src;
2482 if (cond)
2483 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2484 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2485 if (MEM_P (dest))
2486 {
2487 enum machine_mode address_mode
2488 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
2489
2490 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2491 address_mode, 1,
2492 GET_MODE (dest));
2493 }
2494 else
2495 sets[i].dest_addr_elt = 0;
2496 }
2497 }
2498
2499 if (cselib_record_sets_hook)
2500 cselib_record_sets_hook (insn, sets, n_sets);
2501
2502 /* Invalidate all locations written by this insn. Note that the elts we
2503 looked up in the previous loop aren't affected, just some of their
2504 locations may go away. */
2505 note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2506
2507 for (i = n_sets_before_autoinc; i < n_sets; i++)
2508 cselib_invalidate_rtx (sets[i].dest);
2509
2510 /* If this is an asm, look for duplicate sets. This can happen when the
2511 user uses the same value as an output multiple times. This is valid
2512 if the outputs are not actually used thereafter. Treat this case as
2513 if the value isn't actually set. We do this by smashing the destination
2514 to pc_rtx, so that we won't record the value later. */
2515 if (n_sets >= 2 && asm_noperands (body) >= 0)
2516 {
2517 for (i = 0; i < n_sets; i++)
2518 {
2519 rtx dest = sets[i].dest;
2520 if (REG_P (dest) || MEM_P (dest))
2521 {
2522 int j;
2523 for (j = i + 1; j < n_sets; j++)
2524 if (rtx_equal_p (dest, sets[j].dest))
2525 {
2526 sets[i].dest = pc_rtx;
2527 sets[j].dest = pc_rtx;
2528 }
2529 }
2530 }
2531 }
2532
2533 /* Now enter the equivalences in our tables. */
2534 for (i = 0; i < n_sets; i++)
2535 {
2536 rtx dest = sets[i].dest;
2537 if (REG_P (dest)
2538 || (MEM_P (dest) && cselib_record_memory))
2539 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2540 }
2541 }
2542
2543 /* Record the effects of INSN. */
2544
2545 void
2546 cselib_process_insn (rtx insn)
2547 {
2548 int i;
2549 rtx x;
2550
2551 cselib_current_insn = insn;
2552
2553 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
2554 if (LABEL_P (insn)
2555 || (CALL_P (insn)
2556 && find_reg_note (insn, REG_SETJMP, NULL))
2557 || (NONJUMP_INSN_P (insn)
2558 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2559 && MEM_VOLATILE_P (PATTERN (insn))))
2560 {
2561 cselib_reset_table (next_uid);
2562 cselib_current_insn = NULL_RTX;
2563 return;
2564 }
2565
2566 if (! INSN_P (insn))
2567 {
2568 cselib_current_insn = NULL_RTX;
2569 return;
2570 }
2571
2572 /* If this is a call instruction, forget anything stored in a
2573 call clobbered register, or, if this is not a const call, in
2574 memory. */
2575 if (CALL_P (insn))
2576 {
2577 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2578 if (call_used_regs[i]
2579 || (REG_VALUES (i) && REG_VALUES (i)->elt
2580 && HARD_REGNO_CALL_PART_CLOBBERED (i,
2581 GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2582 cselib_invalidate_regno (i, reg_raw_mode[i]);
2583
2584 /* Since it is not clear how cselib is going to be used, be
2585 conservative here and treat looping pure or const functions
2586 as if they were regular functions. */
2587 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2588 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2589 cselib_invalidate_mem (callmem);
2590 }
2591
2592 cselib_record_sets (insn);
2593
2594 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2595 after we have processed the insn. */
2596 if (CALL_P (insn))
2597 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2598 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2599 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2600
2601 cselib_current_insn = NULL_RTX;
2602
2603 if (n_useless_values > MAX_USELESS_VALUES
2604 /* remove_useless_values is linear in the hash table size. Avoid
2605 quadratic behavior for very large hashtables with very few
2606 useless elements. */
2607 && ((unsigned int)n_useless_values
2608 > (cselib_hash_table->n_elements
2609 - cselib_hash_table->n_deleted
2610 - n_debug_values) / 4))
2611 remove_useless_values ();
2612 }
2613
2614 /* Initialize cselib for one pass. The caller must also call
2615 init_alias_analysis. */
2616
2617 void
2618 cselib_init (int record_what)
2619 {
2620 elt_list_pool = create_alloc_pool ("elt_list",
2621 sizeof (struct elt_list), 10);
2622 elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2623 sizeof (struct elt_loc_list), 10);
2624 cselib_val_pool = create_alloc_pool ("cselib_val_list",
2625 sizeof (cselib_val), 10);
2626 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2627 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2628 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2629
2630 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2631 see canon_true_dependence. This is only created once. */
2632 if (! callmem)
2633 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2634
2635 cselib_nregs = max_reg_num ();
2636
2637 /* We preserve reg_values to allow expensive clearing of the whole thing.
2638 Reallocate it however if it happens to be too large. */
2639 if (!reg_values || reg_values_size < cselib_nregs
2640 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2641 {
2642 free (reg_values);
2643 /* Some space for newly emit instructions so we don't end up
2644 reallocating in between passes. */
2645 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2646 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2647 }
2648 used_regs = XNEWVEC (unsigned int, cselib_nregs);
2649 n_used_regs = 0;
2650 cselib_hash_table = htab_create (31, get_value_hash,
2651 entry_and_rtx_equal_p, NULL);
2652 next_uid = 1;
2653 }
2654
2655 /* Called when the current user is done with cselib. */
2656
2657 void
2658 cselib_finish (void)
2659 {
2660 cselib_discard_hook = NULL;
2661 cselib_preserve_constants = false;
2662 cfa_base_preserved_val = NULL;
2663 cfa_base_preserved_regno = INVALID_REGNUM;
2664 free_alloc_pool (elt_list_pool);
2665 free_alloc_pool (elt_loc_list_pool);
2666 free_alloc_pool (cselib_val_pool);
2667 free_alloc_pool (value_pool);
2668 cselib_clear_table ();
2669 htab_delete (cselib_hash_table);
2670 free (used_regs);
2671 used_regs = 0;
2672 cselib_hash_table = 0;
2673 n_useless_values = 0;
2674 n_useless_debug_values = 0;
2675 n_debug_values = 0;
2676 next_uid = 0;
2677 }
2678
2679 /* Dump the cselib_val *X to FILE *info. */
2680
2681 static int
2682 dump_cselib_val (void **x, void *info)
2683 {
2684 cselib_val *v = (cselib_val *)*x;
2685 FILE *out = (FILE *)info;
2686 bool need_lf = true;
2687
2688 print_inline_rtx (out, v->val_rtx, 0);
2689
2690 if (v->locs)
2691 {
2692 struct elt_loc_list *l = v->locs;
2693 if (need_lf)
2694 {
2695 fputc ('\n', out);
2696 need_lf = false;
2697 }
2698 fputs (" locs:", out);
2699 do
2700 {
2701 if (l->setting_insn)
2702 fprintf (out, "\n from insn %i ",
2703 INSN_UID (l->setting_insn));
2704 else
2705 fprintf (out, "\n ");
2706 print_inline_rtx (out, l->loc, 4);
2707 }
2708 while ((l = l->next));
2709 fputc ('\n', out);
2710 }
2711 else
2712 {
2713 fputs (" no locs", out);
2714 need_lf = true;
2715 }
2716
2717 if (v->addr_list)
2718 {
2719 struct elt_list *e = v->addr_list;
2720 if (need_lf)
2721 {
2722 fputc ('\n', out);
2723 need_lf = false;
2724 }
2725 fputs (" addr list:", out);
2726 do
2727 {
2728 fputs ("\n ", out);
2729 print_inline_rtx (out, e->elt->val_rtx, 2);
2730 }
2731 while ((e = e->next));
2732 fputc ('\n', out);
2733 }
2734 else
2735 {
2736 fputs (" no addrs", out);
2737 need_lf = true;
2738 }
2739
2740 if (v->next_containing_mem == &dummy_val)
2741 fputs (" last mem\n", out);
2742 else if (v->next_containing_mem)
2743 {
2744 fputs (" next mem ", out);
2745 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2746 fputc ('\n', out);
2747 }
2748 else if (need_lf)
2749 fputc ('\n', out);
2750
2751 return 1;
2752 }
2753
2754 /* Dump to OUT everything in the CSELIB table. */
2755
2756 void
2757 dump_cselib_table (FILE *out)
2758 {
2759 fprintf (out, "cselib hash table:\n");
2760 htab_traverse (cselib_hash_table, dump_cselib_val, out);
2761 if (first_containing_mem != &dummy_val)
2762 {
2763 fputs ("first mem ", out);
2764 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2765 fputc ('\n', out);
2766 }
2767 fprintf (out, "next uid %i\n", next_uid);
2768 }
2769
2770 #include "gt-cselib.h"