rtl.h (CONST_SCALAR_INT_P): New macro.
[gcc.git] / gcc / cselib.c
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26
27 #include "rtl.h"
28 #include "tree.h"/* FIXME: For hashing DEBUG_EXPR & friends. */
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "emit-rtl.h"
37 #include "diagnostic-core.h"
38 #include "ggc.h"
39 #include "hashtab.h"
40 #include "dumpfile.h"
41 #include "cselib.h"
42 #include "valtrack.h"
43 #include "params.h"
44 #include "alloc-pool.h"
45 #include "target.h"
46 #include "bitmap.h"
47
48 /* A list of cselib_val structures. */
49 struct elt_list {
50 struct elt_list *next;
51 cselib_val *elt;
52 };
53
54 static bool cselib_record_memory;
55 static bool cselib_preserve_constants;
56 static bool cselib_any_perm_equivs;
57 static int entry_and_rtx_equal_p (const void *, const void *);
58 static hashval_t get_value_hash (const void *);
59 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
60 static void new_elt_loc_list (cselib_val *, rtx);
61 static void unchain_one_value (cselib_val *);
62 static void unchain_one_elt_list (struct elt_list **);
63 static void unchain_one_elt_loc_list (struct elt_loc_list **);
64 static int discard_useless_locs (void **, void *);
65 static int discard_useless_values (void **, void *);
66 static void remove_useless_values (void);
67 static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
68 static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
69 static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
70 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
71 static cselib_val *cselib_lookup_mem (rtx, int);
72 static void cselib_invalidate_regno (unsigned int, enum machine_mode);
73 static void cselib_invalidate_mem (rtx);
74 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
75 static void cselib_record_sets (rtx);
76
77 struct expand_value_data
78 {
79 bitmap regs_active;
80 cselib_expand_callback callback;
81 void *callback_arg;
82 bool dummy;
83 };
84
85 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
86
87 /* There are three ways in which cselib can look up an rtx:
88 - for a REG, the reg_values table (which is indexed by regno) is used
89 - for a MEM, we recursively look up its address and then follow the
90 addr_list of that value
91 - for everything else, we compute a hash value and go through the hash
92 table. Since different rtx's can still have the same hash value,
93 this involves walking the table entries for a given value and comparing
94 the locations of the entries with the rtx we are looking up. */
95
96 /* A table that enables us to look up elts by their value. */
97 static htab_t cselib_hash_table;
98
99 /* This is a global so we don't have to pass this through every function.
100 It is used in new_elt_loc_list to set SETTING_INSN. */
101 static rtx cselib_current_insn;
102
103 /* The unique id that the next create value will take. */
104 static unsigned int next_uid;
105
106 /* The number of registers we had when the varrays were last resized. */
107 static unsigned int cselib_nregs;
108
109 /* Count values without known locations, or with only locations that
110 wouldn't have been known except for debug insns. Whenever this
111 grows too big, we remove these useless values from the table.
112
113 Counting values with only debug values is a bit tricky. We don't
114 want to increment n_useless_values when we create a value for a
115 debug insn, for this would get n_useless_values out of sync, but we
116 want increment it if all locs in the list that were ever referenced
117 in nondebug insns are removed from the list.
118
119 In the general case, once we do that, we'd have to stop accepting
120 nondebug expressions in the loc list, to avoid having two values
121 equivalent that, without debug insns, would have been made into
122 separate values. However, because debug insns never introduce
123 equivalences themselves (no assignments), the only means for
124 growing loc lists is through nondebug assignments. If the locs
125 also happen to be referenced in debug insns, it will work just fine.
126
127 A consequence of this is that there's at most one debug-only loc in
128 each loc list. If we keep it in the first entry, testing whether
129 we have a debug-only loc list takes O(1).
130
131 Furthermore, since any additional entry in a loc list containing a
132 debug loc would have to come from an assignment (nondebug) that
133 references both the initial debug loc and the newly-equivalent loc,
134 the initial debug loc would be promoted to a nondebug loc, and the
135 loc list would not contain debug locs any more.
136
137 So the only case we have to be careful with in order to keep
138 n_useless_values in sync between debug and nondebug compilations is
139 to avoid incrementing n_useless_values when removing the single loc
140 from a value that turns out to not appear outside debug values. We
141 increment n_useless_debug_values instead, and leave such values
142 alone until, for other reasons, we garbage-collect useless
143 values. */
144 static int n_useless_values;
145 static int n_useless_debug_values;
146
147 /* Count values whose locs have been taken exclusively from debug
148 insns for the entire life of the value. */
149 static int n_debug_values;
150
151 /* Number of useless values before we remove them from the hash table. */
152 #define MAX_USELESS_VALUES 32
153
154 /* This table maps from register number to values. It does not
155 contain pointers to cselib_val structures, but rather elt_lists.
156 The purpose is to be able to refer to the same register in
157 different modes. The first element of the list defines the mode in
158 which the register was set; if the mode is unknown or the value is
159 no longer valid in that mode, ELT will be NULL for the first
160 element. */
161 static struct elt_list **reg_values;
162 static unsigned int reg_values_size;
163 #define REG_VALUES(i) reg_values[i]
164
165 /* The largest number of hard regs used by any entry added to the
166 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
167 static unsigned int max_value_regs;
168
169 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
170 in cselib_clear_table() for fast emptying. */
171 static unsigned int *used_regs;
172 static unsigned int n_used_regs;
173
174 /* We pass this to cselib_invalidate_mem to invalidate all of
175 memory for a non-const call instruction. */
176 static GTY(()) rtx callmem;
177
178 /* Set by discard_useless_locs if it deleted the last location of any
179 value. */
180 static int values_became_useless;
181
182 /* Used as stop element of the containing_mem list so we can check
183 presence in the list by checking the next pointer. */
184 static cselib_val dummy_val;
185
186 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
187 that is constant through the whole function and should never be
188 eliminated. */
189 static cselib_val *cfa_base_preserved_val;
190 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
191
192 /* Used to list all values that contain memory reference.
193 May or may not contain the useless values - the list is compacted
194 each time memory is invalidated. */
195 static cselib_val *first_containing_mem = &dummy_val;
196 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
197
198 /* If nonnull, cselib will call this function before freeing useless
199 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
200 void (*cselib_discard_hook) (cselib_val *);
201
202 /* If nonnull, cselib will call this function before recording sets or
203 even clobbering outputs of INSN. All the recorded sets will be
204 represented in the array sets[n_sets]. new_val_min can be used to
205 tell whether values present in sets are introduced by this
206 instruction. */
207 void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
208 int n_sets);
209
210 #define PRESERVED_VALUE_P(RTX) \
211 (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
212
213 #define SP_BASED_VALUE_P(RTX) \
214 (RTL_FLAG_CHECK1("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
215
216 \f
217
218 /* Allocate a struct elt_list and fill in its two elements with the
219 arguments. */
220
221 static inline struct elt_list *
222 new_elt_list (struct elt_list *next, cselib_val *elt)
223 {
224 struct elt_list *el;
225 el = (struct elt_list *) pool_alloc (elt_list_pool);
226 el->next = next;
227 el->elt = elt;
228 return el;
229 }
230
231 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
232 list. */
233
234 static inline void
235 new_elt_loc_list (cselib_val *val, rtx loc)
236 {
237 struct elt_loc_list *el, *next = val->locs;
238
239 gcc_checking_assert (!next || !next->setting_insn
240 || !DEBUG_INSN_P (next->setting_insn)
241 || cselib_current_insn == next->setting_insn);
242
243 /* If we're creating the first loc in a debug insn context, we've
244 just created a debug value. Count it. */
245 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
246 n_debug_values++;
247
248 val = canonical_cselib_val (val);
249 next = val->locs;
250
251 if (GET_CODE (loc) == VALUE)
252 {
253 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
254
255 gcc_checking_assert (PRESERVED_VALUE_P (loc)
256 == PRESERVED_VALUE_P (val->val_rtx));
257
258 if (val->val_rtx == loc)
259 return;
260 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
261 {
262 /* Reverse the insertion. */
263 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
264 return;
265 }
266
267 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
268
269 if (CSELIB_VAL_PTR (loc)->locs)
270 {
271 /* Bring all locs from LOC to VAL. */
272 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
273 {
274 /* Adjust values that have LOC as canonical so that VAL
275 becomes their canonical. */
276 if (el->loc && GET_CODE (el->loc) == VALUE)
277 {
278 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
279 == loc);
280 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
281 }
282 }
283 el->next = val->locs;
284 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
285 }
286
287 if (CSELIB_VAL_PTR (loc)->addr_list)
288 {
289 /* Bring in addr_list into canonical node. */
290 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
291 while (last->next)
292 last = last->next;
293 last->next = val->addr_list;
294 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
295 CSELIB_VAL_PTR (loc)->addr_list = NULL;
296 }
297
298 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
299 && val->next_containing_mem == NULL)
300 {
301 /* Add VAL to the containing_mem list after LOC. LOC will
302 be removed when we notice it doesn't contain any
303 MEMs. */
304 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
305 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
306 }
307
308 /* Chain LOC back to VAL. */
309 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
310 el->loc = val->val_rtx;
311 el->setting_insn = cselib_current_insn;
312 el->next = NULL;
313 CSELIB_VAL_PTR (loc)->locs = el;
314 }
315
316 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
317 el->loc = loc;
318 el->setting_insn = cselib_current_insn;
319 el->next = next;
320 val->locs = el;
321 }
322
323 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
324 originating from a debug insn, maintaining the debug values
325 count. */
326
327 static inline void
328 promote_debug_loc (struct elt_loc_list *l)
329 {
330 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
331 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
332 {
333 n_debug_values--;
334 l->setting_insn = cselib_current_insn;
335 if (cselib_preserve_constants && l->next)
336 {
337 gcc_assert (l->next->setting_insn
338 && DEBUG_INSN_P (l->next->setting_insn)
339 && !l->next->next);
340 l->next->setting_insn = cselib_current_insn;
341 }
342 else
343 gcc_assert (!l->next);
344 }
345 }
346
347 /* The elt_list at *PL is no longer needed. Unchain it and free its
348 storage. */
349
350 static inline void
351 unchain_one_elt_list (struct elt_list **pl)
352 {
353 struct elt_list *l = *pl;
354
355 *pl = l->next;
356 pool_free (elt_list_pool, l);
357 }
358
359 /* Likewise for elt_loc_lists. */
360
361 static void
362 unchain_one_elt_loc_list (struct elt_loc_list **pl)
363 {
364 struct elt_loc_list *l = *pl;
365
366 *pl = l->next;
367 pool_free (elt_loc_list_pool, l);
368 }
369
370 /* Likewise for cselib_vals. This also frees the addr_list associated with
371 V. */
372
373 static void
374 unchain_one_value (cselib_val *v)
375 {
376 while (v->addr_list)
377 unchain_one_elt_list (&v->addr_list);
378
379 pool_free (cselib_val_pool, v);
380 }
381
382 /* Remove all entries from the hash table. Also used during
383 initialization. */
384
385 void
386 cselib_clear_table (void)
387 {
388 cselib_reset_table (1);
389 }
390
391 /* Return TRUE if V is a constant, a function invariant or a VALUE
392 equivalence; FALSE otherwise. */
393
394 static bool
395 invariant_or_equiv_p (cselib_val *v)
396 {
397 struct elt_loc_list *l;
398
399 if (v == cfa_base_preserved_val)
400 return true;
401
402 /* Keep VALUE equivalences around. */
403 for (l = v->locs; l; l = l->next)
404 if (GET_CODE (l->loc) == VALUE)
405 return true;
406
407 if (v->locs != NULL
408 && v->locs->next == NULL)
409 {
410 if (CONSTANT_P (v->locs->loc)
411 && (GET_CODE (v->locs->loc) != CONST
412 || !references_value_p (v->locs->loc, 0)))
413 return true;
414 /* Although a debug expr may be bound to different expressions,
415 we can preserve it as if it was constant, to get unification
416 and proper merging within var-tracking. */
417 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
418 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
419 || GET_CODE (v->locs->loc) == ENTRY_VALUE
420 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
421 return true;
422
423 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
424 if (GET_CODE (v->locs->loc) == PLUS
425 && CONST_INT_P (XEXP (v->locs->loc, 1))
426 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
427 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
428 return true;
429 }
430
431 return false;
432 }
433
434 /* Remove from hash table all VALUEs except constants, function
435 invariants and VALUE equivalences. */
436
437 static int
438 preserve_constants_and_equivs (void **x, void *info ATTRIBUTE_UNUSED)
439 {
440 cselib_val *v = (cselib_val *)*x;
441
442 if (!invariant_or_equiv_p (v))
443 htab_clear_slot (cselib_hash_table, x);
444 return 1;
445 }
446
447 /* Remove all entries from the hash table, arranging for the next
448 value to be numbered NUM. */
449
450 void
451 cselib_reset_table (unsigned int num)
452 {
453 unsigned int i;
454
455 max_value_regs = 0;
456
457 if (cfa_base_preserved_val)
458 {
459 unsigned int regno = cfa_base_preserved_regno;
460 unsigned int new_used_regs = 0;
461 for (i = 0; i < n_used_regs; i++)
462 if (used_regs[i] == regno)
463 {
464 new_used_regs = 1;
465 continue;
466 }
467 else
468 REG_VALUES (used_regs[i]) = 0;
469 gcc_assert (new_used_regs == 1);
470 n_used_regs = new_used_regs;
471 used_regs[0] = regno;
472 max_value_regs
473 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
474 }
475 else
476 {
477 for (i = 0; i < n_used_regs; i++)
478 REG_VALUES (used_regs[i]) = 0;
479 n_used_regs = 0;
480 }
481
482 if (cselib_preserve_constants)
483 htab_traverse (cselib_hash_table, preserve_constants_and_equivs, NULL);
484 else
485 {
486 htab_empty (cselib_hash_table);
487 gcc_checking_assert (!cselib_any_perm_equivs);
488 }
489
490 n_useless_values = 0;
491 n_useless_debug_values = 0;
492 n_debug_values = 0;
493
494 next_uid = num;
495
496 first_containing_mem = &dummy_val;
497 }
498
499 /* Return the number of the next value that will be generated. */
500
501 unsigned int
502 cselib_get_next_uid (void)
503 {
504 return next_uid;
505 }
506
507 /* See the documentation of cselib_find_slot below. */
508 static enum machine_mode find_slot_memmode;
509
510 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
511 INSERTing if requested. When X is part of the address of a MEM,
512 MEMMODE should specify the mode of the MEM. While searching the
513 table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs
514 in X can be resolved. */
515
516 static void **
517 cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert,
518 enum machine_mode memmode)
519 {
520 void **slot;
521 find_slot_memmode = memmode;
522 slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert);
523 find_slot_memmode = VOIDmode;
524 return slot;
525 }
526
527 /* The equality test for our hash table. The first argument ENTRY is a table
528 element (i.e. a cselib_val), while the second arg X is an rtx. We know
529 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
530 CONST of an appropriate mode. */
531
532 static int
533 entry_and_rtx_equal_p (const void *entry, const void *x_arg)
534 {
535 struct elt_loc_list *l;
536 const cselib_val *const v = (const cselib_val *) entry;
537 rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
538 enum machine_mode mode = GET_MODE (x);
539
540 gcc_assert (!CONST_SCALAR_INT_P (x) && GET_CODE (x) != CONST_FIXED);
541
542 if (mode != GET_MODE (v->val_rtx))
543 return 0;
544
545 /* Unwrap X if necessary. */
546 if (GET_CODE (x) == CONST
547 && (CONST_SCALAR_INT_P (XEXP (x, 0))
548 || GET_CODE (XEXP (x, 0)) == CONST_FIXED))
549 x = XEXP (x, 0);
550
551 /* We don't guarantee that distinct rtx's have different hash values,
552 so we need to do a comparison. */
553 for (l = v->locs; l; l = l->next)
554 if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
555 {
556 promote_debug_loc (l);
557 return 1;
558 }
559
560 return 0;
561 }
562
563 /* The hash function for our hash table. The value is always computed with
564 cselib_hash_rtx when adding an element; this function just extracts the
565 hash value from a cselib_val structure. */
566
567 static hashval_t
568 get_value_hash (const void *entry)
569 {
570 const cselib_val *const v = (const cselib_val *) entry;
571 return v->hash;
572 }
573
574 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
575 only return true for values which point to a cselib_val whose value
576 element has been set to zero, which implies the cselib_val will be
577 removed. */
578
579 int
580 references_value_p (const_rtx x, int only_useless)
581 {
582 const enum rtx_code code = GET_CODE (x);
583 const char *fmt = GET_RTX_FORMAT (code);
584 int i, j;
585
586 if (GET_CODE (x) == VALUE
587 && (! only_useless ||
588 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
589 return 1;
590
591 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
592 {
593 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
594 return 1;
595 else if (fmt[i] == 'E')
596 for (j = 0; j < XVECLEN (x, i); j++)
597 if (references_value_p (XVECEXP (x, i, j), only_useless))
598 return 1;
599 }
600
601 return 0;
602 }
603
604 /* For all locations found in X, delete locations that reference useless
605 values (i.e. values without any location). Called through
606 htab_traverse. */
607
608 static int
609 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
610 {
611 cselib_val *v = (cselib_val *)*x;
612 struct elt_loc_list **p = &v->locs;
613 bool had_locs = v->locs != NULL;
614 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
615
616 while (*p)
617 {
618 if (references_value_p ((*p)->loc, 1))
619 unchain_one_elt_loc_list (p);
620 else
621 p = &(*p)->next;
622 }
623
624 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
625 {
626 if (setting_insn && DEBUG_INSN_P (setting_insn))
627 n_useless_debug_values++;
628 else
629 n_useless_values++;
630 values_became_useless = 1;
631 }
632 return 1;
633 }
634
635 /* If X is a value with no locations, remove it from the hashtable. */
636
637 static int
638 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
639 {
640 cselib_val *v = (cselib_val *)*x;
641
642 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
643 {
644 if (cselib_discard_hook)
645 cselib_discard_hook (v);
646
647 CSELIB_VAL_PTR (v->val_rtx) = NULL;
648 htab_clear_slot (cselib_hash_table, x);
649 unchain_one_value (v);
650 n_useless_values--;
651 }
652
653 return 1;
654 }
655
656 /* Clean out useless values (i.e. those which no longer have locations
657 associated with them) from the hash table. */
658
659 static void
660 remove_useless_values (void)
661 {
662 cselib_val **p, *v;
663
664 /* First pass: eliminate locations that reference the value. That in
665 turn can make more values useless. */
666 do
667 {
668 values_became_useless = 0;
669 htab_traverse (cselib_hash_table, discard_useless_locs, 0);
670 }
671 while (values_became_useless);
672
673 /* Second pass: actually remove the values. */
674
675 p = &first_containing_mem;
676 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
677 if (v->locs && v == canonical_cselib_val (v))
678 {
679 *p = v;
680 p = &(*p)->next_containing_mem;
681 }
682 *p = &dummy_val;
683
684 n_useless_values += n_useless_debug_values;
685 n_debug_values -= n_useless_debug_values;
686 n_useless_debug_values = 0;
687
688 htab_traverse (cselib_hash_table, discard_useless_values, 0);
689
690 gcc_assert (!n_useless_values);
691 }
692
693 /* Arrange for a value to not be removed from the hash table even if
694 it becomes useless. */
695
696 void
697 cselib_preserve_value (cselib_val *v)
698 {
699 PRESERVED_VALUE_P (v->val_rtx) = 1;
700 }
701
702 /* Test whether a value is preserved. */
703
704 bool
705 cselib_preserved_value_p (cselib_val *v)
706 {
707 return PRESERVED_VALUE_P (v->val_rtx);
708 }
709
710 /* Arrange for a REG value to be assumed constant through the whole function,
711 never invalidated and preserved across cselib_reset_table calls. */
712
713 void
714 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
715 {
716 if (cselib_preserve_constants
717 && v->locs
718 && REG_P (v->locs->loc))
719 {
720 cfa_base_preserved_val = v;
721 cfa_base_preserved_regno = regno;
722 }
723 }
724
725 /* Clean all non-constant expressions in the hash table, but retain
726 their values. */
727
728 void
729 cselib_preserve_only_values (void)
730 {
731 int i;
732
733 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
734 cselib_invalidate_regno (i, reg_raw_mode[i]);
735
736 cselib_invalidate_mem (callmem);
737
738 remove_useless_values ();
739
740 gcc_assert (first_containing_mem == &dummy_val);
741 }
742
743 /* Arrange for a value to be marked as based on stack pointer
744 for find_base_term purposes. */
745
746 void
747 cselib_set_value_sp_based (cselib_val *v)
748 {
749 SP_BASED_VALUE_P (v->val_rtx) = 1;
750 }
751
752 /* Test whether a value is based on stack pointer for
753 find_base_term purposes. */
754
755 bool
756 cselib_sp_based_value_p (cselib_val *v)
757 {
758 return SP_BASED_VALUE_P (v->val_rtx);
759 }
760
761 /* Return the mode in which a register was last set. If X is not a
762 register, return its mode. If the mode in which the register was
763 set is not known, or the value was already clobbered, return
764 VOIDmode. */
765
766 enum machine_mode
767 cselib_reg_set_mode (const_rtx x)
768 {
769 if (!REG_P (x))
770 return GET_MODE (x);
771
772 if (REG_VALUES (REGNO (x)) == NULL
773 || REG_VALUES (REGNO (x))->elt == NULL)
774 return VOIDmode;
775
776 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
777 }
778
779 /* Return nonzero if we can prove that X and Y contain the same value, taking
780 our gathered information into account. */
781
782 int
783 rtx_equal_for_cselib_p (rtx x, rtx y)
784 {
785 return rtx_equal_for_cselib_1 (x, y, VOIDmode);
786 }
787
788 /* If x is a PLUS or an autoinc operation, expand the operation,
789 storing the offset, if any, in *OFF. */
790
791 static rtx
792 autoinc_split (rtx x, rtx *off, enum machine_mode memmode)
793 {
794 switch (GET_CODE (x))
795 {
796 case PLUS:
797 *off = XEXP (x, 1);
798 return XEXP (x, 0);
799
800 case PRE_DEC:
801 if (memmode == VOIDmode)
802 return x;
803
804 *off = GEN_INT (-GET_MODE_SIZE (memmode));
805 return XEXP (x, 0);
806 break;
807
808 case PRE_INC:
809 if (memmode == VOIDmode)
810 return x;
811
812 *off = GEN_INT (GET_MODE_SIZE (memmode));
813 return XEXP (x, 0);
814
815 case PRE_MODIFY:
816 return XEXP (x, 1);
817
818 case POST_DEC:
819 case POST_INC:
820 case POST_MODIFY:
821 return XEXP (x, 0);
822
823 default:
824 return x;
825 }
826 }
827
828 /* Return nonzero if we can prove that X and Y contain the same value,
829 taking our gathered information into account. MEMMODE holds the
830 mode of the enclosing MEM, if any, as required to deal with autoinc
831 addressing modes. If X and Y are not (known to be) part of
832 addresses, MEMMODE should be VOIDmode. */
833
834 static int
835 rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode)
836 {
837 enum rtx_code code;
838 const char *fmt;
839 int i;
840
841 if (REG_P (x) || MEM_P (x))
842 {
843 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
844
845 if (e)
846 x = e->val_rtx;
847 }
848
849 if (REG_P (y) || MEM_P (y))
850 {
851 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
852
853 if (e)
854 y = e->val_rtx;
855 }
856
857 if (x == y)
858 return 1;
859
860 if (GET_CODE (x) == VALUE)
861 {
862 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
863 struct elt_loc_list *l;
864
865 if (GET_CODE (y) == VALUE)
866 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
867
868 for (l = e->locs; l; l = l->next)
869 {
870 rtx t = l->loc;
871
872 /* Avoid infinite recursion. We know we have the canonical
873 value, so we can just skip any values in the equivalence
874 list. */
875 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
876 continue;
877 else if (rtx_equal_for_cselib_1 (t, y, memmode))
878 return 1;
879 }
880
881 return 0;
882 }
883 else if (GET_CODE (y) == VALUE)
884 {
885 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
886 struct elt_loc_list *l;
887
888 for (l = e->locs; l; l = l->next)
889 {
890 rtx t = l->loc;
891
892 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
893 continue;
894 else if (rtx_equal_for_cselib_1 (x, t, memmode))
895 return 1;
896 }
897
898 return 0;
899 }
900
901 if (GET_MODE (x) != GET_MODE (y))
902 return 0;
903
904 if (GET_CODE (x) != GET_CODE (y))
905 {
906 rtx xorig = x, yorig = y;
907 rtx xoff = NULL, yoff = NULL;
908
909 x = autoinc_split (x, &xoff, memmode);
910 y = autoinc_split (y, &yoff, memmode);
911
912 if (!xoff != !yoff)
913 return 0;
914
915 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
916 return 0;
917
918 /* Don't recurse if nothing changed. */
919 if (x != xorig || y != yorig)
920 return rtx_equal_for_cselib_1 (x, y, memmode);
921
922 return 0;
923 }
924
925 /* These won't be handled correctly by the code below. */
926 switch (GET_CODE (x))
927 {
928 case CONST_DOUBLE:
929 case CONST_FIXED:
930 case DEBUG_EXPR:
931 return 0;
932
933 case DEBUG_IMPLICIT_PTR:
934 return DEBUG_IMPLICIT_PTR_DECL (x)
935 == DEBUG_IMPLICIT_PTR_DECL (y);
936
937 case DEBUG_PARAMETER_REF:
938 return DEBUG_PARAMETER_REF_DECL (x)
939 == DEBUG_PARAMETER_REF_DECL (y);
940
941 case ENTRY_VALUE:
942 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
943 use rtx_equal_for_cselib_1 to compare the operands. */
944 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
945
946 case LABEL_REF:
947 return XEXP (x, 0) == XEXP (y, 0);
948
949 case MEM:
950 /* We have to compare any autoinc operations in the addresses
951 using this MEM's mode. */
952 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
953
954 default:
955 break;
956 }
957
958 code = GET_CODE (x);
959 fmt = GET_RTX_FORMAT (code);
960
961 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
962 {
963 int j;
964
965 switch (fmt[i])
966 {
967 case 'w':
968 if (XWINT (x, i) != XWINT (y, i))
969 return 0;
970 break;
971
972 case 'n':
973 case 'i':
974 if (XINT (x, i) != XINT (y, i))
975 return 0;
976 break;
977
978 case 'V':
979 case 'E':
980 /* Two vectors must have the same length. */
981 if (XVECLEN (x, i) != XVECLEN (y, i))
982 return 0;
983
984 /* And the corresponding elements must match. */
985 for (j = 0; j < XVECLEN (x, i); j++)
986 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
987 XVECEXP (y, i, j), memmode))
988 return 0;
989 break;
990
991 case 'e':
992 if (i == 1
993 && targetm.commutative_p (x, UNKNOWN)
994 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
995 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
996 return 1;
997 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
998 return 0;
999 break;
1000
1001 case 'S':
1002 case 's':
1003 if (strcmp (XSTR (x, i), XSTR (y, i)))
1004 return 0;
1005 break;
1006
1007 case 'u':
1008 /* These are just backpointers, so they don't matter. */
1009 break;
1010
1011 case '0':
1012 case 't':
1013 break;
1014
1015 /* It is believed that rtx's at this level will never
1016 contain anything but integers and other rtx's,
1017 except for within LABEL_REFs and SYMBOL_REFs. */
1018 default:
1019 gcc_unreachable ();
1020 }
1021 }
1022 return 1;
1023 }
1024
1025 /* We need to pass down the mode of constants through the hash table
1026 functions. For that purpose, wrap them in a CONST of the appropriate
1027 mode. */
1028 static rtx
1029 wrap_constant (enum machine_mode mode, rtx x)
1030 {
1031 if ((!CONST_SCALAR_INT_P (x)) && GET_CODE (x) != CONST_FIXED)
1032 return x;
1033 gcc_assert (mode != VOIDmode);
1034 return gen_rtx_CONST (mode, x);
1035 }
1036
1037 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1038 For registers and memory locations, we look up their cselib_val structure
1039 and return its VALUE element.
1040 Possible reasons for return 0 are: the object is volatile, or we couldn't
1041 find a register or memory location in the table and CREATE is zero. If
1042 CREATE is nonzero, table elts are created for regs and mem.
1043 N.B. this hash function returns the same hash value for RTXes that
1044 differ only in the order of operands, thus it is suitable for comparisons
1045 that take commutativity into account.
1046 If we wanted to also support associative rules, we'd have to use a different
1047 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1048 MEMMODE indicates the mode of an enclosing MEM, and it's only
1049 used to compute autoinc values.
1050 We used to have a MODE argument for hashing for CONST_INTs, but that
1051 didn't make sense, since it caused spurious hash differences between
1052 (set (reg:SI 1) (const_int))
1053 (plus:SI (reg:SI 2) (reg:SI 1))
1054 and
1055 (plus:SI (reg:SI 2) (const_int))
1056 If the mode is important in any context, it must be checked specifically
1057 in a comparison anyway, since relying on hash differences is unsafe. */
1058
1059 static unsigned int
1060 cselib_hash_rtx (rtx x, int create, enum machine_mode memmode)
1061 {
1062 cselib_val *e;
1063 int i, j;
1064 enum rtx_code code;
1065 const char *fmt;
1066 unsigned int hash = 0;
1067
1068 code = GET_CODE (x);
1069 hash += (unsigned) code + (unsigned) GET_MODE (x);
1070
1071 switch (code)
1072 {
1073 case VALUE:
1074 e = CSELIB_VAL_PTR (x);
1075 return e->hash;
1076
1077 case MEM:
1078 case REG:
1079 e = cselib_lookup (x, GET_MODE (x), create, memmode);
1080 if (! e)
1081 return 0;
1082
1083 return e->hash;
1084
1085 case DEBUG_EXPR:
1086 hash += ((unsigned) DEBUG_EXPR << 7)
1087 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1088 return hash ? hash : (unsigned int) DEBUG_EXPR;
1089
1090 case DEBUG_IMPLICIT_PTR:
1091 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1092 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1093 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1094
1095 case DEBUG_PARAMETER_REF:
1096 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1097 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1098 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1099
1100 case ENTRY_VALUE:
1101 /* ENTRY_VALUEs are function invariant, thus try to avoid
1102 recursing on argument if ENTRY_VALUE is one of the
1103 forms emitted by expand_debug_expr, otherwise
1104 ENTRY_VALUE hash would depend on the current value
1105 in some register or memory. */
1106 if (REG_P (ENTRY_VALUE_EXP (x)))
1107 hash += (unsigned int) REG
1108 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1109 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1110 else if (MEM_P (ENTRY_VALUE_EXP (x))
1111 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1112 hash += (unsigned int) MEM
1113 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1114 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1115 else
1116 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1117 return hash ? hash : (unsigned int) ENTRY_VALUE;
1118
1119 case CONST_INT:
1120 hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
1121 return hash ? hash : (unsigned int) CONST_INT;
1122
1123 case CONST_DOUBLE:
1124 /* This is like the general case, except that it only counts
1125 the integers representing the constant. */
1126 hash += (unsigned) code + (unsigned) GET_MODE (x);
1127 if (GET_MODE (x) != VOIDmode)
1128 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1129 else
1130 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1131 + (unsigned) CONST_DOUBLE_HIGH (x));
1132 return hash ? hash : (unsigned int) CONST_DOUBLE;
1133
1134 case CONST_FIXED:
1135 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1136 hash += fixed_hash (CONST_FIXED_VALUE (x));
1137 return hash ? hash : (unsigned int) CONST_FIXED;
1138
1139 case CONST_VECTOR:
1140 {
1141 int units;
1142 rtx elt;
1143
1144 units = CONST_VECTOR_NUNITS (x);
1145
1146 for (i = 0; i < units; ++i)
1147 {
1148 elt = CONST_VECTOR_ELT (x, i);
1149 hash += cselib_hash_rtx (elt, 0, memmode);
1150 }
1151
1152 return hash;
1153 }
1154
1155 /* Assume there is only one rtx object for any given label. */
1156 case LABEL_REF:
1157 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1158 differences and differences between each stage's debugging dumps. */
1159 hash += (((unsigned int) LABEL_REF << 7)
1160 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1161 return hash ? hash : (unsigned int) LABEL_REF;
1162
1163 case SYMBOL_REF:
1164 {
1165 /* Don't hash on the symbol's address to avoid bootstrap differences.
1166 Different hash values may cause expressions to be recorded in
1167 different orders and thus different registers to be used in the
1168 final assembler. This also avoids differences in the dump files
1169 between various stages. */
1170 unsigned int h = 0;
1171 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1172
1173 while (*p)
1174 h += (h << 7) + *p++; /* ??? revisit */
1175
1176 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1177 return hash ? hash : (unsigned int) SYMBOL_REF;
1178 }
1179
1180 case PRE_DEC:
1181 case PRE_INC:
1182 /* We can't compute these without knowing the MEM mode. */
1183 gcc_assert (memmode != VOIDmode);
1184 i = GET_MODE_SIZE (memmode);
1185 if (code == PRE_DEC)
1186 i = -i;
1187 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1188 like (mem:MEMMODE (plus (reg) (const_int I))). */
1189 hash += (unsigned) PLUS - (unsigned)code
1190 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1191 + cselib_hash_rtx (GEN_INT (i), create, memmode);
1192 return hash ? hash : 1 + (unsigned) PLUS;
1193
1194 case PRE_MODIFY:
1195 gcc_assert (memmode != VOIDmode);
1196 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1197
1198 case POST_DEC:
1199 case POST_INC:
1200 case POST_MODIFY:
1201 gcc_assert (memmode != VOIDmode);
1202 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1203
1204 case PC:
1205 case CC0:
1206 case CALL:
1207 case UNSPEC_VOLATILE:
1208 return 0;
1209
1210 case ASM_OPERANDS:
1211 if (MEM_VOLATILE_P (x))
1212 return 0;
1213
1214 break;
1215
1216 default:
1217 break;
1218 }
1219
1220 i = GET_RTX_LENGTH (code) - 1;
1221 fmt = GET_RTX_FORMAT (code);
1222 for (; i >= 0; i--)
1223 {
1224 switch (fmt[i])
1225 {
1226 case 'e':
1227 {
1228 rtx tem = XEXP (x, i);
1229 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1230
1231 if (tem_hash == 0)
1232 return 0;
1233
1234 hash += tem_hash;
1235 }
1236 break;
1237 case 'E':
1238 for (j = 0; j < XVECLEN (x, i); j++)
1239 {
1240 unsigned int tem_hash
1241 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1242
1243 if (tem_hash == 0)
1244 return 0;
1245
1246 hash += tem_hash;
1247 }
1248 break;
1249
1250 case 's':
1251 {
1252 const unsigned char *p = (const unsigned char *) XSTR (x, i);
1253
1254 if (p)
1255 while (*p)
1256 hash += *p++;
1257 break;
1258 }
1259
1260 case 'i':
1261 hash += XINT (x, i);
1262 break;
1263
1264 case '0':
1265 case 't':
1266 /* unused */
1267 break;
1268
1269 default:
1270 gcc_unreachable ();
1271 }
1272 }
1273
1274 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1275 }
1276
1277 /* Create a new value structure for VALUE and initialize it. The mode of the
1278 value is MODE. */
1279
1280 static inline cselib_val *
1281 new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
1282 {
1283 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
1284
1285 gcc_assert (hash);
1286 gcc_assert (next_uid);
1287
1288 e->hash = hash;
1289 e->uid = next_uid++;
1290 /* We use an alloc pool to allocate this RTL construct because it
1291 accounts for about 8% of the overall memory usage. We know
1292 precisely when we can have VALUE RTXen (when cselib is active)
1293 so we don't need to put them in garbage collected memory.
1294 ??? Why should a VALUE be an RTX in the first place? */
1295 e->val_rtx = (rtx) pool_alloc (value_pool);
1296 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1297 PUT_CODE (e->val_rtx, VALUE);
1298 PUT_MODE (e->val_rtx, mode);
1299 CSELIB_VAL_PTR (e->val_rtx) = e;
1300 e->addr_list = 0;
1301 e->locs = 0;
1302 e->next_containing_mem = 0;
1303
1304 if (dump_file && (dump_flags & TDF_CSELIB))
1305 {
1306 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1307 if (flag_dump_noaddr || flag_dump_unnumbered)
1308 fputs ("# ", dump_file);
1309 else
1310 fprintf (dump_file, "%p ", (void*)e);
1311 print_rtl_single (dump_file, x);
1312 fputc ('\n', dump_file);
1313 }
1314
1315 return e;
1316 }
1317
1318 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1319 contains the data at this address. X is a MEM that represents the
1320 value. Update the two value structures to represent this situation. */
1321
1322 static void
1323 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1324 {
1325 struct elt_loc_list *l;
1326
1327 addr_elt = canonical_cselib_val (addr_elt);
1328 mem_elt = canonical_cselib_val (mem_elt);
1329
1330 /* Avoid duplicates. */
1331 for (l = mem_elt->locs; l; l = l->next)
1332 if (MEM_P (l->loc)
1333 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1334 {
1335 promote_debug_loc (l);
1336 return;
1337 }
1338
1339 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1340 new_elt_loc_list (mem_elt,
1341 replace_equiv_address_nv (x, addr_elt->val_rtx));
1342 if (mem_elt->next_containing_mem == NULL)
1343 {
1344 mem_elt->next_containing_mem = first_containing_mem;
1345 first_containing_mem = mem_elt;
1346 }
1347 }
1348
1349 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1350 If CREATE, make a new one if we haven't seen it before. */
1351
1352 static cselib_val *
1353 cselib_lookup_mem (rtx x, int create)
1354 {
1355 enum machine_mode mode = GET_MODE (x);
1356 enum machine_mode addr_mode;
1357 void **slot;
1358 cselib_val *addr;
1359 cselib_val *mem_elt;
1360 struct elt_list *l;
1361
1362 if (MEM_VOLATILE_P (x) || mode == BLKmode
1363 || !cselib_record_memory
1364 || (FLOAT_MODE_P (mode) && flag_float_store))
1365 return 0;
1366
1367 addr_mode = GET_MODE (XEXP (x, 0));
1368 if (addr_mode == VOIDmode)
1369 addr_mode = Pmode;
1370
1371 /* Look up the value for the address. */
1372 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1373 if (! addr)
1374 return 0;
1375
1376 addr = canonical_cselib_val (addr);
1377 /* Find a value that describes a value of our mode at that address. */
1378 for (l = addr->addr_list; l; l = l->next)
1379 if (GET_MODE (l->elt->val_rtx) == mode)
1380 {
1381 promote_debug_loc (l->elt->locs);
1382 return l->elt;
1383 }
1384
1385 if (! create)
1386 return 0;
1387
1388 mem_elt = new_cselib_val (next_uid, mode, x);
1389 add_mem_for_addr (addr, mem_elt, x);
1390 slot = cselib_find_slot (wrap_constant (mode, x), mem_elt->hash,
1391 INSERT, mode);
1392 *slot = mem_elt;
1393 return mem_elt;
1394 }
1395
1396 /* Search through the possible substitutions in P. We prefer a non reg
1397 substitution because this allows us to expand the tree further. If
1398 we find, just a reg, take the lowest regno. There may be several
1399 non-reg results, we just take the first one because they will all
1400 expand to the same place. */
1401
1402 static rtx
1403 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1404 int max_depth)
1405 {
1406 rtx reg_result = NULL;
1407 unsigned int regno = UINT_MAX;
1408 struct elt_loc_list *p_in = p;
1409
1410 for (; p; p = p->next)
1411 {
1412 /* Return these right away to avoid returning stack pointer based
1413 expressions for frame pointer and vice versa, which is something
1414 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1415 for more details. */
1416 if (REG_P (p->loc)
1417 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1418 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1419 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1420 || REGNO (p->loc) == cfa_base_preserved_regno))
1421 return p->loc;
1422 /* Avoid infinite recursion trying to expand a reg into a
1423 the same reg. */
1424 if ((REG_P (p->loc))
1425 && (REGNO (p->loc) < regno)
1426 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1427 {
1428 reg_result = p->loc;
1429 regno = REGNO (p->loc);
1430 }
1431 /* Avoid infinite recursion and do not try to expand the
1432 value. */
1433 else if (GET_CODE (p->loc) == VALUE
1434 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1435 continue;
1436 else if (!REG_P (p->loc))
1437 {
1438 rtx result, note;
1439 if (dump_file && (dump_flags & TDF_CSELIB))
1440 {
1441 print_inline_rtx (dump_file, p->loc, 0);
1442 fprintf (dump_file, "\n");
1443 }
1444 if (GET_CODE (p->loc) == LO_SUM
1445 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1446 && p->setting_insn
1447 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1448 && XEXP (note, 0) == XEXP (p->loc, 1))
1449 return XEXP (p->loc, 1);
1450 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1451 if (result)
1452 return result;
1453 }
1454
1455 }
1456
1457 if (regno != UINT_MAX)
1458 {
1459 rtx result;
1460 if (dump_file && (dump_flags & TDF_CSELIB))
1461 fprintf (dump_file, "r%d\n", regno);
1462
1463 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1464 if (result)
1465 return result;
1466 }
1467
1468 if (dump_file && (dump_flags & TDF_CSELIB))
1469 {
1470 if (reg_result)
1471 {
1472 print_inline_rtx (dump_file, reg_result, 0);
1473 fprintf (dump_file, "\n");
1474 }
1475 else
1476 fprintf (dump_file, "NULL\n");
1477 }
1478 return reg_result;
1479 }
1480
1481
1482 /* Forward substitute and expand an expression out to its roots.
1483 This is the opposite of common subexpression. Because local value
1484 numbering is such a weak optimization, the expanded expression is
1485 pretty much unique (not from a pointer equals point of view but
1486 from a tree shape point of view.
1487
1488 This function returns NULL if the expansion fails. The expansion
1489 will fail if there is no value number for one of the operands or if
1490 one of the operands has been overwritten between the current insn
1491 and the beginning of the basic block. For instance x has no
1492 expansion in:
1493
1494 r1 <- r1 + 3
1495 x <- r1 + 8
1496
1497 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1498 It is clear on return. */
1499
1500 rtx
1501 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1502 {
1503 struct expand_value_data evd;
1504
1505 evd.regs_active = regs_active;
1506 evd.callback = NULL;
1507 evd.callback_arg = NULL;
1508 evd.dummy = false;
1509
1510 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1511 }
1512
1513 /* Same as cselib_expand_value_rtx, but using a callback to try to
1514 resolve some expressions. The CB function should return ORIG if it
1515 can't or does not want to deal with a certain RTX. Any other
1516 return value, including NULL, will be used as the expansion for
1517 VALUE, without any further changes. */
1518
1519 rtx
1520 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1521 cselib_expand_callback cb, void *data)
1522 {
1523 struct expand_value_data evd;
1524
1525 evd.regs_active = regs_active;
1526 evd.callback = cb;
1527 evd.callback_arg = data;
1528 evd.dummy = false;
1529
1530 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1531 }
1532
1533 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1534 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1535 would return NULL or non-NULL, without allocating new rtx. */
1536
1537 bool
1538 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1539 cselib_expand_callback cb, void *data)
1540 {
1541 struct expand_value_data evd;
1542
1543 evd.regs_active = regs_active;
1544 evd.callback = cb;
1545 evd.callback_arg = data;
1546 evd.dummy = true;
1547
1548 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1549 }
1550
1551 /* Internal implementation of cselib_expand_value_rtx and
1552 cselib_expand_value_rtx_cb. */
1553
1554 static rtx
1555 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1556 int max_depth)
1557 {
1558 rtx copy, scopy;
1559 int i, j;
1560 RTX_CODE code;
1561 const char *format_ptr;
1562 enum machine_mode mode;
1563
1564 code = GET_CODE (orig);
1565
1566 /* For the context of dse, if we end up expand into a huge tree, we
1567 will not have a useful address, so we might as well just give up
1568 quickly. */
1569 if (max_depth <= 0)
1570 return NULL;
1571
1572 switch (code)
1573 {
1574 case REG:
1575 {
1576 struct elt_list *l = REG_VALUES (REGNO (orig));
1577
1578 if (l && l->elt == NULL)
1579 l = l->next;
1580 for (; l; l = l->next)
1581 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1582 {
1583 rtx result;
1584 unsigned regno = REGNO (orig);
1585
1586 /* The only thing that we are not willing to do (this
1587 is requirement of dse and if others potential uses
1588 need this function we should add a parm to control
1589 it) is that we will not substitute the
1590 STACK_POINTER_REGNUM, FRAME_POINTER or the
1591 HARD_FRAME_POINTER.
1592
1593 These expansions confuses the code that notices that
1594 stores into the frame go dead at the end of the
1595 function and that the frame is not effected by calls
1596 to subroutines. If you allow the
1597 STACK_POINTER_REGNUM substitution, then dse will
1598 think that parameter pushing also goes dead which is
1599 wrong. If you allow the FRAME_POINTER or the
1600 HARD_FRAME_POINTER then you lose the opportunity to
1601 make the frame assumptions. */
1602 if (regno == STACK_POINTER_REGNUM
1603 || regno == FRAME_POINTER_REGNUM
1604 || regno == HARD_FRAME_POINTER_REGNUM
1605 || regno == cfa_base_preserved_regno)
1606 return orig;
1607
1608 bitmap_set_bit (evd->regs_active, regno);
1609
1610 if (dump_file && (dump_flags & TDF_CSELIB))
1611 fprintf (dump_file, "expanding: r%d into: ", regno);
1612
1613 result = expand_loc (l->elt->locs, evd, max_depth);
1614 bitmap_clear_bit (evd->regs_active, regno);
1615
1616 if (result)
1617 return result;
1618 else
1619 return orig;
1620 }
1621 }
1622
1623 CASE_CONST_ANY:
1624 case SYMBOL_REF:
1625 case CODE_LABEL:
1626 case PC:
1627 case CC0:
1628 case SCRATCH:
1629 /* SCRATCH must be shared because they represent distinct values. */
1630 return orig;
1631 case CLOBBER:
1632 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1633 return orig;
1634 break;
1635
1636 case CONST:
1637 if (shared_const_p (orig))
1638 return orig;
1639 break;
1640
1641 case SUBREG:
1642 {
1643 rtx subreg;
1644
1645 if (evd->callback)
1646 {
1647 subreg = evd->callback (orig, evd->regs_active, max_depth,
1648 evd->callback_arg);
1649 if (subreg != orig)
1650 return subreg;
1651 }
1652
1653 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1654 max_depth - 1);
1655 if (!subreg)
1656 return NULL;
1657 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1658 GET_MODE (SUBREG_REG (orig)),
1659 SUBREG_BYTE (orig));
1660 if (scopy == NULL
1661 || (GET_CODE (scopy) == SUBREG
1662 && !REG_P (SUBREG_REG (scopy))
1663 && !MEM_P (SUBREG_REG (scopy))))
1664 return NULL;
1665
1666 return scopy;
1667 }
1668
1669 case VALUE:
1670 {
1671 rtx result;
1672
1673 if (dump_file && (dump_flags & TDF_CSELIB))
1674 {
1675 fputs ("\nexpanding ", dump_file);
1676 print_rtl_single (dump_file, orig);
1677 fputs (" into...", dump_file);
1678 }
1679
1680 if (evd->callback)
1681 {
1682 result = evd->callback (orig, evd->regs_active, max_depth,
1683 evd->callback_arg);
1684
1685 if (result != orig)
1686 return result;
1687 }
1688
1689 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1690 return result;
1691 }
1692
1693 case DEBUG_EXPR:
1694 if (evd->callback)
1695 return evd->callback (orig, evd->regs_active, max_depth,
1696 evd->callback_arg);
1697 return orig;
1698
1699 default:
1700 break;
1701 }
1702
1703 /* Copy the various flags, fields, and other information. We assume
1704 that all fields need copying, and then clear the fields that should
1705 not be copied. That is the sensible default behavior, and forces
1706 us to explicitly document why we are *not* copying a flag. */
1707 if (evd->dummy)
1708 copy = NULL;
1709 else
1710 copy = shallow_copy_rtx (orig);
1711
1712 format_ptr = GET_RTX_FORMAT (code);
1713
1714 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1715 switch (*format_ptr++)
1716 {
1717 case 'e':
1718 if (XEXP (orig, i) != NULL)
1719 {
1720 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1721 max_depth - 1);
1722 if (!result)
1723 return NULL;
1724 if (copy)
1725 XEXP (copy, i) = result;
1726 }
1727 break;
1728
1729 case 'E':
1730 case 'V':
1731 if (XVEC (orig, i) != NULL)
1732 {
1733 if (copy)
1734 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1735 for (j = 0; j < XVECLEN (orig, i); j++)
1736 {
1737 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1738 evd, max_depth - 1);
1739 if (!result)
1740 return NULL;
1741 if (copy)
1742 XVECEXP (copy, i, j) = result;
1743 }
1744 }
1745 break;
1746
1747 case 't':
1748 case 'w':
1749 case 'i':
1750 case 's':
1751 case 'S':
1752 case 'T':
1753 case 'u':
1754 case 'B':
1755 case '0':
1756 /* These are left unchanged. */
1757 break;
1758
1759 default:
1760 gcc_unreachable ();
1761 }
1762
1763 if (evd->dummy)
1764 return orig;
1765
1766 mode = GET_MODE (copy);
1767 /* If an operand has been simplified into CONST_INT, which doesn't
1768 have a mode and the mode isn't derivable from whole rtx's mode,
1769 try simplify_*_operation first with mode from original's operand
1770 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1771 scopy = copy;
1772 switch (GET_RTX_CLASS (code))
1773 {
1774 case RTX_UNARY:
1775 if (CONST_INT_P (XEXP (copy, 0))
1776 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1777 {
1778 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1779 GET_MODE (XEXP (orig, 0)));
1780 if (scopy)
1781 return scopy;
1782 }
1783 break;
1784 case RTX_COMM_ARITH:
1785 case RTX_BIN_ARITH:
1786 /* These expressions can derive operand modes from the whole rtx's mode. */
1787 break;
1788 case RTX_TERNARY:
1789 case RTX_BITFIELD_OPS:
1790 if (CONST_INT_P (XEXP (copy, 0))
1791 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1792 {
1793 scopy = simplify_ternary_operation (code, mode,
1794 GET_MODE (XEXP (orig, 0)),
1795 XEXP (copy, 0), XEXP (copy, 1),
1796 XEXP (copy, 2));
1797 if (scopy)
1798 return scopy;
1799 }
1800 break;
1801 case RTX_COMPARE:
1802 case RTX_COMM_COMPARE:
1803 if (CONST_INT_P (XEXP (copy, 0))
1804 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1805 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1806 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1807 {
1808 scopy = simplify_relational_operation (code, mode,
1809 (GET_MODE (XEXP (orig, 0))
1810 != VOIDmode)
1811 ? GET_MODE (XEXP (orig, 0))
1812 : GET_MODE (XEXP (orig, 1)),
1813 XEXP (copy, 0),
1814 XEXP (copy, 1));
1815 if (scopy)
1816 return scopy;
1817 }
1818 break;
1819 default:
1820 break;
1821 }
1822 scopy = simplify_rtx (copy);
1823 if (scopy)
1824 return scopy;
1825 return copy;
1826 }
1827
1828 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1829 with VALUE expressions. This way, it becomes independent of changes
1830 to registers and memory.
1831 X isn't actually modified; if modifications are needed, new rtl is
1832 allocated. However, the return value can share rtl with X.
1833 If X is within a MEM, MEMMODE must be the mode of the MEM. */
1834
1835 rtx
1836 cselib_subst_to_values (rtx x, enum machine_mode memmode)
1837 {
1838 enum rtx_code code = GET_CODE (x);
1839 const char *fmt = GET_RTX_FORMAT (code);
1840 cselib_val *e;
1841 struct elt_list *l;
1842 rtx copy = x;
1843 int i;
1844
1845 switch (code)
1846 {
1847 case REG:
1848 l = REG_VALUES (REGNO (x));
1849 if (l && l->elt == NULL)
1850 l = l->next;
1851 for (; l; l = l->next)
1852 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1853 return l->elt->val_rtx;
1854
1855 gcc_unreachable ();
1856
1857 case MEM:
1858 e = cselib_lookup_mem (x, 0);
1859 /* This used to happen for autoincrements, but we deal with them
1860 properly now. Remove the if stmt for the next release. */
1861 if (! e)
1862 {
1863 /* Assign a value that doesn't match any other. */
1864 e = new_cselib_val (next_uid, GET_MODE (x), x);
1865 }
1866 return e->val_rtx;
1867
1868 case ENTRY_VALUE:
1869 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1870 if (! e)
1871 break;
1872 return e->val_rtx;
1873
1874 CASE_CONST_ANY:
1875 return x;
1876
1877 case PRE_DEC:
1878 case PRE_INC:
1879 gcc_assert (memmode != VOIDmode);
1880 i = GET_MODE_SIZE (memmode);
1881 if (code == PRE_DEC)
1882 i = -i;
1883 return cselib_subst_to_values (plus_constant (GET_MODE (x),
1884 XEXP (x, 0), i),
1885 memmode);
1886
1887 case PRE_MODIFY:
1888 gcc_assert (memmode != VOIDmode);
1889 return cselib_subst_to_values (XEXP (x, 1), memmode);
1890
1891 case POST_DEC:
1892 case POST_INC:
1893 case POST_MODIFY:
1894 gcc_assert (memmode != VOIDmode);
1895 return cselib_subst_to_values (XEXP (x, 0), memmode);
1896
1897 default:
1898 break;
1899 }
1900
1901 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1902 {
1903 if (fmt[i] == 'e')
1904 {
1905 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1906
1907 if (t != XEXP (x, i))
1908 {
1909 if (x == copy)
1910 copy = shallow_copy_rtx (x);
1911 XEXP (copy, i) = t;
1912 }
1913 }
1914 else if (fmt[i] == 'E')
1915 {
1916 int j;
1917
1918 for (j = 0; j < XVECLEN (x, i); j++)
1919 {
1920 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1921
1922 if (t != XVECEXP (x, i, j))
1923 {
1924 if (XVEC (x, i) == XVEC (copy, i))
1925 {
1926 if (x == copy)
1927 copy = shallow_copy_rtx (x);
1928 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1929 }
1930 XVECEXP (copy, i, j) = t;
1931 }
1932 }
1933 }
1934 }
1935
1936 return copy;
1937 }
1938
1939 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
1940
1941 rtx
1942 cselib_subst_to_values_from_insn (rtx x, enum machine_mode memmode, rtx insn)
1943 {
1944 rtx ret;
1945 gcc_assert (!cselib_current_insn);
1946 cselib_current_insn = insn;
1947 ret = cselib_subst_to_values (x, memmode);
1948 cselib_current_insn = NULL;
1949 return ret;
1950 }
1951
1952 /* Look up the rtl expression X in our tables and return the value it
1953 has. If CREATE is zero, we return NULL if we don't know the value.
1954 Otherwise, we create a new one if possible, using mode MODE if X
1955 doesn't have a mode (i.e. because it's a constant). When X is part
1956 of an address, MEMMODE should be the mode of the enclosing MEM if
1957 we're tracking autoinc expressions. */
1958
1959 static cselib_val *
1960 cselib_lookup_1 (rtx x, enum machine_mode mode,
1961 int create, enum machine_mode memmode)
1962 {
1963 void **slot;
1964 cselib_val *e;
1965 unsigned int hashval;
1966
1967 if (GET_MODE (x) != VOIDmode)
1968 mode = GET_MODE (x);
1969
1970 if (GET_CODE (x) == VALUE)
1971 return CSELIB_VAL_PTR (x);
1972
1973 if (REG_P (x))
1974 {
1975 struct elt_list *l;
1976 unsigned int i = REGNO (x);
1977
1978 l = REG_VALUES (i);
1979 if (l && l->elt == NULL)
1980 l = l->next;
1981 for (; l; l = l->next)
1982 if (mode == GET_MODE (l->elt->val_rtx))
1983 {
1984 promote_debug_loc (l->elt->locs);
1985 return l->elt;
1986 }
1987
1988 if (! create)
1989 return 0;
1990
1991 if (i < FIRST_PSEUDO_REGISTER)
1992 {
1993 unsigned int n = hard_regno_nregs[i][mode];
1994
1995 if (n > max_value_regs)
1996 max_value_regs = n;
1997 }
1998
1999 e = new_cselib_val (next_uid, GET_MODE (x), x);
2000 new_elt_loc_list (e, x);
2001 if (REG_VALUES (i) == 0)
2002 {
2003 /* Maintain the invariant that the first entry of
2004 REG_VALUES, if present, must be the value used to set the
2005 register, or NULL. */
2006 used_regs[n_used_regs++] = i;
2007 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2008 }
2009 else if (cselib_preserve_constants
2010 && GET_MODE_CLASS (mode) == MODE_INT)
2011 {
2012 /* During var-tracking, try harder to find equivalences
2013 for SUBREGs. If a setter sets say a DImode register
2014 and user uses that register only in SImode, add a lowpart
2015 subreg location. */
2016 struct elt_list *lwider = NULL;
2017 l = REG_VALUES (i);
2018 if (l && l->elt == NULL)
2019 l = l->next;
2020 for (; l; l = l->next)
2021 if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT
2022 && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2023 > GET_MODE_SIZE (mode)
2024 && (lwider == NULL
2025 || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2026 < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
2027 {
2028 struct elt_loc_list *el;
2029 if (i < FIRST_PSEUDO_REGISTER
2030 && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1)
2031 continue;
2032 for (el = l->elt->locs; el; el = el->next)
2033 if (!REG_P (el->loc))
2034 break;
2035 if (el)
2036 lwider = l;
2037 }
2038 if (lwider)
2039 {
2040 rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx,
2041 GET_MODE (lwider->elt->val_rtx));
2042 if (sub)
2043 new_elt_loc_list (e, sub);
2044 }
2045 }
2046 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2047 slot = cselib_find_slot (x, e->hash, INSERT, memmode);
2048 *slot = e;
2049 return e;
2050 }
2051
2052 if (MEM_P (x))
2053 return cselib_lookup_mem (x, create);
2054
2055 hashval = cselib_hash_rtx (x, create, memmode);
2056 /* Can't even create if hashing is not possible. */
2057 if (! hashval)
2058 return 0;
2059
2060 slot = cselib_find_slot (wrap_constant (mode, x), hashval,
2061 create ? INSERT : NO_INSERT, memmode);
2062 if (slot == 0)
2063 return 0;
2064
2065 e = (cselib_val *) *slot;
2066 if (e)
2067 return e;
2068
2069 e = new_cselib_val (hashval, mode, x);
2070
2071 /* We have to fill the slot before calling cselib_subst_to_values:
2072 the hash table is inconsistent until we do so, and
2073 cselib_subst_to_values will need to do lookups. */
2074 *slot = (void *) e;
2075 new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2076 return e;
2077 }
2078
2079 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2080
2081 cselib_val *
2082 cselib_lookup_from_insn (rtx x, enum machine_mode mode,
2083 int create, enum machine_mode memmode, rtx insn)
2084 {
2085 cselib_val *ret;
2086
2087 gcc_assert (!cselib_current_insn);
2088 cselib_current_insn = insn;
2089
2090 ret = cselib_lookup (x, mode, create, memmode);
2091
2092 cselib_current_insn = NULL;
2093
2094 return ret;
2095 }
2096
2097 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2098 maintains invariants related with debug insns. */
2099
2100 cselib_val *
2101 cselib_lookup (rtx x, enum machine_mode mode,
2102 int create, enum machine_mode memmode)
2103 {
2104 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2105
2106 /* ??? Should we return NULL if we're not to create an entry, the
2107 found loc is a debug loc and cselib_current_insn is not DEBUG?
2108 If so, we should also avoid converting val to non-DEBUG; probably
2109 easiest setting cselib_current_insn to NULL before the call
2110 above. */
2111
2112 if (dump_file && (dump_flags & TDF_CSELIB))
2113 {
2114 fputs ("cselib lookup ", dump_file);
2115 print_inline_rtx (dump_file, x, 2);
2116 fprintf (dump_file, " => %u:%u\n",
2117 ret ? ret->uid : 0,
2118 ret ? ret->hash : 0);
2119 }
2120
2121 return ret;
2122 }
2123
2124 /* Invalidate any entries in reg_values that overlap REGNO. This is called
2125 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2126 is used to determine how many hard registers are being changed. If MODE
2127 is VOIDmode, then only REGNO is being changed; this is used when
2128 invalidating call clobbered registers across a call. */
2129
2130 static void
2131 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
2132 {
2133 unsigned int endregno;
2134 unsigned int i;
2135
2136 /* If we see pseudos after reload, something is _wrong_. */
2137 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2138 || reg_renumber[regno] < 0);
2139
2140 /* Determine the range of registers that must be invalidated. For
2141 pseudos, only REGNO is affected. For hard regs, we must take MODE
2142 into account, and we must also invalidate lower register numbers
2143 if they contain values that overlap REGNO. */
2144 if (regno < FIRST_PSEUDO_REGISTER)
2145 {
2146 gcc_assert (mode != VOIDmode);
2147
2148 if (regno < max_value_regs)
2149 i = 0;
2150 else
2151 i = regno - max_value_regs;
2152
2153 endregno = end_hard_regno (mode, regno);
2154 }
2155 else
2156 {
2157 i = regno;
2158 endregno = regno + 1;
2159 }
2160
2161 for (; i < endregno; i++)
2162 {
2163 struct elt_list **l = &REG_VALUES (i);
2164
2165 /* Go through all known values for this reg; if it overlaps the range
2166 we're invalidating, remove the value. */
2167 while (*l)
2168 {
2169 cselib_val *v = (*l)->elt;
2170 bool had_locs;
2171 rtx setting_insn;
2172 struct elt_loc_list **p;
2173 unsigned int this_last = i;
2174
2175 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2176 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2177
2178 if (this_last < regno || v == NULL
2179 || (v == cfa_base_preserved_val
2180 && i == cfa_base_preserved_regno))
2181 {
2182 l = &(*l)->next;
2183 continue;
2184 }
2185
2186 /* We have an overlap. */
2187 if (*l == REG_VALUES (i))
2188 {
2189 /* Maintain the invariant that the first entry of
2190 REG_VALUES, if present, must be the value used to set
2191 the register, or NULL. This is also nice because
2192 then we won't push the same regno onto user_regs
2193 multiple times. */
2194 (*l)->elt = NULL;
2195 l = &(*l)->next;
2196 }
2197 else
2198 unchain_one_elt_list (l);
2199
2200 v = canonical_cselib_val (v);
2201
2202 had_locs = v->locs != NULL;
2203 setting_insn = v->locs ? v->locs->setting_insn : NULL;
2204
2205 /* Now, we clear the mapping from value to reg. It must exist, so
2206 this code will crash intentionally if it doesn't. */
2207 for (p = &v->locs; ; p = &(*p)->next)
2208 {
2209 rtx x = (*p)->loc;
2210
2211 if (REG_P (x) && REGNO (x) == i)
2212 {
2213 unchain_one_elt_loc_list (p);
2214 break;
2215 }
2216 }
2217
2218 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2219 {
2220 if (setting_insn && DEBUG_INSN_P (setting_insn))
2221 n_useless_debug_values++;
2222 else
2223 n_useless_values++;
2224 }
2225 }
2226 }
2227 }
2228 \f
2229 /* Invalidate any locations in the table which are changed because of a
2230 store to MEM_RTX. If this is called because of a non-const call
2231 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2232
2233 static void
2234 cselib_invalidate_mem (rtx mem_rtx)
2235 {
2236 cselib_val **vp, *v, *next;
2237 int num_mems = 0;
2238 rtx mem_addr;
2239
2240 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2241 mem_rtx = canon_rtx (mem_rtx);
2242
2243 vp = &first_containing_mem;
2244 for (v = *vp; v != &dummy_val; v = next)
2245 {
2246 bool has_mem = false;
2247 struct elt_loc_list **p = &v->locs;
2248 bool had_locs = v->locs != NULL;
2249 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
2250
2251 while (*p)
2252 {
2253 rtx x = (*p)->loc;
2254 cselib_val *addr;
2255 struct elt_list **mem_chain;
2256
2257 /* MEMs may occur in locations only at the top level; below
2258 that every MEM or REG is substituted by its VALUE. */
2259 if (!MEM_P (x))
2260 {
2261 p = &(*p)->next;
2262 continue;
2263 }
2264 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2265 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx),
2266 mem_addr, x, NULL_RTX))
2267 {
2268 has_mem = true;
2269 num_mems++;
2270 p = &(*p)->next;
2271 continue;
2272 }
2273
2274 /* This one overlaps. */
2275 /* We must have a mapping from this MEM's address to the
2276 value (E). Remove that, too. */
2277 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2278 addr = canonical_cselib_val (addr);
2279 gcc_checking_assert (v == canonical_cselib_val (v));
2280 mem_chain = &addr->addr_list;
2281 for (;;)
2282 {
2283 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2284
2285 if (canon == v)
2286 {
2287 unchain_one_elt_list (mem_chain);
2288 break;
2289 }
2290
2291 /* Record canonicalized elt. */
2292 (*mem_chain)->elt = canon;
2293
2294 mem_chain = &(*mem_chain)->next;
2295 }
2296
2297 unchain_one_elt_loc_list (p);
2298 }
2299
2300 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2301 {
2302 if (setting_insn && DEBUG_INSN_P (setting_insn))
2303 n_useless_debug_values++;
2304 else
2305 n_useless_values++;
2306 }
2307
2308 next = v->next_containing_mem;
2309 if (has_mem)
2310 {
2311 *vp = v;
2312 vp = &(*vp)->next_containing_mem;
2313 }
2314 else
2315 v->next_containing_mem = NULL;
2316 }
2317 *vp = &dummy_val;
2318 }
2319
2320 /* Invalidate DEST, which is being assigned to or clobbered. */
2321
2322 void
2323 cselib_invalidate_rtx (rtx dest)
2324 {
2325 while (GET_CODE (dest) == SUBREG
2326 || GET_CODE (dest) == ZERO_EXTRACT
2327 || GET_CODE (dest) == STRICT_LOW_PART)
2328 dest = XEXP (dest, 0);
2329
2330 if (REG_P (dest))
2331 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2332 else if (MEM_P (dest))
2333 cselib_invalidate_mem (dest);
2334 }
2335
2336 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2337
2338 static void
2339 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
2340 void *data ATTRIBUTE_UNUSED)
2341 {
2342 cselib_invalidate_rtx (dest);
2343 }
2344
2345 /* Record the result of a SET instruction. DEST is being set; the source
2346 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2347 describes its address. */
2348
2349 static void
2350 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2351 {
2352 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
2353
2354 if (src_elt == 0 || side_effects_p (dest))
2355 return;
2356
2357 if (dreg >= 0)
2358 {
2359 if (dreg < FIRST_PSEUDO_REGISTER)
2360 {
2361 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
2362
2363 if (n > max_value_regs)
2364 max_value_regs = n;
2365 }
2366
2367 if (REG_VALUES (dreg) == 0)
2368 {
2369 used_regs[n_used_regs++] = dreg;
2370 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2371 }
2372 else
2373 {
2374 /* The register should have been invalidated. */
2375 gcc_assert (REG_VALUES (dreg)->elt == 0);
2376 REG_VALUES (dreg)->elt = src_elt;
2377 }
2378
2379 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2380 n_useless_values--;
2381 new_elt_loc_list (src_elt, dest);
2382 }
2383 else if (MEM_P (dest) && dest_addr_elt != 0
2384 && cselib_record_memory)
2385 {
2386 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2387 n_useless_values--;
2388 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2389 }
2390 }
2391
2392 /* Make ELT and X's VALUE equivalent to each other at INSN. */
2393
2394 void
2395 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx insn)
2396 {
2397 cselib_val *nelt;
2398 rtx save_cselib_current_insn = cselib_current_insn;
2399
2400 gcc_checking_assert (elt);
2401 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2402 gcc_checking_assert (!side_effects_p (x));
2403
2404 cselib_current_insn = insn;
2405
2406 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2407
2408 if (nelt != elt)
2409 {
2410 cselib_any_perm_equivs = true;
2411
2412 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2413 cselib_preserve_value (nelt);
2414
2415 new_elt_loc_list (nelt, elt->val_rtx);
2416 }
2417
2418 cselib_current_insn = save_cselib_current_insn;
2419 }
2420
2421 /* Return TRUE if any permanent equivalences have been recorded since
2422 the table was last initialized. */
2423 bool
2424 cselib_have_permanent_equivalences (void)
2425 {
2426 return cselib_any_perm_equivs;
2427 }
2428
2429 /* There is no good way to determine how many elements there can be
2430 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2431 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2432
2433 struct cselib_record_autoinc_data
2434 {
2435 struct cselib_set *sets;
2436 int n_sets;
2437 };
2438
2439 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2440 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2441
2442 static int
2443 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2444 rtx dest, rtx src, rtx srcoff, void *arg)
2445 {
2446 struct cselib_record_autoinc_data *data;
2447 data = (struct cselib_record_autoinc_data *)arg;
2448
2449 data->sets[data->n_sets].dest = dest;
2450
2451 if (srcoff)
2452 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2453 else
2454 data->sets[data->n_sets].src = src;
2455
2456 data->n_sets++;
2457
2458 return -1;
2459 }
2460
2461 /* Record the effects of any sets and autoincs in INSN. */
2462 static void
2463 cselib_record_sets (rtx insn)
2464 {
2465 int n_sets = 0;
2466 int i;
2467 struct cselib_set sets[MAX_SETS];
2468 rtx body = PATTERN (insn);
2469 rtx cond = 0;
2470 int n_sets_before_autoinc;
2471 struct cselib_record_autoinc_data data;
2472
2473 body = PATTERN (insn);
2474 if (GET_CODE (body) == COND_EXEC)
2475 {
2476 cond = COND_EXEC_TEST (body);
2477 body = COND_EXEC_CODE (body);
2478 }
2479
2480 /* Find all sets. */
2481 if (GET_CODE (body) == SET)
2482 {
2483 sets[0].src = SET_SRC (body);
2484 sets[0].dest = SET_DEST (body);
2485 n_sets = 1;
2486 }
2487 else if (GET_CODE (body) == PARALLEL)
2488 {
2489 /* Look through the PARALLEL and record the values being
2490 set, if possible. Also handle any CLOBBERs. */
2491 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2492 {
2493 rtx x = XVECEXP (body, 0, i);
2494
2495 if (GET_CODE (x) == SET)
2496 {
2497 sets[n_sets].src = SET_SRC (x);
2498 sets[n_sets].dest = SET_DEST (x);
2499 n_sets++;
2500 }
2501 }
2502 }
2503
2504 if (n_sets == 1
2505 && MEM_P (sets[0].src)
2506 && !cselib_record_memory
2507 && MEM_READONLY_P (sets[0].src))
2508 {
2509 rtx note = find_reg_equal_equiv_note (insn);
2510
2511 if (note && CONSTANT_P (XEXP (note, 0)))
2512 sets[0].src = XEXP (note, 0);
2513 }
2514
2515 data.sets = sets;
2516 data.n_sets = n_sets_before_autoinc = n_sets;
2517 for_each_inc_dec (&insn, cselib_record_autoinc_cb, &data);
2518 n_sets = data.n_sets;
2519
2520 /* Look up the values that are read. Do this before invalidating the
2521 locations that are written. */
2522 for (i = 0; i < n_sets; i++)
2523 {
2524 rtx dest = sets[i].dest;
2525
2526 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2527 the low part after invalidating any knowledge about larger modes. */
2528 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2529 sets[i].dest = dest = XEXP (dest, 0);
2530
2531 /* We don't know how to record anything but REG or MEM. */
2532 if (REG_P (dest)
2533 || (MEM_P (dest) && cselib_record_memory))
2534 {
2535 rtx src = sets[i].src;
2536 if (cond)
2537 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2538 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2539 if (MEM_P (dest))
2540 {
2541 enum machine_mode address_mode = get_address_mode (dest);
2542
2543 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2544 address_mode, 1,
2545 GET_MODE (dest));
2546 }
2547 else
2548 sets[i].dest_addr_elt = 0;
2549 }
2550 }
2551
2552 if (cselib_record_sets_hook)
2553 cselib_record_sets_hook (insn, sets, n_sets);
2554
2555 /* Invalidate all locations written by this insn. Note that the elts we
2556 looked up in the previous loop aren't affected, just some of their
2557 locations may go away. */
2558 note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2559
2560 for (i = n_sets_before_autoinc; i < n_sets; i++)
2561 cselib_invalidate_rtx (sets[i].dest);
2562
2563 /* If this is an asm, look for duplicate sets. This can happen when the
2564 user uses the same value as an output multiple times. This is valid
2565 if the outputs are not actually used thereafter. Treat this case as
2566 if the value isn't actually set. We do this by smashing the destination
2567 to pc_rtx, so that we won't record the value later. */
2568 if (n_sets >= 2 && asm_noperands (body) >= 0)
2569 {
2570 for (i = 0; i < n_sets; i++)
2571 {
2572 rtx dest = sets[i].dest;
2573 if (REG_P (dest) || MEM_P (dest))
2574 {
2575 int j;
2576 for (j = i + 1; j < n_sets; j++)
2577 if (rtx_equal_p (dest, sets[j].dest))
2578 {
2579 sets[i].dest = pc_rtx;
2580 sets[j].dest = pc_rtx;
2581 }
2582 }
2583 }
2584 }
2585
2586 /* Now enter the equivalences in our tables. */
2587 for (i = 0; i < n_sets; i++)
2588 {
2589 rtx dest = sets[i].dest;
2590 if (REG_P (dest)
2591 || (MEM_P (dest) && cselib_record_memory))
2592 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2593 }
2594 }
2595
2596 /* Record the effects of INSN. */
2597
2598 void
2599 cselib_process_insn (rtx insn)
2600 {
2601 int i;
2602 rtx x;
2603
2604 cselib_current_insn = insn;
2605
2606 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
2607 if (LABEL_P (insn)
2608 || (CALL_P (insn)
2609 && find_reg_note (insn, REG_SETJMP, NULL))
2610 || (NONJUMP_INSN_P (insn)
2611 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2612 && MEM_VOLATILE_P (PATTERN (insn))))
2613 {
2614 cselib_reset_table (next_uid);
2615 cselib_current_insn = NULL_RTX;
2616 return;
2617 }
2618
2619 if (! INSN_P (insn))
2620 {
2621 cselib_current_insn = NULL_RTX;
2622 return;
2623 }
2624
2625 /* If this is a call instruction, forget anything stored in a
2626 call clobbered register, or, if this is not a const call, in
2627 memory. */
2628 if (CALL_P (insn))
2629 {
2630 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2631 if (call_used_regs[i]
2632 || (REG_VALUES (i) && REG_VALUES (i)->elt
2633 && HARD_REGNO_CALL_PART_CLOBBERED (i,
2634 GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2635 cselib_invalidate_regno (i, reg_raw_mode[i]);
2636
2637 /* Since it is not clear how cselib is going to be used, be
2638 conservative here and treat looping pure or const functions
2639 as if they were regular functions. */
2640 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2641 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2642 cselib_invalidate_mem (callmem);
2643 }
2644
2645 cselib_record_sets (insn);
2646
2647 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2648 after we have processed the insn. */
2649 if (CALL_P (insn))
2650 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2651 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2652 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2653
2654 cselib_current_insn = NULL_RTX;
2655
2656 if (n_useless_values > MAX_USELESS_VALUES
2657 /* remove_useless_values is linear in the hash table size. Avoid
2658 quadratic behavior for very large hashtables with very few
2659 useless elements. */
2660 && ((unsigned int)n_useless_values
2661 > (cselib_hash_table->n_elements
2662 - cselib_hash_table->n_deleted
2663 - n_debug_values) / 4))
2664 remove_useless_values ();
2665 }
2666
2667 /* Initialize cselib for one pass. The caller must also call
2668 init_alias_analysis. */
2669
2670 void
2671 cselib_init (int record_what)
2672 {
2673 elt_list_pool = create_alloc_pool ("elt_list",
2674 sizeof (struct elt_list), 10);
2675 elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2676 sizeof (struct elt_loc_list), 10);
2677 cselib_val_pool = create_alloc_pool ("cselib_val_list",
2678 sizeof (cselib_val), 10);
2679 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2680 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2681 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2682 cselib_any_perm_equivs = false;
2683
2684 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2685 see canon_true_dependence. This is only created once. */
2686 if (! callmem)
2687 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2688
2689 cselib_nregs = max_reg_num ();
2690
2691 /* We preserve reg_values to allow expensive clearing of the whole thing.
2692 Reallocate it however if it happens to be too large. */
2693 if (!reg_values || reg_values_size < cselib_nregs
2694 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2695 {
2696 free (reg_values);
2697 /* Some space for newly emit instructions so we don't end up
2698 reallocating in between passes. */
2699 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2700 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2701 }
2702 used_regs = XNEWVEC (unsigned int, cselib_nregs);
2703 n_used_regs = 0;
2704 cselib_hash_table = htab_create (31, get_value_hash,
2705 entry_and_rtx_equal_p, NULL);
2706 next_uid = 1;
2707 }
2708
2709 /* Called when the current user is done with cselib. */
2710
2711 void
2712 cselib_finish (void)
2713 {
2714 cselib_discard_hook = NULL;
2715 cselib_preserve_constants = false;
2716 cselib_any_perm_equivs = false;
2717 cfa_base_preserved_val = NULL;
2718 cfa_base_preserved_regno = INVALID_REGNUM;
2719 free_alloc_pool (elt_list_pool);
2720 free_alloc_pool (elt_loc_list_pool);
2721 free_alloc_pool (cselib_val_pool);
2722 free_alloc_pool (value_pool);
2723 cselib_clear_table ();
2724 htab_delete (cselib_hash_table);
2725 free (used_regs);
2726 used_regs = 0;
2727 cselib_hash_table = 0;
2728 n_useless_values = 0;
2729 n_useless_debug_values = 0;
2730 n_debug_values = 0;
2731 next_uid = 0;
2732 }
2733
2734 /* Dump the cselib_val *X to FILE *info. */
2735
2736 static int
2737 dump_cselib_val (void **x, void *info)
2738 {
2739 cselib_val *v = (cselib_val *)*x;
2740 FILE *out = (FILE *)info;
2741 bool need_lf = true;
2742
2743 print_inline_rtx (out, v->val_rtx, 0);
2744
2745 if (v->locs)
2746 {
2747 struct elt_loc_list *l = v->locs;
2748 if (need_lf)
2749 {
2750 fputc ('\n', out);
2751 need_lf = false;
2752 }
2753 fputs (" locs:", out);
2754 do
2755 {
2756 if (l->setting_insn)
2757 fprintf (out, "\n from insn %i ",
2758 INSN_UID (l->setting_insn));
2759 else
2760 fprintf (out, "\n ");
2761 print_inline_rtx (out, l->loc, 4);
2762 }
2763 while ((l = l->next));
2764 fputc ('\n', out);
2765 }
2766 else
2767 {
2768 fputs (" no locs", out);
2769 need_lf = true;
2770 }
2771
2772 if (v->addr_list)
2773 {
2774 struct elt_list *e = v->addr_list;
2775 if (need_lf)
2776 {
2777 fputc ('\n', out);
2778 need_lf = false;
2779 }
2780 fputs (" addr list:", out);
2781 do
2782 {
2783 fputs ("\n ", out);
2784 print_inline_rtx (out, e->elt->val_rtx, 2);
2785 }
2786 while ((e = e->next));
2787 fputc ('\n', out);
2788 }
2789 else
2790 {
2791 fputs (" no addrs", out);
2792 need_lf = true;
2793 }
2794
2795 if (v->next_containing_mem == &dummy_val)
2796 fputs (" last mem\n", out);
2797 else if (v->next_containing_mem)
2798 {
2799 fputs (" next mem ", out);
2800 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2801 fputc ('\n', out);
2802 }
2803 else if (need_lf)
2804 fputc ('\n', out);
2805
2806 return 1;
2807 }
2808
2809 /* Dump to OUT everything in the CSELIB table. */
2810
2811 void
2812 dump_cselib_table (FILE *out)
2813 {
2814 fprintf (out, "cselib hash table:\n");
2815 htab_traverse (cselib_hash_table, dump_cselib_val, out);
2816 if (first_containing_mem != &dummy_val)
2817 {
2818 fputs ("first mem ", out);
2819 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2820 fputc ('\n', out);
2821 }
2822 fprintf (out, "next uid %i\n", next_uid);
2823 }
2824
2825 #include "gt-cselib.h"