re PR debug/43176 (var-tracking fails to notice a value change)
[gcc.git] / gcc / cselib.c
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "emit-rtl.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "hashtab.h"
41 #include "tree-pass.h"
42 #include "cselib.h"
43 #include "params.h"
44 #include "alloc-pool.h"
45 #include "target.h"
46
47 static bool cselib_record_memory;
48 static int entry_and_rtx_equal_p (const void *, const void *);
49 static hashval_t get_value_hash (const void *);
50 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
51 static struct elt_loc_list *new_elt_loc_list (struct elt_loc_list *, rtx);
52 static void unchain_one_value (cselib_val *);
53 static void unchain_one_elt_list (struct elt_list **);
54 static void unchain_one_elt_loc_list (struct elt_loc_list **);
55 static int discard_useless_locs (void **, void *);
56 static int discard_useless_values (void **, void *);
57 static void remove_useless_values (void);
58 static unsigned int cselib_hash_rtx (rtx, int);
59 static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
60 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
61 static cselib_val *cselib_lookup_mem (rtx, int);
62 static void cselib_invalidate_regno (unsigned int, enum machine_mode);
63 static void cselib_invalidate_mem (rtx);
64 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
65 static void cselib_record_sets (rtx);
66
67 struct expand_value_data
68 {
69 bitmap regs_active;
70 cselib_expand_callback callback;
71 void *callback_arg;
72 bool dummy;
73 };
74
75 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
76
77 /* There are three ways in which cselib can look up an rtx:
78 - for a REG, the reg_values table (which is indexed by regno) is used
79 - for a MEM, we recursively look up its address and then follow the
80 addr_list of that value
81 - for everything else, we compute a hash value and go through the hash
82 table. Since different rtx's can still have the same hash value,
83 this involves walking the table entries for a given value and comparing
84 the locations of the entries with the rtx we are looking up. */
85
86 /* A table that enables us to look up elts by their value. */
87 static htab_t cselib_hash_table;
88
89 /* This is a global so we don't have to pass this through every function.
90 It is used in new_elt_loc_list to set SETTING_INSN. */
91 static rtx cselib_current_insn;
92
93 /* The unique id that the next create value will take. */
94 static unsigned int next_uid;
95
96 /* The number of registers we had when the varrays were last resized. */
97 static unsigned int cselib_nregs;
98
99 /* Count values without known locations. Whenever this grows too big, we
100 remove these useless values from the table. */
101 static int n_useless_values;
102
103 /* Number of useless values before we remove them from the hash table. */
104 #define MAX_USELESS_VALUES 32
105
106 /* This table maps from register number to values. It does not
107 contain pointers to cselib_val structures, but rather elt_lists.
108 The purpose is to be able to refer to the same register in
109 different modes. The first element of the list defines the mode in
110 which the register was set; if the mode is unknown or the value is
111 no longer valid in that mode, ELT will be NULL for the first
112 element. */
113 static struct elt_list **reg_values;
114 static unsigned int reg_values_size;
115 #define REG_VALUES(i) reg_values[i]
116
117 /* The largest number of hard regs used by any entry added to the
118 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
119 static unsigned int max_value_regs;
120
121 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
122 in cselib_clear_table() for fast emptying. */
123 static unsigned int *used_regs;
124 static unsigned int n_used_regs;
125
126 /* We pass this to cselib_invalidate_mem to invalidate all of
127 memory for a non-const call instruction. */
128 static GTY(()) rtx callmem;
129
130 /* Set by discard_useless_locs if it deleted the last location of any
131 value. */
132 static int values_became_useless;
133
134 /* Used as stop element of the containing_mem list so we can check
135 presence in the list by checking the next pointer. */
136 static cselib_val dummy_val;
137
138 /* Used to list all values that contain memory reference.
139 May or may not contain the useless values - the list is compacted
140 each time memory is invalidated. */
141 static cselib_val *first_containing_mem = &dummy_val;
142 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
143
144 /* If nonnull, cselib will call this function before freeing useless
145 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
146 void (*cselib_discard_hook) (cselib_val *);
147
148 /* If nonnull, cselib will call this function before recording sets or
149 even clobbering outputs of INSN. All the recorded sets will be
150 represented in the array sets[n_sets]. new_val_min can be used to
151 tell whether values present in sets are introduced by this
152 instruction. */
153 void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
154 int n_sets);
155
156 #define PRESERVED_VALUE_P(RTX) \
157 (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
158 #define LONG_TERM_PRESERVED_VALUE_P(RTX) \
159 (RTL_FLAG_CHECK1("LONG_TERM_PRESERVED_VALUE_P", (RTX), VALUE)->in_struct)
160
161 \f
162
163 /* Allocate a struct elt_list and fill in its two elements with the
164 arguments. */
165
166 static inline struct elt_list *
167 new_elt_list (struct elt_list *next, cselib_val *elt)
168 {
169 struct elt_list *el;
170 el = (struct elt_list *) pool_alloc (elt_list_pool);
171 el->next = next;
172 el->elt = elt;
173 return el;
174 }
175
176 /* Allocate a struct elt_loc_list and fill in its two elements with the
177 arguments. */
178
179 static inline struct elt_loc_list *
180 new_elt_loc_list (struct elt_loc_list *next, rtx loc)
181 {
182 struct elt_loc_list *el;
183 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
184 el->next = next;
185 el->loc = loc;
186 el->setting_insn = cselib_current_insn;
187 return el;
188 }
189
190 /* The elt_list at *PL is no longer needed. Unchain it and free its
191 storage. */
192
193 static inline void
194 unchain_one_elt_list (struct elt_list **pl)
195 {
196 struct elt_list *l = *pl;
197
198 *pl = l->next;
199 pool_free (elt_list_pool, l);
200 }
201
202 /* Likewise for elt_loc_lists. */
203
204 static void
205 unchain_one_elt_loc_list (struct elt_loc_list **pl)
206 {
207 struct elt_loc_list *l = *pl;
208
209 *pl = l->next;
210 pool_free (elt_loc_list_pool, l);
211 }
212
213 /* Likewise for cselib_vals. This also frees the addr_list associated with
214 V. */
215
216 static void
217 unchain_one_value (cselib_val *v)
218 {
219 while (v->addr_list)
220 unchain_one_elt_list (&v->addr_list);
221
222 pool_free (cselib_val_pool, v);
223 }
224
225 /* Remove all entries from the hash table. Also used during
226 initialization. */
227
228 void
229 cselib_clear_table (void)
230 {
231 cselib_reset_table (1);
232 }
233
234 /* Remove all entries from the hash table, arranging for the next
235 value to be numbered NUM. */
236
237 void
238 cselib_reset_table (unsigned int num)
239 {
240 unsigned int i;
241
242 for (i = 0; i < n_used_regs; i++)
243 REG_VALUES (used_regs[i]) = 0;
244
245 max_value_regs = 0;
246
247 n_used_regs = 0;
248
249 /* ??? Preserve constants? */
250 htab_empty (cselib_hash_table);
251
252 n_useless_values = 0;
253
254 next_uid = num;
255
256 first_containing_mem = &dummy_val;
257 }
258
259 /* Return the number of the next value that will be generated. */
260
261 unsigned int
262 cselib_get_next_uid (void)
263 {
264 return next_uid;
265 }
266
267 /* The equality test for our hash table. The first argument ENTRY is a table
268 element (i.e. a cselib_val), while the second arg X is an rtx. We know
269 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
270 CONST of an appropriate mode. */
271
272 static int
273 entry_and_rtx_equal_p (const void *entry, const void *x_arg)
274 {
275 struct elt_loc_list *l;
276 const cselib_val *const v = (const cselib_val *) entry;
277 rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
278 enum machine_mode mode = GET_MODE (x);
279
280 gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
281 && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
282
283 if (mode != GET_MODE (v->val_rtx))
284 return 0;
285
286 /* Unwrap X if necessary. */
287 if (GET_CODE (x) == CONST
288 && (CONST_INT_P (XEXP (x, 0))
289 || GET_CODE (XEXP (x, 0)) == CONST_FIXED
290 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
291 x = XEXP (x, 0);
292
293 /* We don't guarantee that distinct rtx's have different hash values,
294 so we need to do a comparison. */
295 for (l = v->locs; l; l = l->next)
296 if (rtx_equal_for_cselib_p (l->loc, x))
297 return 1;
298
299 return 0;
300 }
301
302 /* The hash function for our hash table. The value is always computed with
303 cselib_hash_rtx when adding an element; this function just extracts the
304 hash value from a cselib_val structure. */
305
306 static hashval_t
307 get_value_hash (const void *entry)
308 {
309 const cselib_val *const v = (const cselib_val *) entry;
310 return v->hash;
311 }
312
313 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
314 only return true for values which point to a cselib_val whose value
315 element has been set to zero, which implies the cselib_val will be
316 removed. */
317
318 int
319 references_value_p (const_rtx x, int only_useless)
320 {
321 const enum rtx_code code = GET_CODE (x);
322 const char *fmt = GET_RTX_FORMAT (code);
323 int i, j;
324
325 if (GET_CODE (x) == VALUE
326 && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
327 return 1;
328
329 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
330 {
331 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
332 return 1;
333 else if (fmt[i] == 'E')
334 for (j = 0; j < XVECLEN (x, i); j++)
335 if (references_value_p (XVECEXP (x, i, j), only_useless))
336 return 1;
337 }
338
339 return 0;
340 }
341
342 /* For all locations found in X, delete locations that reference useless
343 values (i.e. values without any location). Called through
344 htab_traverse. */
345
346 static int
347 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
348 {
349 cselib_val *v = (cselib_val *)*x;
350 struct elt_loc_list **p = &v->locs;
351 int had_locs = v->locs != 0;
352
353 while (*p)
354 {
355 if (references_value_p ((*p)->loc, 1))
356 unchain_one_elt_loc_list (p);
357 else
358 p = &(*p)->next;
359 }
360
361 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
362 {
363 n_useless_values++;
364 values_became_useless = 1;
365 }
366 return 1;
367 }
368
369 /* If X is a value with no locations, remove it from the hashtable. */
370
371 static int
372 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
373 {
374 cselib_val *v = (cselib_val *)*x;
375
376 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
377 {
378 if (cselib_discard_hook)
379 cselib_discard_hook (v);
380
381 CSELIB_VAL_PTR (v->val_rtx) = NULL;
382 htab_clear_slot (cselib_hash_table, x);
383 unchain_one_value (v);
384 n_useless_values--;
385 }
386
387 return 1;
388 }
389
390 /* Clean out useless values (i.e. those which no longer have locations
391 associated with them) from the hash table. */
392
393 static void
394 remove_useless_values (void)
395 {
396 cselib_val **p, *v;
397 /* First pass: eliminate locations that reference the value. That in
398 turn can make more values useless. */
399 do
400 {
401 values_became_useless = 0;
402 htab_traverse (cselib_hash_table, discard_useless_locs, 0);
403 }
404 while (values_became_useless);
405
406 /* Second pass: actually remove the values. */
407
408 p = &first_containing_mem;
409 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
410 if (v->locs)
411 {
412 *p = v;
413 p = &(*p)->next_containing_mem;
414 }
415 *p = &dummy_val;
416
417 htab_traverse (cselib_hash_table, discard_useless_values, 0);
418
419 gcc_assert (!n_useless_values);
420 }
421
422 /* Arrange for a value to not be removed from the hash table even if
423 it becomes useless. */
424
425 void
426 cselib_preserve_value (cselib_val *v)
427 {
428 PRESERVED_VALUE_P (v->val_rtx) = 1;
429 }
430
431 /* Test whether a value is preserved. */
432
433 bool
434 cselib_preserved_value_p (cselib_val *v)
435 {
436 return PRESERVED_VALUE_P (v->val_rtx);
437 }
438
439 /* Mark preserved values as preserved for the long term. */
440
441 static int
442 cselib_preserve_definitely (void **slot, void *info ATTRIBUTE_UNUSED)
443 {
444 cselib_val *v = (cselib_val *)*slot;
445
446 if (PRESERVED_VALUE_P (v->val_rtx)
447 && !LONG_TERM_PRESERVED_VALUE_P (v->val_rtx))
448 LONG_TERM_PRESERVED_VALUE_P (v->val_rtx) = true;
449
450 return 1;
451 }
452
453 /* Clear the preserve marks for values not preserved for the long
454 term. */
455
456 static int
457 cselib_clear_preserve (void **slot, void *info ATTRIBUTE_UNUSED)
458 {
459 cselib_val *v = (cselib_val *)*slot;
460
461 if (PRESERVED_VALUE_P (v->val_rtx)
462 && !LONG_TERM_PRESERVED_VALUE_P (v->val_rtx))
463 {
464 PRESERVED_VALUE_P (v->val_rtx) = false;
465 if (!v->locs)
466 n_useless_values++;
467 }
468
469 return 1;
470 }
471
472 /* Clean all non-constant expressions in the hash table, but retain
473 their values. */
474
475 void
476 cselib_preserve_only_values (bool retain)
477 {
478 int i;
479
480 htab_traverse (cselib_hash_table,
481 retain ? cselib_preserve_definitely : cselib_clear_preserve,
482 NULL);
483
484 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
485 cselib_invalidate_regno (i, reg_raw_mode[i]);
486
487 cselib_invalidate_mem (callmem);
488
489 remove_useless_values ();
490
491 gcc_assert (first_containing_mem == &dummy_val);
492 }
493
494 /* Return the mode in which a register was last set. If X is not a
495 register, return its mode. If the mode in which the register was
496 set is not known, or the value was already clobbered, return
497 VOIDmode. */
498
499 enum machine_mode
500 cselib_reg_set_mode (const_rtx x)
501 {
502 if (!REG_P (x))
503 return GET_MODE (x);
504
505 if (REG_VALUES (REGNO (x)) == NULL
506 || REG_VALUES (REGNO (x))->elt == NULL)
507 return VOIDmode;
508
509 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
510 }
511
512 /* Return nonzero if we can prove that X and Y contain the same value, taking
513 our gathered information into account. */
514
515 int
516 rtx_equal_for_cselib_p (rtx x, rtx y)
517 {
518 enum rtx_code code;
519 const char *fmt;
520 int i;
521
522 if (REG_P (x) || MEM_P (x))
523 {
524 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
525
526 if (e)
527 x = e->val_rtx;
528 }
529
530 if (REG_P (y) || MEM_P (y))
531 {
532 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
533
534 if (e)
535 y = e->val_rtx;
536 }
537
538 if (x == y)
539 return 1;
540
541 if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
542 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
543
544 if (GET_CODE (x) == VALUE)
545 {
546 cselib_val *e = CSELIB_VAL_PTR (x);
547 struct elt_loc_list *l;
548
549 for (l = e->locs; l; l = l->next)
550 {
551 rtx t = l->loc;
552
553 /* Avoid infinite recursion. */
554 if (REG_P (t) || MEM_P (t))
555 continue;
556 else if (rtx_equal_for_cselib_p (t, y))
557 return 1;
558 }
559
560 return 0;
561 }
562
563 if (GET_CODE (y) == VALUE)
564 {
565 cselib_val *e = CSELIB_VAL_PTR (y);
566 struct elt_loc_list *l;
567
568 for (l = e->locs; l; l = l->next)
569 {
570 rtx t = l->loc;
571
572 if (REG_P (t) || MEM_P (t))
573 continue;
574 else if (rtx_equal_for_cselib_p (x, t))
575 return 1;
576 }
577
578 return 0;
579 }
580
581 if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
582 return 0;
583
584 /* These won't be handled correctly by the code below. */
585 switch (GET_CODE (x))
586 {
587 case CONST_DOUBLE:
588 case CONST_FIXED:
589 case DEBUG_EXPR:
590 return 0;
591
592 case LABEL_REF:
593 return XEXP (x, 0) == XEXP (y, 0);
594
595 default:
596 break;
597 }
598
599 code = GET_CODE (x);
600 fmt = GET_RTX_FORMAT (code);
601
602 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
603 {
604 int j;
605
606 switch (fmt[i])
607 {
608 case 'w':
609 if (XWINT (x, i) != XWINT (y, i))
610 return 0;
611 break;
612
613 case 'n':
614 case 'i':
615 if (XINT (x, i) != XINT (y, i))
616 return 0;
617 break;
618
619 case 'V':
620 case 'E':
621 /* Two vectors must have the same length. */
622 if (XVECLEN (x, i) != XVECLEN (y, i))
623 return 0;
624
625 /* And the corresponding elements must match. */
626 for (j = 0; j < XVECLEN (x, i); j++)
627 if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
628 XVECEXP (y, i, j)))
629 return 0;
630 break;
631
632 case 'e':
633 if (i == 1
634 && targetm.commutative_p (x, UNKNOWN)
635 && rtx_equal_for_cselib_p (XEXP (x, 1), XEXP (y, 0))
636 && rtx_equal_for_cselib_p (XEXP (x, 0), XEXP (y, 1)))
637 return 1;
638 if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
639 return 0;
640 break;
641
642 case 'S':
643 case 's':
644 if (strcmp (XSTR (x, i), XSTR (y, i)))
645 return 0;
646 break;
647
648 case 'u':
649 /* These are just backpointers, so they don't matter. */
650 break;
651
652 case '0':
653 case 't':
654 break;
655
656 /* It is believed that rtx's at this level will never
657 contain anything but integers and other rtx's,
658 except for within LABEL_REFs and SYMBOL_REFs. */
659 default:
660 gcc_unreachable ();
661 }
662 }
663 return 1;
664 }
665
666 /* We need to pass down the mode of constants through the hash table
667 functions. For that purpose, wrap them in a CONST of the appropriate
668 mode. */
669 static rtx
670 wrap_constant (enum machine_mode mode, rtx x)
671 {
672 if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
673 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
674 return x;
675 gcc_assert (mode != VOIDmode);
676 return gen_rtx_CONST (mode, x);
677 }
678
679 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
680 For registers and memory locations, we look up their cselib_val structure
681 and return its VALUE element.
682 Possible reasons for return 0 are: the object is volatile, or we couldn't
683 find a register or memory location in the table and CREATE is zero. If
684 CREATE is nonzero, table elts are created for regs and mem.
685 N.B. this hash function returns the same hash value for RTXes that
686 differ only in the order of operands, thus it is suitable for comparisons
687 that take commutativity into account.
688 If we wanted to also support associative rules, we'd have to use a different
689 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
690 We used to have a MODE argument for hashing for CONST_INTs, but that
691 didn't make sense, since it caused spurious hash differences between
692 (set (reg:SI 1) (const_int))
693 (plus:SI (reg:SI 2) (reg:SI 1))
694 and
695 (plus:SI (reg:SI 2) (const_int))
696 If the mode is important in any context, it must be checked specifically
697 in a comparison anyway, since relying on hash differences is unsafe. */
698
699 static unsigned int
700 cselib_hash_rtx (rtx x, int create)
701 {
702 cselib_val *e;
703 int i, j;
704 enum rtx_code code;
705 const char *fmt;
706 unsigned int hash = 0;
707
708 code = GET_CODE (x);
709 hash += (unsigned) code + (unsigned) GET_MODE (x);
710
711 switch (code)
712 {
713 case MEM:
714 case REG:
715 e = cselib_lookup (x, GET_MODE (x), create);
716 if (! e)
717 return 0;
718
719 return e->hash;
720
721 case DEBUG_EXPR:
722 hash += ((unsigned) DEBUG_EXPR << 7)
723 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
724 return hash ? hash : (unsigned int) DEBUG_EXPR;
725
726 case CONST_INT:
727 hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
728 return hash ? hash : (unsigned int) CONST_INT;
729
730 case CONST_DOUBLE:
731 /* This is like the general case, except that it only counts
732 the integers representing the constant. */
733 hash += (unsigned) code + (unsigned) GET_MODE (x);
734 if (GET_MODE (x) != VOIDmode)
735 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
736 else
737 hash += ((unsigned) CONST_DOUBLE_LOW (x)
738 + (unsigned) CONST_DOUBLE_HIGH (x));
739 return hash ? hash : (unsigned int) CONST_DOUBLE;
740
741 case CONST_FIXED:
742 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
743 hash += fixed_hash (CONST_FIXED_VALUE (x));
744 return hash ? hash : (unsigned int) CONST_FIXED;
745
746 case CONST_VECTOR:
747 {
748 int units;
749 rtx elt;
750
751 units = CONST_VECTOR_NUNITS (x);
752
753 for (i = 0; i < units; ++i)
754 {
755 elt = CONST_VECTOR_ELT (x, i);
756 hash += cselib_hash_rtx (elt, 0);
757 }
758
759 return hash;
760 }
761
762 /* Assume there is only one rtx object for any given label. */
763 case LABEL_REF:
764 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
765 differences and differences between each stage's debugging dumps. */
766 hash += (((unsigned int) LABEL_REF << 7)
767 + CODE_LABEL_NUMBER (XEXP (x, 0)));
768 return hash ? hash : (unsigned int) LABEL_REF;
769
770 case SYMBOL_REF:
771 {
772 /* Don't hash on the symbol's address to avoid bootstrap differences.
773 Different hash values may cause expressions to be recorded in
774 different orders and thus different registers to be used in the
775 final assembler. This also avoids differences in the dump files
776 between various stages. */
777 unsigned int h = 0;
778 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
779
780 while (*p)
781 h += (h << 7) + *p++; /* ??? revisit */
782
783 hash += ((unsigned int) SYMBOL_REF << 7) + h;
784 return hash ? hash : (unsigned int) SYMBOL_REF;
785 }
786
787 case PRE_DEC:
788 case PRE_INC:
789 case POST_DEC:
790 case POST_INC:
791 case POST_MODIFY:
792 case PRE_MODIFY:
793 case PC:
794 case CC0:
795 case CALL:
796 case UNSPEC_VOLATILE:
797 return 0;
798
799 case ASM_OPERANDS:
800 if (MEM_VOLATILE_P (x))
801 return 0;
802
803 break;
804
805 default:
806 break;
807 }
808
809 i = GET_RTX_LENGTH (code) - 1;
810 fmt = GET_RTX_FORMAT (code);
811 for (; i >= 0; i--)
812 {
813 switch (fmt[i])
814 {
815 case 'e':
816 {
817 rtx tem = XEXP (x, i);
818 unsigned int tem_hash = cselib_hash_rtx (tem, create);
819
820 if (tem_hash == 0)
821 return 0;
822
823 hash += tem_hash;
824 }
825 break;
826 case 'E':
827 for (j = 0; j < XVECLEN (x, i); j++)
828 {
829 unsigned int tem_hash
830 = cselib_hash_rtx (XVECEXP (x, i, j), create);
831
832 if (tem_hash == 0)
833 return 0;
834
835 hash += tem_hash;
836 }
837 break;
838
839 case 's':
840 {
841 const unsigned char *p = (const unsigned char *) XSTR (x, i);
842
843 if (p)
844 while (*p)
845 hash += *p++;
846 break;
847 }
848
849 case 'i':
850 hash += XINT (x, i);
851 break;
852
853 case '0':
854 case 't':
855 /* unused */
856 break;
857
858 default:
859 gcc_unreachable ();
860 }
861 }
862
863 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
864 }
865
866 /* Create a new value structure for VALUE and initialize it. The mode of the
867 value is MODE. */
868
869 static inline cselib_val *
870 new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
871 {
872 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
873
874 gcc_assert (hash);
875 gcc_assert (next_uid);
876
877 e->hash = hash;
878 e->uid = next_uid++;
879 /* We use an alloc pool to allocate this RTL construct because it
880 accounts for about 8% of the overall memory usage. We know
881 precisely when we can have VALUE RTXen (when cselib is active)
882 so we don't need to put them in garbage collected memory.
883 ??? Why should a VALUE be an RTX in the first place? */
884 e->val_rtx = (rtx) pool_alloc (value_pool);
885 memset (e->val_rtx, 0, RTX_HDR_SIZE);
886 PUT_CODE (e->val_rtx, VALUE);
887 PUT_MODE (e->val_rtx, mode);
888 CSELIB_VAL_PTR (e->val_rtx) = e;
889 e->addr_list = 0;
890 e->locs = 0;
891 e->next_containing_mem = 0;
892
893 if (dump_file && (dump_flags & TDF_DETAILS))
894 {
895 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
896 if (flag_dump_noaddr || flag_dump_unnumbered)
897 fputs ("# ", dump_file);
898 else
899 fprintf (dump_file, "%p ", (void*)e);
900 print_rtl_single (dump_file, x);
901 fputc ('\n', dump_file);
902 }
903
904 return e;
905 }
906
907 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
908 contains the data at this address. X is a MEM that represents the
909 value. Update the two value structures to represent this situation. */
910
911 static void
912 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
913 {
914 struct elt_loc_list *l;
915
916 /* Avoid duplicates. */
917 for (l = mem_elt->locs; l; l = l->next)
918 if (MEM_P (l->loc)
919 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
920 return;
921
922 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
923 mem_elt->locs
924 = new_elt_loc_list (mem_elt->locs,
925 replace_equiv_address_nv (x, addr_elt->val_rtx));
926 if (mem_elt->next_containing_mem == NULL)
927 {
928 mem_elt->next_containing_mem = first_containing_mem;
929 first_containing_mem = mem_elt;
930 }
931 }
932
933 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
934 If CREATE, make a new one if we haven't seen it before. */
935
936 static cselib_val *
937 cselib_lookup_mem (rtx x, int create)
938 {
939 enum machine_mode mode = GET_MODE (x);
940 void **slot;
941 cselib_val *addr;
942 cselib_val *mem_elt;
943 struct elt_list *l;
944
945 if (MEM_VOLATILE_P (x) || mode == BLKmode
946 || !cselib_record_memory
947 || (FLOAT_MODE_P (mode) && flag_float_store))
948 return 0;
949
950 /* Look up the value for the address. */
951 addr = cselib_lookup (XEXP (x, 0), mode, create);
952 if (! addr)
953 return 0;
954
955 /* Find a value that describes a value of our mode at that address. */
956 for (l = addr->addr_list; l; l = l->next)
957 if (GET_MODE (l->elt->val_rtx) == mode)
958 return l->elt;
959
960 if (! create)
961 return 0;
962
963 mem_elt = new_cselib_val (next_uid, mode, x);
964 add_mem_for_addr (addr, mem_elt, x);
965 slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
966 mem_elt->hash, INSERT);
967 *slot = mem_elt;
968 return mem_elt;
969 }
970
971 /* Search thru the possible substitutions in P. We prefer a non reg
972 substitution because this allows us to expand the tree further. If
973 we find, just a reg, take the lowest regno. There may be several
974 non-reg results, we just take the first one because they will all
975 expand to the same place. */
976
977 static rtx
978 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
979 int max_depth)
980 {
981 rtx reg_result = NULL;
982 unsigned int regno = UINT_MAX;
983 struct elt_loc_list *p_in = p;
984
985 for (; p; p = p -> next)
986 {
987 /* Avoid infinite recursion trying to expand a reg into a
988 the same reg. */
989 if ((REG_P (p->loc))
990 && (REGNO (p->loc) < regno)
991 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
992 {
993 reg_result = p->loc;
994 regno = REGNO (p->loc);
995 }
996 /* Avoid infinite recursion and do not try to expand the
997 value. */
998 else if (GET_CODE (p->loc) == VALUE
999 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1000 continue;
1001 else if (!REG_P (p->loc))
1002 {
1003 rtx result, note;
1004 if (dump_file && (dump_flags & TDF_DETAILS))
1005 {
1006 print_inline_rtx (dump_file, p->loc, 0);
1007 fprintf (dump_file, "\n");
1008 }
1009 if (GET_CODE (p->loc) == LO_SUM
1010 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1011 && p->setting_insn
1012 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1013 && XEXP (note, 0) == XEXP (p->loc, 1))
1014 return XEXP (p->loc, 1);
1015 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1016 if (result)
1017 return result;
1018 }
1019
1020 }
1021
1022 if (regno != UINT_MAX)
1023 {
1024 rtx result;
1025 if (dump_file && (dump_flags & TDF_DETAILS))
1026 fprintf (dump_file, "r%d\n", regno);
1027
1028 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1029 if (result)
1030 return result;
1031 }
1032
1033 if (dump_file && (dump_flags & TDF_DETAILS))
1034 {
1035 if (reg_result)
1036 {
1037 print_inline_rtx (dump_file, reg_result, 0);
1038 fprintf (dump_file, "\n");
1039 }
1040 else
1041 fprintf (dump_file, "NULL\n");
1042 }
1043 return reg_result;
1044 }
1045
1046
1047 /* Forward substitute and expand an expression out to its roots.
1048 This is the opposite of common subexpression. Because local value
1049 numbering is such a weak optimization, the expanded expression is
1050 pretty much unique (not from a pointer equals point of view but
1051 from a tree shape point of view.
1052
1053 This function returns NULL if the expansion fails. The expansion
1054 will fail if there is no value number for one of the operands or if
1055 one of the operands has been overwritten between the current insn
1056 and the beginning of the basic block. For instance x has no
1057 expansion in:
1058
1059 r1 <- r1 + 3
1060 x <- r1 + 8
1061
1062 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1063 It is clear on return. */
1064
1065 rtx
1066 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1067 {
1068 struct expand_value_data evd;
1069
1070 evd.regs_active = regs_active;
1071 evd.callback = NULL;
1072 evd.callback_arg = NULL;
1073 evd.dummy = false;
1074
1075 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1076 }
1077
1078 /* Same as cselib_expand_value_rtx, but using a callback to try to
1079 resolve some expressions. The CB function should return ORIG if it
1080 can't or does not want to deal with a certain RTX. Any other
1081 return value, including NULL, will be used as the expansion for
1082 VALUE, without any further changes. */
1083
1084 rtx
1085 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1086 cselib_expand_callback cb, void *data)
1087 {
1088 struct expand_value_data evd;
1089
1090 evd.regs_active = regs_active;
1091 evd.callback = cb;
1092 evd.callback_arg = data;
1093 evd.dummy = false;
1094
1095 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1096 }
1097
1098 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1099 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1100 would return NULL or non-NULL, without allocating new rtx. */
1101
1102 bool
1103 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1104 cselib_expand_callback cb, void *data)
1105 {
1106 struct expand_value_data evd;
1107
1108 evd.regs_active = regs_active;
1109 evd.callback = cb;
1110 evd.callback_arg = data;
1111 evd.dummy = true;
1112
1113 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1114 }
1115
1116 /* Internal implementation of cselib_expand_value_rtx and
1117 cselib_expand_value_rtx_cb. */
1118
1119 static rtx
1120 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1121 int max_depth)
1122 {
1123 rtx copy, scopy;
1124 int i, j;
1125 RTX_CODE code;
1126 const char *format_ptr;
1127 enum machine_mode mode;
1128
1129 code = GET_CODE (orig);
1130
1131 /* For the context of dse, if we end up expand into a huge tree, we
1132 will not have a useful address, so we might as well just give up
1133 quickly. */
1134 if (max_depth <= 0)
1135 return NULL;
1136
1137 switch (code)
1138 {
1139 case REG:
1140 {
1141 struct elt_list *l = REG_VALUES (REGNO (orig));
1142
1143 if (l && l->elt == NULL)
1144 l = l->next;
1145 for (; l; l = l->next)
1146 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1147 {
1148 rtx result;
1149 int regno = REGNO (orig);
1150
1151 /* The only thing that we are not willing to do (this
1152 is requirement of dse and if others potential uses
1153 need this function we should add a parm to control
1154 it) is that we will not substitute the
1155 STACK_POINTER_REGNUM, FRAME_POINTER or the
1156 HARD_FRAME_POINTER.
1157
1158 These expansions confuses the code that notices that
1159 stores into the frame go dead at the end of the
1160 function and that the frame is not effected by calls
1161 to subroutines. If you allow the
1162 STACK_POINTER_REGNUM substitution, then dse will
1163 think that parameter pushing also goes dead which is
1164 wrong. If you allow the FRAME_POINTER or the
1165 HARD_FRAME_POINTER then you lose the opportunity to
1166 make the frame assumptions. */
1167 if (regno == STACK_POINTER_REGNUM
1168 || regno == FRAME_POINTER_REGNUM
1169 || regno == HARD_FRAME_POINTER_REGNUM)
1170 return orig;
1171
1172 bitmap_set_bit (evd->regs_active, regno);
1173
1174 if (dump_file && (dump_flags & TDF_DETAILS))
1175 fprintf (dump_file, "expanding: r%d into: ", regno);
1176
1177 result = expand_loc (l->elt->locs, evd, max_depth);
1178 bitmap_clear_bit (evd->regs_active, regno);
1179
1180 if (result)
1181 return result;
1182 else
1183 return orig;
1184 }
1185 }
1186
1187 case CONST_INT:
1188 case CONST_DOUBLE:
1189 case CONST_VECTOR:
1190 case SYMBOL_REF:
1191 case CODE_LABEL:
1192 case PC:
1193 case CC0:
1194 case SCRATCH:
1195 /* SCRATCH must be shared because they represent distinct values. */
1196 return orig;
1197 case CLOBBER:
1198 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1199 return orig;
1200 break;
1201
1202 case CONST:
1203 if (shared_const_p (orig))
1204 return orig;
1205 break;
1206
1207 case SUBREG:
1208 {
1209 rtx subreg;
1210
1211 if (evd->callback)
1212 {
1213 subreg = evd->callback (orig, evd->regs_active, max_depth,
1214 evd->callback_arg);
1215 if (subreg != orig)
1216 return subreg;
1217 }
1218
1219 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1220 max_depth - 1);
1221 if (!subreg)
1222 return NULL;
1223 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1224 GET_MODE (SUBREG_REG (orig)),
1225 SUBREG_BYTE (orig));
1226 if (scopy == NULL
1227 || (GET_CODE (scopy) == SUBREG
1228 && !REG_P (SUBREG_REG (scopy))
1229 && !MEM_P (SUBREG_REG (scopy))))
1230 return NULL;
1231
1232 return scopy;
1233 }
1234
1235 case VALUE:
1236 {
1237 rtx result;
1238
1239 if (dump_file && (dump_flags & TDF_DETAILS))
1240 {
1241 fputs ("\nexpanding ", dump_file);
1242 print_rtl_single (dump_file, orig);
1243 fputs (" into...", dump_file);
1244 }
1245
1246 if (evd->callback)
1247 {
1248 result = evd->callback (orig, evd->regs_active, max_depth,
1249 evd->callback_arg);
1250
1251 if (result != orig)
1252 return result;
1253 }
1254
1255 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1256 return result;
1257 }
1258
1259 case DEBUG_EXPR:
1260 if (evd->callback)
1261 return evd->callback (orig, evd->regs_active, max_depth,
1262 evd->callback_arg);
1263 return orig;
1264
1265 default:
1266 break;
1267 }
1268
1269 /* Copy the various flags, fields, and other information. We assume
1270 that all fields need copying, and then clear the fields that should
1271 not be copied. That is the sensible default behavior, and forces
1272 us to explicitly document why we are *not* copying a flag. */
1273 if (evd->dummy)
1274 copy = NULL;
1275 else
1276 copy = shallow_copy_rtx (orig);
1277
1278 format_ptr = GET_RTX_FORMAT (code);
1279
1280 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1281 switch (*format_ptr++)
1282 {
1283 case 'e':
1284 if (XEXP (orig, i) != NULL)
1285 {
1286 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1287 max_depth - 1);
1288 if (!result)
1289 return NULL;
1290 if (copy)
1291 XEXP (copy, i) = result;
1292 }
1293 break;
1294
1295 case 'E':
1296 case 'V':
1297 if (XVEC (orig, i) != NULL)
1298 {
1299 if (copy)
1300 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1301 for (j = 0; j < XVECLEN (orig, i); j++)
1302 {
1303 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1304 evd, max_depth - 1);
1305 if (!result)
1306 return NULL;
1307 if (copy)
1308 XVECEXP (copy, i, j) = result;
1309 }
1310 }
1311 break;
1312
1313 case 't':
1314 case 'w':
1315 case 'i':
1316 case 's':
1317 case 'S':
1318 case 'T':
1319 case 'u':
1320 case 'B':
1321 case '0':
1322 /* These are left unchanged. */
1323 break;
1324
1325 default:
1326 gcc_unreachable ();
1327 }
1328
1329 if (evd->dummy)
1330 return orig;
1331
1332 mode = GET_MODE (copy);
1333 /* If an operand has been simplified into CONST_INT, which doesn't
1334 have a mode and the mode isn't derivable from whole rtx's mode,
1335 try simplify_*_operation first with mode from original's operand
1336 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1337 scopy = copy;
1338 switch (GET_RTX_CLASS (code))
1339 {
1340 case RTX_UNARY:
1341 if (CONST_INT_P (XEXP (copy, 0))
1342 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1343 {
1344 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1345 GET_MODE (XEXP (orig, 0)));
1346 if (scopy)
1347 return scopy;
1348 }
1349 break;
1350 case RTX_COMM_ARITH:
1351 case RTX_BIN_ARITH:
1352 /* These expressions can derive operand modes from the whole rtx's mode. */
1353 break;
1354 case RTX_TERNARY:
1355 case RTX_BITFIELD_OPS:
1356 if (CONST_INT_P (XEXP (copy, 0))
1357 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1358 {
1359 scopy = simplify_ternary_operation (code, mode,
1360 GET_MODE (XEXP (orig, 0)),
1361 XEXP (copy, 0), XEXP (copy, 1),
1362 XEXP (copy, 2));
1363 if (scopy)
1364 return scopy;
1365 }
1366 break;
1367 case RTX_COMPARE:
1368 case RTX_COMM_COMPARE:
1369 if (CONST_INT_P (XEXP (copy, 0))
1370 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1371 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1372 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1373 {
1374 scopy = simplify_relational_operation (code, mode,
1375 (GET_MODE (XEXP (orig, 0))
1376 != VOIDmode)
1377 ? GET_MODE (XEXP (orig, 0))
1378 : GET_MODE (XEXP (orig, 1)),
1379 XEXP (copy, 0),
1380 XEXP (copy, 1));
1381 if (scopy)
1382 return scopy;
1383 }
1384 break;
1385 default:
1386 break;
1387 }
1388 scopy = simplify_rtx (copy);
1389 if (scopy)
1390 return scopy;
1391 return copy;
1392 }
1393
1394 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1395 with VALUE expressions. This way, it becomes independent of changes
1396 to registers and memory.
1397 X isn't actually modified; if modifications are needed, new rtl is
1398 allocated. However, the return value can share rtl with X. */
1399
1400 rtx
1401 cselib_subst_to_values (rtx x)
1402 {
1403 enum rtx_code code = GET_CODE (x);
1404 const char *fmt = GET_RTX_FORMAT (code);
1405 cselib_val *e;
1406 struct elt_list *l;
1407 rtx copy = x;
1408 int i;
1409
1410 switch (code)
1411 {
1412 case REG:
1413 l = REG_VALUES (REGNO (x));
1414 if (l && l->elt == NULL)
1415 l = l->next;
1416 for (; l; l = l->next)
1417 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1418 return l->elt->val_rtx;
1419
1420 gcc_unreachable ();
1421
1422 case MEM:
1423 e = cselib_lookup_mem (x, 0);
1424 if (! e)
1425 {
1426 /* This happens for autoincrements. Assign a value that doesn't
1427 match any other. */
1428 e = new_cselib_val (next_uid, GET_MODE (x), x);
1429 }
1430 return e->val_rtx;
1431
1432 case CONST_DOUBLE:
1433 case CONST_VECTOR:
1434 case CONST_INT:
1435 case CONST_FIXED:
1436 return x;
1437
1438 case POST_INC:
1439 case PRE_INC:
1440 case POST_DEC:
1441 case PRE_DEC:
1442 case POST_MODIFY:
1443 case PRE_MODIFY:
1444 e = new_cselib_val (next_uid, GET_MODE (x), x);
1445 return e->val_rtx;
1446
1447 default:
1448 break;
1449 }
1450
1451 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1452 {
1453 if (fmt[i] == 'e')
1454 {
1455 rtx t = cselib_subst_to_values (XEXP (x, i));
1456
1457 if (t != XEXP (x, i))
1458 {
1459 if (x == copy)
1460 copy = shallow_copy_rtx (x);
1461 XEXP (copy, i) = t;
1462 }
1463 }
1464 else if (fmt[i] == 'E')
1465 {
1466 int j;
1467
1468 for (j = 0; j < XVECLEN (x, i); j++)
1469 {
1470 rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
1471
1472 if (t != XVECEXP (x, i, j))
1473 {
1474 if (XVEC (x, i) == XVEC (copy, i))
1475 {
1476 if (x == copy)
1477 copy = shallow_copy_rtx (x);
1478 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1479 }
1480 XVECEXP (copy, i, j) = t;
1481 }
1482 }
1483 }
1484 }
1485
1486 return copy;
1487 }
1488
1489 /* Log a lookup of X to the cselib table along with the result RET. */
1490
1491 static cselib_val *
1492 cselib_log_lookup (rtx x, cselib_val *ret)
1493 {
1494 if (dump_file && (dump_flags & TDF_DETAILS))
1495 {
1496 fputs ("cselib lookup ", dump_file);
1497 print_inline_rtx (dump_file, x, 2);
1498 fprintf (dump_file, " => %u:%u\n",
1499 ret ? ret->uid : 0,
1500 ret ? ret->hash : 0);
1501 }
1502
1503 return ret;
1504 }
1505
1506 /* Look up the rtl expression X in our tables and return the value it has.
1507 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
1508 we create a new one if possible, using mode MODE if X doesn't have a mode
1509 (i.e. because it's a constant). */
1510
1511 cselib_val *
1512 cselib_lookup (rtx x, enum machine_mode mode, int create)
1513 {
1514 void **slot;
1515 cselib_val *e;
1516 unsigned int hashval;
1517
1518 if (GET_MODE (x) != VOIDmode)
1519 mode = GET_MODE (x);
1520
1521 if (GET_CODE (x) == VALUE)
1522 return CSELIB_VAL_PTR (x);
1523
1524 if (REG_P (x))
1525 {
1526 struct elt_list *l;
1527 unsigned int i = REGNO (x);
1528
1529 l = REG_VALUES (i);
1530 if (l && l->elt == NULL)
1531 l = l->next;
1532 for (; l; l = l->next)
1533 if (mode == GET_MODE (l->elt->val_rtx))
1534 return cselib_log_lookup (x, l->elt);
1535
1536 if (! create)
1537 return cselib_log_lookup (x, 0);
1538
1539 if (i < FIRST_PSEUDO_REGISTER)
1540 {
1541 unsigned int n = hard_regno_nregs[i][mode];
1542
1543 if (n > max_value_regs)
1544 max_value_regs = n;
1545 }
1546
1547 e = new_cselib_val (next_uid, GET_MODE (x), x);
1548 e->locs = new_elt_loc_list (e->locs, x);
1549 if (REG_VALUES (i) == 0)
1550 {
1551 /* Maintain the invariant that the first entry of
1552 REG_VALUES, if present, must be the value used to set the
1553 register, or NULL. */
1554 used_regs[n_used_regs++] = i;
1555 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
1556 }
1557 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
1558 slot = htab_find_slot_with_hash (cselib_hash_table, x, e->hash, INSERT);
1559 *slot = e;
1560 return cselib_log_lookup (x, e);
1561 }
1562
1563 if (MEM_P (x))
1564 return cselib_log_lookup (x, cselib_lookup_mem (x, create));
1565
1566 hashval = cselib_hash_rtx (x, create);
1567 /* Can't even create if hashing is not possible. */
1568 if (! hashval)
1569 return cselib_log_lookup (x, 0);
1570
1571 slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
1572 hashval, create ? INSERT : NO_INSERT);
1573 if (slot == 0)
1574 return cselib_log_lookup (x, 0);
1575
1576 e = (cselib_val *) *slot;
1577 if (e)
1578 return cselib_log_lookup (x, e);
1579
1580 e = new_cselib_val (hashval, mode, x);
1581
1582 /* We have to fill the slot before calling cselib_subst_to_values:
1583 the hash table is inconsistent until we do so, and
1584 cselib_subst_to_values will need to do lookups. */
1585 *slot = (void *) e;
1586 e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
1587 return cselib_log_lookup (x, e);
1588 }
1589
1590 /* Invalidate any entries in reg_values that overlap REGNO. This is called
1591 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
1592 is used to determine how many hard registers are being changed. If MODE
1593 is VOIDmode, then only REGNO is being changed; this is used when
1594 invalidating call clobbered registers across a call. */
1595
1596 static void
1597 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
1598 {
1599 unsigned int endregno;
1600 unsigned int i;
1601
1602 /* If we see pseudos after reload, something is _wrong_. */
1603 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
1604 || reg_renumber[regno] < 0);
1605
1606 /* Determine the range of registers that must be invalidated. For
1607 pseudos, only REGNO is affected. For hard regs, we must take MODE
1608 into account, and we must also invalidate lower register numbers
1609 if they contain values that overlap REGNO. */
1610 if (regno < FIRST_PSEUDO_REGISTER)
1611 {
1612 gcc_assert (mode != VOIDmode);
1613
1614 if (regno < max_value_regs)
1615 i = 0;
1616 else
1617 i = regno - max_value_regs;
1618
1619 endregno = end_hard_regno (mode, regno);
1620 }
1621 else
1622 {
1623 i = regno;
1624 endregno = regno + 1;
1625 }
1626
1627 for (; i < endregno; i++)
1628 {
1629 struct elt_list **l = &REG_VALUES (i);
1630
1631 /* Go through all known values for this reg; if it overlaps the range
1632 we're invalidating, remove the value. */
1633 while (*l)
1634 {
1635 cselib_val *v = (*l)->elt;
1636 struct elt_loc_list **p;
1637 unsigned int this_last = i;
1638
1639 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
1640 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
1641
1642 if (this_last < regno || v == NULL)
1643 {
1644 l = &(*l)->next;
1645 continue;
1646 }
1647
1648 /* We have an overlap. */
1649 if (*l == REG_VALUES (i))
1650 {
1651 /* Maintain the invariant that the first entry of
1652 REG_VALUES, if present, must be the value used to set
1653 the register, or NULL. This is also nice because
1654 then we won't push the same regno onto user_regs
1655 multiple times. */
1656 (*l)->elt = NULL;
1657 l = &(*l)->next;
1658 }
1659 else
1660 unchain_one_elt_list (l);
1661
1662 /* Now, we clear the mapping from value to reg. It must exist, so
1663 this code will crash intentionally if it doesn't. */
1664 for (p = &v->locs; ; p = &(*p)->next)
1665 {
1666 rtx x = (*p)->loc;
1667
1668 if (REG_P (x) && REGNO (x) == i)
1669 {
1670 unchain_one_elt_loc_list (p);
1671 break;
1672 }
1673 }
1674 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
1675 n_useless_values++;
1676 }
1677 }
1678 }
1679 \f
1680 /* Return 1 if X has a value that can vary even between two
1681 executions of the program. 0 means X can be compared reliably
1682 against certain constants or near-constants. */
1683
1684 static bool
1685 cselib_rtx_varies_p (const_rtx x ATTRIBUTE_UNUSED, bool from_alias ATTRIBUTE_UNUSED)
1686 {
1687 /* We actually don't need to verify very hard. This is because
1688 if X has actually changed, we invalidate the memory anyway,
1689 so assume that all common memory addresses are
1690 invariant. */
1691 return 0;
1692 }
1693
1694 /* Invalidate any locations in the table which are changed because of a
1695 store to MEM_RTX. If this is called because of a non-const call
1696 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1697
1698 static void
1699 cselib_invalidate_mem (rtx mem_rtx)
1700 {
1701 cselib_val **vp, *v, *next;
1702 int num_mems = 0;
1703 rtx mem_addr;
1704
1705 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
1706 mem_rtx = canon_rtx (mem_rtx);
1707
1708 vp = &first_containing_mem;
1709 for (v = *vp; v != &dummy_val; v = next)
1710 {
1711 bool has_mem = false;
1712 struct elt_loc_list **p = &v->locs;
1713 int had_locs = v->locs != 0;
1714
1715 while (*p)
1716 {
1717 rtx x = (*p)->loc;
1718 cselib_val *addr;
1719 struct elt_list **mem_chain;
1720
1721 /* MEMs may occur in locations only at the top level; below
1722 that every MEM or REG is substituted by its VALUE. */
1723 if (!MEM_P (x))
1724 {
1725 p = &(*p)->next;
1726 continue;
1727 }
1728 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
1729 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr,
1730 x, NULL_RTX, cselib_rtx_varies_p))
1731 {
1732 has_mem = true;
1733 num_mems++;
1734 p = &(*p)->next;
1735 continue;
1736 }
1737
1738 /* This one overlaps. */
1739 /* We must have a mapping from this MEM's address to the
1740 value (E). Remove that, too. */
1741 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1742 mem_chain = &addr->addr_list;
1743 for (;;)
1744 {
1745 if ((*mem_chain)->elt == v)
1746 {
1747 unchain_one_elt_list (mem_chain);
1748 break;
1749 }
1750
1751 mem_chain = &(*mem_chain)->next;
1752 }
1753
1754 unchain_one_elt_loc_list (p);
1755 }
1756
1757 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
1758 n_useless_values++;
1759
1760 next = v->next_containing_mem;
1761 if (has_mem)
1762 {
1763 *vp = v;
1764 vp = &(*vp)->next_containing_mem;
1765 }
1766 else
1767 v->next_containing_mem = NULL;
1768 }
1769 *vp = &dummy_val;
1770 }
1771
1772 /* Invalidate DEST, which is being assigned to or clobbered. */
1773
1774 void
1775 cselib_invalidate_rtx (rtx dest)
1776 {
1777 while (GET_CODE (dest) == SUBREG
1778 || GET_CODE (dest) == ZERO_EXTRACT
1779 || GET_CODE (dest) == STRICT_LOW_PART)
1780 dest = XEXP (dest, 0);
1781
1782 if (REG_P (dest))
1783 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1784 else if (MEM_P (dest))
1785 cselib_invalidate_mem (dest);
1786
1787 /* Some machines don't define AUTO_INC_DEC, but they still use push
1788 instructions. We need to catch that case here in order to
1789 invalidate the stack pointer correctly. Note that invalidating
1790 the stack pointer is different from invalidating DEST. */
1791 if (push_operand (dest, GET_MODE (dest)))
1792 cselib_invalidate_rtx (stack_pointer_rtx);
1793 }
1794
1795 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
1796
1797 static void
1798 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
1799 void *data ATTRIBUTE_UNUSED)
1800 {
1801 cselib_invalidate_rtx (dest);
1802 }
1803
1804 /* Record the result of a SET instruction. DEST is being set; the source
1805 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1806 describes its address. */
1807
1808 static void
1809 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
1810 {
1811 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
1812
1813 if (src_elt == 0 || side_effects_p (dest))
1814 return;
1815
1816 if (dreg >= 0)
1817 {
1818 if (dreg < FIRST_PSEUDO_REGISTER)
1819 {
1820 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
1821
1822 if (n > max_value_regs)
1823 max_value_regs = n;
1824 }
1825
1826 if (REG_VALUES (dreg) == 0)
1827 {
1828 used_regs[n_used_regs++] = dreg;
1829 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
1830 }
1831 else
1832 {
1833 /* The register should have been invalidated. */
1834 gcc_assert (REG_VALUES (dreg)->elt == 0);
1835 REG_VALUES (dreg)->elt = src_elt;
1836 }
1837
1838 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
1839 n_useless_values--;
1840 src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
1841 }
1842 else if (MEM_P (dest) && dest_addr_elt != 0
1843 && cselib_record_memory)
1844 {
1845 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
1846 n_useless_values--;
1847 add_mem_for_addr (dest_addr_elt, src_elt, dest);
1848 }
1849 }
1850
1851 /* There is no good way to determine how many elements there can be
1852 in a PARALLEL. Since it's fairly cheap, use a really large number. */
1853 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1854
1855 /* Record the effects of any sets in INSN. */
1856 static void
1857 cselib_record_sets (rtx insn)
1858 {
1859 int n_sets = 0;
1860 int i;
1861 struct cselib_set sets[MAX_SETS];
1862 rtx body = PATTERN (insn);
1863 rtx cond = 0;
1864
1865 body = PATTERN (insn);
1866 if (GET_CODE (body) == COND_EXEC)
1867 {
1868 cond = COND_EXEC_TEST (body);
1869 body = COND_EXEC_CODE (body);
1870 }
1871
1872 /* Find all sets. */
1873 if (GET_CODE (body) == SET)
1874 {
1875 sets[0].src = SET_SRC (body);
1876 sets[0].dest = SET_DEST (body);
1877 n_sets = 1;
1878 }
1879 else if (GET_CODE (body) == PARALLEL)
1880 {
1881 /* Look through the PARALLEL and record the values being
1882 set, if possible. Also handle any CLOBBERs. */
1883 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
1884 {
1885 rtx x = XVECEXP (body, 0, i);
1886
1887 if (GET_CODE (x) == SET)
1888 {
1889 sets[n_sets].src = SET_SRC (x);
1890 sets[n_sets].dest = SET_DEST (x);
1891 n_sets++;
1892 }
1893 }
1894 }
1895
1896 if (n_sets == 1
1897 && MEM_P (sets[0].src)
1898 && !cselib_record_memory
1899 && MEM_READONLY_P (sets[0].src))
1900 {
1901 rtx note = find_reg_equal_equiv_note (insn);
1902
1903 if (note && CONSTANT_P (XEXP (note, 0)))
1904 sets[0].src = XEXP (note, 0);
1905 }
1906
1907 /* Look up the values that are read. Do this before invalidating the
1908 locations that are written. */
1909 for (i = 0; i < n_sets; i++)
1910 {
1911 rtx dest = sets[i].dest;
1912
1913 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1914 the low part after invalidating any knowledge about larger modes. */
1915 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
1916 sets[i].dest = dest = XEXP (dest, 0);
1917
1918 /* We don't know how to record anything but REG or MEM. */
1919 if (REG_P (dest)
1920 || (MEM_P (dest) && cselib_record_memory))
1921 {
1922 rtx src = sets[i].src;
1923 if (cond)
1924 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
1925 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
1926 if (MEM_P (dest))
1927 {
1928 enum machine_mode address_mode
1929 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
1930
1931 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
1932 address_mode, 1);
1933 }
1934 else
1935 sets[i].dest_addr_elt = 0;
1936 }
1937 }
1938
1939 if (cselib_record_sets_hook)
1940 cselib_record_sets_hook (insn, sets, n_sets);
1941
1942 /* Invalidate all locations written by this insn. Note that the elts we
1943 looked up in the previous loop aren't affected, just some of their
1944 locations may go away. */
1945 note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
1946
1947 /* If this is an asm, look for duplicate sets. This can happen when the
1948 user uses the same value as an output multiple times. This is valid
1949 if the outputs are not actually used thereafter. Treat this case as
1950 if the value isn't actually set. We do this by smashing the destination
1951 to pc_rtx, so that we won't record the value later. */
1952 if (n_sets >= 2 && asm_noperands (body) >= 0)
1953 {
1954 for (i = 0; i < n_sets; i++)
1955 {
1956 rtx dest = sets[i].dest;
1957 if (REG_P (dest) || MEM_P (dest))
1958 {
1959 int j;
1960 for (j = i + 1; j < n_sets; j++)
1961 if (rtx_equal_p (dest, sets[j].dest))
1962 {
1963 sets[i].dest = pc_rtx;
1964 sets[j].dest = pc_rtx;
1965 }
1966 }
1967 }
1968 }
1969
1970 /* Now enter the equivalences in our tables. */
1971 for (i = 0; i < n_sets; i++)
1972 {
1973 rtx dest = sets[i].dest;
1974 if (REG_P (dest)
1975 || (MEM_P (dest) && cselib_record_memory))
1976 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
1977 }
1978 }
1979
1980 /* Record the effects of INSN. */
1981
1982 void
1983 cselib_process_insn (rtx insn)
1984 {
1985 int i;
1986 rtx x;
1987
1988 cselib_current_insn = insn;
1989
1990 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
1991 if (LABEL_P (insn)
1992 || (CALL_P (insn)
1993 && find_reg_note (insn, REG_SETJMP, NULL))
1994 || (NONJUMP_INSN_P (insn)
1995 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1996 && MEM_VOLATILE_P (PATTERN (insn))))
1997 {
1998 cselib_reset_table (next_uid);
1999 return;
2000 }
2001
2002 if (! INSN_P (insn))
2003 {
2004 cselib_current_insn = 0;
2005 return;
2006 }
2007
2008 /* If this is a call instruction, forget anything stored in a
2009 call clobbered register, or, if this is not a const call, in
2010 memory. */
2011 if (CALL_P (insn))
2012 {
2013 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2014 if (call_used_regs[i]
2015 || (REG_VALUES (i) && REG_VALUES (i)->elt
2016 && HARD_REGNO_CALL_PART_CLOBBERED (i,
2017 GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2018 cselib_invalidate_regno (i, reg_raw_mode[i]);
2019
2020 /* Since it is not clear how cselib is going to be used, be
2021 conservative here and treat looping pure or const functions
2022 as if they were regular functions. */
2023 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2024 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2025 cselib_invalidate_mem (callmem);
2026 }
2027
2028 cselib_record_sets (insn);
2029
2030 #ifdef AUTO_INC_DEC
2031 /* Clobber any registers which appear in REG_INC notes. We
2032 could keep track of the changes to their values, but it is
2033 unlikely to help. */
2034 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
2035 if (REG_NOTE_KIND (x) == REG_INC)
2036 cselib_invalidate_rtx (XEXP (x, 0));
2037 #endif
2038
2039 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2040 after we have processed the insn. */
2041 if (CALL_P (insn))
2042 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2043 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2044 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2045
2046 cselib_current_insn = 0;
2047
2048 if (n_useless_values > MAX_USELESS_VALUES
2049 /* remove_useless_values is linear in the hash table size. Avoid
2050 quadratic behavior for very large hashtables with very few
2051 useless elements. */
2052 && (unsigned int)n_useless_values > cselib_hash_table->n_elements / 4)
2053 remove_useless_values ();
2054 }
2055
2056 /* Initialize cselib for one pass. The caller must also call
2057 init_alias_analysis. */
2058
2059 void
2060 cselib_init (bool record_memory)
2061 {
2062 elt_list_pool = create_alloc_pool ("elt_list",
2063 sizeof (struct elt_list), 10);
2064 elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2065 sizeof (struct elt_loc_list), 10);
2066 cselib_val_pool = create_alloc_pool ("cselib_val_list",
2067 sizeof (cselib_val), 10);
2068 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2069 cselib_record_memory = record_memory;
2070
2071 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2072 see canon_true_dependence. This is only created once. */
2073 if (! callmem)
2074 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2075
2076 cselib_nregs = max_reg_num ();
2077
2078 /* We preserve reg_values to allow expensive clearing of the whole thing.
2079 Reallocate it however if it happens to be too large. */
2080 if (!reg_values || reg_values_size < cselib_nregs
2081 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2082 {
2083 if (reg_values)
2084 free (reg_values);
2085 /* Some space for newly emit instructions so we don't end up
2086 reallocating in between passes. */
2087 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2088 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2089 }
2090 used_regs = XNEWVEC (unsigned int, cselib_nregs);
2091 n_used_regs = 0;
2092 cselib_hash_table = htab_create (31, get_value_hash,
2093 entry_and_rtx_equal_p, NULL);
2094 next_uid = 1;
2095 }
2096
2097 /* Called when the current user is done with cselib. */
2098
2099 void
2100 cselib_finish (void)
2101 {
2102 cselib_discard_hook = NULL;
2103 free_alloc_pool (elt_list_pool);
2104 free_alloc_pool (elt_loc_list_pool);
2105 free_alloc_pool (cselib_val_pool);
2106 free_alloc_pool (value_pool);
2107 cselib_clear_table ();
2108 htab_delete (cselib_hash_table);
2109 free (used_regs);
2110 used_regs = 0;
2111 cselib_hash_table = 0;
2112 n_useless_values = 0;
2113 next_uid = 0;
2114 }
2115
2116 /* Dump the cselib_val *X to FILE *info. */
2117
2118 static int
2119 dump_cselib_val (void **x, void *info)
2120 {
2121 cselib_val *v = (cselib_val *)*x;
2122 FILE *out = (FILE *)info;
2123 bool need_lf = true;
2124
2125 print_inline_rtx (out, v->val_rtx, 0);
2126
2127 if (v->locs)
2128 {
2129 struct elt_loc_list *l = v->locs;
2130 if (need_lf)
2131 {
2132 fputc ('\n', out);
2133 need_lf = false;
2134 }
2135 fputs (" locs:", out);
2136 do
2137 {
2138 fprintf (out, "\n from insn %i ",
2139 INSN_UID (l->setting_insn));
2140 print_inline_rtx (out, l->loc, 4);
2141 }
2142 while ((l = l->next));
2143 fputc ('\n', out);
2144 }
2145 else
2146 {
2147 fputs (" no locs", out);
2148 need_lf = true;
2149 }
2150
2151 if (v->addr_list)
2152 {
2153 struct elt_list *e = v->addr_list;
2154 if (need_lf)
2155 {
2156 fputc ('\n', out);
2157 need_lf = false;
2158 }
2159 fputs (" addr list:", out);
2160 do
2161 {
2162 fputs ("\n ", out);
2163 print_inline_rtx (out, e->elt->val_rtx, 2);
2164 }
2165 while ((e = e->next));
2166 fputc ('\n', out);
2167 }
2168 else
2169 {
2170 fputs (" no addrs", out);
2171 need_lf = true;
2172 }
2173
2174 if (v->next_containing_mem == &dummy_val)
2175 fputs (" last mem\n", out);
2176 else if (v->next_containing_mem)
2177 {
2178 fputs (" next mem ", out);
2179 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2180 fputc ('\n', out);
2181 }
2182 else if (need_lf)
2183 fputc ('\n', out);
2184
2185 return 1;
2186 }
2187
2188 /* Dump to OUT everything in the CSELIB table. */
2189
2190 void
2191 dump_cselib_table (FILE *out)
2192 {
2193 fprintf (out, "cselib hash table:\n");
2194 htab_traverse (cselib_hash_table, dump_cselib_val, out);
2195 if (first_containing_mem != &dummy_val)
2196 {
2197 fputs ("first mem ", out);
2198 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2199 fputc ('\n', out);
2200 }
2201 fprintf (out, "next uid %i\n", next_uid);
2202 }
2203
2204 #include "gt-cselib.h"