diagnostic-core.h: Include bversion.h.
[gcc.git] / gcc / cselib.c
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "function.h"
35 #include "emit-rtl.h"
36 #include "diagnostic-core.h"
37 #include "output.h"
38 #include "ggc.h"
39 #include "hashtab.h"
40 #include "tree-pass.h"
41 #include "cselib.h"
42 #include "params.h"
43 #include "alloc-pool.h"
44 #include "target.h"
45 #include "bitmap.h"
46
47 static bool cselib_record_memory;
48 static bool cselib_preserve_constants;
49 static int entry_and_rtx_equal_p (const void *, const void *);
50 static hashval_t get_value_hash (const void *);
51 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
52 static struct elt_loc_list *new_elt_loc_list (struct elt_loc_list *, rtx);
53 static void unchain_one_value (cselib_val *);
54 static void unchain_one_elt_list (struct elt_list **);
55 static void unchain_one_elt_loc_list (struct elt_loc_list **);
56 static int discard_useless_locs (void **, void *);
57 static int discard_useless_values (void **, void *);
58 static void remove_useless_values (void);
59 static unsigned int cselib_hash_rtx (rtx, int);
60 static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
61 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
62 static cselib_val *cselib_lookup_mem (rtx, int);
63 static void cselib_invalidate_regno (unsigned int, enum machine_mode);
64 static void cselib_invalidate_mem (rtx);
65 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
66 static void cselib_record_sets (rtx);
67
68 struct expand_value_data
69 {
70 bitmap regs_active;
71 cselib_expand_callback callback;
72 void *callback_arg;
73 bool dummy;
74 };
75
76 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
77
78 /* There are three ways in which cselib can look up an rtx:
79 - for a REG, the reg_values table (which is indexed by regno) is used
80 - for a MEM, we recursively look up its address and then follow the
81 addr_list of that value
82 - for everything else, we compute a hash value and go through the hash
83 table. Since different rtx's can still have the same hash value,
84 this involves walking the table entries for a given value and comparing
85 the locations of the entries with the rtx we are looking up. */
86
87 /* A table that enables us to look up elts by their value. */
88 static htab_t cselib_hash_table;
89
90 /* This is a global so we don't have to pass this through every function.
91 It is used in new_elt_loc_list to set SETTING_INSN. */
92 static rtx cselib_current_insn;
93
94 /* The unique id that the next create value will take. */
95 static unsigned int next_uid;
96
97 /* The number of registers we had when the varrays were last resized. */
98 static unsigned int cselib_nregs;
99
100 /* Count values without known locations, or with only locations that
101 wouldn't have been known except for debug insns. Whenever this
102 grows too big, we remove these useless values from the table.
103
104 Counting values with only debug values is a bit tricky. We don't
105 want to increment n_useless_values when we create a value for a
106 debug insn, for this would get n_useless_values out of sync, but we
107 want increment it if all locs in the list that were ever referenced
108 in nondebug insns are removed from the list.
109
110 In the general case, once we do that, we'd have to stop accepting
111 nondebug expressions in the loc list, to avoid having two values
112 equivalent that, without debug insns, would have been made into
113 separate values. However, because debug insns never introduce
114 equivalences themselves (no assignments), the only means for
115 growing loc lists is through nondebug assignments. If the locs
116 also happen to be referenced in debug insns, it will work just fine.
117
118 A consequence of this is that there's at most one debug-only loc in
119 each loc list. If we keep it in the first entry, testing whether
120 we have a debug-only loc list takes O(1).
121
122 Furthermore, since any additional entry in a loc list containing a
123 debug loc would have to come from an assignment (nondebug) that
124 references both the initial debug loc and the newly-equivalent loc,
125 the initial debug loc would be promoted to a nondebug loc, and the
126 loc list would not contain debug locs any more.
127
128 So the only case we have to be careful with in order to keep
129 n_useless_values in sync between debug and nondebug compilations is
130 to avoid incrementing n_useless_values when removing the single loc
131 from a value that turns out to not appear outside debug values. We
132 increment n_useless_debug_values instead, and leave such values
133 alone until, for other reasons, we garbage-collect useless
134 values. */
135 static int n_useless_values;
136 static int n_useless_debug_values;
137
138 /* Count values whose locs have been taken exclusively from debug
139 insns for the entire life of the value. */
140 static int n_debug_values;
141
142 /* Number of useless values before we remove them from the hash table. */
143 #define MAX_USELESS_VALUES 32
144
145 /* This table maps from register number to values. It does not
146 contain pointers to cselib_val structures, but rather elt_lists.
147 The purpose is to be able to refer to the same register in
148 different modes. The first element of the list defines the mode in
149 which the register was set; if the mode is unknown or the value is
150 no longer valid in that mode, ELT will be NULL for the first
151 element. */
152 static struct elt_list **reg_values;
153 static unsigned int reg_values_size;
154 #define REG_VALUES(i) reg_values[i]
155
156 /* The largest number of hard regs used by any entry added to the
157 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
158 static unsigned int max_value_regs;
159
160 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
161 in cselib_clear_table() for fast emptying. */
162 static unsigned int *used_regs;
163 static unsigned int n_used_regs;
164
165 /* We pass this to cselib_invalidate_mem to invalidate all of
166 memory for a non-const call instruction. */
167 static GTY(()) rtx callmem;
168
169 /* Set by discard_useless_locs if it deleted the last location of any
170 value. */
171 static int values_became_useless;
172
173 /* Used as stop element of the containing_mem list so we can check
174 presence in the list by checking the next pointer. */
175 static cselib_val dummy_val;
176
177 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
178 that is constant through the whole function and should never be
179 eliminated. */
180 static cselib_val *cfa_base_preserved_val;
181 static unsigned int cfa_base_preserved_regno;
182
183 /* Used to list all values that contain memory reference.
184 May or may not contain the useless values - the list is compacted
185 each time memory is invalidated. */
186 static cselib_val *first_containing_mem = &dummy_val;
187 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
188
189 /* If nonnull, cselib will call this function before freeing useless
190 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
191 void (*cselib_discard_hook) (cselib_val *);
192
193 /* If nonnull, cselib will call this function before recording sets or
194 even clobbering outputs of INSN. All the recorded sets will be
195 represented in the array sets[n_sets]. new_val_min can be used to
196 tell whether values present in sets are introduced by this
197 instruction. */
198 void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
199 int n_sets);
200
201 #define PRESERVED_VALUE_P(RTX) \
202 (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
203
204 \f
205
206 /* Allocate a struct elt_list and fill in its two elements with the
207 arguments. */
208
209 static inline struct elt_list *
210 new_elt_list (struct elt_list *next, cselib_val *elt)
211 {
212 struct elt_list *el;
213 el = (struct elt_list *) pool_alloc (elt_list_pool);
214 el->next = next;
215 el->elt = elt;
216 return el;
217 }
218
219 /* Allocate a struct elt_loc_list and fill in its two elements with the
220 arguments. */
221
222 static inline struct elt_loc_list *
223 new_elt_loc_list (struct elt_loc_list *next, rtx loc)
224 {
225 struct elt_loc_list *el;
226 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
227 el->next = next;
228 el->loc = loc;
229 el->setting_insn = cselib_current_insn;
230 gcc_assert (!next || !next->setting_insn
231 || !DEBUG_INSN_P (next->setting_insn));
232
233 /* If we're creating the first loc in a debug insn context, we've
234 just created a debug value. Count it. */
235 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
236 n_debug_values++;
237
238 return el;
239 }
240
241 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
242 originating from a debug insn, maintaining the debug values
243 count. */
244
245 static inline void
246 promote_debug_loc (struct elt_loc_list *l)
247 {
248 if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
249 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
250 {
251 n_debug_values--;
252 l->setting_insn = cselib_current_insn;
253 gcc_assert (!l->next);
254 }
255 }
256
257 /* The elt_list at *PL is no longer needed. Unchain it and free its
258 storage. */
259
260 static inline void
261 unchain_one_elt_list (struct elt_list **pl)
262 {
263 struct elt_list *l = *pl;
264
265 *pl = l->next;
266 pool_free (elt_list_pool, l);
267 }
268
269 /* Likewise for elt_loc_lists. */
270
271 static void
272 unchain_one_elt_loc_list (struct elt_loc_list **pl)
273 {
274 struct elt_loc_list *l = *pl;
275
276 *pl = l->next;
277 pool_free (elt_loc_list_pool, l);
278 }
279
280 /* Likewise for cselib_vals. This also frees the addr_list associated with
281 V. */
282
283 static void
284 unchain_one_value (cselib_val *v)
285 {
286 while (v->addr_list)
287 unchain_one_elt_list (&v->addr_list);
288
289 pool_free (cselib_val_pool, v);
290 }
291
292 /* Remove all entries from the hash table. Also used during
293 initialization. */
294
295 void
296 cselib_clear_table (void)
297 {
298 cselib_reset_table (1);
299 }
300
301 /* Remove from hash table all VALUEs except constants. */
302
303 static int
304 preserve_only_constants (void **x, void *info ATTRIBUTE_UNUSED)
305 {
306 cselib_val *v = (cselib_val *)*x;
307
308 if (v->locs != NULL
309 && v->locs->next == NULL)
310 {
311 if (CONSTANT_P (v->locs->loc)
312 && (GET_CODE (v->locs->loc) != CONST
313 || !references_value_p (v->locs->loc, 0)))
314 return 1;
315 if (cfa_base_preserved_val)
316 {
317 if (v == cfa_base_preserved_val)
318 return 1;
319 if (GET_CODE (v->locs->loc) == PLUS
320 && CONST_INT_P (XEXP (v->locs->loc, 1))
321 && XEXP (v->locs->loc, 0) == cfa_base_preserved_val->val_rtx)
322 return 1;
323 }
324 }
325
326 htab_clear_slot (cselib_hash_table, x);
327 return 1;
328 }
329
330 /* Remove all entries from the hash table, arranging for the next
331 value to be numbered NUM. */
332
333 void
334 cselib_reset_table (unsigned int num)
335 {
336 unsigned int i;
337
338 max_value_regs = 0;
339
340 if (cfa_base_preserved_val)
341 {
342 unsigned int regno = cfa_base_preserved_regno;
343 unsigned int new_used_regs = 0;
344 for (i = 0; i < n_used_regs; i++)
345 if (used_regs[i] == regno)
346 {
347 new_used_regs = 1;
348 continue;
349 }
350 else
351 REG_VALUES (used_regs[i]) = 0;
352 gcc_assert (new_used_regs == 1);
353 n_used_regs = new_used_regs;
354 used_regs[0] = regno;
355 max_value_regs
356 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
357 }
358 else
359 {
360 for (i = 0; i < n_used_regs; i++)
361 REG_VALUES (used_regs[i]) = 0;
362 n_used_regs = 0;
363 }
364
365 if (cselib_preserve_constants)
366 htab_traverse (cselib_hash_table, preserve_only_constants, NULL);
367 else
368 htab_empty (cselib_hash_table);
369
370 n_useless_values = 0;
371 n_useless_debug_values = 0;
372 n_debug_values = 0;
373
374 next_uid = num;
375
376 first_containing_mem = &dummy_val;
377 }
378
379 /* Return the number of the next value that will be generated. */
380
381 unsigned int
382 cselib_get_next_uid (void)
383 {
384 return next_uid;
385 }
386
387 /* The equality test for our hash table. The first argument ENTRY is a table
388 element (i.e. a cselib_val), while the second arg X is an rtx. We know
389 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
390 CONST of an appropriate mode. */
391
392 static int
393 entry_and_rtx_equal_p (const void *entry, const void *x_arg)
394 {
395 struct elt_loc_list *l;
396 const cselib_val *const v = (const cselib_val *) entry;
397 rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
398 enum machine_mode mode = GET_MODE (x);
399
400 gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
401 && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
402
403 if (mode != GET_MODE (v->val_rtx))
404 return 0;
405
406 /* Unwrap X if necessary. */
407 if (GET_CODE (x) == CONST
408 && (CONST_INT_P (XEXP (x, 0))
409 || GET_CODE (XEXP (x, 0)) == CONST_FIXED
410 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
411 x = XEXP (x, 0);
412
413 /* We don't guarantee that distinct rtx's have different hash values,
414 so we need to do a comparison. */
415 for (l = v->locs; l; l = l->next)
416 if (rtx_equal_for_cselib_p (l->loc, x))
417 {
418 promote_debug_loc (l);
419 return 1;
420 }
421
422 return 0;
423 }
424
425 /* The hash function for our hash table. The value is always computed with
426 cselib_hash_rtx when adding an element; this function just extracts the
427 hash value from a cselib_val structure. */
428
429 static hashval_t
430 get_value_hash (const void *entry)
431 {
432 const cselib_val *const v = (const cselib_val *) entry;
433 return v->hash;
434 }
435
436 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
437 only return true for values which point to a cselib_val whose value
438 element has been set to zero, which implies the cselib_val will be
439 removed. */
440
441 int
442 references_value_p (const_rtx x, int only_useless)
443 {
444 const enum rtx_code code = GET_CODE (x);
445 const char *fmt = GET_RTX_FORMAT (code);
446 int i, j;
447
448 if (GET_CODE (x) == VALUE
449 && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
450 return 1;
451
452 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
453 {
454 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
455 return 1;
456 else if (fmt[i] == 'E')
457 for (j = 0; j < XVECLEN (x, i); j++)
458 if (references_value_p (XVECEXP (x, i, j), only_useless))
459 return 1;
460 }
461
462 return 0;
463 }
464
465 /* For all locations found in X, delete locations that reference useless
466 values (i.e. values without any location). Called through
467 htab_traverse. */
468
469 static int
470 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
471 {
472 cselib_val *v = (cselib_val *)*x;
473 struct elt_loc_list **p = &v->locs;
474 bool had_locs = v->locs != NULL;
475 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
476
477 while (*p)
478 {
479 if (references_value_p ((*p)->loc, 1))
480 unchain_one_elt_loc_list (p);
481 else
482 p = &(*p)->next;
483 }
484
485 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
486 {
487 if (setting_insn && DEBUG_INSN_P (setting_insn))
488 n_useless_debug_values++;
489 else
490 n_useless_values++;
491 values_became_useless = 1;
492 }
493 return 1;
494 }
495
496 /* If X is a value with no locations, remove it from the hashtable. */
497
498 static int
499 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
500 {
501 cselib_val *v = (cselib_val *)*x;
502
503 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
504 {
505 if (cselib_discard_hook)
506 cselib_discard_hook (v);
507
508 CSELIB_VAL_PTR (v->val_rtx) = NULL;
509 htab_clear_slot (cselib_hash_table, x);
510 unchain_one_value (v);
511 n_useless_values--;
512 }
513
514 return 1;
515 }
516
517 /* Clean out useless values (i.e. those which no longer have locations
518 associated with them) from the hash table. */
519
520 static void
521 remove_useless_values (void)
522 {
523 cselib_val **p, *v;
524
525 /* First pass: eliminate locations that reference the value. That in
526 turn can make more values useless. */
527 do
528 {
529 values_became_useless = 0;
530 htab_traverse (cselib_hash_table, discard_useless_locs, 0);
531 }
532 while (values_became_useless);
533
534 /* Second pass: actually remove the values. */
535
536 p = &first_containing_mem;
537 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
538 if (v->locs)
539 {
540 *p = v;
541 p = &(*p)->next_containing_mem;
542 }
543 *p = &dummy_val;
544
545 n_useless_values += n_useless_debug_values;
546 n_debug_values -= n_useless_debug_values;
547 n_useless_debug_values = 0;
548
549 htab_traverse (cselib_hash_table, discard_useless_values, 0);
550
551 gcc_assert (!n_useless_values);
552 }
553
554 /* Arrange for a value to not be removed from the hash table even if
555 it becomes useless. */
556
557 void
558 cselib_preserve_value (cselib_val *v)
559 {
560 PRESERVED_VALUE_P (v->val_rtx) = 1;
561 }
562
563 /* Test whether a value is preserved. */
564
565 bool
566 cselib_preserved_value_p (cselib_val *v)
567 {
568 return PRESERVED_VALUE_P (v->val_rtx);
569 }
570
571 /* Arrange for a REG value to be assumed constant through the whole function,
572 never invalidated and preserved across cselib_reset_table calls. */
573
574 void
575 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
576 {
577 if (cselib_preserve_constants
578 && v->locs
579 && REG_P (v->locs->loc))
580 {
581 cfa_base_preserved_val = v;
582 cfa_base_preserved_regno = regno;
583 }
584 }
585
586 /* Clean all non-constant expressions in the hash table, but retain
587 their values. */
588
589 void
590 cselib_preserve_only_values (void)
591 {
592 int i;
593
594 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
595 cselib_invalidate_regno (i, reg_raw_mode[i]);
596
597 cselib_invalidate_mem (callmem);
598
599 remove_useless_values ();
600
601 gcc_assert (first_containing_mem == &dummy_val);
602 }
603
604 /* Return the mode in which a register was last set. If X is not a
605 register, return its mode. If the mode in which the register was
606 set is not known, or the value was already clobbered, return
607 VOIDmode. */
608
609 enum machine_mode
610 cselib_reg_set_mode (const_rtx x)
611 {
612 if (!REG_P (x))
613 return GET_MODE (x);
614
615 if (REG_VALUES (REGNO (x)) == NULL
616 || REG_VALUES (REGNO (x))->elt == NULL)
617 return VOIDmode;
618
619 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
620 }
621
622 /* Return nonzero if we can prove that X and Y contain the same value, taking
623 our gathered information into account. */
624
625 int
626 rtx_equal_for_cselib_p (rtx x, rtx y)
627 {
628 enum rtx_code code;
629 const char *fmt;
630 int i;
631
632 if (REG_P (x) || MEM_P (x))
633 {
634 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
635
636 if (e)
637 x = e->val_rtx;
638 }
639
640 if (REG_P (y) || MEM_P (y))
641 {
642 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
643
644 if (e)
645 y = e->val_rtx;
646 }
647
648 if (x == y)
649 return 1;
650
651 if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
652 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
653
654 if (GET_CODE (x) == VALUE)
655 {
656 cselib_val *e = CSELIB_VAL_PTR (x);
657 struct elt_loc_list *l;
658
659 for (l = e->locs; l; l = l->next)
660 {
661 rtx t = l->loc;
662
663 /* Avoid infinite recursion. */
664 if (REG_P (t) || MEM_P (t))
665 continue;
666 else if (rtx_equal_for_cselib_p (t, y))
667 return 1;
668 }
669
670 return 0;
671 }
672
673 if (GET_CODE (y) == VALUE)
674 {
675 cselib_val *e = CSELIB_VAL_PTR (y);
676 struct elt_loc_list *l;
677
678 for (l = e->locs; l; l = l->next)
679 {
680 rtx t = l->loc;
681
682 if (REG_P (t) || MEM_P (t))
683 continue;
684 else if (rtx_equal_for_cselib_p (x, t))
685 return 1;
686 }
687
688 return 0;
689 }
690
691 if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
692 return 0;
693
694 /* These won't be handled correctly by the code below. */
695 switch (GET_CODE (x))
696 {
697 case CONST_DOUBLE:
698 case CONST_FIXED:
699 case DEBUG_EXPR:
700 return 0;
701
702 case DEBUG_IMPLICIT_PTR:
703 return DEBUG_IMPLICIT_PTR_DECL (x)
704 == DEBUG_IMPLICIT_PTR_DECL (y);
705
706 case LABEL_REF:
707 return XEXP (x, 0) == XEXP (y, 0);
708
709 default:
710 break;
711 }
712
713 code = GET_CODE (x);
714 fmt = GET_RTX_FORMAT (code);
715
716 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
717 {
718 int j;
719
720 switch (fmt[i])
721 {
722 case 'w':
723 if (XWINT (x, i) != XWINT (y, i))
724 return 0;
725 break;
726
727 case 'n':
728 case 'i':
729 if (XINT (x, i) != XINT (y, i))
730 return 0;
731 break;
732
733 case 'V':
734 case 'E':
735 /* Two vectors must have the same length. */
736 if (XVECLEN (x, i) != XVECLEN (y, i))
737 return 0;
738
739 /* And the corresponding elements must match. */
740 for (j = 0; j < XVECLEN (x, i); j++)
741 if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
742 XVECEXP (y, i, j)))
743 return 0;
744 break;
745
746 case 'e':
747 if (i == 1
748 && targetm.commutative_p (x, UNKNOWN)
749 && rtx_equal_for_cselib_p (XEXP (x, 1), XEXP (y, 0))
750 && rtx_equal_for_cselib_p (XEXP (x, 0), XEXP (y, 1)))
751 return 1;
752 if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
753 return 0;
754 break;
755
756 case 'S':
757 case 's':
758 if (strcmp (XSTR (x, i), XSTR (y, i)))
759 return 0;
760 break;
761
762 case 'u':
763 /* These are just backpointers, so they don't matter. */
764 break;
765
766 case '0':
767 case 't':
768 break;
769
770 /* It is believed that rtx's at this level will never
771 contain anything but integers and other rtx's,
772 except for within LABEL_REFs and SYMBOL_REFs. */
773 default:
774 gcc_unreachable ();
775 }
776 }
777 return 1;
778 }
779
780 /* We need to pass down the mode of constants through the hash table
781 functions. For that purpose, wrap them in a CONST of the appropriate
782 mode. */
783 static rtx
784 wrap_constant (enum machine_mode mode, rtx x)
785 {
786 if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
787 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
788 return x;
789 gcc_assert (mode != VOIDmode);
790 return gen_rtx_CONST (mode, x);
791 }
792
793 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
794 For registers and memory locations, we look up their cselib_val structure
795 and return its VALUE element.
796 Possible reasons for return 0 are: the object is volatile, or we couldn't
797 find a register or memory location in the table and CREATE is zero. If
798 CREATE is nonzero, table elts are created for regs and mem.
799 N.B. this hash function returns the same hash value for RTXes that
800 differ only in the order of operands, thus it is suitable for comparisons
801 that take commutativity into account.
802 If we wanted to also support associative rules, we'd have to use a different
803 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
804 We used to have a MODE argument for hashing for CONST_INTs, but that
805 didn't make sense, since it caused spurious hash differences between
806 (set (reg:SI 1) (const_int))
807 (plus:SI (reg:SI 2) (reg:SI 1))
808 and
809 (plus:SI (reg:SI 2) (const_int))
810 If the mode is important in any context, it must be checked specifically
811 in a comparison anyway, since relying on hash differences is unsafe. */
812
813 static unsigned int
814 cselib_hash_rtx (rtx x, int create)
815 {
816 cselib_val *e;
817 int i, j;
818 enum rtx_code code;
819 const char *fmt;
820 unsigned int hash = 0;
821
822 code = GET_CODE (x);
823 hash += (unsigned) code + (unsigned) GET_MODE (x);
824
825 switch (code)
826 {
827 case MEM:
828 case REG:
829 e = cselib_lookup (x, GET_MODE (x), create);
830 if (! e)
831 return 0;
832
833 return e->hash;
834
835 case DEBUG_EXPR:
836 hash += ((unsigned) DEBUG_EXPR << 7)
837 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
838 return hash ? hash : (unsigned int) DEBUG_EXPR;
839
840 case DEBUG_IMPLICIT_PTR:
841 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
842 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
843 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
844
845 case CONST_INT:
846 hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
847 return hash ? hash : (unsigned int) CONST_INT;
848
849 case CONST_DOUBLE:
850 /* This is like the general case, except that it only counts
851 the integers representing the constant. */
852 hash += (unsigned) code + (unsigned) GET_MODE (x);
853 if (GET_MODE (x) != VOIDmode)
854 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
855 else
856 hash += ((unsigned) CONST_DOUBLE_LOW (x)
857 + (unsigned) CONST_DOUBLE_HIGH (x));
858 return hash ? hash : (unsigned int) CONST_DOUBLE;
859
860 case CONST_FIXED:
861 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
862 hash += fixed_hash (CONST_FIXED_VALUE (x));
863 return hash ? hash : (unsigned int) CONST_FIXED;
864
865 case CONST_VECTOR:
866 {
867 int units;
868 rtx elt;
869
870 units = CONST_VECTOR_NUNITS (x);
871
872 for (i = 0; i < units; ++i)
873 {
874 elt = CONST_VECTOR_ELT (x, i);
875 hash += cselib_hash_rtx (elt, 0);
876 }
877
878 return hash;
879 }
880
881 /* Assume there is only one rtx object for any given label. */
882 case LABEL_REF:
883 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
884 differences and differences between each stage's debugging dumps. */
885 hash += (((unsigned int) LABEL_REF << 7)
886 + CODE_LABEL_NUMBER (XEXP (x, 0)));
887 return hash ? hash : (unsigned int) LABEL_REF;
888
889 case SYMBOL_REF:
890 {
891 /* Don't hash on the symbol's address to avoid bootstrap differences.
892 Different hash values may cause expressions to be recorded in
893 different orders and thus different registers to be used in the
894 final assembler. This also avoids differences in the dump files
895 between various stages. */
896 unsigned int h = 0;
897 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
898
899 while (*p)
900 h += (h << 7) + *p++; /* ??? revisit */
901
902 hash += ((unsigned int) SYMBOL_REF << 7) + h;
903 return hash ? hash : (unsigned int) SYMBOL_REF;
904 }
905
906 case PRE_DEC:
907 case PRE_INC:
908 case POST_DEC:
909 case POST_INC:
910 case POST_MODIFY:
911 case PRE_MODIFY:
912 case PC:
913 case CC0:
914 case CALL:
915 case UNSPEC_VOLATILE:
916 return 0;
917
918 case ASM_OPERANDS:
919 if (MEM_VOLATILE_P (x))
920 return 0;
921
922 break;
923
924 default:
925 break;
926 }
927
928 i = GET_RTX_LENGTH (code) - 1;
929 fmt = GET_RTX_FORMAT (code);
930 for (; i >= 0; i--)
931 {
932 switch (fmt[i])
933 {
934 case 'e':
935 {
936 rtx tem = XEXP (x, i);
937 unsigned int tem_hash = cselib_hash_rtx (tem, create);
938
939 if (tem_hash == 0)
940 return 0;
941
942 hash += tem_hash;
943 }
944 break;
945 case 'E':
946 for (j = 0; j < XVECLEN (x, i); j++)
947 {
948 unsigned int tem_hash
949 = cselib_hash_rtx (XVECEXP (x, i, j), create);
950
951 if (tem_hash == 0)
952 return 0;
953
954 hash += tem_hash;
955 }
956 break;
957
958 case 's':
959 {
960 const unsigned char *p = (const unsigned char *) XSTR (x, i);
961
962 if (p)
963 while (*p)
964 hash += *p++;
965 break;
966 }
967
968 case 'i':
969 hash += XINT (x, i);
970 break;
971
972 case '0':
973 case 't':
974 /* unused */
975 break;
976
977 default:
978 gcc_unreachable ();
979 }
980 }
981
982 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
983 }
984
985 /* Create a new value structure for VALUE and initialize it. The mode of the
986 value is MODE. */
987
988 static inline cselib_val *
989 new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
990 {
991 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
992
993 gcc_assert (hash);
994 gcc_assert (next_uid);
995
996 e->hash = hash;
997 e->uid = next_uid++;
998 /* We use an alloc pool to allocate this RTL construct because it
999 accounts for about 8% of the overall memory usage. We know
1000 precisely when we can have VALUE RTXen (when cselib is active)
1001 so we don't need to put them in garbage collected memory.
1002 ??? Why should a VALUE be an RTX in the first place? */
1003 e->val_rtx = (rtx) pool_alloc (value_pool);
1004 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1005 PUT_CODE (e->val_rtx, VALUE);
1006 PUT_MODE (e->val_rtx, mode);
1007 CSELIB_VAL_PTR (e->val_rtx) = e;
1008 e->addr_list = 0;
1009 e->locs = 0;
1010 e->next_containing_mem = 0;
1011
1012 if (dump_file && (dump_flags & TDF_DETAILS))
1013 {
1014 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1015 if (flag_dump_noaddr || flag_dump_unnumbered)
1016 fputs ("# ", dump_file);
1017 else
1018 fprintf (dump_file, "%p ", (void*)e);
1019 print_rtl_single (dump_file, x);
1020 fputc ('\n', dump_file);
1021 }
1022
1023 return e;
1024 }
1025
1026 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1027 contains the data at this address. X is a MEM that represents the
1028 value. Update the two value structures to represent this situation. */
1029
1030 static void
1031 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1032 {
1033 struct elt_loc_list *l;
1034
1035 /* Avoid duplicates. */
1036 for (l = mem_elt->locs; l; l = l->next)
1037 if (MEM_P (l->loc)
1038 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1039 {
1040 promote_debug_loc (l);
1041 return;
1042 }
1043
1044 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1045 mem_elt->locs
1046 = new_elt_loc_list (mem_elt->locs,
1047 replace_equiv_address_nv (x, addr_elt->val_rtx));
1048 if (mem_elt->next_containing_mem == NULL)
1049 {
1050 mem_elt->next_containing_mem = first_containing_mem;
1051 first_containing_mem = mem_elt;
1052 }
1053 }
1054
1055 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1056 If CREATE, make a new one if we haven't seen it before. */
1057
1058 static cselib_val *
1059 cselib_lookup_mem (rtx x, int create)
1060 {
1061 enum machine_mode mode = GET_MODE (x);
1062 void **slot;
1063 cselib_val *addr;
1064 cselib_val *mem_elt;
1065 struct elt_list *l;
1066
1067 if (MEM_VOLATILE_P (x) || mode == BLKmode
1068 || !cselib_record_memory
1069 || (FLOAT_MODE_P (mode) && flag_float_store))
1070 return 0;
1071
1072 /* Look up the value for the address. */
1073 addr = cselib_lookup (XEXP (x, 0), mode, create);
1074 if (! addr)
1075 return 0;
1076
1077 /* Find a value that describes a value of our mode at that address. */
1078 for (l = addr->addr_list; l; l = l->next)
1079 if (GET_MODE (l->elt->val_rtx) == mode)
1080 {
1081 promote_debug_loc (l->elt->locs);
1082 return l->elt;
1083 }
1084
1085 if (! create)
1086 return 0;
1087
1088 mem_elt = new_cselib_val (next_uid, mode, x);
1089 add_mem_for_addr (addr, mem_elt, x);
1090 slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
1091 mem_elt->hash, INSERT);
1092 *slot = mem_elt;
1093 return mem_elt;
1094 }
1095
1096 /* Search thru the possible substitutions in P. We prefer a non reg
1097 substitution because this allows us to expand the tree further. If
1098 we find, just a reg, take the lowest regno. There may be several
1099 non-reg results, we just take the first one because they will all
1100 expand to the same place. */
1101
1102 static rtx
1103 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1104 int max_depth)
1105 {
1106 rtx reg_result = NULL;
1107 unsigned int regno = UINT_MAX;
1108 struct elt_loc_list *p_in = p;
1109
1110 for (; p; p = p -> next)
1111 {
1112 /* Avoid infinite recursion trying to expand a reg into a
1113 the same reg. */
1114 if ((REG_P (p->loc))
1115 && (REGNO (p->loc) < regno)
1116 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1117 {
1118 reg_result = p->loc;
1119 regno = REGNO (p->loc);
1120 }
1121 /* Avoid infinite recursion and do not try to expand the
1122 value. */
1123 else if (GET_CODE (p->loc) == VALUE
1124 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1125 continue;
1126 else if (!REG_P (p->loc))
1127 {
1128 rtx result, note;
1129 if (dump_file && (dump_flags & TDF_DETAILS))
1130 {
1131 print_inline_rtx (dump_file, p->loc, 0);
1132 fprintf (dump_file, "\n");
1133 }
1134 if (GET_CODE (p->loc) == LO_SUM
1135 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1136 && p->setting_insn
1137 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1138 && XEXP (note, 0) == XEXP (p->loc, 1))
1139 return XEXP (p->loc, 1);
1140 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1141 if (result)
1142 return result;
1143 }
1144
1145 }
1146
1147 if (regno != UINT_MAX)
1148 {
1149 rtx result;
1150 if (dump_file && (dump_flags & TDF_DETAILS))
1151 fprintf (dump_file, "r%d\n", regno);
1152
1153 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1154 if (result)
1155 return result;
1156 }
1157
1158 if (dump_file && (dump_flags & TDF_DETAILS))
1159 {
1160 if (reg_result)
1161 {
1162 print_inline_rtx (dump_file, reg_result, 0);
1163 fprintf (dump_file, "\n");
1164 }
1165 else
1166 fprintf (dump_file, "NULL\n");
1167 }
1168 return reg_result;
1169 }
1170
1171
1172 /* Forward substitute and expand an expression out to its roots.
1173 This is the opposite of common subexpression. Because local value
1174 numbering is such a weak optimization, the expanded expression is
1175 pretty much unique (not from a pointer equals point of view but
1176 from a tree shape point of view.
1177
1178 This function returns NULL if the expansion fails. The expansion
1179 will fail if there is no value number for one of the operands or if
1180 one of the operands has been overwritten between the current insn
1181 and the beginning of the basic block. For instance x has no
1182 expansion in:
1183
1184 r1 <- r1 + 3
1185 x <- r1 + 8
1186
1187 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1188 It is clear on return. */
1189
1190 rtx
1191 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1192 {
1193 struct expand_value_data evd;
1194
1195 evd.regs_active = regs_active;
1196 evd.callback = NULL;
1197 evd.callback_arg = NULL;
1198 evd.dummy = false;
1199
1200 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1201 }
1202
1203 /* Same as cselib_expand_value_rtx, but using a callback to try to
1204 resolve some expressions. The CB function should return ORIG if it
1205 can't or does not want to deal with a certain RTX. Any other
1206 return value, including NULL, will be used as the expansion for
1207 VALUE, without any further changes. */
1208
1209 rtx
1210 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1211 cselib_expand_callback cb, void *data)
1212 {
1213 struct expand_value_data evd;
1214
1215 evd.regs_active = regs_active;
1216 evd.callback = cb;
1217 evd.callback_arg = data;
1218 evd.dummy = false;
1219
1220 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1221 }
1222
1223 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1224 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1225 would return NULL or non-NULL, without allocating new rtx. */
1226
1227 bool
1228 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1229 cselib_expand_callback cb, void *data)
1230 {
1231 struct expand_value_data evd;
1232
1233 evd.regs_active = regs_active;
1234 evd.callback = cb;
1235 evd.callback_arg = data;
1236 evd.dummy = true;
1237
1238 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1239 }
1240
1241 /* Internal implementation of cselib_expand_value_rtx and
1242 cselib_expand_value_rtx_cb. */
1243
1244 static rtx
1245 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1246 int max_depth)
1247 {
1248 rtx copy, scopy;
1249 int i, j;
1250 RTX_CODE code;
1251 const char *format_ptr;
1252 enum machine_mode mode;
1253
1254 code = GET_CODE (orig);
1255
1256 /* For the context of dse, if we end up expand into a huge tree, we
1257 will not have a useful address, so we might as well just give up
1258 quickly. */
1259 if (max_depth <= 0)
1260 return NULL;
1261
1262 switch (code)
1263 {
1264 case REG:
1265 {
1266 struct elt_list *l = REG_VALUES (REGNO (orig));
1267
1268 if (l && l->elt == NULL)
1269 l = l->next;
1270 for (; l; l = l->next)
1271 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1272 {
1273 rtx result;
1274 int regno = REGNO (orig);
1275
1276 /* The only thing that we are not willing to do (this
1277 is requirement of dse and if others potential uses
1278 need this function we should add a parm to control
1279 it) is that we will not substitute the
1280 STACK_POINTER_REGNUM, FRAME_POINTER or the
1281 HARD_FRAME_POINTER.
1282
1283 These expansions confuses the code that notices that
1284 stores into the frame go dead at the end of the
1285 function and that the frame is not effected by calls
1286 to subroutines. If you allow the
1287 STACK_POINTER_REGNUM substitution, then dse will
1288 think that parameter pushing also goes dead which is
1289 wrong. If you allow the FRAME_POINTER or the
1290 HARD_FRAME_POINTER then you lose the opportunity to
1291 make the frame assumptions. */
1292 if (regno == STACK_POINTER_REGNUM
1293 || regno == FRAME_POINTER_REGNUM
1294 || regno == HARD_FRAME_POINTER_REGNUM)
1295 return orig;
1296
1297 bitmap_set_bit (evd->regs_active, regno);
1298
1299 if (dump_file && (dump_flags & TDF_DETAILS))
1300 fprintf (dump_file, "expanding: r%d into: ", regno);
1301
1302 result = expand_loc (l->elt->locs, evd, max_depth);
1303 bitmap_clear_bit (evd->regs_active, regno);
1304
1305 if (result)
1306 return result;
1307 else
1308 return orig;
1309 }
1310 }
1311
1312 case CONST_INT:
1313 case CONST_DOUBLE:
1314 case CONST_VECTOR:
1315 case SYMBOL_REF:
1316 case CODE_LABEL:
1317 case PC:
1318 case CC0:
1319 case SCRATCH:
1320 /* SCRATCH must be shared because they represent distinct values. */
1321 return orig;
1322 case CLOBBER:
1323 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1324 return orig;
1325 break;
1326
1327 case CONST:
1328 if (shared_const_p (orig))
1329 return orig;
1330 break;
1331
1332 case SUBREG:
1333 {
1334 rtx subreg;
1335
1336 if (evd->callback)
1337 {
1338 subreg = evd->callback (orig, evd->regs_active, max_depth,
1339 evd->callback_arg);
1340 if (subreg != orig)
1341 return subreg;
1342 }
1343
1344 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1345 max_depth - 1);
1346 if (!subreg)
1347 return NULL;
1348 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1349 GET_MODE (SUBREG_REG (orig)),
1350 SUBREG_BYTE (orig));
1351 if (scopy == NULL
1352 || (GET_CODE (scopy) == SUBREG
1353 && !REG_P (SUBREG_REG (scopy))
1354 && !MEM_P (SUBREG_REG (scopy))))
1355 return NULL;
1356
1357 return scopy;
1358 }
1359
1360 case VALUE:
1361 {
1362 rtx result;
1363
1364 if (dump_file && (dump_flags & TDF_DETAILS))
1365 {
1366 fputs ("\nexpanding ", dump_file);
1367 print_rtl_single (dump_file, orig);
1368 fputs (" into...", dump_file);
1369 }
1370
1371 if (evd->callback)
1372 {
1373 result = evd->callback (orig, evd->regs_active, max_depth,
1374 evd->callback_arg);
1375
1376 if (result != orig)
1377 return result;
1378 }
1379
1380 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1381 return result;
1382 }
1383
1384 case DEBUG_EXPR:
1385 if (evd->callback)
1386 return evd->callback (orig, evd->regs_active, max_depth,
1387 evd->callback_arg);
1388 return orig;
1389
1390 default:
1391 break;
1392 }
1393
1394 /* Copy the various flags, fields, and other information. We assume
1395 that all fields need copying, and then clear the fields that should
1396 not be copied. That is the sensible default behavior, and forces
1397 us to explicitly document why we are *not* copying a flag. */
1398 if (evd->dummy)
1399 copy = NULL;
1400 else
1401 copy = shallow_copy_rtx (orig);
1402
1403 format_ptr = GET_RTX_FORMAT (code);
1404
1405 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1406 switch (*format_ptr++)
1407 {
1408 case 'e':
1409 if (XEXP (orig, i) != NULL)
1410 {
1411 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1412 max_depth - 1);
1413 if (!result)
1414 return NULL;
1415 if (copy)
1416 XEXP (copy, i) = result;
1417 }
1418 break;
1419
1420 case 'E':
1421 case 'V':
1422 if (XVEC (orig, i) != NULL)
1423 {
1424 if (copy)
1425 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1426 for (j = 0; j < XVECLEN (orig, i); j++)
1427 {
1428 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1429 evd, max_depth - 1);
1430 if (!result)
1431 return NULL;
1432 if (copy)
1433 XVECEXP (copy, i, j) = result;
1434 }
1435 }
1436 break;
1437
1438 case 't':
1439 case 'w':
1440 case 'i':
1441 case 's':
1442 case 'S':
1443 case 'T':
1444 case 'u':
1445 case 'B':
1446 case '0':
1447 /* These are left unchanged. */
1448 break;
1449
1450 default:
1451 gcc_unreachable ();
1452 }
1453
1454 if (evd->dummy)
1455 return orig;
1456
1457 mode = GET_MODE (copy);
1458 /* If an operand has been simplified into CONST_INT, which doesn't
1459 have a mode and the mode isn't derivable from whole rtx's mode,
1460 try simplify_*_operation first with mode from original's operand
1461 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1462 scopy = copy;
1463 switch (GET_RTX_CLASS (code))
1464 {
1465 case RTX_UNARY:
1466 if (CONST_INT_P (XEXP (copy, 0))
1467 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1468 {
1469 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1470 GET_MODE (XEXP (orig, 0)));
1471 if (scopy)
1472 return scopy;
1473 }
1474 break;
1475 case RTX_COMM_ARITH:
1476 case RTX_BIN_ARITH:
1477 /* These expressions can derive operand modes from the whole rtx's mode. */
1478 break;
1479 case RTX_TERNARY:
1480 case RTX_BITFIELD_OPS:
1481 if (CONST_INT_P (XEXP (copy, 0))
1482 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1483 {
1484 scopy = simplify_ternary_operation (code, mode,
1485 GET_MODE (XEXP (orig, 0)),
1486 XEXP (copy, 0), XEXP (copy, 1),
1487 XEXP (copy, 2));
1488 if (scopy)
1489 return scopy;
1490 }
1491 break;
1492 case RTX_COMPARE:
1493 case RTX_COMM_COMPARE:
1494 if (CONST_INT_P (XEXP (copy, 0))
1495 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1496 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1497 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1498 {
1499 scopy = simplify_relational_operation (code, mode,
1500 (GET_MODE (XEXP (orig, 0))
1501 != VOIDmode)
1502 ? GET_MODE (XEXP (orig, 0))
1503 : GET_MODE (XEXP (orig, 1)),
1504 XEXP (copy, 0),
1505 XEXP (copy, 1));
1506 if (scopy)
1507 return scopy;
1508 }
1509 break;
1510 default:
1511 break;
1512 }
1513 scopy = simplify_rtx (copy);
1514 if (scopy)
1515 return scopy;
1516 return copy;
1517 }
1518
1519 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1520 with VALUE expressions. This way, it becomes independent of changes
1521 to registers and memory.
1522 X isn't actually modified; if modifications are needed, new rtl is
1523 allocated. However, the return value can share rtl with X. */
1524
1525 rtx
1526 cselib_subst_to_values (rtx x)
1527 {
1528 enum rtx_code code = GET_CODE (x);
1529 const char *fmt = GET_RTX_FORMAT (code);
1530 cselib_val *e;
1531 struct elt_list *l;
1532 rtx copy = x;
1533 int i;
1534
1535 switch (code)
1536 {
1537 case REG:
1538 l = REG_VALUES (REGNO (x));
1539 if (l && l->elt == NULL)
1540 l = l->next;
1541 for (; l; l = l->next)
1542 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1543 return l->elt->val_rtx;
1544
1545 gcc_unreachable ();
1546
1547 case MEM:
1548 e = cselib_lookup_mem (x, 0);
1549 if (! e)
1550 {
1551 /* This happens for autoincrements. Assign a value that doesn't
1552 match any other. */
1553 e = new_cselib_val (next_uid, GET_MODE (x), x);
1554 }
1555 return e->val_rtx;
1556
1557 case CONST_DOUBLE:
1558 case CONST_VECTOR:
1559 case CONST_INT:
1560 case CONST_FIXED:
1561 return x;
1562
1563 case POST_INC:
1564 case PRE_INC:
1565 case POST_DEC:
1566 case PRE_DEC:
1567 case POST_MODIFY:
1568 case PRE_MODIFY:
1569 e = new_cselib_val (next_uid, GET_MODE (x), x);
1570 return e->val_rtx;
1571
1572 default:
1573 break;
1574 }
1575
1576 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1577 {
1578 if (fmt[i] == 'e')
1579 {
1580 rtx t = cselib_subst_to_values (XEXP (x, i));
1581
1582 if (t != XEXP (x, i))
1583 {
1584 if (x == copy)
1585 copy = shallow_copy_rtx (x);
1586 XEXP (copy, i) = t;
1587 }
1588 }
1589 else if (fmt[i] == 'E')
1590 {
1591 int j;
1592
1593 for (j = 0; j < XVECLEN (x, i); j++)
1594 {
1595 rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
1596
1597 if (t != XVECEXP (x, i, j))
1598 {
1599 if (XVEC (x, i) == XVEC (copy, i))
1600 {
1601 if (x == copy)
1602 copy = shallow_copy_rtx (x);
1603 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1604 }
1605 XVECEXP (copy, i, j) = t;
1606 }
1607 }
1608 }
1609 }
1610
1611 return copy;
1612 }
1613
1614 /* Look up the rtl expression X in our tables and return the value it has.
1615 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
1616 we create a new one if possible, using mode MODE if X doesn't have a mode
1617 (i.e. because it's a constant). */
1618
1619 static cselib_val *
1620 cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
1621 {
1622 void **slot;
1623 cselib_val *e;
1624 unsigned int hashval;
1625
1626 if (GET_MODE (x) != VOIDmode)
1627 mode = GET_MODE (x);
1628
1629 if (GET_CODE (x) == VALUE)
1630 return CSELIB_VAL_PTR (x);
1631
1632 if (REG_P (x))
1633 {
1634 struct elt_list *l;
1635 unsigned int i = REGNO (x);
1636
1637 l = REG_VALUES (i);
1638 if (l && l->elt == NULL)
1639 l = l->next;
1640 for (; l; l = l->next)
1641 if (mode == GET_MODE (l->elt->val_rtx))
1642 {
1643 promote_debug_loc (l->elt->locs);
1644 return l->elt;
1645 }
1646
1647 if (! create)
1648 return 0;
1649
1650 if (i < FIRST_PSEUDO_REGISTER)
1651 {
1652 unsigned int n = hard_regno_nregs[i][mode];
1653
1654 if (n > max_value_regs)
1655 max_value_regs = n;
1656 }
1657
1658 e = new_cselib_val (next_uid, GET_MODE (x), x);
1659 e->locs = new_elt_loc_list (e->locs, x);
1660 if (REG_VALUES (i) == 0)
1661 {
1662 /* Maintain the invariant that the first entry of
1663 REG_VALUES, if present, must be the value used to set the
1664 register, or NULL. */
1665 used_regs[n_used_regs++] = i;
1666 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
1667 }
1668 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
1669 slot = htab_find_slot_with_hash (cselib_hash_table, x, e->hash, INSERT);
1670 *slot = e;
1671 return e;
1672 }
1673
1674 if (MEM_P (x))
1675 return cselib_lookup_mem (x, create);
1676
1677 hashval = cselib_hash_rtx (x, create);
1678 /* Can't even create if hashing is not possible. */
1679 if (! hashval)
1680 return 0;
1681
1682 slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
1683 hashval, create ? INSERT : NO_INSERT);
1684 if (slot == 0)
1685 return 0;
1686
1687 e = (cselib_val *) *slot;
1688 if (e)
1689 return e;
1690
1691 e = new_cselib_val (hashval, mode, x);
1692
1693 /* We have to fill the slot before calling cselib_subst_to_values:
1694 the hash table is inconsistent until we do so, and
1695 cselib_subst_to_values will need to do lookups. */
1696 *slot = (void *) e;
1697 e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
1698 return e;
1699 }
1700
1701 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
1702
1703 cselib_val *
1704 cselib_lookup_from_insn (rtx x, enum machine_mode mode,
1705 int create, rtx insn)
1706 {
1707 cselib_val *ret;
1708
1709 gcc_assert (!cselib_current_insn);
1710 cselib_current_insn = insn;
1711
1712 ret = cselib_lookup (x, mode, create);
1713
1714 cselib_current_insn = NULL;
1715
1716 return ret;
1717 }
1718
1719 /* Wrapper for cselib_lookup_1, that logs the lookup result and
1720 maintains invariants related with debug insns. */
1721
1722 cselib_val *
1723 cselib_lookup (rtx x, enum machine_mode mode, int create)
1724 {
1725 cselib_val *ret = cselib_lookup_1 (x, mode, create);
1726
1727 /* ??? Should we return NULL if we're not to create an entry, the
1728 found loc is a debug loc and cselib_current_insn is not DEBUG?
1729 If so, we should also avoid converting val to non-DEBUG; probably
1730 easiest setting cselib_current_insn to NULL before the call
1731 above. */
1732
1733 if (dump_file && (dump_flags & TDF_DETAILS))
1734 {
1735 fputs ("cselib lookup ", dump_file);
1736 print_inline_rtx (dump_file, x, 2);
1737 fprintf (dump_file, " => %u:%u\n",
1738 ret ? ret->uid : 0,
1739 ret ? ret->hash : 0);
1740 }
1741
1742 return ret;
1743 }
1744
1745 /* Invalidate any entries in reg_values that overlap REGNO. This is called
1746 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
1747 is used to determine how many hard registers are being changed. If MODE
1748 is VOIDmode, then only REGNO is being changed; this is used when
1749 invalidating call clobbered registers across a call. */
1750
1751 static void
1752 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
1753 {
1754 unsigned int endregno;
1755 unsigned int i;
1756
1757 /* If we see pseudos after reload, something is _wrong_. */
1758 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
1759 || reg_renumber[regno] < 0);
1760
1761 /* Determine the range of registers that must be invalidated. For
1762 pseudos, only REGNO is affected. For hard regs, we must take MODE
1763 into account, and we must also invalidate lower register numbers
1764 if they contain values that overlap REGNO. */
1765 if (regno < FIRST_PSEUDO_REGISTER)
1766 {
1767 gcc_assert (mode != VOIDmode);
1768
1769 if (regno < max_value_regs)
1770 i = 0;
1771 else
1772 i = regno - max_value_regs;
1773
1774 endregno = end_hard_regno (mode, regno);
1775 }
1776 else
1777 {
1778 i = regno;
1779 endregno = regno + 1;
1780 }
1781
1782 for (; i < endregno; i++)
1783 {
1784 struct elt_list **l = &REG_VALUES (i);
1785
1786 /* Go through all known values for this reg; if it overlaps the range
1787 we're invalidating, remove the value. */
1788 while (*l)
1789 {
1790 cselib_val *v = (*l)->elt;
1791 bool had_locs;
1792 rtx setting_insn;
1793 struct elt_loc_list **p;
1794 unsigned int this_last = i;
1795
1796 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
1797 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
1798
1799 if (this_last < regno || v == NULL
1800 || (v == cfa_base_preserved_val
1801 && i == cfa_base_preserved_regno))
1802 {
1803 l = &(*l)->next;
1804 continue;
1805 }
1806
1807 /* We have an overlap. */
1808 if (*l == REG_VALUES (i))
1809 {
1810 /* Maintain the invariant that the first entry of
1811 REG_VALUES, if present, must be the value used to set
1812 the register, or NULL. This is also nice because
1813 then we won't push the same regno onto user_regs
1814 multiple times. */
1815 (*l)->elt = NULL;
1816 l = &(*l)->next;
1817 }
1818 else
1819 unchain_one_elt_list (l);
1820
1821 had_locs = v->locs != NULL;
1822 setting_insn = v->locs ? v->locs->setting_insn : NULL;
1823
1824 /* Now, we clear the mapping from value to reg. It must exist, so
1825 this code will crash intentionally if it doesn't. */
1826 for (p = &v->locs; ; p = &(*p)->next)
1827 {
1828 rtx x = (*p)->loc;
1829
1830 if (REG_P (x) && REGNO (x) == i)
1831 {
1832 unchain_one_elt_loc_list (p);
1833 break;
1834 }
1835 }
1836
1837 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
1838 {
1839 if (setting_insn && DEBUG_INSN_P (setting_insn))
1840 n_useless_debug_values++;
1841 else
1842 n_useless_values++;
1843 }
1844 }
1845 }
1846 }
1847 \f
1848 /* Return 1 if X has a value that can vary even between two
1849 executions of the program. 0 means X can be compared reliably
1850 against certain constants or near-constants. */
1851
1852 static bool
1853 cselib_rtx_varies_p (const_rtx x ATTRIBUTE_UNUSED, bool from_alias ATTRIBUTE_UNUSED)
1854 {
1855 /* We actually don't need to verify very hard. This is because
1856 if X has actually changed, we invalidate the memory anyway,
1857 so assume that all common memory addresses are
1858 invariant. */
1859 return 0;
1860 }
1861
1862 /* Invalidate any locations in the table which are changed because of a
1863 store to MEM_RTX. If this is called because of a non-const call
1864 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1865
1866 static void
1867 cselib_invalidate_mem (rtx mem_rtx)
1868 {
1869 cselib_val **vp, *v, *next;
1870 int num_mems = 0;
1871 rtx mem_addr;
1872
1873 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
1874 mem_rtx = canon_rtx (mem_rtx);
1875
1876 vp = &first_containing_mem;
1877 for (v = *vp; v != &dummy_val; v = next)
1878 {
1879 bool has_mem = false;
1880 struct elt_loc_list **p = &v->locs;
1881 bool had_locs = v->locs != NULL;
1882 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
1883
1884 while (*p)
1885 {
1886 rtx x = (*p)->loc;
1887 cselib_val *addr;
1888 struct elt_list **mem_chain;
1889
1890 /* MEMs may occur in locations only at the top level; below
1891 that every MEM or REG is substituted by its VALUE. */
1892 if (!MEM_P (x))
1893 {
1894 p = &(*p)->next;
1895 continue;
1896 }
1897 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
1898 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr,
1899 x, NULL_RTX, cselib_rtx_varies_p))
1900 {
1901 has_mem = true;
1902 num_mems++;
1903 p = &(*p)->next;
1904 continue;
1905 }
1906
1907 /* This one overlaps. */
1908 /* We must have a mapping from this MEM's address to the
1909 value (E). Remove that, too. */
1910 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1911 mem_chain = &addr->addr_list;
1912 for (;;)
1913 {
1914 if ((*mem_chain)->elt == v)
1915 {
1916 unchain_one_elt_list (mem_chain);
1917 break;
1918 }
1919
1920 mem_chain = &(*mem_chain)->next;
1921 }
1922
1923 unchain_one_elt_loc_list (p);
1924 }
1925
1926 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
1927 {
1928 if (setting_insn && DEBUG_INSN_P (setting_insn))
1929 n_useless_debug_values++;
1930 else
1931 n_useless_values++;
1932 }
1933
1934 next = v->next_containing_mem;
1935 if (has_mem)
1936 {
1937 *vp = v;
1938 vp = &(*vp)->next_containing_mem;
1939 }
1940 else
1941 v->next_containing_mem = NULL;
1942 }
1943 *vp = &dummy_val;
1944 }
1945
1946 /* Invalidate DEST, which is being assigned to or clobbered. */
1947
1948 void
1949 cselib_invalidate_rtx (rtx dest)
1950 {
1951 while (GET_CODE (dest) == SUBREG
1952 || GET_CODE (dest) == ZERO_EXTRACT
1953 || GET_CODE (dest) == STRICT_LOW_PART)
1954 dest = XEXP (dest, 0);
1955
1956 if (REG_P (dest))
1957 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1958 else if (MEM_P (dest))
1959 cselib_invalidate_mem (dest);
1960
1961 /* Some machines don't define AUTO_INC_DEC, but they still use push
1962 instructions. We need to catch that case here in order to
1963 invalidate the stack pointer correctly. Note that invalidating
1964 the stack pointer is different from invalidating DEST. */
1965 if (push_operand (dest, GET_MODE (dest)))
1966 cselib_invalidate_rtx (stack_pointer_rtx);
1967 }
1968
1969 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
1970
1971 static void
1972 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
1973 void *data ATTRIBUTE_UNUSED)
1974 {
1975 cselib_invalidate_rtx (dest);
1976 }
1977
1978 /* Record the result of a SET instruction. DEST is being set; the source
1979 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1980 describes its address. */
1981
1982 static void
1983 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
1984 {
1985 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
1986
1987 if (src_elt == 0 || side_effects_p (dest))
1988 return;
1989
1990 if (dreg >= 0)
1991 {
1992 if (dreg < FIRST_PSEUDO_REGISTER)
1993 {
1994 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
1995
1996 if (n > max_value_regs)
1997 max_value_regs = n;
1998 }
1999
2000 if (REG_VALUES (dreg) == 0)
2001 {
2002 used_regs[n_used_regs++] = dreg;
2003 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2004 }
2005 else
2006 {
2007 /* The register should have been invalidated. */
2008 gcc_assert (REG_VALUES (dreg)->elt == 0);
2009 REG_VALUES (dreg)->elt = src_elt;
2010 }
2011
2012 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2013 n_useless_values--;
2014 src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
2015 }
2016 else if (MEM_P (dest) && dest_addr_elt != 0
2017 && cselib_record_memory)
2018 {
2019 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2020 n_useless_values--;
2021 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2022 }
2023 }
2024
2025 /* There is no good way to determine how many elements there can be
2026 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2027 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2028
2029 /* Record the effects of any sets in INSN. */
2030 static void
2031 cselib_record_sets (rtx insn)
2032 {
2033 int n_sets = 0;
2034 int i;
2035 struct cselib_set sets[MAX_SETS];
2036 rtx body = PATTERN (insn);
2037 rtx cond = 0;
2038
2039 body = PATTERN (insn);
2040 if (GET_CODE (body) == COND_EXEC)
2041 {
2042 cond = COND_EXEC_TEST (body);
2043 body = COND_EXEC_CODE (body);
2044 }
2045
2046 /* Find all sets. */
2047 if (GET_CODE (body) == SET)
2048 {
2049 sets[0].src = SET_SRC (body);
2050 sets[0].dest = SET_DEST (body);
2051 n_sets = 1;
2052 }
2053 else if (GET_CODE (body) == PARALLEL)
2054 {
2055 /* Look through the PARALLEL and record the values being
2056 set, if possible. Also handle any CLOBBERs. */
2057 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2058 {
2059 rtx x = XVECEXP (body, 0, i);
2060
2061 if (GET_CODE (x) == SET)
2062 {
2063 sets[n_sets].src = SET_SRC (x);
2064 sets[n_sets].dest = SET_DEST (x);
2065 n_sets++;
2066 }
2067 }
2068 }
2069
2070 if (n_sets == 1
2071 && MEM_P (sets[0].src)
2072 && !cselib_record_memory
2073 && MEM_READONLY_P (sets[0].src))
2074 {
2075 rtx note = find_reg_equal_equiv_note (insn);
2076
2077 if (note && CONSTANT_P (XEXP (note, 0)))
2078 sets[0].src = XEXP (note, 0);
2079 }
2080
2081 /* Look up the values that are read. Do this before invalidating the
2082 locations that are written. */
2083 for (i = 0; i < n_sets; i++)
2084 {
2085 rtx dest = sets[i].dest;
2086
2087 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2088 the low part after invalidating any knowledge about larger modes. */
2089 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2090 sets[i].dest = dest = XEXP (dest, 0);
2091
2092 /* We don't know how to record anything but REG or MEM. */
2093 if (REG_P (dest)
2094 || (MEM_P (dest) && cselib_record_memory))
2095 {
2096 rtx src = sets[i].src;
2097 if (cond)
2098 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2099 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
2100 if (MEM_P (dest))
2101 {
2102 enum machine_mode address_mode
2103 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
2104
2105 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2106 address_mode, 1);
2107 }
2108 else
2109 sets[i].dest_addr_elt = 0;
2110 }
2111 }
2112
2113 if (cselib_record_sets_hook)
2114 cselib_record_sets_hook (insn, sets, n_sets);
2115
2116 /* Invalidate all locations written by this insn. Note that the elts we
2117 looked up in the previous loop aren't affected, just some of their
2118 locations may go away. */
2119 note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2120
2121 /* If this is an asm, look for duplicate sets. This can happen when the
2122 user uses the same value as an output multiple times. This is valid
2123 if the outputs are not actually used thereafter. Treat this case as
2124 if the value isn't actually set. We do this by smashing the destination
2125 to pc_rtx, so that we won't record the value later. */
2126 if (n_sets >= 2 && asm_noperands (body) >= 0)
2127 {
2128 for (i = 0; i < n_sets; i++)
2129 {
2130 rtx dest = sets[i].dest;
2131 if (REG_P (dest) || MEM_P (dest))
2132 {
2133 int j;
2134 for (j = i + 1; j < n_sets; j++)
2135 if (rtx_equal_p (dest, sets[j].dest))
2136 {
2137 sets[i].dest = pc_rtx;
2138 sets[j].dest = pc_rtx;
2139 }
2140 }
2141 }
2142 }
2143
2144 /* Now enter the equivalences in our tables. */
2145 for (i = 0; i < n_sets; i++)
2146 {
2147 rtx dest = sets[i].dest;
2148 if (REG_P (dest)
2149 || (MEM_P (dest) && cselib_record_memory))
2150 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2151 }
2152 }
2153
2154 /* Record the effects of INSN. */
2155
2156 void
2157 cselib_process_insn (rtx insn)
2158 {
2159 int i;
2160 rtx x;
2161
2162 cselib_current_insn = insn;
2163
2164 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
2165 if (LABEL_P (insn)
2166 || (CALL_P (insn)
2167 && find_reg_note (insn, REG_SETJMP, NULL))
2168 || (NONJUMP_INSN_P (insn)
2169 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2170 && MEM_VOLATILE_P (PATTERN (insn))))
2171 {
2172 cselib_reset_table (next_uid);
2173 cselib_current_insn = NULL_RTX;
2174 return;
2175 }
2176
2177 if (! INSN_P (insn))
2178 {
2179 cselib_current_insn = NULL_RTX;
2180 return;
2181 }
2182
2183 /* If this is a call instruction, forget anything stored in a
2184 call clobbered register, or, if this is not a const call, in
2185 memory. */
2186 if (CALL_P (insn))
2187 {
2188 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2189 if (call_used_regs[i]
2190 || (REG_VALUES (i) && REG_VALUES (i)->elt
2191 && HARD_REGNO_CALL_PART_CLOBBERED (i,
2192 GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2193 cselib_invalidate_regno (i, reg_raw_mode[i]);
2194
2195 /* Since it is not clear how cselib is going to be used, be
2196 conservative here and treat looping pure or const functions
2197 as if they were regular functions. */
2198 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2199 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2200 cselib_invalidate_mem (callmem);
2201 }
2202
2203 cselib_record_sets (insn);
2204
2205 #ifdef AUTO_INC_DEC
2206 /* Clobber any registers which appear in REG_INC notes. We
2207 could keep track of the changes to their values, but it is
2208 unlikely to help. */
2209 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
2210 if (REG_NOTE_KIND (x) == REG_INC)
2211 cselib_invalidate_rtx (XEXP (x, 0));
2212 #endif
2213
2214 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2215 after we have processed the insn. */
2216 if (CALL_P (insn))
2217 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2218 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2219 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2220
2221 cselib_current_insn = NULL_RTX;
2222
2223 if (n_useless_values > MAX_USELESS_VALUES
2224 /* remove_useless_values is linear in the hash table size. Avoid
2225 quadratic behavior for very large hashtables with very few
2226 useless elements. */
2227 && ((unsigned int)n_useless_values
2228 > (cselib_hash_table->n_elements
2229 - cselib_hash_table->n_deleted
2230 - n_debug_values) / 4))
2231 remove_useless_values ();
2232 }
2233
2234 /* Initialize cselib for one pass. The caller must also call
2235 init_alias_analysis. */
2236
2237 void
2238 cselib_init (int record_what)
2239 {
2240 elt_list_pool = create_alloc_pool ("elt_list",
2241 sizeof (struct elt_list), 10);
2242 elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2243 sizeof (struct elt_loc_list), 10);
2244 cselib_val_pool = create_alloc_pool ("cselib_val_list",
2245 sizeof (cselib_val), 10);
2246 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2247 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2248 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2249
2250 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2251 see canon_true_dependence. This is only created once. */
2252 if (! callmem)
2253 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2254
2255 cselib_nregs = max_reg_num ();
2256
2257 /* We preserve reg_values to allow expensive clearing of the whole thing.
2258 Reallocate it however if it happens to be too large. */
2259 if (!reg_values || reg_values_size < cselib_nregs
2260 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2261 {
2262 if (reg_values)
2263 free (reg_values);
2264 /* Some space for newly emit instructions so we don't end up
2265 reallocating in between passes. */
2266 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2267 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2268 }
2269 used_regs = XNEWVEC (unsigned int, cselib_nregs);
2270 n_used_regs = 0;
2271 cselib_hash_table = htab_create (31, get_value_hash,
2272 entry_and_rtx_equal_p, NULL);
2273 next_uid = 1;
2274 }
2275
2276 /* Called when the current user is done with cselib. */
2277
2278 void
2279 cselib_finish (void)
2280 {
2281 cselib_discard_hook = NULL;
2282 cselib_preserve_constants = false;
2283 cfa_base_preserved_val = NULL;
2284 cfa_base_preserved_regno = INVALID_REGNUM;
2285 free_alloc_pool (elt_list_pool);
2286 free_alloc_pool (elt_loc_list_pool);
2287 free_alloc_pool (cselib_val_pool);
2288 free_alloc_pool (value_pool);
2289 cselib_clear_table ();
2290 htab_delete (cselib_hash_table);
2291 free (used_regs);
2292 used_regs = 0;
2293 cselib_hash_table = 0;
2294 n_useless_values = 0;
2295 n_useless_debug_values = 0;
2296 n_debug_values = 0;
2297 next_uid = 0;
2298 }
2299
2300 /* Dump the cselib_val *X to FILE *info. */
2301
2302 static int
2303 dump_cselib_val (void **x, void *info)
2304 {
2305 cselib_val *v = (cselib_val *)*x;
2306 FILE *out = (FILE *)info;
2307 bool need_lf = true;
2308
2309 print_inline_rtx (out, v->val_rtx, 0);
2310
2311 if (v->locs)
2312 {
2313 struct elt_loc_list *l = v->locs;
2314 if (need_lf)
2315 {
2316 fputc ('\n', out);
2317 need_lf = false;
2318 }
2319 fputs (" locs:", out);
2320 do
2321 {
2322 fprintf (out, "\n from insn %i ",
2323 INSN_UID (l->setting_insn));
2324 print_inline_rtx (out, l->loc, 4);
2325 }
2326 while ((l = l->next));
2327 fputc ('\n', out);
2328 }
2329 else
2330 {
2331 fputs (" no locs", out);
2332 need_lf = true;
2333 }
2334
2335 if (v->addr_list)
2336 {
2337 struct elt_list *e = v->addr_list;
2338 if (need_lf)
2339 {
2340 fputc ('\n', out);
2341 need_lf = false;
2342 }
2343 fputs (" addr list:", out);
2344 do
2345 {
2346 fputs ("\n ", out);
2347 print_inline_rtx (out, e->elt->val_rtx, 2);
2348 }
2349 while ((e = e->next));
2350 fputc ('\n', out);
2351 }
2352 else
2353 {
2354 fputs (" no addrs", out);
2355 need_lf = true;
2356 }
2357
2358 if (v->next_containing_mem == &dummy_val)
2359 fputs (" last mem\n", out);
2360 else if (v->next_containing_mem)
2361 {
2362 fputs (" next mem ", out);
2363 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2364 fputc ('\n', out);
2365 }
2366 else if (need_lf)
2367 fputc ('\n', out);
2368
2369 return 1;
2370 }
2371
2372 /* Dump to OUT everything in the CSELIB table. */
2373
2374 void
2375 dump_cselib_table (FILE *out)
2376 {
2377 fprintf (out, "cselib hash table:\n");
2378 htab_traverse (cselib_hash_table, dump_cselib_val, out);
2379 if (first_containing_mem != &dummy_val)
2380 {
2381 fputs ("first mem ", out);
2382 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2383 fputc ('\n', out);
2384 }
2385 fprintf (out, "next uid %i\n", next_uid);
2386 }
2387
2388 #include "gt-cselib.h"