* config/alpha/x-vms (version): Change "." to "_".
[gcc.git] / gcc / cselib.c
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "toplev.h"
36 #include "output.h"
37 #include "ggc.h"
38 #include "obstack.h"
39 #include "hashtab.h"
40 #include "cselib.h"
41
42 static int entry_and_rtx_equal_p PARAMS ((const void *, const void *));
43 static unsigned int get_value_hash PARAMS ((const void *));
44 static struct elt_list *new_elt_list PARAMS ((struct elt_list *,
45 cselib_val *));
46 static struct elt_loc_list *new_elt_loc_list PARAMS ((struct elt_loc_list *,
47 rtx));
48 static void unchain_one_value PARAMS ((cselib_val *));
49 static void unchain_one_elt_list PARAMS ((struct elt_list **));
50 static void unchain_one_elt_loc_list PARAMS ((struct elt_loc_list **));
51 static void clear_table PARAMS ((int));
52 static int discard_useless_locs PARAMS ((void **, void *));
53 static int discard_useless_values PARAMS ((void **, void *));
54 static void remove_useless_values PARAMS ((void));
55 static rtx wrap_constant PARAMS ((enum machine_mode, rtx));
56 static unsigned int hash_rtx PARAMS ((rtx, enum machine_mode, int));
57 static cselib_val *new_cselib_val PARAMS ((unsigned int,
58 enum machine_mode));
59 static void add_mem_for_addr PARAMS ((cselib_val *, cselib_val *,
60 rtx));
61 static cselib_val *cselib_lookup_mem PARAMS ((rtx, int));
62 static void cselib_invalidate_regno PARAMS ((unsigned int,
63 enum machine_mode));
64 static int cselib_mem_conflict_p PARAMS ((rtx, rtx));
65 static int cselib_invalidate_mem_1 PARAMS ((void **, void *));
66 static void cselib_invalidate_mem PARAMS ((rtx));
67 static void cselib_invalidate_rtx PARAMS ((rtx, rtx, void *));
68 static void cselib_record_set PARAMS ((rtx, cselib_val *,
69 cselib_val *));
70 static void cselib_record_sets PARAMS ((rtx));
71
72 /* There are three ways in which cselib can look up an rtx:
73 - for a REG, the reg_values table (which is indexed by regno) is used
74 - for a MEM, we recursively look up its address and then follow the
75 addr_list of that value
76 - for everything else, we compute a hash value and go through the hash
77 table. Since different rtx's can still have the same hash value,
78 this involves walking the table entries for a given value and comparing
79 the locations of the entries with the rtx we are looking up. */
80
81 /* A table that enables us to look up elts by their value. */
82 static htab_t hash_table;
83
84 /* This is a global so we don't have to pass this through every function.
85 It is used in new_elt_loc_list to set SETTING_INSN. */
86 static rtx cselib_current_insn;
87
88 /* Every new unknown value gets a unique number. */
89 static unsigned int next_unknown_value;
90
91 /* The number of registers we had when the varrays were last resized. */
92 static unsigned int cselib_nregs;
93
94 /* Count values without known locations. Whenever this grows too big, we
95 remove these useless values from the table. */
96 static int n_useless_values;
97
98 /* Number of useless values before we remove them from the hash table. */
99 #define MAX_USELESS_VALUES 32
100
101 /* This table maps from register number to values. It does not contain
102 pointers to cselib_val structures, but rather elt_lists. The purpose is
103 to be able to refer to the same register in different modes. */
104 static varray_type reg_values;
105 #define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I))
106
107 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
108 in clear_table() for fast emptying. */
109 static varray_type used_regs;
110
111 /* We pass this to cselib_invalidate_mem to invalidate all of
112 memory for a non-const call instruction. */
113 static rtx callmem;
114
115 /* Memory for our structures is allocated from this obstack. */
116 static struct obstack cselib_obstack;
117
118 /* Used to quickly free all memory. */
119 static char *cselib_startobj;
120
121 /* Caches for unused structures. */
122 static cselib_val *empty_vals;
123 static struct elt_list *empty_elt_lists;
124 static struct elt_loc_list *empty_elt_loc_lists;
125
126 /* Set by discard_useless_locs if it deleted the last location of any
127 value. */
128 static int values_became_useless;
129 \f
130
131 /* Allocate a struct elt_list and fill in its two elements with the
132 arguments. */
133
134 static struct elt_list *
135 new_elt_list (next, elt)
136 struct elt_list *next;
137 cselib_val *elt;
138 {
139 struct elt_list *el = empty_elt_lists;
140
141 if (el)
142 empty_elt_lists = el->next;
143 else
144 el = (struct elt_list *) obstack_alloc (&cselib_obstack,
145 sizeof (struct elt_list));
146 el->next = next;
147 el->elt = elt;
148 return el;
149 }
150
151 /* Allocate a struct elt_loc_list and fill in its two elements with the
152 arguments. */
153
154 static struct elt_loc_list *
155 new_elt_loc_list (next, loc)
156 struct elt_loc_list *next;
157 rtx loc;
158 {
159 struct elt_loc_list *el = empty_elt_loc_lists;
160
161 if (el)
162 empty_elt_loc_lists = el->next;
163 else
164 el = (struct elt_loc_list *) obstack_alloc (&cselib_obstack,
165 sizeof (struct elt_loc_list));
166 el->next = next;
167 el->loc = loc;
168 el->setting_insn = cselib_current_insn;
169 return el;
170 }
171
172 /* The elt_list at *PL is no longer needed. Unchain it and free its
173 storage. */
174
175 static void
176 unchain_one_elt_list (pl)
177 struct elt_list **pl;
178 {
179 struct elt_list *l = *pl;
180
181 *pl = l->next;
182 l->next = empty_elt_lists;
183 empty_elt_lists = l;
184 }
185
186 /* Likewise for elt_loc_lists. */
187
188 static void
189 unchain_one_elt_loc_list (pl)
190 struct elt_loc_list **pl;
191 {
192 struct elt_loc_list *l = *pl;
193
194 *pl = l->next;
195 l->next = empty_elt_loc_lists;
196 empty_elt_loc_lists = l;
197 }
198
199 /* Likewise for cselib_vals. This also frees the addr_list associated with
200 V. */
201
202 static void
203 unchain_one_value (v)
204 cselib_val *v;
205 {
206 while (v->addr_list)
207 unchain_one_elt_list (&v->addr_list);
208
209 v->u.next_free = empty_vals;
210 empty_vals = v;
211 }
212
213 /* Remove all entries from the hash table. Also used during
214 initialization. If CLEAR_ALL isn't set, then only clear the entries
215 which are known to have been used. */
216
217 static void
218 clear_table (clear_all)
219 int clear_all;
220 {
221 unsigned int i;
222
223 if (clear_all)
224 for (i = 0; i < cselib_nregs; i++)
225 REG_VALUES (i) = 0;
226 else
227 for (i = 0; i < VARRAY_ACTIVE_SIZE (used_regs); i++)
228 REG_VALUES (VARRAY_UINT (used_regs, i)) = 0;
229
230 VARRAY_POP_ALL (used_regs);
231
232 htab_empty (hash_table);
233 obstack_free (&cselib_obstack, cselib_startobj);
234
235 empty_vals = 0;
236 empty_elt_lists = 0;
237 empty_elt_loc_lists = 0;
238 n_useless_values = 0;
239
240 next_unknown_value = 0;
241 }
242
243 /* The equality test for our hash table. The first argument ENTRY is a table
244 element (i.e. a cselib_val), while the second arg X is an rtx. We know
245 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
246 CONST of an appropriate mode. */
247
248 static int
249 entry_and_rtx_equal_p (entry, x_arg)
250 const void *entry, *x_arg;
251 {
252 struct elt_loc_list *l;
253 const cselib_val *v = (const cselib_val *) entry;
254 rtx x = (rtx) x_arg;
255 enum machine_mode mode = GET_MODE (x);
256
257 if (GET_CODE (x) == CONST_INT
258 || (mode == VOIDmode && GET_CODE (x) == CONST_DOUBLE))
259 abort ();
260 if (mode != GET_MODE (v->u.val_rtx))
261 return 0;
262
263 /* Unwrap X if necessary. */
264 if (GET_CODE (x) == CONST
265 && (GET_CODE (XEXP (x, 0)) == CONST_INT
266 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
267 x = XEXP (x, 0);
268
269 /* We don't guarantee that distinct rtx's have different hash values,
270 so we need to do a comparison. */
271 for (l = v->locs; l; l = l->next)
272 if (rtx_equal_for_cselib_p (l->loc, x))
273 return 1;
274
275 return 0;
276 }
277
278 /* The hash function for our hash table. The value is always computed with
279 hash_rtx when adding an element; this function just extracts the hash
280 value from a cselib_val structure. */
281
282 static unsigned int
283 get_value_hash (entry)
284 const void *entry;
285 {
286 const cselib_val *v = (const cselib_val *) entry;
287 return v->value;
288 }
289
290 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
291 only return true for values which point to a cselib_val whose value
292 element has been set to zero, which implies the cselib_val will be
293 removed. */
294
295 int
296 references_value_p (x, only_useless)
297 rtx x;
298 int only_useless;
299 {
300 enum rtx_code code = GET_CODE (x);
301 const char *fmt = GET_RTX_FORMAT (code);
302 int i, j;
303
304 if (GET_CODE (x) == VALUE
305 && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
306 return 1;
307
308 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
309 {
310 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
311 return 1;
312 else if (fmt[i] == 'E')
313 for (j = 0; j < XVECLEN (x, i); j++)
314 if (references_value_p (XVECEXP (x, i, j), only_useless))
315 return 1;
316 }
317
318 return 0;
319 }
320
321 /* For all locations found in X, delete locations that reference useless
322 values (i.e. values without any location). Called through
323 htab_traverse. */
324
325 static int
326 discard_useless_locs (x, info)
327 void **x;
328 void *info ATTRIBUTE_UNUSED;
329 {
330 cselib_val *v = (cselib_val *)*x;
331 struct elt_loc_list **p = &v->locs;
332 int had_locs = v->locs != 0;
333
334 while (*p)
335 {
336 if (references_value_p ((*p)->loc, 1))
337 unchain_one_elt_loc_list (p);
338 else
339 p = &(*p)->next;
340 }
341
342 if (had_locs && v->locs == 0)
343 {
344 n_useless_values++;
345 values_became_useless = 1;
346 }
347 return 1;
348 }
349
350 /* If X is a value with no locations, remove it from the hashtable. */
351
352 static int
353 discard_useless_values (x, info)
354 void **x;
355 void *info ATTRIBUTE_UNUSED;
356 {
357 cselib_val *v = (cselib_val *)*x;
358
359 if (v->locs == 0)
360 {
361 htab_clear_slot (hash_table, x);
362 unchain_one_value (v);
363 n_useless_values--;
364 }
365
366 return 1;
367 }
368
369 /* Clean out useless values (i.e. those which no longer have locations
370 associated with them) from the hash table. */
371
372 static void
373 remove_useless_values ()
374 {
375 /* First pass: eliminate locations that reference the value. That in
376 turn can make more values useless. */
377 do
378 {
379 values_became_useless = 0;
380 htab_traverse (hash_table, discard_useless_locs, 0);
381 }
382 while (values_became_useless);
383
384 /* Second pass: actually remove the values. */
385 htab_traverse (hash_table, discard_useless_values, 0);
386
387 if (n_useless_values != 0)
388 abort ();
389 }
390
391 /* Return nonzero if we can prove that X and Y contain the same value, taking
392 our gathered information into account. */
393
394 int
395 rtx_equal_for_cselib_p (x, y)
396 rtx x, y;
397 {
398 enum rtx_code code;
399 const char *fmt;
400 int i;
401
402 if (GET_CODE (x) == REG || GET_CODE (x) == MEM)
403 {
404 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
405
406 if (e)
407 x = e->u.val_rtx;
408 }
409
410 if (GET_CODE (y) == REG || GET_CODE (y) == MEM)
411 {
412 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
413
414 if (e)
415 y = e->u.val_rtx;
416 }
417
418 if (x == y)
419 return 1;
420
421 if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
422 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
423
424 if (GET_CODE (x) == VALUE)
425 {
426 cselib_val *e = CSELIB_VAL_PTR (x);
427 struct elt_loc_list *l;
428
429 for (l = e->locs; l; l = l->next)
430 {
431 rtx t = l->loc;
432
433 /* Avoid infinite recursion. */
434 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
435 continue;
436 else if (rtx_equal_for_cselib_p (t, y))
437 return 1;
438 }
439
440 return 0;
441 }
442
443 if (GET_CODE (y) == VALUE)
444 {
445 cselib_val *e = CSELIB_VAL_PTR (y);
446 struct elt_loc_list *l;
447
448 for (l = e->locs; l; l = l->next)
449 {
450 rtx t = l->loc;
451
452 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
453 continue;
454 else if (rtx_equal_for_cselib_p (x, t))
455 return 1;
456 }
457
458 return 0;
459 }
460
461 if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
462 return 0;
463
464 /* This won't be handled correctly by the code below. */
465 if (GET_CODE (x) == LABEL_REF)
466 return XEXP (x, 0) == XEXP (y, 0);
467
468 code = GET_CODE (x);
469 fmt = GET_RTX_FORMAT (code);
470
471 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
472 {
473 int j;
474
475 switch (fmt[i])
476 {
477 case 'w':
478 if (XWINT (x, i) != XWINT (y, i))
479 return 0;
480 break;
481
482 case 'n':
483 case 'i':
484 if (XINT (x, i) != XINT (y, i))
485 return 0;
486 break;
487
488 case 'V':
489 case 'E':
490 /* Two vectors must have the same length. */
491 if (XVECLEN (x, i) != XVECLEN (y, i))
492 return 0;
493
494 /* And the corresponding elements must match. */
495 for (j = 0; j < XVECLEN (x, i); j++)
496 if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
497 XVECEXP (y, i, j)))
498 return 0;
499 break;
500
501 case 'e':
502 if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
503 return 0;
504 break;
505
506 case 'S':
507 case 's':
508 if (strcmp (XSTR (x, i), XSTR (y, i)))
509 return 0;
510 break;
511
512 case 'u':
513 /* These are just backpointers, so they don't matter. */
514 break;
515
516 case '0':
517 case 't':
518 break;
519
520 /* It is believed that rtx's at this level will never
521 contain anything but integers and other rtx's,
522 except for within LABEL_REFs and SYMBOL_REFs. */
523 default:
524 abort ();
525 }
526 }
527 return 1;
528 }
529
530 /* We need to pass down the mode of constants through the hash table
531 functions. For that purpose, wrap them in a CONST of the appropriate
532 mode. */
533 static rtx
534 wrap_constant (mode, x)
535 enum machine_mode mode;
536 rtx x;
537 {
538 if (GET_CODE (x) != CONST_INT
539 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
540 return x;
541 if (mode == VOIDmode)
542 abort ();
543 return gen_rtx_CONST (mode, x);
544 }
545
546 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
547 For registers and memory locations, we look up their cselib_val structure
548 and return its VALUE element.
549 Possible reasons for return 0 are: the object is volatile, or we couldn't
550 find a register or memory location in the table and CREATE is zero. If
551 CREATE is nonzero, table elts are created for regs and mem.
552 MODE is used in hashing for CONST_INTs only;
553 otherwise the mode of X is used. */
554
555 static unsigned int
556 hash_rtx (x, mode, create)
557 rtx x;
558 enum machine_mode mode;
559 int create;
560 {
561 cselib_val *e;
562 int i, j;
563 enum rtx_code code;
564 const char *fmt;
565 unsigned int hash = 0;
566
567 code = GET_CODE (x);
568 hash += (unsigned) code + (unsigned) GET_MODE (x);
569
570 switch (code)
571 {
572 case MEM:
573 case REG:
574 e = cselib_lookup (x, GET_MODE (x), create);
575 if (! e)
576 return 0;
577
578 return e->value;
579
580 case CONST_INT:
581 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + INTVAL (x);
582 return hash ? hash : (unsigned int) CONST_INT;
583
584 case CONST_DOUBLE:
585 /* This is like the general case, except that it only counts
586 the integers representing the constant. */
587 hash += (unsigned) code + (unsigned) GET_MODE (x);
588 if (GET_MODE (x) != VOIDmode)
589 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
590 hash += XWINT (x, i);
591 else
592 hash += ((unsigned) CONST_DOUBLE_LOW (x)
593 + (unsigned) CONST_DOUBLE_HIGH (x));
594 return hash ? hash : (unsigned int) CONST_DOUBLE;
595
596 /* Assume there is only one rtx object for any given label. */
597 case LABEL_REF:
598 hash
599 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
600 return hash ? hash : (unsigned int) LABEL_REF;
601
602 case SYMBOL_REF:
603 hash
604 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
605 return hash ? hash : (unsigned int) SYMBOL_REF;
606
607 case PRE_DEC:
608 case PRE_INC:
609 case POST_DEC:
610 case POST_INC:
611 case POST_MODIFY:
612 case PRE_MODIFY:
613 case PC:
614 case CC0:
615 case CALL:
616 case UNSPEC_VOLATILE:
617 return 0;
618
619 case ASM_OPERANDS:
620 if (MEM_VOLATILE_P (x))
621 return 0;
622
623 break;
624
625 default:
626 break;
627 }
628
629 i = GET_RTX_LENGTH (code) - 1;
630 fmt = GET_RTX_FORMAT (code);
631 for (; i >= 0; i--)
632 {
633 if (fmt[i] == 'e')
634 {
635 rtx tem = XEXP (x, i);
636 unsigned int tem_hash = hash_rtx (tem, 0, create);
637
638 if (tem_hash == 0)
639 return 0;
640
641 hash += tem_hash;
642 }
643 else if (fmt[i] == 'E')
644 for (j = 0; j < XVECLEN (x, i); j++)
645 {
646 unsigned int tem_hash = hash_rtx (XVECEXP (x, i, j), 0, create);
647
648 if (tem_hash == 0)
649 return 0;
650
651 hash += tem_hash;
652 }
653 else if (fmt[i] == 's')
654 {
655 const unsigned char *p = (const unsigned char *) XSTR (x, i);
656
657 if (p)
658 while (*p)
659 hash += *p++;
660 }
661 else if (fmt[i] == 'i')
662 hash += XINT (x, i);
663 else if (fmt[i] == '0' || fmt[i] == 't')
664 /* unused */;
665 else
666 abort ();
667 }
668
669 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
670 }
671
672 /* Create a new value structure for VALUE and initialize it. The mode of the
673 value is MODE. */
674
675 static cselib_val *
676 new_cselib_val (value, mode)
677 unsigned int value;
678 enum machine_mode mode;
679 {
680 cselib_val *e = empty_vals;
681
682 if (e)
683 empty_vals = e->u.next_free;
684 else
685 e = (cselib_val *) obstack_alloc (&cselib_obstack, sizeof (cselib_val));
686
687 if (value == 0)
688 abort ();
689
690 e->value = value;
691 e->u.val_rtx = gen_rtx_VALUE (mode);
692 CSELIB_VAL_PTR (e->u.val_rtx) = e;
693 e->addr_list = 0;
694 e->locs = 0;
695 return e;
696 }
697
698 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
699 contains the data at this address. X is a MEM that represents the
700 value. Update the two value structures to represent this situation. */
701
702 static void
703 add_mem_for_addr (addr_elt, mem_elt, x)
704 cselib_val *addr_elt, *mem_elt;
705 rtx x;
706 {
707 struct elt_loc_list *l;
708
709 /* Avoid duplicates. */
710 for (l = mem_elt->locs; l; l = l->next)
711 if (GET_CODE (l->loc) == MEM
712 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
713 return;
714
715 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
716 mem_elt->locs
717 = new_elt_loc_list (mem_elt->locs,
718 replace_equiv_address_nv (x, addr_elt->u.val_rtx));
719 }
720
721 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
722 If CREATE, make a new one if we haven't seen it before. */
723
724 static cselib_val *
725 cselib_lookup_mem (x, create)
726 rtx x;
727 int create;
728 {
729 enum machine_mode mode = GET_MODE (x);
730 void **slot;
731 cselib_val *addr;
732 cselib_val *mem_elt;
733 struct elt_list *l;
734
735 if (MEM_VOLATILE_P (x) || mode == BLKmode
736 || (FLOAT_MODE_P (mode) && flag_float_store))
737 return 0;
738
739 /* Look up the value for the address. */
740 addr = cselib_lookup (XEXP (x, 0), mode, create);
741 if (! addr)
742 return 0;
743
744 /* Find a value that describes a value of our mode at that address. */
745 for (l = addr->addr_list; l; l = l->next)
746 if (GET_MODE (l->elt->u.val_rtx) == mode)
747 return l->elt;
748
749 if (! create)
750 return 0;
751
752 mem_elt = new_cselib_val (++next_unknown_value, mode);
753 add_mem_for_addr (addr, mem_elt, x);
754 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
755 mem_elt->value, INSERT);
756 *slot = mem_elt;
757 return mem_elt;
758 }
759
760 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
761 with VALUE expressions. This way, it becomes independent of changes
762 to registers and memory.
763 X isn't actually modified; if modifications are needed, new rtl is
764 allocated. However, the return value can share rtl with X. */
765
766 rtx
767 cselib_subst_to_values (x)
768 rtx x;
769 {
770 enum rtx_code code = GET_CODE (x);
771 const char *fmt = GET_RTX_FORMAT (code);
772 cselib_val *e;
773 struct elt_list *l;
774 rtx copy = x;
775 int i;
776
777 switch (code)
778 {
779 case REG:
780 for (l = REG_VALUES (REGNO (x)); l; l = l->next)
781 if (GET_MODE (l->elt->u.val_rtx) == GET_MODE (x))
782 return l->elt->u.val_rtx;
783
784 abort ();
785
786 case MEM:
787 e = cselib_lookup_mem (x, 0);
788 if (! e)
789 {
790 /* This happens for autoincrements. Assign a value that doesn't
791 match any other. */
792 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
793 }
794 return e->u.val_rtx;
795
796 case CONST_DOUBLE:
797 case CONST_INT:
798 return x;
799
800 case POST_INC:
801 case PRE_INC:
802 case POST_DEC:
803 case PRE_DEC:
804 case POST_MODIFY:
805 case PRE_MODIFY:
806 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
807 return e->u.val_rtx;
808
809 default:
810 break;
811 }
812
813 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
814 {
815 if (fmt[i] == 'e')
816 {
817 rtx t = cselib_subst_to_values (XEXP (x, i));
818
819 if (t != XEXP (x, i) && x == copy)
820 copy = shallow_copy_rtx (x);
821
822 XEXP (copy, i) = t;
823 }
824 else if (fmt[i] == 'E')
825 {
826 int j, k;
827
828 for (j = 0; j < XVECLEN (x, i); j++)
829 {
830 rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
831
832 if (t != XVECEXP (x, i, j) && XVEC (x, i) == XVEC (copy, i))
833 {
834 if (x == copy)
835 copy = shallow_copy_rtx (x);
836
837 XVEC (copy, i) = rtvec_alloc (XVECLEN (x, i));
838 for (k = 0; k < j; k++)
839 XVECEXP (copy, i, k) = XVECEXP (x, i, k);
840 }
841
842 XVECEXP (copy, i, j) = t;
843 }
844 }
845 }
846
847 return copy;
848 }
849
850 /* Look up the rtl expression X in our tables and return the value it has.
851 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
852 we create a new one if possible, using mode MODE if X doesn't have a mode
853 (i.e. because it's a constant). */
854
855 cselib_val *
856 cselib_lookup (x, mode, create)
857 rtx x;
858 enum machine_mode mode;
859 int create;
860 {
861 void **slot;
862 cselib_val *e;
863 unsigned int hashval;
864
865 if (GET_MODE (x) != VOIDmode)
866 mode = GET_MODE (x);
867
868 if (GET_CODE (x) == VALUE)
869 return CSELIB_VAL_PTR (x);
870
871 if (GET_CODE (x) == REG)
872 {
873 struct elt_list *l;
874 unsigned int i = REGNO (x);
875
876 for (l = REG_VALUES (i); l; l = l->next)
877 if (mode == GET_MODE (l->elt->u.val_rtx))
878 return l->elt;
879
880 if (! create)
881 return 0;
882
883 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
884 e->locs = new_elt_loc_list (e->locs, x);
885 if (REG_VALUES (i) == 0)
886 VARRAY_PUSH_UINT (used_regs, i);
887 REG_VALUES (i) = new_elt_list (REG_VALUES (i), e);
888 slot = htab_find_slot_with_hash (hash_table, x, e->value, INSERT);
889 *slot = e;
890 return e;
891 }
892
893 if (GET_CODE (x) == MEM)
894 return cselib_lookup_mem (x, create);
895
896 hashval = hash_rtx (x, mode, create);
897 /* Can't even create if hashing is not possible. */
898 if (! hashval)
899 return 0;
900
901 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
902 hashval, create ? INSERT : NO_INSERT);
903 if (slot == 0)
904 return 0;
905
906 e = (cselib_val *) *slot;
907 if (e)
908 return e;
909
910 e = new_cselib_val (hashval, mode);
911
912 /* We have to fill the slot before calling cselib_subst_to_values:
913 the hash table is inconsistent until we do so, and
914 cselib_subst_to_values will need to do lookups. */
915 *slot = (void *) e;
916 e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
917 return e;
918 }
919
920 /* Invalidate any entries in reg_values that overlap REGNO. This is called
921 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
922 is used to determine how many hard registers are being changed. If MODE
923 is VOIDmode, then only REGNO is being changed; this is used when
924 invalidating call clobbered registers across a call. */
925
926 static void
927 cselib_invalidate_regno (regno, mode)
928 unsigned int regno;
929 enum machine_mode mode;
930 {
931 unsigned int endregno;
932 unsigned int i;
933
934 /* If we see pseudos after reload, something is _wrong_. */
935 if (reload_completed && regno >= FIRST_PSEUDO_REGISTER
936 && reg_renumber[regno] >= 0)
937 abort ();
938
939 /* Determine the range of registers that must be invalidated. For
940 pseudos, only REGNO is affected. For hard regs, we must take MODE
941 into account, and we must also invalidate lower register numbers
942 if they contain values that overlap REGNO. */
943 endregno = regno + 1;
944 if (regno < FIRST_PSEUDO_REGISTER && mode != VOIDmode)
945 endregno = regno + HARD_REGNO_NREGS (regno, mode);
946
947 for (i = 0; i < endregno; i++)
948 {
949 struct elt_list **l = &REG_VALUES (i);
950
951 /* Go through all known values for this reg; if it overlaps the range
952 we're invalidating, remove the value. */
953 while (*l)
954 {
955 cselib_val *v = (*l)->elt;
956 struct elt_loc_list **p;
957 unsigned int this_last = i;
958
959 if (i < FIRST_PSEUDO_REGISTER)
960 this_last += HARD_REGNO_NREGS (i, GET_MODE (v->u.val_rtx)) - 1;
961
962 if (this_last < regno)
963 {
964 l = &(*l)->next;
965 continue;
966 }
967
968 /* We have an overlap. */
969 unchain_one_elt_list (l);
970
971 /* Now, we clear the mapping from value to reg. It must exist, so
972 this code will crash intentionally if it doesn't. */
973 for (p = &v->locs; ; p = &(*p)->next)
974 {
975 rtx x = (*p)->loc;
976
977 if (GET_CODE (x) == REG && REGNO (x) == i)
978 {
979 unchain_one_elt_loc_list (p);
980 break;
981 }
982 }
983 if (v->locs == 0)
984 n_useless_values++;
985 }
986 }
987 }
988
989 /* The memory at address MEM_BASE is being changed.
990 Return whether this change will invalidate VAL. */
991
992 static int
993 cselib_mem_conflict_p (mem_base, val)
994 rtx mem_base;
995 rtx val;
996 {
997 enum rtx_code code;
998 const char *fmt;
999 int i, j;
1000
1001 code = GET_CODE (val);
1002 switch (code)
1003 {
1004 /* Get rid of a few simple cases quickly. */
1005 case REG:
1006 case PC:
1007 case CC0:
1008 case SCRATCH:
1009 case CONST:
1010 case CONST_INT:
1011 case CONST_DOUBLE:
1012 case SYMBOL_REF:
1013 case LABEL_REF:
1014 return 0;
1015
1016 case MEM:
1017 if (GET_MODE (mem_base) == BLKmode
1018 || GET_MODE (val) == BLKmode
1019 || anti_dependence (val, mem_base))
1020 return 1;
1021
1022 /* The address may contain nested MEMs. */
1023 break;
1024
1025 default:
1026 break;
1027 }
1028
1029 fmt = GET_RTX_FORMAT (code);
1030 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1031 {
1032 if (fmt[i] == 'e')
1033 {
1034 if (cselib_mem_conflict_p (mem_base, XEXP (val, i)))
1035 return 1;
1036 }
1037 else if (fmt[i] == 'E')
1038 for (j = 0; j < XVECLEN (val, i); j++)
1039 if (cselib_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
1040 return 1;
1041 }
1042
1043 return 0;
1044 }
1045
1046 /* For the value found in SLOT, walk its locations to determine if any overlap
1047 INFO (which is a MEM rtx). */
1048
1049 static int
1050 cselib_invalidate_mem_1 (slot, info)
1051 void **slot;
1052 void *info;
1053 {
1054 cselib_val *v = (cselib_val *) *slot;
1055 rtx mem_rtx = (rtx) info;
1056 struct elt_loc_list **p = &v->locs;
1057 int had_locs = v->locs != 0;
1058
1059 while (*p)
1060 {
1061 rtx x = (*p)->loc;
1062 cselib_val *addr;
1063 struct elt_list **mem_chain;
1064
1065 /* MEMs may occur in locations only at the top level; below
1066 that every MEM or REG is substituted by its VALUE. */
1067 if (GET_CODE (x) != MEM
1068 || ! cselib_mem_conflict_p (mem_rtx, x))
1069 {
1070 p = &(*p)->next;
1071 continue;
1072 }
1073
1074 /* This one overlaps. */
1075 /* We must have a mapping from this MEM's address to the
1076 value (E). Remove that, too. */
1077 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1078 mem_chain = &addr->addr_list;
1079 for (;;)
1080 {
1081 if ((*mem_chain)->elt == v)
1082 {
1083 unchain_one_elt_list (mem_chain);
1084 break;
1085 }
1086
1087 mem_chain = &(*mem_chain)->next;
1088 }
1089
1090 unchain_one_elt_loc_list (p);
1091 }
1092
1093 if (had_locs && v->locs == 0)
1094 n_useless_values++;
1095
1096 return 1;
1097 }
1098
1099 /* Invalidate any locations in the table which are changed because of a
1100 store to MEM_RTX. If this is called because of a non-const call
1101 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1102
1103 static void
1104 cselib_invalidate_mem (mem_rtx)
1105 rtx mem_rtx;
1106 {
1107 htab_traverse (hash_table, cselib_invalidate_mem_1, mem_rtx);
1108 }
1109
1110 /* Invalidate DEST, which is being assigned to or clobbered. The second and
1111 the third parameter exist so that this function can be passed to
1112 note_stores; they are ignored. */
1113
1114 static void
1115 cselib_invalidate_rtx (dest, ignore, data)
1116 rtx dest;
1117 rtx ignore ATTRIBUTE_UNUSED;
1118 void *data ATTRIBUTE_UNUSED;
1119 {
1120 while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SIGN_EXTRACT
1121 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG)
1122 dest = XEXP (dest, 0);
1123
1124 if (GET_CODE (dest) == REG)
1125 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1126 else if (GET_CODE (dest) == MEM)
1127 cselib_invalidate_mem (dest);
1128
1129 /* Some machines don't define AUTO_INC_DEC, but they still use push
1130 instructions. We need to catch that case here in order to
1131 invalidate the stack pointer correctly. Note that invalidating
1132 the stack pointer is different from invalidating DEST. */
1133 if (push_operand (dest, GET_MODE (dest)))
1134 cselib_invalidate_rtx (stack_pointer_rtx, NULL_RTX, NULL);
1135 }
1136
1137 /* Record the result of a SET instruction. DEST is being set; the source
1138 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1139 describes its address. */
1140
1141 static void
1142 cselib_record_set (dest, src_elt, dest_addr_elt)
1143 rtx dest;
1144 cselib_val *src_elt, *dest_addr_elt;
1145 {
1146 int dreg = GET_CODE (dest) == REG ? (int) REGNO (dest) : -1;
1147
1148 if (src_elt == 0 || side_effects_p (dest))
1149 return;
1150
1151 if (dreg >= 0)
1152 {
1153 if (REG_VALUES (dreg) == 0)
1154 VARRAY_PUSH_UINT (used_regs, dreg);
1155
1156 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
1157 if (src_elt->locs == 0)
1158 n_useless_values--;
1159 src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
1160 }
1161 else if (GET_CODE (dest) == MEM && dest_addr_elt != 0)
1162 {
1163 if (src_elt->locs == 0)
1164 n_useless_values--;
1165 add_mem_for_addr (dest_addr_elt, src_elt, dest);
1166 }
1167 }
1168
1169 /* Describe a single set that is part of an insn. */
1170 struct set
1171 {
1172 rtx src;
1173 rtx dest;
1174 cselib_val *src_elt;
1175 cselib_val *dest_addr_elt;
1176 };
1177
1178 /* There is no good way to determine how many elements there can be
1179 in a PARALLEL. Since it's fairly cheap, use a really large number. */
1180 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1181
1182 /* Record the effects of any sets in INSN. */
1183 static void
1184 cselib_record_sets (insn)
1185 rtx insn;
1186 {
1187 int n_sets = 0;
1188 int i;
1189 struct set sets[MAX_SETS];
1190 rtx body = PATTERN (insn);
1191 rtx cond = 0;
1192
1193 body = PATTERN (insn);
1194 if (GET_CODE (body) == COND_EXEC)
1195 {
1196 cond = COND_EXEC_TEST (body);
1197 body = COND_EXEC_CODE (body);
1198 }
1199
1200 /* Find all sets. */
1201 if (GET_CODE (body) == SET)
1202 {
1203 sets[0].src = SET_SRC (body);
1204 sets[0].dest = SET_DEST (body);
1205 n_sets = 1;
1206 }
1207 else if (GET_CODE (body) == PARALLEL)
1208 {
1209 /* Look through the PARALLEL and record the values being
1210 set, if possible. Also handle any CLOBBERs. */
1211 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
1212 {
1213 rtx x = XVECEXP (body, 0, i);
1214
1215 if (GET_CODE (x) == SET)
1216 {
1217 sets[n_sets].src = SET_SRC (x);
1218 sets[n_sets].dest = SET_DEST (x);
1219 n_sets++;
1220 }
1221 }
1222 }
1223
1224 /* Look up the values that are read. Do this before invalidating the
1225 locations that are written. */
1226 for (i = 0; i < n_sets; i++)
1227 {
1228 rtx dest = sets[i].dest;
1229
1230 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1231 the low part after invalidating any knowledge about larger modes. */
1232 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
1233 sets[i].dest = dest = XEXP (dest, 0);
1234
1235 /* We don't know how to record anything but REG or MEM. */
1236 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1237 {
1238 rtx src = sets[i].src;
1239 if (cond)
1240 src = gen_rtx_IF_THEN_ELSE (GET_MODE (src), cond, src, dest);
1241 sets[i].src_elt = cselib_lookup (sets[i].src, GET_MODE (dest), 1);
1242 if (GET_CODE (dest) == MEM)
1243 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1);
1244 else
1245 sets[i].dest_addr_elt = 0;
1246 }
1247 }
1248
1249 /* Invalidate all locations written by this insn. Note that the elts we
1250 looked up in the previous loop aren't affected, just some of their
1251 locations may go away. */
1252 note_stores (body, cselib_invalidate_rtx, NULL);
1253
1254 /* Now enter the equivalences in our tables. */
1255 for (i = 0; i < n_sets; i++)
1256 {
1257 rtx dest = sets[i].dest;
1258 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1259 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
1260 }
1261 }
1262
1263 /* Record the effects of INSN. */
1264
1265 void
1266 cselib_process_insn (insn)
1267 rtx insn;
1268 {
1269 int i;
1270 rtx x;
1271
1272 cselib_current_insn = insn;
1273
1274 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
1275 if (GET_CODE (insn) == CODE_LABEL
1276 || (GET_CODE (insn) == CALL_INSN
1277 && find_reg_note (insn, REG_SETJMP, NULL))
1278 || (GET_CODE (insn) == INSN
1279 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1280 && MEM_VOLATILE_P (PATTERN (insn))))
1281 {
1282 clear_table (0);
1283 return;
1284 }
1285
1286 if (! INSN_P (insn))
1287 {
1288 cselib_current_insn = 0;
1289 return;
1290 }
1291
1292 /* If this is a call instruction, forget anything stored in a
1293 call clobbered register, or, if this is not a const call, in
1294 memory. */
1295 if (GET_CODE (insn) == CALL_INSN)
1296 {
1297 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1298 if (call_used_regs[i])
1299 cselib_invalidate_regno (i, VOIDmode);
1300
1301 if (! CONST_OR_PURE_CALL_P (insn))
1302 cselib_invalidate_mem (callmem);
1303 }
1304
1305 cselib_record_sets (insn);
1306
1307 #ifdef AUTO_INC_DEC
1308 /* Clobber any registers which appear in REG_INC notes. We
1309 could keep track of the changes to their values, but it is
1310 unlikely to help. */
1311 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
1312 if (REG_NOTE_KIND (x) == REG_INC)
1313 cselib_invalidate_rtx (XEXP (x, 0), NULL_RTX, NULL);
1314 #endif
1315
1316 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
1317 after we have processed the insn. */
1318 if (GET_CODE (insn) == CALL_INSN)
1319 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
1320 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
1321 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX, NULL);
1322
1323 cselib_current_insn = 0;
1324
1325 if (n_useless_values > MAX_USELESS_VALUES)
1326 remove_useless_values ();
1327 }
1328
1329 /* Make sure our varrays are big enough. Not called from any cselib routines;
1330 it must be called by the user if it allocated new registers. */
1331
1332 void
1333 cselib_update_varray_sizes ()
1334 {
1335 unsigned int nregs = max_reg_num ();
1336
1337 if (nregs == cselib_nregs)
1338 return;
1339
1340 cselib_nregs = nregs;
1341 VARRAY_GROW (reg_values, nregs);
1342 VARRAY_GROW (used_regs, nregs);
1343 }
1344
1345 /* Initialize cselib for one pass. The caller must also call
1346 init_alias_analysis. */
1347
1348 void
1349 cselib_init ()
1350 {
1351 /* These are only created once. */
1352 if (! callmem)
1353 {
1354 gcc_obstack_init (&cselib_obstack);
1355 cselib_startobj = obstack_alloc (&cselib_obstack, 0);
1356
1357 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
1358 ggc_add_rtx_root (&callmem, 1);
1359 }
1360
1361 cselib_nregs = max_reg_num ();
1362 VARRAY_ELT_LIST_INIT (reg_values, cselib_nregs, "reg_values");
1363 VARRAY_UINT_INIT (used_regs, cselib_nregs, "used_regs");
1364 hash_table = htab_create (31, get_value_hash, entry_and_rtx_equal_p, NULL);
1365 clear_table (1);
1366 }
1367
1368 /* Called when the current user is done with cselib. */
1369
1370 void
1371 cselib_finish ()
1372 {
1373 clear_table (0);
1374 VARRAY_FREE (reg_values);
1375 VARRAY_FREE (used_regs);
1376 htab_delete (hash_table);
1377 }