Change use to type-based pool allocator in regcprop.c.
[gcc.git] / gcc / regcprop.c
1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "tm_p.h"
26 #include "insn-config.h"
27 #include "regs.h"
28 #include "addresses.h"
29 #include "hard-reg-set.h"
30 #include "predict.h"
31 #include "vec.h"
32 #include "hashtab.h"
33 #include "hash-set.h"
34 #include "machmode.h"
35 #include "input.h"
36 #include "function.h"
37 #include "dominance.h"
38 #include "cfg.h"
39 #include "basic-block.h"
40 #include "reload.h"
41 #include "recog.h"
42 #include "flags.h"
43 #include "diagnostic-core.h"
44 #include "obstack.h"
45 #include "tree-pass.h"
46 #include "df.h"
47 #include "rtl-iter.h"
48
49 /* The following code does forward propagation of hard register copies.
50 The object is to eliminate as many dependencies as possible, so that
51 we have the most scheduling freedom. As a side effect, we also clean
52 up some silly register allocation decisions made by reload. This
53 code may be obsoleted by a new register allocator. */
54
55 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
56 lifetime of a register and get the DEBUG_INSN subsequently reset.
57 So they are queued instead, and updated only when the register is
58 used in some subsequent real insn before it is set. */
59 struct queued_debug_insn_change
60 {
61 struct queued_debug_insn_change *next;
62 rtx_insn *insn;
63 rtx *loc;
64 rtx new_rtx;
65
66 /* Pool allocation new operator. */
67 inline void *operator new (size_t)
68 {
69 return pool.allocate ();
70 }
71
72 /* Delete operator utilizing pool allocation. */
73 inline void operator delete (void *ptr)
74 {
75 pool.remove ((queued_debug_insn_change *) ptr);
76 }
77
78 /* Memory allocation pool. */
79 static pool_allocator<queued_debug_insn_change> pool;
80 };
81
82 /* For each register, we have a list of registers that contain the same
83 value. The OLDEST_REGNO field points to the head of the list, and
84 the NEXT_REGNO field runs through the list. The MODE field indicates
85 what mode the data is known to be in; this field is VOIDmode when the
86 register is not known to contain valid data. */
87
88 struct value_data_entry
89 {
90 machine_mode mode;
91 unsigned int oldest_regno;
92 unsigned int next_regno;
93 struct queued_debug_insn_change *debug_insn_changes;
94 };
95
96 struct value_data
97 {
98 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
99 unsigned int max_value_regs;
100 unsigned int n_debug_insn_changes;
101 };
102
103 pool_allocator<queued_debug_insn_change> queued_debug_insn_change::pool
104 ("debug insn changes pool", 256);
105
106 static bool skip_debug_insn_p;
107
108 static void kill_value_one_regno (unsigned, struct value_data *);
109 static void kill_value_regno (unsigned, unsigned, struct value_data *);
110 static void kill_value (const_rtx, struct value_data *);
111 static void set_value_regno (unsigned, machine_mode, struct value_data *);
112 static void init_value_data (struct value_data *);
113 static void kill_clobbered_value (rtx, const_rtx, void *);
114 static void kill_set_value (rtx, const_rtx, void *);
115 static void copy_value (rtx, rtx, struct value_data *);
116 static bool mode_change_ok (machine_mode, machine_mode,
117 unsigned int);
118 static rtx maybe_mode_change (machine_mode, machine_mode,
119 machine_mode, unsigned int, unsigned int);
120 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
121 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
122 struct value_data *);
123 static bool replace_oldest_value_addr (rtx *, enum reg_class,
124 machine_mode, addr_space_t,
125 rtx_insn *, struct value_data *);
126 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
127 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
128 extern void debug_value_data (struct value_data *);
129 #ifdef ENABLE_CHECKING
130 static void validate_value_data (struct value_data *);
131 #endif
132
133 /* Free all queued updates for DEBUG_INSNs that change some reg to
134 register REGNO. */
135
136 static void
137 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
138 {
139 struct queued_debug_insn_change *cur, *next;
140 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
141 {
142 next = cur->next;
143 --vd->n_debug_insn_changes;
144 delete cur;
145 }
146 vd->e[regno].debug_insn_changes = NULL;
147 }
148
149 /* Kill register REGNO. This involves removing it from any value
150 lists, and resetting the value mode to VOIDmode. This is only a
151 helper function; it does not handle any hard registers overlapping
152 with REGNO. */
153
154 static void
155 kill_value_one_regno (unsigned int regno, struct value_data *vd)
156 {
157 unsigned int i, next;
158
159 if (vd->e[regno].oldest_regno != regno)
160 {
161 for (i = vd->e[regno].oldest_regno;
162 vd->e[i].next_regno != regno;
163 i = vd->e[i].next_regno)
164 continue;
165 vd->e[i].next_regno = vd->e[regno].next_regno;
166 }
167 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
168 {
169 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
170 vd->e[i].oldest_regno = next;
171 }
172
173 vd->e[regno].mode = VOIDmode;
174 vd->e[regno].oldest_regno = regno;
175 vd->e[regno].next_regno = INVALID_REGNUM;
176 if (vd->e[regno].debug_insn_changes)
177 free_debug_insn_changes (vd, regno);
178
179 #ifdef ENABLE_CHECKING
180 validate_value_data (vd);
181 #endif
182 }
183
184 /* Kill the value in register REGNO for NREGS, and any other registers
185 whose values overlap. */
186
187 static void
188 kill_value_regno (unsigned int regno, unsigned int nregs,
189 struct value_data *vd)
190 {
191 unsigned int j;
192
193 /* Kill the value we're told to kill. */
194 for (j = 0; j < nregs; ++j)
195 kill_value_one_regno (regno + j, vd);
196
197 /* Kill everything that overlapped what we're told to kill. */
198 if (regno < vd->max_value_regs)
199 j = 0;
200 else
201 j = regno - vd->max_value_regs;
202 for (; j < regno; ++j)
203 {
204 unsigned int i, n;
205 if (vd->e[j].mode == VOIDmode)
206 continue;
207 n = hard_regno_nregs[j][vd->e[j].mode];
208 if (j + n > regno)
209 for (i = 0; i < n; ++i)
210 kill_value_one_regno (j + i, vd);
211 }
212 }
213
214 /* Kill X. This is a convenience function wrapping kill_value_regno
215 so that we mind the mode the register is in. */
216
217 static void
218 kill_value (const_rtx x, struct value_data *vd)
219 {
220 if (GET_CODE (x) == SUBREG)
221 {
222 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
223 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
224 x = tmp ? tmp : SUBREG_REG (x);
225 }
226 if (REG_P (x))
227 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
228 }
229
230 /* Remember that REGNO is valid in MODE. */
231
232 static void
233 set_value_regno (unsigned int regno, machine_mode mode,
234 struct value_data *vd)
235 {
236 unsigned int nregs;
237
238 vd->e[regno].mode = mode;
239
240 nregs = hard_regno_nregs[regno][mode];
241 if (nregs > vd->max_value_regs)
242 vd->max_value_regs = nregs;
243 }
244
245 /* Initialize VD such that there are no known relationships between regs. */
246
247 static void
248 init_value_data (struct value_data *vd)
249 {
250 int i;
251 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
252 {
253 vd->e[i].mode = VOIDmode;
254 vd->e[i].oldest_regno = i;
255 vd->e[i].next_regno = INVALID_REGNUM;
256 vd->e[i].debug_insn_changes = NULL;
257 }
258 vd->max_value_regs = 0;
259 vd->n_debug_insn_changes = 0;
260 }
261
262 /* Called through note_stores. If X is clobbered, kill its value. */
263
264 static void
265 kill_clobbered_value (rtx x, const_rtx set, void *data)
266 {
267 struct value_data *const vd = (struct value_data *) data;
268 if (GET_CODE (set) == CLOBBER)
269 kill_value (x, vd);
270 }
271
272 /* A structure passed as data to kill_set_value through note_stores. */
273 struct kill_set_value_data
274 {
275 struct value_data *vd;
276 rtx ignore_set_reg;
277 };
278
279 /* Called through note_stores. If X is set, not clobbered, kill its
280 current value and install it as the root of its own value list. */
281
282 static void
283 kill_set_value (rtx x, const_rtx set, void *data)
284 {
285 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
286 if (rtx_equal_p (x, ksvd->ignore_set_reg))
287 return;
288 if (GET_CODE (set) != CLOBBER)
289 {
290 kill_value (x, ksvd->vd);
291 if (REG_P (x))
292 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
293 }
294 }
295
296 /* Kill any register used in X as the base of an auto-increment expression,
297 and install that register as the root of its own value list. */
298
299 static void
300 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
301 {
302 subrtx_iterator::array_type array;
303 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
304 {
305 const_rtx x = *iter;
306 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
307 {
308 x = XEXP (x, 0);
309 kill_value (x, vd);
310 set_value_regno (REGNO (x), GET_MODE (x), vd);
311 iter.skip_subrtxes ();
312 }
313 }
314 }
315
316 /* Assert that SRC has been copied to DEST. Adjust the data structures
317 to reflect that SRC contains an older copy of the shared value. */
318
319 static void
320 copy_value (rtx dest, rtx src, struct value_data *vd)
321 {
322 unsigned int dr = REGNO (dest);
323 unsigned int sr = REGNO (src);
324 unsigned int dn, sn;
325 unsigned int i;
326
327 /* ??? At present, it's possible to see noop sets. It'd be nice if
328 this were cleaned up beforehand... */
329 if (sr == dr)
330 return;
331
332 /* Do not propagate copies to the stack pointer, as that can leave
333 memory accesses with no scheduling dependency on the stack update. */
334 if (dr == STACK_POINTER_REGNUM)
335 return;
336
337 /* Likewise with the frame pointer, if we're using one. */
338 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
339 return;
340
341 /* Do not propagate copies to fixed or global registers, patterns
342 can be relying to see particular fixed register or users can
343 expect the chosen global register in asm. */
344 if (fixed_regs[dr] || global_regs[dr])
345 return;
346
347 /* If SRC and DEST overlap, don't record anything. */
348 dn = REG_NREGS (dest);
349 sn = REG_NREGS (src);
350 if ((dr > sr && dr < sr + sn)
351 || (sr > dr && sr < dr + dn))
352 return;
353
354 /* If SRC had no assigned mode (i.e. we didn't know it was live)
355 assign it now and assume the value came from an input argument
356 or somesuch. */
357 if (vd->e[sr].mode == VOIDmode)
358 set_value_regno (sr, vd->e[dr].mode, vd);
359
360 /* If we are narrowing the input to a smaller number of hard regs,
361 and it is in big endian, we are really extracting a high part.
362 Since we generally associate a low part of a value with the value itself,
363 we must not do the same for the high part.
364 Note we can still get low parts for the same mode combination through
365 a two-step copy involving differently sized hard regs.
366 Assume hard regs fr* are 32 bits bits each, while r* are 64 bits each:
367 (set (reg:DI r0) (reg:DI fr0))
368 (set (reg:SI fr2) (reg:SI r0))
369 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
370 (set (reg:SI fr2) (reg:SI fr0))
371 loads the high part of (reg:DI fr0) into fr2.
372
373 We can't properly represent the latter case in our tables, so don't
374 record anything then. */
375 else if (sn < (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode]
376 && (GET_MODE_SIZE (vd->e[sr].mode) > UNITS_PER_WORD
377 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
378 return;
379
380 /* If SRC had been assigned a mode narrower than the copy, we can't
381 link DEST into the chain, because not all of the pieces of the
382 copy came from oldest_regno. */
383 else if (sn > (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode])
384 return;
385
386 /* Link DR at the end of the value chain used by SR. */
387
388 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
389
390 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
391 continue;
392 vd->e[i].next_regno = dr;
393
394 #ifdef ENABLE_CHECKING
395 validate_value_data (vd);
396 #endif
397 }
398
399 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
400
401 static bool
402 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
403 unsigned int regno ATTRIBUTE_UNUSED)
404 {
405 if (GET_MODE_SIZE (orig_mode) < GET_MODE_SIZE (new_mode))
406 return false;
407
408 #ifdef CANNOT_CHANGE_MODE_CLASS
409 return !REG_CANNOT_CHANGE_MODE_P (regno, orig_mode, new_mode);
410 #endif
411
412 return true;
413 }
414
415 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
416 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
417 in NEW_MODE.
418 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
419
420 static rtx
421 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
422 machine_mode new_mode, unsigned int regno,
423 unsigned int copy_regno ATTRIBUTE_UNUSED)
424 {
425 if (GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (orig_mode)
426 && GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (new_mode))
427 return NULL_RTX;
428
429 if (orig_mode == new_mode)
430 return gen_raw_REG (new_mode, regno);
431 else if (mode_change_ok (orig_mode, new_mode, regno))
432 {
433 int copy_nregs = hard_regno_nregs[copy_regno][copy_mode];
434 int use_nregs = hard_regno_nregs[copy_regno][new_mode];
435 int copy_offset
436 = GET_MODE_SIZE (copy_mode) / copy_nregs * (copy_nregs - use_nregs);
437 int offset
438 = GET_MODE_SIZE (orig_mode) - GET_MODE_SIZE (new_mode) - copy_offset;
439 int byteoffset = offset % UNITS_PER_WORD;
440 int wordoffset = offset - byteoffset;
441
442 offset = ((WORDS_BIG_ENDIAN ? wordoffset : 0)
443 + (BYTES_BIG_ENDIAN ? byteoffset : 0));
444 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
445 if (HARD_REGNO_MODE_OK (regno, new_mode))
446 return gen_raw_REG (new_mode, regno);
447 }
448 return NULL_RTX;
449 }
450
451 /* Find the oldest copy of the value contained in REGNO that is in
452 register class CL and has mode MODE. If found, return an rtx
453 of that oldest register, otherwise return NULL. */
454
455 static rtx
456 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
457 {
458 unsigned int regno = REGNO (reg);
459 machine_mode mode = GET_MODE (reg);
460 unsigned int i;
461
462 /* If we are accessing REG in some mode other that what we set it in,
463 make sure that the replacement is valid. In particular, consider
464 (set (reg:DI r11) (...))
465 (set (reg:SI r9) (reg:SI r11))
466 (set (reg:SI r10) (...))
467 (set (...) (reg:DI r9))
468 Replacing r9 with r11 is invalid. */
469 if (mode != vd->e[regno].mode)
470 {
471 if (hard_regno_nregs[regno][mode]
472 > hard_regno_nregs[regno][vd->e[regno].mode])
473 return NULL_RTX;
474 }
475
476 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
477 {
478 machine_mode oldmode = vd->e[i].mode;
479 rtx new_rtx;
480
481 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
482 continue;
483
484 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
485 if (new_rtx)
486 {
487 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
488 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
489 REG_POINTER (new_rtx) = REG_POINTER (reg);
490 return new_rtx;
491 }
492 }
493
494 return NULL_RTX;
495 }
496
497 /* If possible, replace the register at *LOC with the oldest register
498 in register class CL. Return true if successfully replaced. */
499
500 static bool
501 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
502 struct value_data *vd)
503 {
504 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
505 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
506 {
507 if (DEBUG_INSN_P (insn))
508 {
509 struct queued_debug_insn_change *change;
510
511 if (dump_file)
512 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
513 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
514
515 change = new queued_debug_insn_change;
516 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
517 change->insn = insn;
518 change->loc = loc;
519 change->new_rtx = new_rtx;
520 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
521 ++vd->n_debug_insn_changes;
522 return true;
523 }
524 if (dump_file)
525 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
526 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
527
528 validate_change (insn, loc, new_rtx, 1);
529 return true;
530 }
531 return false;
532 }
533
534 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
535 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
536 BASE_REG_CLASS depending on how the register is being considered. */
537
538 static bool
539 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
540 machine_mode mode, addr_space_t as,
541 rtx_insn *insn, struct value_data *vd)
542 {
543 rtx x = *loc;
544 RTX_CODE code = GET_CODE (x);
545 const char *fmt;
546 int i, j;
547 bool changed = false;
548
549 switch (code)
550 {
551 case PLUS:
552 if (DEBUG_INSN_P (insn))
553 break;
554
555 {
556 rtx orig_op0 = XEXP (x, 0);
557 rtx orig_op1 = XEXP (x, 1);
558 RTX_CODE code0 = GET_CODE (orig_op0);
559 RTX_CODE code1 = GET_CODE (orig_op1);
560 rtx op0 = orig_op0;
561 rtx op1 = orig_op1;
562 rtx *locI = NULL;
563 rtx *locB = NULL;
564 enum rtx_code index_code = SCRATCH;
565
566 if (GET_CODE (op0) == SUBREG)
567 {
568 op0 = SUBREG_REG (op0);
569 code0 = GET_CODE (op0);
570 }
571
572 if (GET_CODE (op1) == SUBREG)
573 {
574 op1 = SUBREG_REG (op1);
575 code1 = GET_CODE (op1);
576 }
577
578 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
579 || code0 == ZERO_EXTEND || code1 == MEM)
580 {
581 locI = &XEXP (x, 0);
582 locB = &XEXP (x, 1);
583 index_code = GET_CODE (*locI);
584 }
585 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
586 || code1 == ZERO_EXTEND || code0 == MEM)
587 {
588 locI = &XEXP (x, 1);
589 locB = &XEXP (x, 0);
590 index_code = GET_CODE (*locI);
591 }
592 else if (code0 == CONST_INT || code0 == CONST
593 || code0 == SYMBOL_REF || code0 == LABEL_REF)
594 {
595 locB = &XEXP (x, 1);
596 index_code = GET_CODE (XEXP (x, 0));
597 }
598 else if (code1 == CONST_INT || code1 == CONST
599 || code1 == SYMBOL_REF || code1 == LABEL_REF)
600 {
601 locB = &XEXP (x, 0);
602 index_code = GET_CODE (XEXP (x, 1));
603 }
604 else if (code0 == REG && code1 == REG)
605 {
606 int index_op;
607 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
608
609 if (REGNO_OK_FOR_INDEX_P (regno1)
610 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
611 index_op = 1;
612 else if (REGNO_OK_FOR_INDEX_P (regno0)
613 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
614 index_op = 0;
615 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
616 || REGNO_OK_FOR_INDEX_P (regno1))
617 index_op = 1;
618 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
619 index_op = 0;
620 else
621 index_op = 1;
622
623 locI = &XEXP (x, index_op);
624 locB = &XEXP (x, !index_op);
625 index_code = GET_CODE (*locI);
626 }
627 else if (code0 == REG)
628 {
629 locI = &XEXP (x, 0);
630 locB = &XEXP (x, 1);
631 index_code = GET_CODE (*locI);
632 }
633 else if (code1 == REG)
634 {
635 locI = &XEXP (x, 1);
636 locB = &XEXP (x, 0);
637 index_code = GET_CODE (*locI);
638 }
639
640 if (locI)
641 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
642 mode, as, insn, vd);
643 if (locB)
644 changed |= replace_oldest_value_addr (locB,
645 base_reg_class (mode, as, PLUS,
646 index_code),
647 mode, as, insn, vd);
648 return changed;
649 }
650
651 case POST_INC:
652 case POST_DEC:
653 case POST_MODIFY:
654 case PRE_INC:
655 case PRE_DEC:
656 case PRE_MODIFY:
657 return false;
658
659 case MEM:
660 return replace_oldest_value_mem (x, insn, vd);
661
662 case REG:
663 return replace_oldest_value_reg (loc, cl, insn, vd);
664
665 default:
666 break;
667 }
668
669 fmt = GET_RTX_FORMAT (code);
670 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
671 {
672 if (fmt[i] == 'e')
673 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
674 insn, vd);
675 else if (fmt[i] == 'E')
676 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
677 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
678 mode, as, insn, vd);
679 }
680
681 return changed;
682 }
683
684 /* Similar to replace_oldest_value_reg, but X contains a memory. */
685
686 static bool
687 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
688 {
689 enum reg_class cl;
690
691 if (DEBUG_INSN_P (insn))
692 cl = ALL_REGS;
693 else
694 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
695
696 return replace_oldest_value_addr (&XEXP (x, 0), cl,
697 GET_MODE (x), MEM_ADDR_SPACE (x),
698 insn, vd);
699 }
700
701 /* Apply all queued updates for DEBUG_INSNs that change some reg to
702 register REGNO. */
703
704 static void
705 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
706 {
707 struct queued_debug_insn_change *change;
708 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
709
710 for (change = vd->e[regno].debug_insn_changes;
711 change;
712 change = change->next)
713 {
714 if (last_insn != change->insn)
715 {
716 apply_change_group ();
717 last_insn = change->insn;
718 }
719 validate_change (change->insn, change->loc, change->new_rtx, 1);
720 }
721 apply_change_group ();
722 }
723
724 /* Called via note_uses, for all used registers in a real insn
725 apply DEBUG_INSN changes that change registers to the used
726 registers. */
727
728 static void
729 cprop_find_used_regs (rtx *loc, void *data)
730 {
731 struct value_data *const vd = (struct value_data *) data;
732 subrtx_iterator::array_type array;
733 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
734 {
735 const_rtx x = *iter;
736 if (REG_P (x))
737 {
738 unsigned int regno = REGNO (x);
739 if (vd->e[regno].debug_insn_changes)
740 {
741 apply_debug_insn_changes (vd, regno);
742 free_debug_insn_changes (vd, regno);
743 }
744 }
745 }
746 }
747
748 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
749
750 static void
751 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
752 {
753 note_stores (PATTERN (insn), kill_clobbered_value, vd);
754
755 if (CALL_P (insn))
756 {
757 rtx exp;
758
759 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
760 {
761 rtx x = XEXP (exp, 0);
762 if (GET_CODE (x) == CLOBBER)
763 kill_value (SET_DEST (x), vd);
764 }
765 }
766 }
767
768 /* Perform the forward copy propagation on basic block BB. */
769
770 static bool
771 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
772 {
773 bool anything_changed = false;
774 rtx_insn *insn;
775
776 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
777 {
778 int n_ops, i, predicated;
779 bool is_asm, any_replacements;
780 rtx set;
781 rtx link;
782 bool replaced[MAX_RECOG_OPERANDS];
783 bool changed = false;
784 struct kill_set_value_data ksvd;
785
786 if (!NONDEBUG_INSN_P (insn))
787 {
788 if (DEBUG_INSN_P (insn))
789 {
790 rtx loc = INSN_VAR_LOCATION_LOC (insn);
791 if (!VAR_LOC_UNKNOWN_P (loc))
792 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
793 ALL_REGS, GET_MODE (loc),
794 ADDR_SPACE_GENERIC, insn, vd);
795 }
796
797 if (insn == BB_END (bb))
798 break;
799 else
800 continue;
801 }
802
803 set = single_set (insn);
804 extract_constrain_insn (insn);
805 preprocess_constraints (insn);
806 const operand_alternative *op_alt = which_op_alt ();
807 n_ops = recog_data.n_operands;
808 is_asm = asm_noperands (PATTERN (insn)) >= 0;
809
810 /* Simplify the code below by promoting OP_OUT to OP_INOUT
811 in predicated instructions. */
812
813 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
814 for (i = 0; i < n_ops; ++i)
815 {
816 int matches = op_alt[i].matches;
817 if (matches >= 0 || op_alt[i].matched >= 0
818 || (predicated && recog_data.operand_type[i] == OP_OUT))
819 recog_data.operand_type[i] = OP_INOUT;
820 }
821
822 /* Apply changes to earlier DEBUG_INSNs if possible. */
823 if (vd->n_debug_insn_changes)
824 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
825
826 /* For each earlyclobber operand, zap the value data. */
827 for (i = 0; i < n_ops; i++)
828 if (op_alt[i].earlyclobber)
829 kill_value (recog_data.operand[i], vd);
830
831 /* Within asms, a clobber cannot overlap inputs or outputs.
832 I wouldn't think this were true for regular insns, but
833 scan_rtx treats them like that... */
834 kill_clobbered_values (insn, vd);
835
836 /* Kill all auto-incremented values. */
837 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
838 kill_autoinc_value (insn, vd);
839
840 /* Kill all early-clobbered operands. */
841 for (i = 0; i < n_ops; i++)
842 if (op_alt[i].earlyclobber)
843 kill_value (recog_data.operand[i], vd);
844
845 /* If we have dead sets in the insn, then we need to note these as we
846 would clobbers. */
847 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
848 {
849 if (REG_NOTE_KIND (link) == REG_UNUSED)
850 {
851 kill_value (XEXP (link, 0), vd);
852 /* Furthermore, if the insn looked like a single-set,
853 but the dead store kills the source value of that
854 set, then we can no-longer use the plain move
855 special case below. */
856 if (set
857 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
858 set = NULL;
859 }
860 }
861
862 /* Special-case plain move instructions, since we may well
863 be able to do the move from a different register class. */
864 if (set && REG_P (SET_SRC (set)))
865 {
866 rtx src = SET_SRC (set);
867 unsigned int regno = REGNO (src);
868 machine_mode mode = GET_MODE (src);
869 unsigned int i;
870 rtx new_rtx;
871
872 /* If we are accessing SRC in some mode other that what we
873 set it in, make sure that the replacement is valid. */
874 if (mode != vd->e[regno].mode)
875 {
876 if (hard_regno_nregs[regno][mode]
877 > hard_regno_nregs[regno][vd->e[regno].mode])
878 goto no_move_special_case;
879
880 /* And likewise, if we are narrowing on big endian the transformation
881 is also invalid. */
882 if (hard_regno_nregs[regno][mode]
883 < hard_regno_nregs[regno][vd->e[regno].mode]
884 && (GET_MODE_SIZE (vd->e[regno].mode) > UNITS_PER_WORD
885 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
886 goto no_move_special_case;
887 }
888
889 /* If the destination is also a register, try to find a source
890 register in the same class. */
891 if (REG_P (SET_DEST (set)))
892 {
893 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno), src, vd);
894 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
895 {
896 if (dump_file)
897 fprintf (dump_file,
898 "insn %u: replaced reg %u with %u\n",
899 INSN_UID (insn), regno, REGNO (new_rtx));
900 changed = true;
901 goto did_replacement;
902 }
903 /* We need to re-extract as validate_change clobbers
904 recog_data. */
905 extract_constrain_insn (insn);
906 preprocess_constraints (insn);
907 }
908
909 /* Otherwise, try all valid registers and see if its valid. */
910 for (i = vd->e[regno].oldest_regno; i != regno;
911 i = vd->e[i].next_regno)
912 {
913 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
914 mode, i, regno);
915 if (new_rtx != NULL_RTX)
916 {
917 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
918 {
919 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
920 REG_ATTRS (new_rtx) = REG_ATTRS (src);
921 REG_POINTER (new_rtx) = REG_POINTER (src);
922 if (dump_file)
923 fprintf (dump_file,
924 "insn %u: replaced reg %u with %u\n",
925 INSN_UID (insn), regno, REGNO (new_rtx));
926 changed = true;
927 goto did_replacement;
928 }
929 /* We need to re-extract as validate_change clobbers
930 recog_data. */
931 extract_constrain_insn (insn);
932 preprocess_constraints (insn);
933 }
934 }
935 }
936 no_move_special_case:
937
938 any_replacements = false;
939
940 /* For each input operand, replace a hard register with the
941 eldest live copy that's in an appropriate register class. */
942 for (i = 0; i < n_ops; i++)
943 {
944 replaced[i] = false;
945
946 /* Don't scan match_operand here, since we've no reg class
947 information to pass down. Any operands that we could
948 substitute in will be represented elsewhere. */
949 if (recog_data.constraints[i][0] == '\0')
950 continue;
951
952 /* Don't replace in asms intentionally referencing hard regs. */
953 if (is_asm && REG_P (recog_data.operand[i])
954 && (REGNO (recog_data.operand[i])
955 == ORIGINAL_REGNO (recog_data.operand[i])))
956 continue;
957
958 if (recog_data.operand_type[i] == OP_IN)
959 {
960 if (op_alt[i].is_address)
961 replaced[i]
962 = replace_oldest_value_addr (recog_data.operand_loc[i],
963 alternative_class (op_alt, i),
964 VOIDmode, ADDR_SPACE_GENERIC,
965 insn, vd);
966 else if (REG_P (recog_data.operand[i]))
967 replaced[i]
968 = replace_oldest_value_reg (recog_data.operand_loc[i],
969 alternative_class (op_alt, i),
970 insn, vd);
971 else if (MEM_P (recog_data.operand[i]))
972 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
973 insn, vd);
974 }
975 else if (MEM_P (recog_data.operand[i]))
976 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
977 insn, vd);
978
979 /* If we performed any replacement, update match_dups. */
980 if (replaced[i])
981 {
982 int j;
983 rtx new_rtx;
984
985 new_rtx = *recog_data.operand_loc[i];
986 recog_data.operand[i] = new_rtx;
987 for (j = 0; j < recog_data.n_dups; j++)
988 if (recog_data.dup_num[j] == i)
989 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
990
991 any_replacements = true;
992 }
993 }
994
995 if (any_replacements)
996 {
997 if (! apply_change_group ())
998 {
999 for (i = 0; i < n_ops; i++)
1000 if (replaced[i])
1001 {
1002 rtx old = *recog_data.operand_loc[i];
1003 recog_data.operand[i] = old;
1004 }
1005
1006 if (dump_file)
1007 fprintf (dump_file,
1008 "insn %u: reg replacements not verified\n",
1009 INSN_UID (insn));
1010 }
1011 else
1012 changed = true;
1013 }
1014
1015 did_replacement:
1016 if (changed)
1017 {
1018 anything_changed = true;
1019
1020 /* If something changed, perhaps further changes to earlier
1021 DEBUG_INSNs can be applied. */
1022 if (vd->n_debug_insn_changes)
1023 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1024 }
1025
1026 ksvd.vd = vd;
1027 ksvd.ignore_set_reg = NULL_RTX;
1028
1029 /* Clobber call-clobbered registers. */
1030 if (CALL_P (insn))
1031 {
1032 unsigned int set_regno = INVALID_REGNUM;
1033 unsigned int set_nregs = 0;
1034 unsigned int regno;
1035 rtx exp;
1036 HARD_REG_SET regs_invalidated_by_this_call;
1037
1038 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1039 {
1040 rtx x = XEXP (exp, 0);
1041 if (GET_CODE (x) == SET)
1042 {
1043 rtx dest = SET_DEST (x);
1044 kill_value (dest, vd);
1045 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1046 copy_value (dest, SET_SRC (x), vd);
1047 ksvd.ignore_set_reg = dest;
1048 set_regno = REGNO (dest);
1049 set_nregs = REG_NREGS (dest);
1050 break;
1051 }
1052 }
1053
1054 get_call_reg_set_usage (insn,
1055 &regs_invalidated_by_this_call,
1056 regs_invalidated_by_call);
1057 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1058 if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1059 || HARD_REGNO_CALL_PART_CLOBBERED (regno, vd->e[regno].mode))
1060 && (regno < set_regno || regno >= set_regno + set_nregs))
1061 kill_value_regno (regno, 1, vd);
1062
1063 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1064 of the SET isn't in regs_invalidated_by_call hard reg set,
1065 but instead among CLOBBERs on the CALL_INSN, we could wrongly
1066 assume the value in it is still live. */
1067 if (ksvd.ignore_set_reg)
1068 kill_clobbered_values (insn, vd);
1069 }
1070
1071 bool copy_p = (set
1072 && REG_P (SET_DEST (set))
1073 && REG_P (SET_SRC (set)));
1074 bool noop_p = (copy_p
1075 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1076
1077 if (!noop_p)
1078 {
1079 /* Notice stores. */
1080 note_stores (PATTERN (insn), kill_set_value, &ksvd);
1081
1082 /* Notice copies. */
1083 if (copy_p)
1084 copy_value (SET_DEST (set), SET_SRC (set), vd);
1085 }
1086
1087 if (insn == BB_END (bb))
1088 break;
1089 }
1090
1091 return anything_changed;
1092 }
1093
1094 /* Dump the value chain data to stderr. */
1095
1096 DEBUG_FUNCTION void
1097 debug_value_data (struct value_data *vd)
1098 {
1099 HARD_REG_SET set;
1100 unsigned int i, j;
1101
1102 CLEAR_HARD_REG_SET (set);
1103
1104 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1105 if (vd->e[i].oldest_regno == i)
1106 {
1107 if (vd->e[i].mode == VOIDmode)
1108 {
1109 if (vd->e[i].next_regno != INVALID_REGNUM)
1110 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1111 i, vd->e[i].next_regno);
1112 continue;
1113 }
1114
1115 SET_HARD_REG_BIT (set, i);
1116 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1117
1118 for (j = vd->e[i].next_regno;
1119 j != INVALID_REGNUM;
1120 j = vd->e[j].next_regno)
1121 {
1122 if (TEST_HARD_REG_BIT (set, j))
1123 {
1124 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1125 return;
1126 }
1127
1128 if (vd->e[j].oldest_regno != i)
1129 {
1130 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1131 j, vd->e[j].oldest_regno);
1132 return;
1133 }
1134 SET_HARD_REG_BIT (set, j);
1135 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1136 }
1137 fputc ('\n', stderr);
1138 }
1139
1140 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1141 if (! TEST_HARD_REG_BIT (set, i)
1142 && (vd->e[i].mode != VOIDmode
1143 || vd->e[i].oldest_regno != i
1144 || vd->e[i].next_regno != INVALID_REGNUM))
1145 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1146 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1147 vd->e[i].next_regno);
1148 }
1149
1150 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1151 DEBUG_INSN is skipped since we do not want to involve DF related
1152 staff as how it is handled in function pass_cprop_hardreg::execute.
1153
1154 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1155 to handle DEBUG_INSN for other uses. */
1156
1157 void
1158 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1159 {
1160 struct value_data *vd;
1161 vd = XNEWVEC (struct value_data, 1);
1162 init_value_data (vd);
1163
1164 skip_debug_insn_p = true;
1165 copyprop_hardreg_forward_1 (bb, vd);
1166 free (vd);
1167 skip_debug_insn_p = false;
1168 }
1169
1170 #ifdef ENABLE_CHECKING
1171 static void
1172 validate_value_data (struct value_data *vd)
1173 {
1174 HARD_REG_SET set;
1175 unsigned int i, j;
1176
1177 CLEAR_HARD_REG_SET (set);
1178
1179 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1180 if (vd->e[i].oldest_regno == i)
1181 {
1182 if (vd->e[i].mode == VOIDmode)
1183 {
1184 if (vd->e[i].next_regno != INVALID_REGNUM)
1185 internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1186 i, vd->e[i].next_regno);
1187 continue;
1188 }
1189
1190 SET_HARD_REG_BIT (set, i);
1191
1192 for (j = vd->e[i].next_regno;
1193 j != INVALID_REGNUM;
1194 j = vd->e[j].next_regno)
1195 {
1196 if (TEST_HARD_REG_BIT (set, j))
1197 internal_error ("validate_value_data: Loop in regno chain (%u)",
1198 j);
1199 if (vd->e[j].oldest_regno != i)
1200 internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1201 j, vd->e[j].oldest_regno);
1202
1203 SET_HARD_REG_BIT (set, j);
1204 }
1205 }
1206
1207 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1208 if (! TEST_HARD_REG_BIT (set, i)
1209 && (vd->e[i].mode != VOIDmode
1210 || vd->e[i].oldest_regno != i
1211 || vd->e[i].next_regno != INVALID_REGNUM))
1212 internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1213 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1214 vd->e[i].next_regno);
1215 }
1216 #endif
1217 \f
1218 namespace {
1219
1220 const pass_data pass_data_cprop_hardreg =
1221 {
1222 RTL_PASS, /* type */
1223 "cprop_hardreg", /* name */
1224 OPTGROUP_NONE, /* optinfo_flags */
1225 TV_CPROP_REGISTERS, /* tv_id */
1226 0, /* properties_required */
1227 0, /* properties_provided */
1228 0, /* properties_destroyed */
1229 0, /* todo_flags_start */
1230 TODO_df_finish, /* todo_flags_finish */
1231 };
1232
1233 class pass_cprop_hardreg : public rtl_opt_pass
1234 {
1235 public:
1236 pass_cprop_hardreg (gcc::context *ctxt)
1237 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1238 {}
1239
1240 /* opt_pass methods: */
1241 virtual bool gate (function *)
1242 {
1243 return (optimize > 0 && (flag_cprop_registers));
1244 }
1245
1246 virtual unsigned int execute (function *);
1247
1248 }; // class pass_cprop_hardreg
1249
1250 unsigned int
1251 pass_cprop_hardreg::execute (function *fun)
1252 {
1253 struct value_data *all_vd;
1254 basic_block bb;
1255 sbitmap visited;
1256 bool analyze_called = false;
1257
1258 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1259
1260 visited = sbitmap_alloc (last_basic_block_for_fn (fun));
1261 bitmap_clear (visited);
1262
1263 FOR_EACH_BB_FN (bb, fun)
1264 {
1265 bitmap_set_bit (visited, bb->index);
1266
1267 /* If a block has a single predecessor, that we've already
1268 processed, begin with the value data that was live at
1269 the end of the predecessor block. */
1270 /* ??? Ought to use more intelligent queuing of blocks. */
1271 if (single_pred_p (bb)
1272 && bitmap_bit_p (visited, single_pred (bb)->index)
1273 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1274 {
1275 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1276 if (all_vd[bb->index].n_debug_insn_changes)
1277 {
1278 unsigned int regno;
1279
1280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1281 {
1282 if (all_vd[bb->index].e[regno].debug_insn_changes)
1283 {
1284 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1285 if (--all_vd[bb->index].n_debug_insn_changes == 0)
1286 break;
1287 }
1288 }
1289 }
1290 }
1291 else
1292 init_value_data (all_vd + bb->index);
1293
1294 copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1295 }
1296
1297 if (MAY_HAVE_DEBUG_INSNS)
1298 {
1299 FOR_EACH_BB_FN (bb, fun)
1300 if (bitmap_bit_p (visited, bb->index)
1301 && all_vd[bb->index].n_debug_insn_changes)
1302 {
1303 unsigned int regno;
1304 bitmap live;
1305
1306 if (!analyze_called)
1307 {
1308 df_analyze ();
1309 analyze_called = true;
1310 }
1311 live = df_get_live_out (bb);
1312 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1313 if (all_vd[bb->index].e[regno].debug_insn_changes)
1314 {
1315 if (REGNO_REG_SET_P (live, regno))
1316 apply_debug_insn_changes (all_vd + bb->index, regno);
1317 if (all_vd[bb->index].n_debug_insn_changes == 0)
1318 break;
1319 }
1320 }
1321
1322 queued_debug_insn_change::pool.release ();
1323 }
1324
1325 sbitmap_free (visited);
1326 free (all_vd);
1327 return 0;
1328 }
1329
1330 } // anon namespace
1331
1332 rtl_opt_pass *
1333 make_pass_cprop_hardreg (gcc::context *ctxt)
1334 {
1335 return new pass_cprop_hardreg (ctxt);
1336 }