coretypes.h: Include machmode.h...
[gcc.git] / gcc / regcprop.c
1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "tm_p.h"
26 #include "insn-config.h"
27 #include "regs.h"
28 #include "addresses.h"
29 #include "hard-reg-set.h"
30 #include "predict.h"
31 #include "vec.h"
32 #include "hashtab.h"
33 #include "hash-set.h"
34 #include "input.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "basic-block.h"
39 #include "reload.h"
40 #include "recog.h"
41 #include "flags.h"
42 #include "diagnostic-core.h"
43 #include "obstack.h"
44 #include "tree-pass.h"
45 #include "df.h"
46 #include "rtl-iter.h"
47
48 /* The following code does forward propagation of hard register copies.
49 The object is to eliminate as many dependencies as possible, so that
50 we have the most scheduling freedom. As a side effect, we also clean
51 up some silly register allocation decisions made by reload. This
52 code may be obsoleted by a new register allocator. */
53
54 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
55 lifetime of a register and get the DEBUG_INSN subsequently reset.
56 So they are queued instead, and updated only when the register is
57 used in some subsequent real insn before it is set. */
58 struct queued_debug_insn_change
59 {
60 struct queued_debug_insn_change *next;
61 rtx_insn *insn;
62 rtx *loc;
63 rtx new_rtx;
64
65 /* Pool allocation new operator. */
66 inline void *operator new (size_t)
67 {
68 return pool.allocate ();
69 }
70
71 /* Delete operator utilizing pool allocation. */
72 inline void operator delete (void *ptr)
73 {
74 pool.remove ((queued_debug_insn_change *) ptr);
75 }
76
77 /* Memory allocation pool. */
78 static pool_allocator<queued_debug_insn_change> pool;
79 };
80
81 /* For each register, we have a list of registers that contain the same
82 value. The OLDEST_REGNO field points to the head of the list, and
83 the NEXT_REGNO field runs through the list. The MODE field indicates
84 what mode the data is known to be in; this field is VOIDmode when the
85 register is not known to contain valid data. */
86
87 struct value_data_entry
88 {
89 machine_mode mode;
90 unsigned int oldest_regno;
91 unsigned int next_regno;
92 struct queued_debug_insn_change *debug_insn_changes;
93 };
94
95 struct value_data
96 {
97 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
98 unsigned int max_value_regs;
99 unsigned int n_debug_insn_changes;
100 };
101
102 pool_allocator<queued_debug_insn_change> queued_debug_insn_change::pool
103 ("debug insn changes pool", 256);
104
105 static bool skip_debug_insn_p;
106
107 static void kill_value_one_regno (unsigned, struct value_data *);
108 static void kill_value_regno (unsigned, unsigned, struct value_data *);
109 static void kill_value (const_rtx, struct value_data *);
110 static void set_value_regno (unsigned, machine_mode, struct value_data *);
111 static void init_value_data (struct value_data *);
112 static void kill_clobbered_value (rtx, const_rtx, void *);
113 static void kill_set_value (rtx, const_rtx, void *);
114 static void copy_value (rtx, rtx, struct value_data *);
115 static bool mode_change_ok (machine_mode, machine_mode,
116 unsigned int);
117 static rtx maybe_mode_change (machine_mode, machine_mode,
118 machine_mode, unsigned int, unsigned int);
119 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
120 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
121 struct value_data *);
122 static bool replace_oldest_value_addr (rtx *, enum reg_class,
123 machine_mode, addr_space_t,
124 rtx_insn *, struct value_data *);
125 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
126 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
127 extern void debug_value_data (struct value_data *);
128 #ifdef ENABLE_CHECKING
129 static void validate_value_data (struct value_data *);
130 #endif
131
132 /* Free all queued updates for DEBUG_INSNs that change some reg to
133 register REGNO. */
134
135 static void
136 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
137 {
138 struct queued_debug_insn_change *cur, *next;
139 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
140 {
141 next = cur->next;
142 --vd->n_debug_insn_changes;
143 delete cur;
144 }
145 vd->e[regno].debug_insn_changes = NULL;
146 }
147
148 /* Kill register REGNO. This involves removing it from any value
149 lists, and resetting the value mode to VOIDmode. This is only a
150 helper function; it does not handle any hard registers overlapping
151 with REGNO. */
152
153 static void
154 kill_value_one_regno (unsigned int regno, struct value_data *vd)
155 {
156 unsigned int i, next;
157
158 if (vd->e[regno].oldest_regno != regno)
159 {
160 for (i = vd->e[regno].oldest_regno;
161 vd->e[i].next_regno != regno;
162 i = vd->e[i].next_regno)
163 continue;
164 vd->e[i].next_regno = vd->e[regno].next_regno;
165 }
166 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
167 {
168 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
169 vd->e[i].oldest_regno = next;
170 }
171
172 vd->e[regno].mode = VOIDmode;
173 vd->e[regno].oldest_regno = regno;
174 vd->e[regno].next_regno = INVALID_REGNUM;
175 if (vd->e[regno].debug_insn_changes)
176 free_debug_insn_changes (vd, regno);
177
178 #ifdef ENABLE_CHECKING
179 validate_value_data (vd);
180 #endif
181 }
182
183 /* Kill the value in register REGNO for NREGS, and any other registers
184 whose values overlap. */
185
186 static void
187 kill_value_regno (unsigned int regno, unsigned int nregs,
188 struct value_data *vd)
189 {
190 unsigned int j;
191
192 /* Kill the value we're told to kill. */
193 for (j = 0; j < nregs; ++j)
194 kill_value_one_regno (regno + j, vd);
195
196 /* Kill everything that overlapped what we're told to kill. */
197 if (regno < vd->max_value_regs)
198 j = 0;
199 else
200 j = regno - vd->max_value_regs;
201 for (; j < regno; ++j)
202 {
203 unsigned int i, n;
204 if (vd->e[j].mode == VOIDmode)
205 continue;
206 n = hard_regno_nregs[j][vd->e[j].mode];
207 if (j + n > regno)
208 for (i = 0; i < n; ++i)
209 kill_value_one_regno (j + i, vd);
210 }
211 }
212
213 /* Kill X. This is a convenience function wrapping kill_value_regno
214 so that we mind the mode the register is in. */
215
216 static void
217 kill_value (const_rtx x, struct value_data *vd)
218 {
219 if (GET_CODE (x) == SUBREG)
220 {
221 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
222 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
223 x = tmp ? tmp : SUBREG_REG (x);
224 }
225 if (REG_P (x))
226 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
227 }
228
229 /* Remember that REGNO is valid in MODE. */
230
231 static void
232 set_value_regno (unsigned int regno, machine_mode mode,
233 struct value_data *vd)
234 {
235 unsigned int nregs;
236
237 vd->e[regno].mode = mode;
238
239 nregs = hard_regno_nregs[regno][mode];
240 if (nregs > vd->max_value_regs)
241 vd->max_value_regs = nregs;
242 }
243
244 /* Initialize VD such that there are no known relationships between regs. */
245
246 static void
247 init_value_data (struct value_data *vd)
248 {
249 int i;
250 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
251 {
252 vd->e[i].mode = VOIDmode;
253 vd->e[i].oldest_regno = i;
254 vd->e[i].next_regno = INVALID_REGNUM;
255 vd->e[i].debug_insn_changes = NULL;
256 }
257 vd->max_value_regs = 0;
258 vd->n_debug_insn_changes = 0;
259 }
260
261 /* Called through note_stores. If X is clobbered, kill its value. */
262
263 static void
264 kill_clobbered_value (rtx x, const_rtx set, void *data)
265 {
266 struct value_data *const vd = (struct value_data *) data;
267 if (GET_CODE (set) == CLOBBER)
268 kill_value (x, vd);
269 }
270
271 /* A structure passed as data to kill_set_value through note_stores. */
272 struct kill_set_value_data
273 {
274 struct value_data *vd;
275 rtx ignore_set_reg;
276 };
277
278 /* Called through note_stores. If X is set, not clobbered, kill its
279 current value and install it as the root of its own value list. */
280
281 static void
282 kill_set_value (rtx x, const_rtx set, void *data)
283 {
284 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
285 if (rtx_equal_p (x, ksvd->ignore_set_reg))
286 return;
287 if (GET_CODE (set) != CLOBBER)
288 {
289 kill_value (x, ksvd->vd);
290 if (REG_P (x))
291 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
292 }
293 }
294
295 /* Kill any register used in X as the base of an auto-increment expression,
296 and install that register as the root of its own value list. */
297
298 static void
299 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
300 {
301 subrtx_iterator::array_type array;
302 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
303 {
304 const_rtx x = *iter;
305 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
306 {
307 x = XEXP (x, 0);
308 kill_value (x, vd);
309 set_value_regno (REGNO (x), GET_MODE (x), vd);
310 iter.skip_subrtxes ();
311 }
312 }
313 }
314
315 /* Assert that SRC has been copied to DEST. Adjust the data structures
316 to reflect that SRC contains an older copy of the shared value. */
317
318 static void
319 copy_value (rtx dest, rtx src, struct value_data *vd)
320 {
321 unsigned int dr = REGNO (dest);
322 unsigned int sr = REGNO (src);
323 unsigned int dn, sn;
324 unsigned int i;
325
326 /* ??? At present, it's possible to see noop sets. It'd be nice if
327 this were cleaned up beforehand... */
328 if (sr == dr)
329 return;
330
331 /* Do not propagate copies to the stack pointer, as that can leave
332 memory accesses with no scheduling dependency on the stack update. */
333 if (dr == STACK_POINTER_REGNUM)
334 return;
335
336 /* Likewise with the frame pointer, if we're using one. */
337 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
338 return;
339
340 /* Do not propagate copies to fixed or global registers, patterns
341 can be relying to see particular fixed register or users can
342 expect the chosen global register in asm. */
343 if (fixed_regs[dr] || global_regs[dr])
344 return;
345
346 /* If SRC and DEST overlap, don't record anything. */
347 dn = REG_NREGS (dest);
348 sn = REG_NREGS (src);
349 if ((dr > sr && dr < sr + sn)
350 || (sr > dr && sr < dr + dn))
351 return;
352
353 /* If SRC had no assigned mode (i.e. we didn't know it was live)
354 assign it now and assume the value came from an input argument
355 or somesuch. */
356 if (vd->e[sr].mode == VOIDmode)
357 set_value_regno (sr, vd->e[dr].mode, vd);
358
359 /* If we are narrowing the input to a smaller number of hard regs,
360 and it is in big endian, we are really extracting a high part.
361 Since we generally associate a low part of a value with the value itself,
362 we must not do the same for the high part.
363 Note we can still get low parts for the same mode combination through
364 a two-step copy involving differently sized hard regs.
365 Assume hard regs fr* are 32 bits bits each, while r* are 64 bits each:
366 (set (reg:DI r0) (reg:DI fr0))
367 (set (reg:SI fr2) (reg:SI r0))
368 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
369 (set (reg:SI fr2) (reg:SI fr0))
370 loads the high part of (reg:DI fr0) into fr2.
371
372 We can't properly represent the latter case in our tables, so don't
373 record anything then. */
374 else if (sn < (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode]
375 && (GET_MODE_SIZE (vd->e[sr].mode) > UNITS_PER_WORD
376 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
377 return;
378
379 /* If SRC had been assigned a mode narrower than the copy, we can't
380 link DEST into the chain, because not all of the pieces of the
381 copy came from oldest_regno. */
382 else if (sn > (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode])
383 return;
384
385 /* Link DR at the end of the value chain used by SR. */
386
387 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
388
389 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
390 continue;
391 vd->e[i].next_regno = dr;
392
393 #ifdef ENABLE_CHECKING
394 validate_value_data (vd);
395 #endif
396 }
397
398 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
399
400 static bool
401 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
402 unsigned int regno ATTRIBUTE_UNUSED)
403 {
404 if (GET_MODE_SIZE (orig_mode) < GET_MODE_SIZE (new_mode))
405 return false;
406
407 #ifdef CANNOT_CHANGE_MODE_CLASS
408 return !REG_CANNOT_CHANGE_MODE_P (regno, orig_mode, new_mode);
409 #endif
410
411 return true;
412 }
413
414 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
415 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
416 in NEW_MODE.
417 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
418
419 static rtx
420 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
421 machine_mode new_mode, unsigned int regno,
422 unsigned int copy_regno ATTRIBUTE_UNUSED)
423 {
424 if (GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (orig_mode)
425 && GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (new_mode))
426 return NULL_RTX;
427
428 if (orig_mode == new_mode)
429 return gen_raw_REG (new_mode, regno);
430 else if (mode_change_ok (orig_mode, new_mode, regno))
431 {
432 int copy_nregs = hard_regno_nregs[copy_regno][copy_mode];
433 int use_nregs = hard_regno_nregs[copy_regno][new_mode];
434 int copy_offset
435 = GET_MODE_SIZE (copy_mode) / copy_nregs * (copy_nregs - use_nregs);
436 int offset
437 = GET_MODE_SIZE (orig_mode) - GET_MODE_SIZE (new_mode) - copy_offset;
438 int byteoffset = offset % UNITS_PER_WORD;
439 int wordoffset = offset - byteoffset;
440
441 offset = ((WORDS_BIG_ENDIAN ? wordoffset : 0)
442 + (BYTES_BIG_ENDIAN ? byteoffset : 0));
443 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
444 if (HARD_REGNO_MODE_OK (regno, new_mode))
445 return gen_raw_REG (new_mode, regno);
446 }
447 return NULL_RTX;
448 }
449
450 /* Find the oldest copy of the value contained in REGNO that is in
451 register class CL and has mode MODE. If found, return an rtx
452 of that oldest register, otherwise return NULL. */
453
454 static rtx
455 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
456 {
457 unsigned int regno = REGNO (reg);
458 machine_mode mode = GET_MODE (reg);
459 unsigned int i;
460
461 /* If we are accessing REG in some mode other that what we set it in,
462 make sure that the replacement is valid. In particular, consider
463 (set (reg:DI r11) (...))
464 (set (reg:SI r9) (reg:SI r11))
465 (set (reg:SI r10) (...))
466 (set (...) (reg:DI r9))
467 Replacing r9 with r11 is invalid. */
468 if (mode != vd->e[regno].mode)
469 {
470 if (hard_regno_nregs[regno][mode]
471 > hard_regno_nregs[regno][vd->e[regno].mode])
472 return NULL_RTX;
473 }
474
475 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
476 {
477 machine_mode oldmode = vd->e[i].mode;
478 rtx new_rtx;
479
480 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
481 continue;
482
483 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
484 if (new_rtx)
485 {
486 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
487 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
488 REG_POINTER (new_rtx) = REG_POINTER (reg);
489 return new_rtx;
490 }
491 }
492
493 return NULL_RTX;
494 }
495
496 /* If possible, replace the register at *LOC with the oldest register
497 in register class CL. Return true if successfully replaced. */
498
499 static bool
500 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
501 struct value_data *vd)
502 {
503 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
504 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
505 {
506 if (DEBUG_INSN_P (insn))
507 {
508 struct queued_debug_insn_change *change;
509
510 if (dump_file)
511 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
512 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
513
514 change = new queued_debug_insn_change;
515 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
516 change->insn = insn;
517 change->loc = loc;
518 change->new_rtx = new_rtx;
519 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
520 ++vd->n_debug_insn_changes;
521 return true;
522 }
523 if (dump_file)
524 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
525 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
526
527 validate_change (insn, loc, new_rtx, 1);
528 return true;
529 }
530 return false;
531 }
532
533 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
534 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
535 BASE_REG_CLASS depending on how the register is being considered. */
536
537 static bool
538 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
539 machine_mode mode, addr_space_t as,
540 rtx_insn *insn, struct value_data *vd)
541 {
542 rtx x = *loc;
543 RTX_CODE code = GET_CODE (x);
544 const char *fmt;
545 int i, j;
546 bool changed = false;
547
548 switch (code)
549 {
550 case PLUS:
551 if (DEBUG_INSN_P (insn))
552 break;
553
554 {
555 rtx orig_op0 = XEXP (x, 0);
556 rtx orig_op1 = XEXP (x, 1);
557 RTX_CODE code0 = GET_CODE (orig_op0);
558 RTX_CODE code1 = GET_CODE (orig_op1);
559 rtx op0 = orig_op0;
560 rtx op1 = orig_op1;
561 rtx *locI = NULL;
562 rtx *locB = NULL;
563 enum rtx_code index_code = SCRATCH;
564
565 if (GET_CODE (op0) == SUBREG)
566 {
567 op0 = SUBREG_REG (op0);
568 code0 = GET_CODE (op0);
569 }
570
571 if (GET_CODE (op1) == SUBREG)
572 {
573 op1 = SUBREG_REG (op1);
574 code1 = GET_CODE (op1);
575 }
576
577 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
578 || code0 == ZERO_EXTEND || code1 == MEM)
579 {
580 locI = &XEXP (x, 0);
581 locB = &XEXP (x, 1);
582 index_code = GET_CODE (*locI);
583 }
584 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
585 || code1 == ZERO_EXTEND || code0 == MEM)
586 {
587 locI = &XEXP (x, 1);
588 locB = &XEXP (x, 0);
589 index_code = GET_CODE (*locI);
590 }
591 else if (code0 == CONST_INT || code0 == CONST
592 || code0 == SYMBOL_REF || code0 == LABEL_REF)
593 {
594 locB = &XEXP (x, 1);
595 index_code = GET_CODE (XEXP (x, 0));
596 }
597 else if (code1 == CONST_INT || code1 == CONST
598 || code1 == SYMBOL_REF || code1 == LABEL_REF)
599 {
600 locB = &XEXP (x, 0);
601 index_code = GET_CODE (XEXP (x, 1));
602 }
603 else if (code0 == REG && code1 == REG)
604 {
605 int index_op;
606 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
607
608 if (REGNO_OK_FOR_INDEX_P (regno1)
609 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
610 index_op = 1;
611 else if (REGNO_OK_FOR_INDEX_P (regno0)
612 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
613 index_op = 0;
614 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
615 || REGNO_OK_FOR_INDEX_P (regno1))
616 index_op = 1;
617 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
618 index_op = 0;
619 else
620 index_op = 1;
621
622 locI = &XEXP (x, index_op);
623 locB = &XEXP (x, !index_op);
624 index_code = GET_CODE (*locI);
625 }
626 else if (code0 == REG)
627 {
628 locI = &XEXP (x, 0);
629 locB = &XEXP (x, 1);
630 index_code = GET_CODE (*locI);
631 }
632 else if (code1 == REG)
633 {
634 locI = &XEXP (x, 1);
635 locB = &XEXP (x, 0);
636 index_code = GET_CODE (*locI);
637 }
638
639 if (locI)
640 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
641 mode, as, insn, vd);
642 if (locB)
643 changed |= replace_oldest_value_addr (locB,
644 base_reg_class (mode, as, PLUS,
645 index_code),
646 mode, as, insn, vd);
647 return changed;
648 }
649
650 case POST_INC:
651 case POST_DEC:
652 case POST_MODIFY:
653 case PRE_INC:
654 case PRE_DEC:
655 case PRE_MODIFY:
656 return false;
657
658 case MEM:
659 return replace_oldest_value_mem (x, insn, vd);
660
661 case REG:
662 return replace_oldest_value_reg (loc, cl, insn, vd);
663
664 default:
665 break;
666 }
667
668 fmt = GET_RTX_FORMAT (code);
669 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
670 {
671 if (fmt[i] == 'e')
672 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
673 insn, vd);
674 else if (fmt[i] == 'E')
675 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
676 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
677 mode, as, insn, vd);
678 }
679
680 return changed;
681 }
682
683 /* Similar to replace_oldest_value_reg, but X contains a memory. */
684
685 static bool
686 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
687 {
688 enum reg_class cl;
689
690 if (DEBUG_INSN_P (insn))
691 cl = ALL_REGS;
692 else
693 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
694
695 return replace_oldest_value_addr (&XEXP (x, 0), cl,
696 GET_MODE (x), MEM_ADDR_SPACE (x),
697 insn, vd);
698 }
699
700 /* Apply all queued updates for DEBUG_INSNs that change some reg to
701 register REGNO. */
702
703 static void
704 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
705 {
706 struct queued_debug_insn_change *change;
707 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
708
709 for (change = vd->e[regno].debug_insn_changes;
710 change;
711 change = change->next)
712 {
713 if (last_insn != change->insn)
714 {
715 apply_change_group ();
716 last_insn = change->insn;
717 }
718 validate_change (change->insn, change->loc, change->new_rtx, 1);
719 }
720 apply_change_group ();
721 }
722
723 /* Called via note_uses, for all used registers in a real insn
724 apply DEBUG_INSN changes that change registers to the used
725 registers. */
726
727 static void
728 cprop_find_used_regs (rtx *loc, void *data)
729 {
730 struct value_data *const vd = (struct value_data *) data;
731 subrtx_iterator::array_type array;
732 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
733 {
734 const_rtx x = *iter;
735 if (REG_P (x))
736 {
737 unsigned int regno = REGNO (x);
738 if (vd->e[regno].debug_insn_changes)
739 {
740 apply_debug_insn_changes (vd, regno);
741 free_debug_insn_changes (vd, regno);
742 }
743 }
744 }
745 }
746
747 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
748
749 static void
750 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
751 {
752 note_stores (PATTERN (insn), kill_clobbered_value, vd);
753
754 if (CALL_P (insn))
755 {
756 rtx exp;
757
758 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
759 {
760 rtx x = XEXP (exp, 0);
761 if (GET_CODE (x) == CLOBBER)
762 kill_value (SET_DEST (x), vd);
763 }
764 }
765 }
766
767 /* Perform the forward copy propagation on basic block BB. */
768
769 static bool
770 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
771 {
772 bool anything_changed = false;
773 rtx_insn *insn;
774
775 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
776 {
777 int n_ops, i, predicated;
778 bool is_asm, any_replacements;
779 rtx set;
780 rtx link;
781 bool replaced[MAX_RECOG_OPERANDS];
782 bool changed = false;
783 struct kill_set_value_data ksvd;
784
785 if (!NONDEBUG_INSN_P (insn))
786 {
787 if (DEBUG_INSN_P (insn))
788 {
789 rtx loc = INSN_VAR_LOCATION_LOC (insn);
790 if (!VAR_LOC_UNKNOWN_P (loc))
791 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
792 ALL_REGS, GET_MODE (loc),
793 ADDR_SPACE_GENERIC, insn, vd);
794 }
795
796 if (insn == BB_END (bb))
797 break;
798 else
799 continue;
800 }
801
802 set = single_set (insn);
803 extract_constrain_insn (insn);
804 preprocess_constraints (insn);
805 const operand_alternative *op_alt = which_op_alt ();
806 n_ops = recog_data.n_operands;
807 is_asm = asm_noperands (PATTERN (insn)) >= 0;
808
809 /* Simplify the code below by promoting OP_OUT to OP_INOUT
810 in predicated instructions. */
811
812 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
813 for (i = 0; i < n_ops; ++i)
814 {
815 int matches = op_alt[i].matches;
816 if (matches >= 0 || op_alt[i].matched >= 0
817 || (predicated && recog_data.operand_type[i] == OP_OUT))
818 recog_data.operand_type[i] = OP_INOUT;
819 }
820
821 /* Apply changes to earlier DEBUG_INSNs if possible. */
822 if (vd->n_debug_insn_changes)
823 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
824
825 /* For each earlyclobber operand, zap the value data. */
826 for (i = 0; i < n_ops; i++)
827 if (op_alt[i].earlyclobber)
828 kill_value (recog_data.operand[i], vd);
829
830 /* Within asms, a clobber cannot overlap inputs or outputs.
831 I wouldn't think this were true for regular insns, but
832 scan_rtx treats them like that... */
833 kill_clobbered_values (insn, vd);
834
835 /* Kill all auto-incremented values. */
836 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
837 kill_autoinc_value (insn, vd);
838
839 /* Kill all early-clobbered operands. */
840 for (i = 0; i < n_ops; i++)
841 if (op_alt[i].earlyclobber)
842 kill_value (recog_data.operand[i], vd);
843
844 /* If we have dead sets in the insn, then we need to note these as we
845 would clobbers. */
846 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
847 {
848 if (REG_NOTE_KIND (link) == REG_UNUSED)
849 {
850 kill_value (XEXP (link, 0), vd);
851 /* Furthermore, if the insn looked like a single-set,
852 but the dead store kills the source value of that
853 set, then we can no-longer use the plain move
854 special case below. */
855 if (set
856 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
857 set = NULL;
858 }
859 }
860
861 /* Special-case plain move instructions, since we may well
862 be able to do the move from a different register class. */
863 if (set && REG_P (SET_SRC (set)))
864 {
865 rtx src = SET_SRC (set);
866 unsigned int regno = REGNO (src);
867 machine_mode mode = GET_MODE (src);
868 unsigned int i;
869 rtx new_rtx;
870
871 /* If we are accessing SRC in some mode other that what we
872 set it in, make sure that the replacement is valid. */
873 if (mode != vd->e[regno].mode)
874 {
875 if (hard_regno_nregs[regno][mode]
876 > hard_regno_nregs[regno][vd->e[regno].mode])
877 goto no_move_special_case;
878
879 /* And likewise, if we are narrowing on big endian the transformation
880 is also invalid. */
881 if (hard_regno_nregs[regno][mode]
882 < hard_regno_nregs[regno][vd->e[regno].mode]
883 && (GET_MODE_SIZE (vd->e[regno].mode) > UNITS_PER_WORD
884 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
885 goto no_move_special_case;
886 }
887
888 /* If the destination is also a register, try to find a source
889 register in the same class. */
890 if (REG_P (SET_DEST (set)))
891 {
892 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno), src, vd);
893 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
894 {
895 if (dump_file)
896 fprintf (dump_file,
897 "insn %u: replaced reg %u with %u\n",
898 INSN_UID (insn), regno, REGNO (new_rtx));
899 changed = true;
900 goto did_replacement;
901 }
902 /* We need to re-extract as validate_change clobbers
903 recog_data. */
904 extract_constrain_insn (insn);
905 preprocess_constraints (insn);
906 }
907
908 /* Otherwise, try all valid registers and see if its valid. */
909 for (i = vd->e[regno].oldest_regno; i != regno;
910 i = vd->e[i].next_regno)
911 {
912 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
913 mode, i, regno);
914 if (new_rtx != NULL_RTX)
915 {
916 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
917 {
918 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
919 REG_ATTRS (new_rtx) = REG_ATTRS (src);
920 REG_POINTER (new_rtx) = REG_POINTER (src);
921 if (dump_file)
922 fprintf (dump_file,
923 "insn %u: replaced reg %u with %u\n",
924 INSN_UID (insn), regno, REGNO (new_rtx));
925 changed = true;
926 goto did_replacement;
927 }
928 /* We need to re-extract as validate_change clobbers
929 recog_data. */
930 extract_constrain_insn (insn);
931 preprocess_constraints (insn);
932 }
933 }
934 }
935 no_move_special_case:
936
937 any_replacements = false;
938
939 /* For each input operand, replace a hard register with the
940 eldest live copy that's in an appropriate register class. */
941 for (i = 0; i < n_ops; i++)
942 {
943 replaced[i] = false;
944
945 /* Don't scan match_operand here, since we've no reg class
946 information to pass down. Any operands that we could
947 substitute in will be represented elsewhere. */
948 if (recog_data.constraints[i][0] == '\0')
949 continue;
950
951 /* Don't replace in asms intentionally referencing hard regs. */
952 if (is_asm && REG_P (recog_data.operand[i])
953 && (REGNO (recog_data.operand[i])
954 == ORIGINAL_REGNO (recog_data.operand[i])))
955 continue;
956
957 if (recog_data.operand_type[i] == OP_IN)
958 {
959 if (op_alt[i].is_address)
960 replaced[i]
961 = replace_oldest_value_addr (recog_data.operand_loc[i],
962 alternative_class (op_alt, i),
963 VOIDmode, ADDR_SPACE_GENERIC,
964 insn, vd);
965 else if (REG_P (recog_data.operand[i]))
966 replaced[i]
967 = replace_oldest_value_reg (recog_data.operand_loc[i],
968 alternative_class (op_alt, i),
969 insn, vd);
970 else if (MEM_P (recog_data.operand[i]))
971 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
972 insn, vd);
973 }
974 else if (MEM_P (recog_data.operand[i]))
975 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
976 insn, vd);
977
978 /* If we performed any replacement, update match_dups. */
979 if (replaced[i])
980 {
981 int j;
982 rtx new_rtx;
983
984 new_rtx = *recog_data.operand_loc[i];
985 recog_data.operand[i] = new_rtx;
986 for (j = 0; j < recog_data.n_dups; j++)
987 if (recog_data.dup_num[j] == i)
988 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
989
990 any_replacements = true;
991 }
992 }
993
994 if (any_replacements)
995 {
996 if (! apply_change_group ())
997 {
998 for (i = 0; i < n_ops; i++)
999 if (replaced[i])
1000 {
1001 rtx old = *recog_data.operand_loc[i];
1002 recog_data.operand[i] = old;
1003 }
1004
1005 if (dump_file)
1006 fprintf (dump_file,
1007 "insn %u: reg replacements not verified\n",
1008 INSN_UID (insn));
1009 }
1010 else
1011 changed = true;
1012 }
1013
1014 did_replacement:
1015 if (changed)
1016 {
1017 anything_changed = true;
1018
1019 /* If something changed, perhaps further changes to earlier
1020 DEBUG_INSNs can be applied. */
1021 if (vd->n_debug_insn_changes)
1022 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1023 }
1024
1025 ksvd.vd = vd;
1026 ksvd.ignore_set_reg = NULL_RTX;
1027
1028 /* Clobber call-clobbered registers. */
1029 if (CALL_P (insn))
1030 {
1031 unsigned int set_regno = INVALID_REGNUM;
1032 unsigned int set_nregs = 0;
1033 unsigned int regno;
1034 rtx exp;
1035 HARD_REG_SET regs_invalidated_by_this_call;
1036
1037 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1038 {
1039 rtx x = XEXP (exp, 0);
1040 if (GET_CODE (x) == SET)
1041 {
1042 rtx dest = SET_DEST (x);
1043 kill_value (dest, vd);
1044 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1045 copy_value (dest, SET_SRC (x), vd);
1046 ksvd.ignore_set_reg = dest;
1047 set_regno = REGNO (dest);
1048 set_nregs = REG_NREGS (dest);
1049 break;
1050 }
1051 }
1052
1053 get_call_reg_set_usage (insn,
1054 &regs_invalidated_by_this_call,
1055 regs_invalidated_by_call);
1056 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1057 if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1058 || HARD_REGNO_CALL_PART_CLOBBERED (regno, vd->e[regno].mode))
1059 && (regno < set_regno || regno >= set_regno + set_nregs))
1060 kill_value_regno (regno, 1, vd);
1061
1062 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1063 of the SET isn't in regs_invalidated_by_call hard reg set,
1064 but instead among CLOBBERs on the CALL_INSN, we could wrongly
1065 assume the value in it is still live. */
1066 if (ksvd.ignore_set_reg)
1067 kill_clobbered_values (insn, vd);
1068 }
1069
1070 bool copy_p = (set
1071 && REG_P (SET_DEST (set))
1072 && REG_P (SET_SRC (set)));
1073 bool noop_p = (copy_p
1074 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1075
1076 if (!noop_p)
1077 {
1078 /* Notice stores. */
1079 note_stores (PATTERN (insn), kill_set_value, &ksvd);
1080
1081 /* Notice copies. */
1082 if (copy_p)
1083 copy_value (SET_DEST (set), SET_SRC (set), vd);
1084 }
1085
1086 if (insn == BB_END (bb))
1087 break;
1088 }
1089
1090 return anything_changed;
1091 }
1092
1093 /* Dump the value chain data to stderr. */
1094
1095 DEBUG_FUNCTION void
1096 debug_value_data (struct value_data *vd)
1097 {
1098 HARD_REG_SET set;
1099 unsigned int i, j;
1100
1101 CLEAR_HARD_REG_SET (set);
1102
1103 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1104 if (vd->e[i].oldest_regno == i)
1105 {
1106 if (vd->e[i].mode == VOIDmode)
1107 {
1108 if (vd->e[i].next_regno != INVALID_REGNUM)
1109 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1110 i, vd->e[i].next_regno);
1111 continue;
1112 }
1113
1114 SET_HARD_REG_BIT (set, i);
1115 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1116
1117 for (j = vd->e[i].next_regno;
1118 j != INVALID_REGNUM;
1119 j = vd->e[j].next_regno)
1120 {
1121 if (TEST_HARD_REG_BIT (set, j))
1122 {
1123 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1124 return;
1125 }
1126
1127 if (vd->e[j].oldest_regno != i)
1128 {
1129 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1130 j, vd->e[j].oldest_regno);
1131 return;
1132 }
1133 SET_HARD_REG_BIT (set, j);
1134 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1135 }
1136 fputc ('\n', stderr);
1137 }
1138
1139 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1140 if (! TEST_HARD_REG_BIT (set, i)
1141 && (vd->e[i].mode != VOIDmode
1142 || vd->e[i].oldest_regno != i
1143 || vd->e[i].next_regno != INVALID_REGNUM))
1144 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1145 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1146 vd->e[i].next_regno);
1147 }
1148
1149 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1150 DEBUG_INSN is skipped since we do not want to involve DF related
1151 staff as how it is handled in function pass_cprop_hardreg::execute.
1152
1153 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1154 to handle DEBUG_INSN for other uses. */
1155
1156 void
1157 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1158 {
1159 struct value_data *vd;
1160 vd = XNEWVEC (struct value_data, 1);
1161 init_value_data (vd);
1162
1163 skip_debug_insn_p = true;
1164 copyprop_hardreg_forward_1 (bb, vd);
1165 free (vd);
1166 skip_debug_insn_p = false;
1167 }
1168
1169 #ifdef ENABLE_CHECKING
1170 static void
1171 validate_value_data (struct value_data *vd)
1172 {
1173 HARD_REG_SET set;
1174 unsigned int i, j;
1175
1176 CLEAR_HARD_REG_SET (set);
1177
1178 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1179 if (vd->e[i].oldest_regno == i)
1180 {
1181 if (vd->e[i].mode == VOIDmode)
1182 {
1183 if (vd->e[i].next_regno != INVALID_REGNUM)
1184 internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1185 i, vd->e[i].next_regno);
1186 continue;
1187 }
1188
1189 SET_HARD_REG_BIT (set, i);
1190
1191 for (j = vd->e[i].next_regno;
1192 j != INVALID_REGNUM;
1193 j = vd->e[j].next_regno)
1194 {
1195 if (TEST_HARD_REG_BIT (set, j))
1196 internal_error ("validate_value_data: Loop in regno chain (%u)",
1197 j);
1198 if (vd->e[j].oldest_regno != i)
1199 internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1200 j, vd->e[j].oldest_regno);
1201
1202 SET_HARD_REG_BIT (set, j);
1203 }
1204 }
1205
1206 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1207 if (! TEST_HARD_REG_BIT (set, i)
1208 && (vd->e[i].mode != VOIDmode
1209 || vd->e[i].oldest_regno != i
1210 || vd->e[i].next_regno != INVALID_REGNUM))
1211 internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1212 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1213 vd->e[i].next_regno);
1214 }
1215 #endif
1216 \f
1217 namespace {
1218
1219 const pass_data pass_data_cprop_hardreg =
1220 {
1221 RTL_PASS, /* type */
1222 "cprop_hardreg", /* name */
1223 OPTGROUP_NONE, /* optinfo_flags */
1224 TV_CPROP_REGISTERS, /* tv_id */
1225 0, /* properties_required */
1226 0, /* properties_provided */
1227 0, /* properties_destroyed */
1228 0, /* todo_flags_start */
1229 TODO_df_finish, /* todo_flags_finish */
1230 };
1231
1232 class pass_cprop_hardreg : public rtl_opt_pass
1233 {
1234 public:
1235 pass_cprop_hardreg (gcc::context *ctxt)
1236 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1237 {}
1238
1239 /* opt_pass methods: */
1240 virtual bool gate (function *)
1241 {
1242 return (optimize > 0 && (flag_cprop_registers));
1243 }
1244
1245 virtual unsigned int execute (function *);
1246
1247 }; // class pass_cprop_hardreg
1248
1249 unsigned int
1250 pass_cprop_hardreg::execute (function *fun)
1251 {
1252 struct value_data *all_vd;
1253 basic_block bb;
1254 sbitmap visited;
1255 bool analyze_called = false;
1256
1257 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1258
1259 visited = sbitmap_alloc (last_basic_block_for_fn (fun));
1260 bitmap_clear (visited);
1261
1262 FOR_EACH_BB_FN (bb, fun)
1263 {
1264 bitmap_set_bit (visited, bb->index);
1265
1266 /* If a block has a single predecessor, that we've already
1267 processed, begin with the value data that was live at
1268 the end of the predecessor block. */
1269 /* ??? Ought to use more intelligent queuing of blocks. */
1270 if (single_pred_p (bb)
1271 && bitmap_bit_p (visited, single_pred (bb)->index)
1272 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1273 {
1274 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1275 if (all_vd[bb->index].n_debug_insn_changes)
1276 {
1277 unsigned int regno;
1278
1279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1280 {
1281 if (all_vd[bb->index].e[regno].debug_insn_changes)
1282 {
1283 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1284 if (--all_vd[bb->index].n_debug_insn_changes == 0)
1285 break;
1286 }
1287 }
1288 }
1289 }
1290 else
1291 init_value_data (all_vd + bb->index);
1292
1293 copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1294 }
1295
1296 if (MAY_HAVE_DEBUG_INSNS)
1297 {
1298 FOR_EACH_BB_FN (bb, fun)
1299 if (bitmap_bit_p (visited, bb->index)
1300 && all_vd[bb->index].n_debug_insn_changes)
1301 {
1302 unsigned int regno;
1303 bitmap live;
1304
1305 if (!analyze_called)
1306 {
1307 df_analyze ();
1308 analyze_called = true;
1309 }
1310 live = df_get_live_out (bb);
1311 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1312 if (all_vd[bb->index].e[regno].debug_insn_changes)
1313 {
1314 if (REGNO_REG_SET_P (live, regno))
1315 apply_debug_insn_changes (all_vd + bb->index, regno);
1316 if (all_vd[bb->index].n_debug_insn_changes == 0)
1317 break;
1318 }
1319 }
1320
1321 queued_debug_insn_change::pool.release ();
1322 }
1323
1324 sbitmap_free (visited);
1325 free (all_vd);
1326 return 0;
1327 }
1328
1329 } // anon namespace
1330
1331 rtl_opt_pass *
1332 make_pass_cprop_hardreg (gcc::context *ctxt)
1333 {
1334 return new pass_cprop_hardreg (ctxt);
1335 }