IA MCU psABI support: changes to libraries
[gcc.git] / gcc / regcprop.c
1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "tm_p.h"
26 #include "insn-config.h"
27 #include "regs.h"
28 #include "addresses.h"
29 #include "hard-reg-set.h"
30 #include "predict.h"
31 #include "function.h"
32 #include "dominance.h"
33 #include "cfg.h"
34 #include "basic-block.h"
35 #include "reload.h"
36 #include "recog.h"
37 #include "flags.h"
38 #include "diagnostic-core.h"
39 #include "obstack.h"
40 #include "tree-pass.h"
41 #include "df.h"
42 #include "rtl-iter.h"
43 #include "emit-rtl.h"
44
45 /* The following code does forward propagation of hard register copies.
46 The object is to eliminate as many dependencies as possible, so that
47 we have the most scheduling freedom. As a side effect, we also clean
48 up some silly register allocation decisions made by reload. This
49 code may be obsoleted by a new register allocator. */
50
51 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
52 lifetime of a register and get the DEBUG_INSN subsequently reset.
53 So they are queued instead, and updated only when the register is
54 used in some subsequent real insn before it is set. */
55 struct queued_debug_insn_change
56 {
57 struct queued_debug_insn_change *next;
58 rtx_insn *insn;
59 rtx *loc;
60 rtx new_rtx;
61
62 /* Pool allocation new operator. */
63 inline void *operator new (size_t)
64 {
65 return pool.allocate ();
66 }
67
68 /* Delete operator utilizing pool allocation. */
69 inline void operator delete (void *ptr)
70 {
71 pool.remove ((queued_debug_insn_change *) ptr);
72 }
73
74 /* Memory allocation pool. */
75 static pool_allocator<queued_debug_insn_change> pool;
76 };
77
78 /* For each register, we have a list of registers that contain the same
79 value. The OLDEST_REGNO field points to the head of the list, and
80 the NEXT_REGNO field runs through the list. The MODE field indicates
81 what mode the data is known to be in; this field is VOIDmode when the
82 register is not known to contain valid data. */
83
84 struct value_data_entry
85 {
86 machine_mode mode;
87 unsigned int oldest_regno;
88 unsigned int next_regno;
89 struct queued_debug_insn_change *debug_insn_changes;
90 };
91
92 struct value_data
93 {
94 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
95 unsigned int max_value_regs;
96 unsigned int n_debug_insn_changes;
97 };
98
99 pool_allocator<queued_debug_insn_change> queued_debug_insn_change::pool
100 ("debug insn changes pool", 256);
101
102 static bool skip_debug_insn_p;
103
104 static void kill_value_one_regno (unsigned, struct value_data *);
105 static void kill_value_regno (unsigned, unsigned, struct value_data *);
106 static void kill_value (const_rtx, struct value_data *);
107 static void set_value_regno (unsigned, machine_mode, struct value_data *);
108 static void init_value_data (struct value_data *);
109 static void kill_clobbered_value (rtx, const_rtx, void *);
110 static void kill_set_value (rtx, const_rtx, void *);
111 static void copy_value (rtx, rtx, struct value_data *);
112 static bool mode_change_ok (machine_mode, machine_mode,
113 unsigned int);
114 static rtx maybe_mode_change (machine_mode, machine_mode,
115 machine_mode, unsigned int, unsigned int);
116 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
117 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
118 struct value_data *);
119 static bool replace_oldest_value_addr (rtx *, enum reg_class,
120 machine_mode, addr_space_t,
121 rtx_insn *, struct value_data *);
122 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
123 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
124 extern void debug_value_data (struct value_data *);
125 #ifdef ENABLE_CHECKING
126 static void validate_value_data (struct value_data *);
127 #endif
128
129 /* Free all queued updates for DEBUG_INSNs that change some reg to
130 register REGNO. */
131
132 static void
133 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
134 {
135 struct queued_debug_insn_change *cur, *next;
136 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
137 {
138 next = cur->next;
139 --vd->n_debug_insn_changes;
140 delete cur;
141 }
142 vd->e[regno].debug_insn_changes = NULL;
143 }
144
145 /* Kill register REGNO. This involves removing it from any value
146 lists, and resetting the value mode to VOIDmode. This is only a
147 helper function; it does not handle any hard registers overlapping
148 with REGNO. */
149
150 static void
151 kill_value_one_regno (unsigned int regno, struct value_data *vd)
152 {
153 unsigned int i, next;
154
155 if (vd->e[regno].oldest_regno != regno)
156 {
157 for (i = vd->e[regno].oldest_regno;
158 vd->e[i].next_regno != regno;
159 i = vd->e[i].next_regno)
160 continue;
161 vd->e[i].next_regno = vd->e[regno].next_regno;
162 }
163 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
164 {
165 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
166 vd->e[i].oldest_regno = next;
167 }
168
169 vd->e[regno].mode = VOIDmode;
170 vd->e[regno].oldest_regno = regno;
171 vd->e[regno].next_regno = INVALID_REGNUM;
172 if (vd->e[regno].debug_insn_changes)
173 free_debug_insn_changes (vd, regno);
174
175 #ifdef ENABLE_CHECKING
176 validate_value_data (vd);
177 #endif
178 }
179
180 /* Kill the value in register REGNO for NREGS, and any other registers
181 whose values overlap. */
182
183 static void
184 kill_value_regno (unsigned int regno, unsigned int nregs,
185 struct value_data *vd)
186 {
187 unsigned int j;
188
189 /* Kill the value we're told to kill. */
190 for (j = 0; j < nregs; ++j)
191 kill_value_one_regno (regno + j, vd);
192
193 /* Kill everything that overlapped what we're told to kill. */
194 if (regno < vd->max_value_regs)
195 j = 0;
196 else
197 j = regno - vd->max_value_regs;
198 for (; j < regno; ++j)
199 {
200 unsigned int i, n;
201 if (vd->e[j].mode == VOIDmode)
202 continue;
203 n = hard_regno_nregs[j][vd->e[j].mode];
204 if (j + n > regno)
205 for (i = 0; i < n; ++i)
206 kill_value_one_regno (j + i, vd);
207 }
208 }
209
210 /* Kill X. This is a convenience function wrapping kill_value_regno
211 so that we mind the mode the register is in. */
212
213 static void
214 kill_value (const_rtx x, struct value_data *vd)
215 {
216 if (GET_CODE (x) == SUBREG)
217 {
218 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
219 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
220 x = tmp ? tmp : SUBREG_REG (x);
221 }
222 if (REG_P (x))
223 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
224 }
225
226 /* Remember that REGNO is valid in MODE. */
227
228 static void
229 set_value_regno (unsigned int regno, machine_mode mode,
230 struct value_data *vd)
231 {
232 unsigned int nregs;
233
234 vd->e[regno].mode = mode;
235
236 nregs = hard_regno_nregs[regno][mode];
237 if (nregs > vd->max_value_regs)
238 vd->max_value_regs = nregs;
239 }
240
241 /* Initialize VD such that there are no known relationships between regs. */
242
243 static void
244 init_value_data (struct value_data *vd)
245 {
246 int i;
247 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
248 {
249 vd->e[i].mode = VOIDmode;
250 vd->e[i].oldest_regno = i;
251 vd->e[i].next_regno = INVALID_REGNUM;
252 vd->e[i].debug_insn_changes = NULL;
253 }
254 vd->max_value_regs = 0;
255 vd->n_debug_insn_changes = 0;
256 }
257
258 /* Called through note_stores. If X is clobbered, kill its value. */
259
260 static void
261 kill_clobbered_value (rtx x, const_rtx set, void *data)
262 {
263 struct value_data *const vd = (struct value_data *) data;
264 if (GET_CODE (set) == CLOBBER)
265 kill_value (x, vd);
266 }
267
268 /* A structure passed as data to kill_set_value through note_stores. */
269 struct kill_set_value_data
270 {
271 struct value_data *vd;
272 rtx ignore_set_reg;
273 };
274
275 /* Called through note_stores. If X is set, not clobbered, kill its
276 current value and install it as the root of its own value list. */
277
278 static void
279 kill_set_value (rtx x, const_rtx set, void *data)
280 {
281 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
282 if (rtx_equal_p (x, ksvd->ignore_set_reg))
283 return;
284 if (GET_CODE (set) != CLOBBER)
285 {
286 kill_value (x, ksvd->vd);
287 if (REG_P (x))
288 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
289 }
290 }
291
292 /* Kill any register used in X as the base of an auto-increment expression,
293 and install that register as the root of its own value list. */
294
295 static void
296 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
297 {
298 subrtx_iterator::array_type array;
299 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
300 {
301 const_rtx x = *iter;
302 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
303 {
304 x = XEXP (x, 0);
305 kill_value (x, vd);
306 set_value_regno (REGNO (x), GET_MODE (x), vd);
307 iter.skip_subrtxes ();
308 }
309 }
310 }
311
312 /* Assert that SRC has been copied to DEST. Adjust the data structures
313 to reflect that SRC contains an older copy of the shared value. */
314
315 static void
316 copy_value (rtx dest, rtx src, struct value_data *vd)
317 {
318 unsigned int dr = REGNO (dest);
319 unsigned int sr = REGNO (src);
320 unsigned int dn, sn;
321 unsigned int i;
322
323 /* ??? At present, it's possible to see noop sets. It'd be nice if
324 this were cleaned up beforehand... */
325 if (sr == dr)
326 return;
327
328 /* Do not propagate copies to the stack pointer, as that can leave
329 memory accesses with no scheduling dependency on the stack update. */
330 if (dr == STACK_POINTER_REGNUM)
331 return;
332
333 /* Likewise with the frame pointer, if we're using one. */
334 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
335 return;
336
337 /* Do not propagate copies to fixed or global registers, patterns
338 can be relying to see particular fixed register or users can
339 expect the chosen global register in asm. */
340 if (fixed_regs[dr] || global_regs[dr])
341 return;
342
343 /* If SRC and DEST overlap, don't record anything. */
344 dn = REG_NREGS (dest);
345 sn = REG_NREGS (src);
346 if ((dr > sr && dr < sr + sn)
347 || (sr > dr && sr < dr + dn))
348 return;
349
350 /* If SRC had no assigned mode (i.e. we didn't know it was live)
351 assign it now and assume the value came from an input argument
352 or somesuch. */
353 if (vd->e[sr].mode == VOIDmode)
354 set_value_regno (sr, vd->e[dr].mode, vd);
355
356 /* If we are narrowing the input to a smaller number of hard regs,
357 and it is in big endian, we are really extracting a high part.
358 Since we generally associate a low part of a value with the value itself,
359 we must not do the same for the high part.
360 Note we can still get low parts for the same mode combination through
361 a two-step copy involving differently sized hard regs.
362 Assume hard regs fr* are 32 bits bits each, while r* are 64 bits each:
363 (set (reg:DI r0) (reg:DI fr0))
364 (set (reg:SI fr2) (reg:SI r0))
365 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
366 (set (reg:SI fr2) (reg:SI fr0))
367 loads the high part of (reg:DI fr0) into fr2.
368
369 We can't properly represent the latter case in our tables, so don't
370 record anything then. */
371 else if (sn < (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode]
372 && (GET_MODE_SIZE (vd->e[sr].mode) > UNITS_PER_WORD
373 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
374 return;
375
376 /* If SRC had been assigned a mode narrower than the copy, we can't
377 link DEST into the chain, because not all of the pieces of the
378 copy came from oldest_regno. */
379 else if (sn > (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode])
380 return;
381
382 /* Link DR at the end of the value chain used by SR. */
383
384 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
385
386 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
387 continue;
388 vd->e[i].next_regno = dr;
389
390 #ifdef ENABLE_CHECKING
391 validate_value_data (vd);
392 #endif
393 }
394
395 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
396
397 static bool
398 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
399 unsigned int regno ATTRIBUTE_UNUSED)
400 {
401 if (GET_MODE_SIZE (orig_mode) < GET_MODE_SIZE (new_mode))
402 return false;
403
404 #ifdef CANNOT_CHANGE_MODE_CLASS
405 return !REG_CANNOT_CHANGE_MODE_P (regno, orig_mode, new_mode);
406 #endif
407
408 return true;
409 }
410
411 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
412 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
413 in NEW_MODE.
414 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
415
416 static rtx
417 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
418 machine_mode new_mode, unsigned int regno,
419 unsigned int copy_regno ATTRIBUTE_UNUSED)
420 {
421 if (GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (orig_mode)
422 && GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (new_mode))
423 return NULL_RTX;
424
425 if (orig_mode == new_mode)
426 return gen_raw_REG (new_mode, regno);
427 else if (mode_change_ok (orig_mode, new_mode, regno))
428 {
429 int copy_nregs = hard_regno_nregs[copy_regno][copy_mode];
430 int use_nregs = hard_regno_nregs[copy_regno][new_mode];
431 int copy_offset
432 = GET_MODE_SIZE (copy_mode) / copy_nregs * (copy_nregs - use_nregs);
433 int offset
434 = GET_MODE_SIZE (orig_mode) - GET_MODE_SIZE (new_mode) - copy_offset;
435 int byteoffset = offset % UNITS_PER_WORD;
436 int wordoffset = offset - byteoffset;
437
438 offset = ((WORDS_BIG_ENDIAN ? wordoffset : 0)
439 + (BYTES_BIG_ENDIAN ? byteoffset : 0));
440 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
441 if (HARD_REGNO_MODE_OK (regno, new_mode))
442 return gen_raw_REG (new_mode, regno);
443 }
444 return NULL_RTX;
445 }
446
447 /* Find the oldest copy of the value contained in REGNO that is in
448 register class CL and has mode MODE. If found, return an rtx
449 of that oldest register, otherwise return NULL. */
450
451 static rtx
452 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
453 {
454 unsigned int regno = REGNO (reg);
455 machine_mode mode = GET_MODE (reg);
456 unsigned int i;
457
458 /* If we are accessing REG in some mode other that what we set it in,
459 make sure that the replacement is valid. In particular, consider
460 (set (reg:DI r11) (...))
461 (set (reg:SI r9) (reg:SI r11))
462 (set (reg:SI r10) (...))
463 (set (...) (reg:DI r9))
464 Replacing r9 with r11 is invalid. */
465 if (mode != vd->e[regno].mode)
466 {
467 if (hard_regno_nregs[regno][mode]
468 > hard_regno_nregs[regno][vd->e[regno].mode])
469 return NULL_RTX;
470 }
471
472 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
473 {
474 machine_mode oldmode = vd->e[i].mode;
475 rtx new_rtx;
476
477 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
478 continue;
479
480 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
481 if (new_rtx)
482 {
483 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
484 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
485 REG_POINTER (new_rtx) = REG_POINTER (reg);
486 return new_rtx;
487 }
488 }
489
490 return NULL_RTX;
491 }
492
493 /* If possible, replace the register at *LOC with the oldest register
494 in register class CL. Return true if successfully replaced. */
495
496 static bool
497 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
498 struct value_data *vd)
499 {
500 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
501 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
502 {
503 if (DEBUG_INSN_P (insn))
504 {
505 struct queued_debug_insn_change *change;
506
507 if (dump_file)
508 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
509 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
510
511 change = new queued_debug_insn_change;
512 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
513 change->insn = insn;
514 change->loc = loc;
515 change->new_rtx = new_rtx;
516 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
517 ++vd->n_debug_insn_changes;
518 return true;
519 }
520 if (dump_file)
521 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
522 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
523
524 validate_change (insn, loc, new_rtx, 1);
525 return true;
526 }
527 return false;
528 }
529
530 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
531 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
532 BASE_REG_CLASS depending on how the register is being considered. */
533
534 static bool
535 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
536 machine_mode mode, addr_space_t as,
537 rtx_insn *insn, struct value_data *vd)
538 {
539 rtx x = *loc;
540 RTX_CODE code = GET_CODE (x);
541 const char *fmt;
542 int i, j;
543 bool changed = false;
544
545 switch (code)
546 {
547 case PLUS:
548 if (DEBUG_INSN_P (insn))
549 break;
550
551 {
552 rtx orig_op0 = XEXP (x, 0);
553 rtx orig_op1 = XEXP (x, 1);
554 RTX_CODE code0 = GET_CODE (orig_op0);
555 RTX_CODE code1 = GET_CODE (orig_op1);
556 rtx op0 = orig_op0;
557 rtx op1 = orig_op1;
558 rtx *locI = NULL;
559 rtx *locB = NULL;
560 enum rtx_code index_code = SCRATCH;
561
562 if (GET_CODE (op0) == SUBREG)
563 {
564 op0 = SUBREG_REG (op0);
565 code0 = GET_CODE (op0);
566 }
567
568 if (GET_CODE (op1) == SUBREG)
569 {
570 op1 = SUBREG_REG (op1);
571 code1 = GET_CODE (op1);
572 }
573
574 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
575 || code0 == ZERO_EXTEND || code1 == MEM)
576 {
577 locI = &XEXP (x, 0);
578 locB = &XEXP (x, 1);
579 index_code = GET_CODE (*locI);
580 }
581 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
582 || code1 == ZERO_EXTEND || code0 == MEM)
583 {
584 locI = &XEXP (x, 1);
585 locB = &XEXP (x, 0);
586 index_code = GET_CODE (*locI);
587 }
588 else if (code0 == CONST_INT || code0 == CONST
589 || code0 == SYMBOL_REF || code0 == LABEL_REF)
590 {
591 locB = &XEXP (x, 1);
592 index_code = GET_CODE (XEXP (x, 0));
593 }
594 else if (code1 == CONST_INT || code1 == CONST
595 || code1 == SYMBOL_REF || code1 == LABEL_REF)
596 {
597 locB = &XEXP (x, 0);
598 index_code = GET_CODE (XEXP (x, 1));
599 }
600 else if (code0 == REG && code1 == REG)
601 {
602 int index_op;
603 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
604
605 if (REGNO_OK_FOR_INDEX_P (regno1)
606 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
607 index_op = 1;
608 else if (REGNO_OK_FOR_INDEX_P (regno0)
609 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
610 index_op = 0;
611 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
612 || REGNO_OK_FOR_INDEX_P (regno1))
613 index_op = 1;
614 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
615 index_op = 0;
616 else
617 index_op = 1;
618
619 locI = &XEXP (x, index_op);
620 locB = &XEXP (x, !index_op);
621 index_code = GET_CODE (*locI);
622 }
623 else if (code0 == REG)
624 {
625 locI = &XEXP (x, 0);
626 locB = &XEXP (x, 1);
627 index_code = GET_CODE (*locI);
628 }
629 else if (code1 == REG)
630 {
631 locI = &XEXP (x, 1);
632 locB = &XEXP (x, 0);
633 index_code = GET_CODE (*locI);
634 }
635
636 if (locI)
637 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
638 mode, as, insn, vd);
639 if (locB)
640 changed |= replace_oldest_value_addr (locB,
641 base_reg_class (mode, as, PLUS,
642 index_code),
643 mode, as, insn, vd);
644 return changed;
645 }
646
647 case POST_INC:
648 case POST_DEC:
649 case POST_MODIFY:
650 case PRE_INC:
651 case PRE_DEC:
652 case PRE_MODIFY:
653 return false;
654
655 case MEM:
656 return replace_oldest_value_mem (x, insn, vd);
657
658 case REG:
659 return replace_oldest_value_reg (loc, cl, insn, vd);
660
661 default:
662 break;
663 }
664
665 fmt = GET_RTX_FORMAT (code);
666 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
667 {
668 if (fmt[i] == 'e')
669 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
670 insn, vd);
671 else if (fmt[i] == 'E')
672 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
673 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
674 mode, as, insn, vd);
675 }
676
677 return changed;
678 }
679
680 /* Similar to replace_oldest_value_reg, but X contains a memory. */
681
682 static bool
683 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
684 {
685 enum reg_class cl;
686
687 if (DEBUG_INSN_P (insn))
688 cl = ALL_REGS;
689 else
690 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
691
692 return replace_oldest_value_addr (&XEXP (x, 0), cl,
693 GET_MODE (x), MEM_ADDR_SPACE (x),
694 insn, vd);
695 }
696
697 /* Apply all queued updates for DEBUG_INSNs that change some reg to
698 register REGNO. */
699
700 static void
701 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
702 {
703 struct queued_debug_insn_change *change;
704 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
705
706 for (change = vd->e[regno].debug_insn_changes;
707 change;
708 change = change->next)
709 {
710 if (last_insn != change->insn)
711 {
712 apply_change_group ();
713 last_insn = change->insn;
714 }
715 validate_change (change->insn, change->loc, change->new_rtx, 1);
716 }
717 apply_change_group ();
718 }
719
720 /* Called via note_uses, for all used registers in a real insn
721 apply DEBUG_INSN changes that change registers to the used
722 registers. */
723
724 static void
725 cprop_find_used_regs (rtx *loc, void *data)
726 {
727 struct value_data *const vd = (struct value_data *) data;
728 subrtx_iterator::array_type array;
729 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
730 {
731 const_rtx x = *iter;
732 if (REG_P (x))
733 {
734 unsigned int regno = REGNO (x);
735 if (vd->e[regno].debug_insn_changes)
736 {
737 apply_debug_insn_changes (vd, regno);
738 free_debug_insn_changes (vd, regno);
739 }
740 }
741 }
742 }
743
744 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
745
746 static void
747 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
748 {
749 note_stores (PATTERN (insn), kill_clobbered_value, vd);
750
751 if (CALL_P (insn))
752 {
753 rtx exp;
754
755 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
756 {
757 rtx x = XEXP (exp, 0);
758 if (GET_CODE (x) == CLOBBER)
759 kill_value (SET_DEST (x), vd);
760 }
761 }
762 }
763
764 /* Perform the forward copy propagation on basic block BB. */
765
766 static bool
767 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
768 {
769 bool anything_changed = false;
770 rtx_insn *insn;
771
772 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
773 {
774 int n_ops, i, predicated;
775 bool is_asm, any_replacements;
776 rtx set;
777 rtx link;
778 bool replaced[MAX_RECOG_OPERANDS];
779 bool changed = false;
780 struct kill_set_value_data ksvd;
781
782 if (!NONDEBUG_INSN_P (insn))
783 {
784 if (DEBUG_INSN_P (insn))
785 {
786 rtx loc = INSN_VAR_LOCATION_LOC (insn);
787 if (!VAR_LOC_UNKNOWN_P (loc))
788 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
789 ALL_REGS, GET_MODE (loc),
790 ADDR_SPACE_GENERIC, insn, vd);
791 }
792
793 if (insn == BB_END (bb))
794 break;
795 else
796 continue;
797 }
798
799 set = single_set (insn);
800 extract_constrain_insn (insn);
801 preprocess_constraints (insn);
802 const operand_alternative *op_alt = which_op_alt ();
803 n_ops = recog_data.n_operands;
804 is_asm = asm_noperands (PATTERN (insn)) >= 0;
805
806 /* Simplify the code below by promoting OP_OUT to OP_INOUT
807 in predicated instructions. */
808
809 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
810 for (i = 0; i < n_ops; ++i)
811 {
812 int matches = op_alt[i].matches;
813 if (matches >= 0 || op_alt[i].matched >= 0
814 || (predicated && recog_data.operand_type[i] == OP_OUT))
815 recog_data.operand_type[i] = OP_INOUT;
816 }
817
818 /* Apply changes to earlier DEBUG_INSNs if possible. */
819 if (vd->n_debug_insn_changes)
820 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
821
822 /* For each earlyclobber operand, zap the value data. */
823 for (i = 0; i < n_ops; i++)
824 if (op_alt[i].earlyclobber)
825 kill_value (recog_data.operand[i], vd);
826
827 /* Within asms, a clobber cannot overlap inputs or outputs.
828 I wouldn't think this were true for regular insns, but
829 scan_rtx treats them like that... */
830 kill_clobbered_values (insn, vd);
831
832 /* Kill all auto-incremented values. */
833 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
834 kill_autoinc_value (insn, vd);
835
836 /* Kill all early-clobbered operands. */
837 for (i = 0; i < n_ops; i++)
838 if (op_alt[i].earlyclobber)
839 kill_value (recog_data.operand[i], vd);
840
841 /* If we have dead sets in the insn, then we need to note these as we
842 would clobbers. */
843 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
844 {
845 if (REG_NOTE_KIND (link) == REG_UNUSED)
846 {
847 kill_value (XEXP (link, 0), vd);
848 /* Furthermore, if the insn looked like a single-set,
849 but the dead store kills the source value of that
850 set, then we can no-longer use the plain move
851 special case below. */
852 if (set
853 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
854 set = NULL;
855 }
856 }
857
858 /* Special-case plain move instructions, since we may well
859 be able to do the move from a different register class. */
860 if (set && REG_P (SET_SRC (set)))
861 {
862 rtx src = SET_SRC (set);
863 unsigned int regno = REGNO (src);
864 machine_mode mode = GET_MODE (src);
865 unsigned int i;
866 rtx new_rtx;
867
868 /* If we are accessing SRC in some mode other that what we
869 set it in, make sure that the replacement is valid. */
870 if (mode != vd->e[regno].mode)
871 {
872 if (hard_regno_nregs[regno][mode]
873 > hard_regno_nregs[regno][vd->e[regno].mode])
874 goto no_move_special_case;
875
876 /* And likewise, if we are narrowing on big endian the transformation
877 is also invalid. */
878 if (hard_regno_nregs[regno][mode]
879 < hard_regno_nregs[regno][vd->e[regno].mode]
880 && (GET_MODE_SIZE (vd->e[regno].mode) > UNITS_PER_WORD
881 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
882 goto no_move_special_case;
883 }
884
885 /* If the destination is also a register, try to find a source
886 register in the same class. */
887 if (REG_P (SET_DEST (set)))
888 {
889 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno), src, vd);
890 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
891 {
892 if (dump_file)
893 fprintf (dump_file,
894 "insn %u: replaced reg %u with %u\n",
895 INSN_UID (insn), regno, REGNO (new_rtx));
896 changed = true;
897 goto did_replacement;
898 }
899 /* We need to re-extract as validate_change clobbers
900 recog_data. */
901 extract_constrain_insn (insn);
902 preprocess_constraints (insn);
903 }
904
905 /* Otherwise, try all valid registers and see if its valid. */
906 for (i = vd->e[regno].oldest_regno; i != regno;
907 i = vd->e[i].next_regno)
908 {
909 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
910 mode, i, regno);
911 if (new_rtx != NULL_RTX)
912 {
913 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
914 {
915 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
916 REG_ATTRS (new_rtx) = REG_ATTRS (src);
917 REG_POINTER (new_rtx) = REG_POINTER (src);
918 if (dump_file)
919 fprintf (dump_file,
920 "insn %u: replaced reg %u with %u\n",
921 INSN_UID (insn), regno, REGNO (new_rtx));
922 changed = true;
923 goto did_replacement;
924 }
925 /* We need to re-extract as validate_change clobbers
926 recog_data. */
927 extract_constrain_insn (insn);
928 preprocess_constraints (insn);
929 }
930 }
931 }
932 no_move_special_case:
933
934 any_replacements = false;
935
936 /* For each input operand, replace a hard register with the
937 eldest live copy that's in an appropriate register class. */
938 for (i = 0; i < n_ops; i++)
939 {
940 replaced[i] = false;
941
942 /* Don't scan match_operand here, since we've no reg class
943 information to pass down. Any operands that we could
944 substitute in will be represented elsewhere. */
945 if (recog_data.constraints[i][0] == '\0')
946 continue;
947
948 /* Don't replace in asms intentionally referencing hard regs. */
949 if (is_asm && REG_P (recog_data.operand[i])
950 && (REGNO (recog_data.operand[i])
951 == ORIGINAL_REGNO (recog_data.operand[i])))
952 continue;
953
954 if (recog_data.operand_type[i] == OP_IN)
955 {
956 if (op_alt[i].is_address)
957 replaced[i]
958 = replace_oldest_value_addr (recog_data.operand_loc[i],
959 alternative_class (op_alt, i),
960 VOIDmode, ADDR_SPACE_GENERIC,
961 insn, vd);
962 else if (REG_P (recog_data.operand[i]))
963 replaced[i]
964 = replace_oldest_value_reg (recog_data.operand_loc[i],
965 alternative_class (op_alt, i),
966 insn, vd);
967 else if (MEM_P (recog_data.operand[i]))
968 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
969 insn, vd);
970 }
971 else if (MEM_P (recog_data.operand[i]))
972 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
973 insn, vd);
974
975 /* If we performed any replacement, update match_dups. */
976 if (replaced[i])
977 {
978 int j;
979 rtx new_rtx;
980
981 new_rtx = *recog_data.operand_loc[i];
982 recog_data.operand[i] = new_rtx;
983 for (j = 0; j < recog_data.n_dups; j++)
984 if (recog_data.dup_num[j] == i)
985 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
986
987 any_replacements = true;
988 }
989 }
990
991 if (any_replacements)
992 {
993 if (! apply_change_group ())
994 {
995 for (i = 0; i < n_ops; i++)
996 if (replaced[i])
997 {
998 rtx old = *recog_data.operand_loc[i];
999 recog_data.operand[i] = old;
1000 }
1001
1002 if (dump_file)
1003 fprintf (dump_file,
1004 "insn %u: reg replacements not verified\n",
1005 INSN_UID (insn));
1006 }
1007 else
1008 changed = true;
1009 }
1010
1011 did_replacement:
1012 if (changed)
1013 {
1014 anything_changed = true;
1015
1016 /* If something changed, perhaps further changes to earlier
1017 DEBUG_INSNs can be applied. */
1018 if (vd->n_debug_insn_changes)
1019 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1020 }
1021
1022 ksvd.vd = vd;
1023 ksvd.ignore_set_reg = NULL_RTX;
1024
1025 /* Clobber call-clobbered registers. */
1026 if (CALL_P (insn))
1027 {
1028 unsigned int set_regno = INVALID_REGNUM;
1029 unsigned int set_nregs = 0;
1030 unsigned int regno;
1031 rtx exp;
1032 HARD_REG_SET regs_invalidated_by_this_call;
1033
1034 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1035 {
1036 rtx x = XEXP (exp, 0);
1037 if (GET_CODE (x) == SET)
1038 {
1039 rtx dest = SET_DEST (x);
1040 kill_value (dest, vd);
1041 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1042 copy_value (dest, SET_SRC (x), vd);
1043 ksvd.ignore_set_reg = dest;
1044 set_regno = REGNO (dest);
1045 set_nregs = REG_NREGS (dest);
1046 break;
1047 }
1048 }
1049
1050 get_call_reg_set_usage (insn,
1051 &regs_invalidated_by_this_call,
1052 regs_invalidated_by_call);
1053 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1054 if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1055 || HARD_REGNO_CALL_PART_CLOBBERED (regno, vd->e[regno].mode))
1056 && (regno < set_regno || regno >= set_regno + set_nregs))
1057 kill_value_regno (regno, 1, vd);
1058
1059 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1060 of the SET isn't in regs_invalidated_by_call hard reg set,
1061 but instead among CLOBBERs on the CALL_INSN, we could wrongly
1062 assume the value in it is still live. */
1063 if (ksvd.ignore_set_reg)
1064 kill_clobbered_values (insn, vd);
1065 }
1066
1067 bool copy_p = (set
1068 && REG_P (SET_DEST (set))
1069 && REG_P (SET_SRC (set)));
1070 bool noop_p = (copy_p
1071 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1072
1073 if (!noop_p)
1074 {
1075 /* Notice stores. */
1076 note_stores (PATTERN (insn), kill_set_value, &ksvd);
1077
1078 /* Notice copies. */
1079 if (copy_p)
1080 copy_value (SET_DEST (set), SET_SRC (set), vd);
1081 }
1082
1083 if (insn == BB_END (bb))
1084 break;
1085 }
1086
1087 return anything_changed;
1088 }
1089
1090 /* Dump the value chain data to stderr. */
1091
1092 DEBUG_FUNCTION void
1093 debug_value_data (struct value_data *vd)
1094 {
1095 HARD_REG_SET set;
1096 unsigned int i, j;
1097
1098 CLEAR_HARD_REG_SET (set);
1099
1100 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1101 if (vd->e[i].oldest_regno == i)
1102 {
1103 if (vd->e[i].mode == VOIDmode)
1104 {
1105 if (vd->e[i].next_regno != INVALID_REGNUM)
1106 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1107 i, vd->e[i].next_regno);
1108 continue;
1109 }
1110
1111 SET_HARD_REG_BIT (set, i);
1112 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1113
1114 for (j = vd->e[i].next_regno;
1115 j != INVALID_REGNUM;
1116 j = vd->e[j].next_regno)
1117 {
1118 if (TEST_HARD_REG_BIT (set, j))
1119 {
1120 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1121 return;
1122 }
1123
1124 if (vd->e[j].oldest_regno != i)
1125 {
1126 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1127 j, vd->e[j].oldest_regno);
1128 return;
1129 }
1130 SET_HARD_REG_BIT (set, j);
1131 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1132 }
1133 fputc ('\n', stderr);
1134 }
1135
1136 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1137 if (! TEST_HARD_REG_BIT (set, i)
1138 && (vd->e[i].mode != VOIDmode
1139 || vd->e[i].oldest_regno != i
1140 || vd->e[i].next_regno != INVALID_REGNUM))
1141 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1142 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1143 vd->e[i].next_regno);
1144 }
1145
1146 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1147 DEBUG_INSN is skipped since we do not want to involve DF related
1148 staff as how it is handled in function pass_cprop_hardreg::execute.
1149
1150 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1151 to handle DEBUG_INSN for other uses. */
1152
1153 void
1154 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1155 {
1156 struct value_data *vd;
1157 vd = XNEWVEC (struct value_data, 1);
1158 init_value_data (vd);
1159
1160 skip_debug_insn_p = true;
1161 copyprop_hardreg_forward_1 (bb, vd);
1162 free (vd);
1163 skip_debug_insn_p = false;
1164 }
1165
1166 #ifdef ENABLE_CHECKING
1167 static void
1168 validate_value_data (struct value_data *vd)
1169 {
1170 HARD_REG_SET set;
1171 unsigned int i, j;
1172
1173 CLEAR_HARD_REG_SET (set);
1174
1175 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1176 if (vd->e[i].oldest_regno == i)
1177 {
1178 if (vd->e[i].mode == VOIDmode)
1179 {
1180 if (vd->e[i].next_regno != INVALID_REGNUM)
1181 internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1182 i, vd->e[i].next_regno);
1183 continue;
1184 }
1185
1186 SET_HARD_REG_BIT (set, i);
1187
1188 for (j = vd->e[i].next_regno;
1189 j != INVALID_REGNUM;
1190 j = vd->e[j].next_regno)
1191 {
1192 if (TEST_HARD_REG_BIT (set, j))
1193 internal_error ("validate_value_data: Loop in regno chain (%u)",
1194 j);
1195 if (vd->e[j].oldest_regno != i)
1196 internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1197 j, vd->e[j].oldest_regno);
1198
1199 SET_HARD_REG_BIT (set, j);
1200 }
1201 }
1202
1203 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1204 if (! TEST_HARD_REG_BIT (set, i)
1205 && (vd->e[i].mode != VOIDmode
1206 || vd->e[i].oldest_regno != i
1207 || vd->e[i].next_regno != INVALID_REGNUM))
1208 internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1209 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1210 vd->e[i].next_regno);
1211 }
1212 #endif
1213 \f
1214 namespace {
1215
1216 const pass_data pass_data_cprop_hardreg =
1217 {
1218 RTL_PASS, /* type */
1219 "cprop_hardreg", /* name */
1220 OPTGROUP_NONE, /* optinfo_flags */
1221 TV_CPROP_REGISTERS, /* tv_id */
1222 0, /* properties_required */
1223 0, /* properties_provided */
1224 0, /* properties_destroyed */
1225 0, /* todo_flags_start */
1226 TODO_df_finish, /* todo_flags_finish */
1227 };
1228
1229 class pass_cprop_hardreg : public rtl_opt_pass
1230 {
1231 public:
1232 pass_cprop_hardreg (gcc::context *ctxt)
1233 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1234 {}
1235
1236 /* opt_pass methods: */
1237 virtual bool gate (function *)
1238 {
1239 return (optimize > 0 && (flag_cprop_registers));
1240 }
1241
1242 virtual unsigned int execute (function *);
1243
1244 }; // class pass_cprop_hardreg
1245
1246 unsigned int
1247 pass_cprop_hardreg::execute (function *fun)
1248 {
1249 struct value_data *all_vd;
1250 basic_block bb;
1251 sbitmap visited;
1252 bool analyze_called = false;
1253
1254 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1255
1256 visited = sbitmap_alloc (last_basic_block_for_fn (fun));
1257 bitmap_clear (visited);
1258
1259 FOR_EACH_BB_FN (bb, fun)
1260 {
1261 bitmap_set_bit (visited, bb->index);
1262
1263 /* If a block has a single predecessor, that we've already
1264 processed, begin with the value data that was live at
1265 the end of the predecessor block. */
1266 /* ??? Ought to use more intelligent queuing of blocks. */
1267 if (single_pred_p (bb)
1268 && bitmap_bit_p (visited, single_pred (bb)->index)
1269 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1270 {
1271 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1272 if (all_vd[bb->index].n_debug_insn_changes)
1273 {
1274 unsigned int regno;
1275
1276 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1277 {
1278 if (all_vd[bb->index].e[regno].debug_insn_changes)
1279 {
1280 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1281 if (--all_vd[bb->index].n_debug_insn_changes == 0)
1282 break;
1283 }
1284 }
1285 }
1286 }
1287 else
1288 init_value_data (all_vd + bb->index);
1289
1290 copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1291 }
1292
1293 if (MAY_HAVE_DEBUG_INSNS)
1294 {
1295 FOR_EACH_BB_FN (bb, fun)
1296 if (bitmap_bit_p (visited, bb->index)
1297 && all_vd[bb->index].n_debug_insn_changes)
1298 {
1299 unsigned int regno;
1300 bitmap live;
1301
1302 if (!analyze_called)
1303 {
1304 df_analyze ();
1305 analyze_called = true;
1306 }
1307 live = df_get_live_out (bb);
1308 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1309 if (all_vd[bb->index].e[regno].debug_insn_changes)
1310 {
1311 if (REGNO_REG_SET_P (live, regno))
1312 apply_debug_insn_changes (all_vd + bb->index, regno);
1313 if (all_vd[bb->index].n_debug_insn_changes == 0)
1314 break;
1315 }
1316 }
1317
1318 queued_debug_insn_change::pool.release ();
1319 }
1320
1321 sbitmap_free (visited);
1322 free (all_vd);
1323 return 0;
1324 }
1325
1326 } // anon namespace
1327
1328 rtl_opt_pass *
1329 make_pass_cprop_hardreg (gcc::context *ctxt)
1330 {
1331 return new pass_cprop_hardreg (ctxt);
1332 }