re PR target/83488 (ICE on a CET test-case)
[gcc.git] / gcc / regcprop.c
1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "regs.h"
30 #include "emit-rtl.h"
31 #include "recog.h"
32 #include "diagnostic-core.h"
33 #include "addresses.h"
34 #include "tree-pass.h"
35 #include "rtl-iter.h"
36 #include "cfgrtl.h"
37 #include "target.h"
38
39 /* The following code does forward propagation of hard register copies.
40 The object is to eliminate as many dependencies as possible, so that
41 we have the most scheduling freedom. As a side effect, we also clean
42 up some silly register allocation decisions made by reload. This
43 code may be obsoleted by a new register allocator. */
44
45 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
46 lifetime of a register and get the DEBUG_INSN subsequently reset.
47 So they are queued instead, and updated only when the register is
48 used in some subsequent real insn before it is set. */
49 struct queued_debug_insn_change
50 {
51 struct queued_debug_insn_change *next;
52 rtx_insn *insn;
53 rtx *loc;
54 rtx new_rtx;
55 };
56
57 /* For each register, we have a list of registers that contain the same
58 value. The OLDEST_REGNO field points to the head of the list, and
59 the NEXT_REGNO field runs through the list. The MODE field indicates
60 what mode the data is known to be in; this field is VOIDmode when the
61 register is not known to contain valid data. */
62
63 struct value_data_entry
64 {
65 machine_mode mode;
66 unsigned int oldest_regno;
67 unsigned int next_regno;
68 struct queued_debug_insn_change *debug_insn_changes;
69 };
70
71 struct value_data
72 {
73 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
74 unsigned int max_value_regs;
75 unsigned int n_debug_insn_changes;
76 };
77
78 static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool
79 ("debug insn changes pool");
80
81 static bool skip_debug_insn_p;
82
83 static void kill_value_one_regno (unsigned, struct value_data *);
84 static void kill_value_regno (unsigned, unsigned, struct value_data *);
85 static void kill_value (const_rtx, struct value_data *);
86 static void set_value_regno (unsigned, machine_mode, struct value_data *);
87 static void init_value_data (struct value_data *);
88 static void kill_clobbered_value (rtx, const_rtx, void *);
89 static void kill_set_value (rtx, const_rtx, void *);
90 static void copy_value (rtx, rtx, struct value_data *);
91 static bool mode_change_ok (machine_mode, machine_mode,
92 unsigned int);
93 static rtx maybe_mode_change (machine_mode, machine_mode,
94 machine_mode, unsigned int, unsigned int);
95 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
96 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
97 struct value_data *);
98 static bool replace_oldest_value_addr (rtx *, enum reg_class,
99 machine_mode, addr_space_t,
100 rtx_insn *, struct value_data *);
101 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
102 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
103 extern void debug_value_data (struct value_data *);
104 static void validate_value_data (struct value_data *);
105
106 /* Free all queued updates for DEBUG_INSNs that change some reg to
107 register REGNO. */
108
109 static void
110 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
111 {
112 struct queued_debug_insn_change *cur, *next;
113 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
114 {
115 next = cur->next;
116 --vd->n_debug_insn_changes;
117 queued_debug_insn_change_pool.remove (cur);
118 }
119 vd->e[regno].debug_insn_changes = NULL;
120 }
121
122 /* Kill register REGNO. This involves removing it from any value
123 lists, and resetting the value mode to VOIDmode. This is only a
124 helper function; it does not handle any hard registers overlapping
125 with REGNO. */
126
127 static void
128 kill_value_one_regno (unsigned int regno, struct value_data *vd)
129 {
130 unsigned int i, next;
131
132 if (vd->e[regno].oldest_regno != regno)
133 {
134 for (i = vd->e[regno].oldest_regno;
135 vd->e[i].next_regno != regno;
136 i = vd->e[i].next_regno)
137 continue;
138 vd->e[i].next_regno = vd->e[regno].next_regno;
139 }
140 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
141 {
142 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
143 vd->e[i].oldest_regno = next;
144 }
145
146 vd->e[regno].mode = VOIDmode;
147 vd->e[regno].oldest_regno = regno;
148 vd->e[regno].next_regno = INVALID_REGNUM;
149 if (vd->e[regno].debug_insn_changes)
150 free_debug_insn_changes (vd, regno);
151
152 if (flag_checking)
153 validate_value_data (vd);
154 }
155
156 /* Kill the value in register REGNO for NREGS, and any other registers
157 whose values overlap. */
158
159 static void
160 kill_value_regno (unsigned int regno, unsigned int nregs,
161 struct value_data *vd)
162 {
163 unsigned int j;
164
165 /* Kill the value we're told to kill. */
166 for (j = 0; j < nregs; ++j)
167 kill_value_one_regno (regno + j, vd);
168
169 /* Kill everything that overlapped what we're told to kill. */
170 if (regno < vd->max_value_regs)
171 j = 0;
172 else
173 j = regno - vd->max_value_regs;
174 for (; j < regno; ++j)
175 {
176 unsigned int i, n;
177 if (vd->e[j].mode == VOIDmode)
178 continue;
179 n = hard_regno_nregs (j, vd->e[j].mode);
180 if (j + n > regno)
181 for (i = 0; i < n; ++i)
182 kill_value_one_regno (j + i, vd);
183 }
184 }
185
186 /* Kill X. This is a convenience function wrapping kill_value_regno
187 so that we mind the mode the register is in. */
188
189 static void
190 kill_value (const_rtx x, struct value_data *vd)
191 {
192 if (GET_CODE (x) == SUBREG)
193 {
194 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
195 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
196 x = tmp ? tmp : SUBREG_REG (x);
197 }
198 if (REG_P (x))
199 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
200 }
201
202 /* Remember that REGNO is valid in MODE. */
203
204 static void
205 set_value_regno (unsigned int regno, machine_mode mode,
206 struct value_data *vd)
207 {
208 unsigned int nregs;
209
210 vd->e[regno].mode = mode;
211
212 nregs = hard_regno_nregs (regno, mode);
213 if (nregs > vd->max_value_regs)
214 vd->max_value_regs = nregs;
215 }
216
217 /* Initialize VD such that there are no known relationships between regs. */
218
219 static void
220 init_value_data (struct value_data *vd)
221 {
222 int i;
223 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
224 {
225 vd->e[i].mode = VOIDmode;
226 vd->e[i].oldest_regno = i;
227 vd->e[i].next_regno = INVALID_REGNUM;
228 vd->e[i].debug_insn_changes = NULL;
229 }
230 vd->max_value_regs = 0;
231 vd->n_debug_insn_changes = 0;
232 }
233
234 /* Called through note_stores. If X is clobbered, kill its value. */
235
236 static void
237 kill_clobbered_value (rtx x, const_rtx set, void *data)
238 {
239 struct value_data *const vd = (struct value_data *) data;
240 if (GET_CODE (set) == CLOBBER)
241 kill_value (x, vd);
242 }
243
244 /* A structure passed as data to kill_set_value through note_stores. */
245 struct kill_set_value_data
246 {
247 struct value_data *vd;
248 rtx ignore_set_reg;
249 };
250
251 /* Called through note_stores. If X is set, not clobbered, kill its
252 current value and install it as the root of its own value list. */
253
254 static void
255 kill_set_value (rtx x, const_rtx set, void *data)
256 {
257 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
258 if (rtx_equal_p (x, ksvd->ignore_set_reg))
259 return;
260 if (GET_CODE (set) != CLOBBER)
261 {
262 kill_value (x, ksvd->vd);
263 if (REG_P (x))
264 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
265 }
266 }
267
268 /* Kill any register used in X as the base of an auto-increment expression,
269 and install that register as the root of its own value list. */
270
271 static void
272 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
273 {
274 subrtx_iterator::array_type array;
275 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
276 {
277 const_rtx x = *iter;
278 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
279 {
280 x = XEXP (x, 0);
281 kill_value (x, vd);
282 set_value_regno (REGNO (x), GET_MODE (x), vd);
283 iter.skip_subrtxes ();
284 }
285 }
286 }
287
288 /* Assert that SRC has been copied to DEST. Adjust the data structures
289 to reflect that SRC contains an older copy of the shared value. */
290
291 static void
292 copy_value (rtx dest, rtx src, struct value_data *vd)
293 {
294 unsigned int dr = REGNO (dest);
295 unsigned int sr = REGNO (src);
296 unsigned int dn, sn;
297 unsigned int i;
298
299 /* ??? At present, it's possible to see noop sets. It'd be nice if
300 this were cleaned up beforehand... */
301 if (sr == dr)
302 return;
303
304 /* Do not propagate copies to the stack pointer, as that can leave
305 memory accesses with no scheduling dependency on the stack update. */
306 if (dr == STACK_POINTER_REGNUM)
307 return;
308
309 /* Likewise with the frame pointer, if we're using one. */
310 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
311 return;
312
313 /* Do not propagate copies to fixed or global registers, patterns
314 can be relying to see particular fixed register or users can
315 expect the chosen global register in asm. */
316 if (fixed_regs[dr] || global_regs[dr])
317 return;
318
319 /* If SRC and DEST overlap, don't record anything. */
320 dn = REG_NREGS (dest);
321 sn = REG_NREGS (src);
322 if ((dr > sr && dr < sr + sn)
323 || (sr > dr && sr < dr + dn))
324 return;
325
326 /* If SRC had no assigned mode (i.e. we didn't know it was live)
327 assign it now and assume the value came from an input argument
328 or somesuch. */
329 if (vd->e[sr].mode == VOIDmode)
330 set_value_regno (sr, vd->e[dr].mode, vd);
331
332 /* If we are narrowing the input to a smaller number of hard regs,
333 and it is in big endian, we are really extracting a high part.
334 Since we generally associate a low part of a value with the value itself,
335 we must not do the same for the high part.
336 Note we can still get low parts for the same mode combination through
337 a two-step copy involving differently sized hard regs.
338 Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
339 (set (reg:DI r0) (reg:DI fr0))
340 (set (reg:SI fr2) (reg:SI r0))
341 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
342 (set (reg:SI fr2) (reg:SI fr0))
343 loads the high part of (reg:DI fr0) into fr2.
344
345 We can't properly represent the latter case in our tables, so don't
346 record anything then. */
347 else if (sn < hard_regno_nregs (sr, vd->e[sr].mode)
348 && maybe_ne (subreg_lowpart_offset (GET_MODE (dest),
349 vd->e[sr].mode), 0U))
350 return;
351
352 /* If SRC had been assigned a mode narrower than the copy, we can't
353 link DEST into the chain, because not all of the pieces of the
354 copy came from oldest_regno. */
355 else if (sn > hard_regno_nregs (sr, vd->e[sr].mode))
356 return;
357
358 /* Link DR at the end of the value chain used by SR. */
359
360 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
361
362 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
363 continue;
364 vd->e[i].next_regno = dr;
365
366 if (flag_checking)
367 validate_value_data (vd);
368 }
369
370 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
371
372 static bool
373 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
374 unsigned int regno ATTRIBUTE_UNUSED)
375 {
376 if (partial_subreg_p (orig_mode, new_mode))
377 return false;
378
379 return REG_CAN_CHANGE_MODE_P (regno, orig_mode, new_mode);
380 }
381
382 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
383 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
384 in NEW_MODE.
385 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
386
387 static rtx
388 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
389 machine_mode new_mode, unsigned int regno,
390 unsigned int copy_regno ATTRIBUTE_UNUSED)
391 {
392 if (partial_subreg_p (copy_mode, orig_mode)
393 && partial_subreg_p (copy_mode, new_mode))
394 return NULL_RTX;
395
396 /* Avoid creating multiple copies of the stack pointer. Some ports
397 assume there is one and only one stack pointer.
398
399 It's unclear if we need to do the same for other special registers. */
400 if (regno == STACK_POINTER_REGNUM)
401 return NULL_RTX;
402
403 if (orig_mode == new_mode)
404 return gen_raw_REG (new_mode, regno);
405 else if (mode_change_ok (orig_mode, new_mode, regno))
406 {
407 int copy_nregs = hard_regno_nregs (copy_regno, copy_mode);
408 int use_nregs = hard_regno_nregs (copy_regno, new_mode);
409 int copy_offset
410 = GET_MODE_SIZE (copy_mode) / copy_nregs * (copy_nregs - use_nregs);
411 poly_uint64 offset
412 = subreg_size_lowpart_offset (GET_MODE_SIZE (new_mode) + copy_offset,
413 GET_MODE_SIZE (orig_mode));
414 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
415 if (targetm.hard_regno_mode_ok (regno, new_mode))
416 return gen_raw_REG (new_mode, regno);
417 }
418 return NULL_RTX;
419 }
420
421 /* Find the oldest copy of the value contained in REGNO that is in
422 register class CL and has mode MODE. If found, return an rtx
423 of that oldest register, otherwise return NULL. */
424
425 static rtx
426 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
427 {
428 unsigned int regno = REGNO (reg);
429 machine_mode mode = GET_MODE (reg);
430 unsigned int i;
431
432 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
433
434 /* If we are accessing REG in some mode other that what we set it in,
435 make sure that the replacement is valid. In particular, consider
436 (set (reg:DI r11) (...))
437 (set (reg:SI r9) (reg:SI r11))
438 (set (reg:SI r10) (...))
439 (set (...) (reg:DI r9))
440 Replacing r9 with r11 is invalid. */
441 if (mode != vd->e[regno].mode
442 && REG_NREGS (reg) > hard_regno_nregs (regno, vd->e[regno].mode))
443 return NULL_RTX;
444
445 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
446 {
447 machine_mode oldmode = vd->e[i].mode;
448 rtx new_rtx;
449
450 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
451 continue;
452
453 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
454 if (new_rtx)
455 {
456 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
457 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
458 REG_POINTER (new_rtx) = REG_POINTER (reg);
459 return new_rtx;
460 }
461 }
462
463 return NULL_RTX;
464 }
465
466 /* If possible, replace the register at *LOC with the oldest register
467 in register class CL. Return true if successfully replaced. */
468
469 static bool
470 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
471 struct value_data *vd)
472 {
473 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
474 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
475 {
476 if (DEBUG_INSN_P (insn))
477 {
478 struct queued_debug_insn_change *change;
479
480 if (dump_file)
481 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
482 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
483
484 change = queued_debug_insn_change_pool.allocate ();
485 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
486 change->insn = insn;
487 change->loc = loc;
488 change->new_rtx = new_rtx;
489 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
490 ++vd->n_debug_insn_changes;
491 return true;
492 }
493 if (dump_file)
494 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
495 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
496
497 validate_change (insn, loc, new_rtx, 1);
498 return true;
499 }
500 return false;
501 }
502
503 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
504 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
505 BASE_REG_CLASS depending on how the register is being considered. */
506
507 static bool
508 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
509 machine_mode mode, addr_space_t as,
510 rtx_insn *insn, struct value_data *vd)
511 {
512 rtx x = *loc;
513 RTX_CODE code = GET_CODE (x);
514 const char *fmt;
515 int i, j;
516 bool changed = false;
517
518 switch (code)
519 {
520 case PLUS:
521 if (DEBUG_INSN_P (insn))
522 break;
523
524 {
525 rtx orig_op0 = XEXP (x, 0);
526 rtx orig_op1 = XEXP (x, 1);
527 RTX_CODE code0 = GET_CODE (orig_op0);
528 RTX_CODE code1 = GET_CODE (orig_op1);
529 rtx op0 = orig_op0;
530 rtx op1 = orig_op1;
531 rtx *locI = NULL;
532 rtx *locB = NULL;
533 enum rtx_code index_code = SCRATCH;
534
535 if (GET_CODE (op0) == SUBREG)
536 {
537 op0 = SUBREG_REG (op0);
538 code0 = GET_CODE (op0);
539 }
540
541 if (GET_CODE (op1) == SUBREG)
542 {
543 op1 = SUBREG_REG (op1);
544 code1 = GET_CODE (op1);
545 }
546
547 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
548 || code0 == ZERO_EXTEND || code1 == MEM)
549 {
550 locI = &XEXP (x, 0);
551 locB = &XEXP (x, 1);
552 index_code = GET_CODE (*locI);
553 }
554 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
555 || code1 == ZERO_EXTEND || code0 == MEM)
556 {
557 locI = &XEXP (x, 1);
558 locB = &XEXP (x, 0);
559 index_code = GET_CODE (*locI);
560 }
561 else if (code0 == CONST_INT || code0 == CONST
562 || code0 == SYMBOL_REF || code0 == LABEL_REF)
563 {
564 locB = &XEXP (x, 1);
565 index_code = GET_CODE (XEXP (x, 0));
566 }
567 else if (code1 == CONST_INT || code1 == CONST
568 || code1 == SYMBOL_REF || code1 == LABEL_REF)
569 {
570 locB = &XEXP (x, 0);
571 index_code = GET_CODE (XEXP (x, 1));
572 }
573 else if (code0 == REG && code1 == REG)
574 {
575 int index_op;
576 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
577
578 if (REGNO_OK_FOR_INDEX_P (regno1)
579 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
580 index_op = 1;
581 else if (REGNO_OK_FOR_INDEX_P (regno0)
582 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
583 index_op = 0;
584 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
585 || REGNO_OK_FOR_INDEX_P (regno1))
586 index_op = 1;
587 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
588 index_op = 0;
589 else
590 index_op = 1;
591
592 locI = &XEXP (x, index_op);
593 locB = &XEXP (x, !index_op);
594 index_code = GET_CODE (*locI);
595 }
596 else if (code0 == REG)
597 {
598 locI = &XEXP (x, 0);
599 locB = &XEXP (x, 1);
600 index_code = GET_CODE (*locI);
601 }
602 else if (code1 == REG)
603 {
604 locI = &XEXP (x, 1);
605 locB = &XEXP (x, 0);
606 index_code = GET_CODE (*locI);
607 }
608
609 if (locI)
610 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
611 mode, as, insn, vd);
612 if (locB)
613 changed |= replace_oldest_value_addr (locB,
614 base_reg_class (mode, as, PLUS,
615 index_code),
616 mode, as, insn, vd);
617 return changed;
618 }
619
620 case POST_INC:
621 case POST_DEC:
622 case POST_MODIFY:
623 case PRE_INC:
624 case PRE_DEC:
625 case PRE_MODIFY:
626 return false;
627
628 case MEM:
629 return replace_oldest_value_mem (x, insn, vd);
630
631 case REG:
632 return replace_oldest_value_reg (loc, cl, insn, vd);
633
634 default:
635 break;
636 }
637
638 fmt = GET_RTX_FORMAT (code);
639 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
640 {
641 if (fmt[i] == 'e')
642 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
643 insn, vd);
644 else if (fmt[i] == 'E')
645 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
646 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
647 mode, as, insn, vd);
648 }
649
650 return changed;
651 }
652
653 /* Similar to replace_oldest_value_reg, but X contains a memory. */
654
655 static bool
656 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
657 {
658 enum reg_class cl;
659
660 if (DEBUG_INSN_P (insn))
661 cl = ALL_REGS;
662 else
663 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
664
665 return replace_oldest_value_addr (&XEXP (x, 0), cl,
666 GET_MODE (x), MEM_ADDR_SPACE (x),
667 insn, vd);
668 }
669
670 /* Apply all queued updates for DEBUG_INSNs that change some reg to
671 register REGNO. */
672
673 static void
674 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
675 {
676 struct queued_debug_insn_change *change;
677 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
678
679 for (change = vd->e[regno].debug_insn_changes;
680 change;
681 change = change->next)
682 {
683 if (last_insn != change->insn)
684 {
685 apply_change_group ();
686 last_insn = change->insn;
687 }
688 validate_change (change->insn, change->loc, change->new_rtx, 1);
689 }
690 apply_change_group ();
691 }
692
693 /* Called via note_uses, for all used registers in a real insn
694 apply DEBUG_INSN changes that change registers to the used
695 registers. */
696
697 static void
698 cprop_find_used_regs (rtx *loc, void *data)
699 {
700 struct value_data *const vd = (struct value_data *) data;
701 subrtx_iterator::array_type array;
702 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
703 {
704 const_rtx x = *iter;
705 if (REG_P (x))
706 {
707 unsigned int regno = REGNO (x);
708 if (vd->e[regno].debug_insn_changes)
709 {
710 apply_debug_insn_changes (vd, regno);
711 free_debug_insn_changes (vd, regno);
712 }
713 }
714 }
715 }
716
717 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
718
719 static void
720 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
721 {
722 note_stores (PATTERN (insn), kill_clobbered_value, vd);
723
724 if (CALL_P (insn))
725 {
726 rtx exp;
727
728 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
729 {
730 rtx x = XEXP (exp, 0);
731 if (GET_CODE (x) == CLOBBER)
732 kill_value (SET_DEST (x), vd);
733 }
734 }
735 }
736
737 /* Perform the forward copy propagation on basic block BB. */
738
739 static bool
740 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
741 {
742 bool anything_changed = false;
743 rtx_insn *insn, *next;
744
745 for (insn = BB_HEAD (bb); ; insn = next)
746 {
747 int n_ops, i, predicated;
748 bool is_asm, any_replacements;
749 rtx set;
750 rtx link;
751 bool replaced[MAX_RECOG_OPERANDS];
752 bool changed = false;
753 struct kill_set_value_data ksvd;
754
755 next = NEXT_INSN (insn);
756 if (!NONDEBUG_INSN_P (insn))
757 {
758 if (DEBUG_BIND_INSN_P (insn))
759 {
760 rtx loc = INSN_VAR_LOCATION_LOC (insn);
761 if (!VAR_LOC_UNKNOWN_P (loc))
762 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
763 ALL_REGS, GET_MODE (loc),
764 ADDR_SPACE_GENERIC, insn, vd);
765 }
766
767 if (insn == BB_END (bb))
768 break;
769 else
770 continue;
771 }
772
773 set = single_set (insn);
774
775 /* Detect noop sets and remove them before processing side effects. */
776 if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
777 {
778 unsigned int regno = REGNO (SET_SRC (set));
779 rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
780 SET_DEST (set), vd);
781 rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
782 SET_SRC (set), vd);
783 if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set)))
784 {
785 bool last = insn == BB_END (bb);
786 delete_insn (insn);
787 if (last)
788 break;
789 continue;
790 }
791 }
792
793 extract_constrain_insn (insn);
794 preprocess_constraints (insn);
795 const operand_alternative *op_alt = which_op_alt ();
796 n_ops = recog_data.n_operands;
797 is_asm = asm_noperands (PATTERN (insn)) >= 0;
798
799 /* Simplify the code below by promoting OP_OUT to OP_INOUT
800 in predicated instructions. */
801
802 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
803 for (i = 0; i < n_ops; ++i)
804 {
805 int matches = op_alt[i].matches;
806 if (matches >= 0 || op_alt[i].matched >= 0
807 || (predicated && recog_data.operand_type[i] == OP_OUT))
808 recog_data.operand_type[i] = OP_INOUT;
809 }
810
811 /* Apply changes to earlier DEBUG_INSNs if possible. */
812 if (vd->n_debug_insn_changes)
813 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
814
815 /* For each earlyclobber operand, zap the value data. */
816 for (i = 0; i < n_ops; i++)
817 if (op_alt[i].earlyclobber)
818 kill_value (recog_data.operand[i], vd);
819
820 /* Within asms, a clobber cannot overlap inputs or outputs.
821 I wouldn't think this were true for regular insns, but
822 scan_rtx treats them like that... */
823 kill_clobbered_values (insn, vd);
824
825 /* Kill all auto-incremented values. */
826 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
827 kill_autoinc_value (insn, vd);
828
829 /* Kill all early-clobbered operands. */
830 for (i = 0; i < n_ops; i++)
831 if (op_alt[i].earlyclobber)
832 kill_value (recog_data.operand[i], vd);
833
834 /* If we have dead sets in the insn, then we need to note these as we
835 would clobbers. */
836 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
837 {
838 if (REG_NOTE_KIND (link) == REG_UNUSED)
839 {
840 kill_value (XEXP (link, 0), vd);
841 /* Furthermore, if the insn looked like a single-set,
842 but the dead store kills the source value of that
843 set, then we can no-longer use the plain move
844 special case below. */
845 if (set
846 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
847 set = NULL;
848 }
849 }
850
851 /* Special-case plain move instructions, since we may well
852 be able to do the move from a different register class. */
853 if (set && REG_P (SET_SRC (set)))
854 {
855 rtx src = SET_SRC (set);
856 unsigned int regno = REGNO (src);
857 machine_mode mode = GET_MODE (src);
858 unsigned int i;
859 rtx new_rtx;
860
861 /* If we are accessing SRC in some mode other that what we
862 set it in, make sure that the replacement is valid. */
863 if (mode != vd->e[regno].mode)
864 {
865 if (REG_NREGS (src)
866 > hard_regno_nregs (regno, vd->e[regno].mode))
867 goto no_move_special_case;
868
869 /* And likewise, if we are narrowing on big endian the transformation
870 is also invalid. */
871 if (REG_NREGS (src) < hard_regno_nregs (regno, vd->e[regno].mode)
872 && maybe_ne (subreg_lowpart_offset (mode,
873 vd->e[regno].mode), 0U))
874 goto no_move_special_case;
875 }
876
877 /* If the destination is also a register, try to find a source
878 register in the same class. */
879 if (REG_P (SET_DEST (set)))
880 {
881 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno),
882 src, vd);
883
884 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
885 {
886 if (dump_file)
887 fprintf (dump_file,
888 "insn %u: replaced reg %u with %u\n",
889 INSN_UID (insn), regno, REGNO (new_rtx));
890 changed = true;
891 goto did_replacement;
892 }
893 /* We need to re-extract as validate_change clobbers
894 recog_data. */
895 extract_constrain_insn (insn);
896 preprocess_constraints (insn);
897 }
898
899 /* Otherwise, try all valid registers and see if its valid. */
900 for (i = vd->e[regno].oldest_regno; i != regno;
901 i = vd->e[i].next_regno)
902 {
903 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
904 mode, i, regno);
905 if (new_rtx != NULL_RTX)
906 {
907 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
908 {
909 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
910 REG_ATTRS (new_rtx) = REG_ATTRS (src);
911 REG_POINTER (new_rtx) = REG_POINTER (src);
912 if (dump_file)
913 fprintf (dump_file,
914 "insn %u: replaced reg %u with %u\n",
915 INSN_UID (insn), regno, REGNO (new_rtx));
916 changed = true;
917 goto did_replacement;
918 }
919 /* We need to re-extract as validate_change clobbers
920 recog_data. */
921 extract_constrain_insn (insn);
922 preprocess_constraints (insn);
923 }
924 }
925 }
926 no_move_special_case:
927
928 any_replacements = false;
929
930 /* For each input operand, replace a hard register with the
931 eldest live copy that's in an appropriate register class. */
932 for (i = 0; i < n_ops; i++)
933 {
934 replaced[i] = false;
935
936 /* Don't scan match_operand here, since we've no reg class
937 information to pass down. Any operands that we could
938 substitute in will be represented elsewhere. */
939 if (recog_data.constraints[i][0] == '\0')
940 continue;
941
942 /* Don't replace in asms intentionally referencing hard regs. */
943 if (is_asm && REG_P (recog_data.operand[i])
944 && (REGNO (recog_data.operand[i])
945 == ORIGINAL_REGNO (recog_data.operand[i])))
946 continue;
947
948 if (recog_data.operand_type[i] == OP_IN)
949 {
950 if (op_alt[i].is_address)
951 replaced[i]
952 = replace_oldest_value_addr (recog_data.operand_loc[i],
953 alternative_class (op_alt, i),
954 VOIDmode, ADDR_SPACE_GENERIC,
955 insn, vd);
956 else if (REG_P (recog_data.operand[i]))
957 replaced[i]
958 = replace_oldest_value_reg (recog_data.operand_loc[i],
959 alternative_class (op_alt, i),
960 insn, vd);
961 else if (MEM_P (recog_data.operand[i]))
962 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
963 insn, vd);
964 }
965 else if (MEM_P (recog_data.operand[i]))
966 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
967 insn, vd);
968
969 /* If we performed any replacement, update match_dups. */
970 if (replaced[i])
971 {
972 int j;
973 rtx new_rtx;
974
975 new_rtx = *recog_data.operand_loc[i];
976 recog_data.operand[i] = new_rtx;
977 for (j = 0; j < recog_data.n_dups; j++)
978 if (recog_data.dup_num[j] == i)
979 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
980
981 any_replacements = true;
982 }
983 }
984
985 if (any_replacements)
986 {
987 if (! apply_change_group ())
988 {
989 for (i = 0; i < n_ops; i++)
990 if (replaced[i])
991 {
992 rtx old = *recog_data.operand_loc[i];
993 recog_data.operand[i] = old;
994 }
995
996 if (dump_file)
997 fprintf (dump_file,
998 "insn %u: reg replacements not verified\n",
999 INSN_UID (insn));
1000 }
1001 else
1002 changed = true;
1003 }
1004
1005 did_replacement:
1006 if (changed)
1007 {
1008 anything_changed = true;
1009
1010 /* If something changed, perhaps further changes to earlier
1011 DEBUG_INSNs can be applied. */
1012 if (vd->n_debug_insn_changes)
1013 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1014 }
1015
1016 ksvd.vd = vd;
1017 ksvd.ignore_set_reg = NULL_RTX;
1018
1019 /* Clobber call-clobbered registers. */
1020 if (CALL_P (insn))
1021 {
1022 unsigned int set_regno = INVALID_REGNUM;
1023 unsigned int set_nregs = 0;
1024 unsigned int regno;
1025 rtx exp;
1026 HARD_REG_SET regs_invalidated_by_this_call;
1027
1028 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1029 {
1030 rtx x = XEXP (exp, 0);
1031 if (GET_CODE (x) == SET)
1032 {
1033 rtx dest = SET_DEST (x);
1034 kill_value (dest, vd);
1035 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1036 copy_value (dest, SET_SRC (x), vd);
1037 ksvd.ignore_set_reg = dest;
1038 set_regno = REGNO (dest);
1039 set_nregs = REG_NREGS (dest);
1040 break;
1041 }
1042 }
1043
1044 get_call_reg_set_usage (insn,
1045 &regs_invalidated_by_this_call,
1046 regs_invalidated_by_call);
1047 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1048 if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1049 || (targetm.hard_regno_call_part_clobbered
1050 (regno, vd->e[regno].mode)))
1051 && (regno < set_regno || regno >= set_regno + set_nregs))
1052 kill_value_regno (regno, 1, vd);
1053
1054 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1055 of the SET isn't in regs_invalidated_by_call hard reg set,
1056 but instead among CLOBBERs on the CALL_INSN, we could wrongly
1057 assume the value in it is still live. */
1058 if (ksvd.ignore_set_reg)
1059 kill_clobbered_values (insn, vd);
1060 }
1061
1062 bool copy_p = (set
1063 && REG_P (SET_DEST (set))
1064 && REG_P (SET_SRC (set)));
1065 bool noop_p = (copy_p
1066 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1067
1068 /* If a noop move is using narrower mode than we have recorded,
1069 we need to either remove the noop move, or kill_set_value. */
1070 if (noop_p
1071 && partial_subreg_p (GET_MODE (SET_DEST (set)),
1072 vd->e[REGNO (SET_DEST (set))].mode))
1073 {
1074 if (noop_move_p (insn))
1075 {
1076 bool last = insn == BB_END (bb);
1077 delete_insn (insn);
1078 if (last)
1079 break;
1080 }
1081 else
1082 noop_p = false;
1083 }
1084
1085 if (!noop_p)
1086 {
1087 /* Notice stores. */
1088 note_stores (PATTERN (insn), kill_set_value, &ksvd);
1089
1090 /* Notice copies. */
1091 if (copy_p)
1092 copy_value (SET_DEST (set), SET_SRC (set), vd);
1093 }
1094
1095 if (insn == BB_END (bb))
1096 break;
1097 }
1098
1099 return anything_changed;
1100 }
1101
1102 /* Dump the value chain data to stderr. */
1103
1104 DEBUG_FUNCTION void
1105 debug_value_data (struct value_data *vd)
1106 {
1107 HARD_REG_SET set;
1108 unsigned int i, j;
1109
1110 CLEAR_HARD_REG_SET (set);
1111
1112 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1113 if (vd->e[i].oldest_regno == i)
1114 {
1115 if (vd->e[i].mode == VOIDmode)
1116 {
1117 if (vd->e[i].next_regno != INVALID_REGNUM)
1118 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1119 i, vd->e[i].next_regno);
1120 continue;
1121 }
1122
1123 SET_HARD_REG_BIT (set, i);
1124 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1125
1126 for (j = vd->e[i].next_regno;
1127 j != INVALID_REGNUM;
1128 j = vd->e[j].next_regno)
1129 {
1130 if (TEST_HARD_REG_BIT (set, j))
1131 {
1132 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1133 return;
1134 }
1135
1136 if (vd->e[j].oldest_regno != i)
1137 {
1138 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1139 j, vd->e[j].oldest_regno);
1140 return;
1141 }
1142 SET_HARD_REG_BIT (set, j);
1143 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1144 }
1145 fputc ('\n', stderr);
1146 }
1147
1148 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1149 if (! TEST_HARD_REG_BIT (set, i)
1150 && (vd->e[i].mode != VOIDmode
1151 || vd->e[i].oldest_regno != i
1152 || vd->e[i].next_regno != INVALID_REGNUM))
1153 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1154 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1155 vd->e[i].next_regno);
1156 }
1157
1158 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1159 DEBUG_INSN is skipped since we do not want to involve DF related
1160 staff as how it is handled in function pass_cprop_hardreg::execute.
1161
1162 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1163 to handle DEBUG_INSN for other uses. */
1164
1165 void
1166 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1167 {
1168 struct value_data *vd;
1169 vd = XNEWVEC (struct value_data, 1);
1170 init_value_data (vd);
1171
1172 skip_debug_insn_p = true;
1173 copyprop_hardreg_forward_1 (bb, vd);
1174 free (vd);
1175 skip_debug_insn_p = false;
1176 }
1177
1178 static void
1179 validate_value_data (struct value_data *vd)
1180 {
1181 HARD_REG_SET set;
1182 unsigned int i, j;
1183
1184 CLEAR_HARD_REG_SET (set);
1185
1186 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1187 if (vd->e[i].oldest_regno == i)
1188 {
1189 if (vd->e[i].mode == VOIDmode)
1190 {
1191 if (vd->e[i].next_regno != INVALID_REGNUM)
1192 internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1193 i, vd->e[i].next_regno);
1194 continue;
1195 }
1196
1197 SET_HARD_REG_BIT (set, i);
1198
1199 for (j = vd->e[i].next_regno;
1200 j != INVALID_REGNUM;
1201 j = vd->e[j].next_regno)
1202 {
1203 if (TEST_HARD_REG_BIT (set, j))
1204 internal_error ("validate_value_data: Loop in regno chain (%u)",
1205 j);
1206 if (vd->e[j].oldest_regno != i)
1207 internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1208 j, vd->e[j].oldest_regno);
1209
1210 SET_HARD_REG_BIT (set, j);
1211 }
1212 }
1213
1214 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1215 if (! TEST_HARD_REG_BIT (set, i)
1216 && (vd->e[i].mode != VOIDmode
1217 || vd->e[i].oldest_regno != i
1218 || vd->e[i].next_regno != INVALID_REGNUM))
1219 internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1220 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1221 vd->e[i].next_regno);
1222 }
1223
1224 \f
1225 namespace {
1226
1227 const pass_data pass_data_cprop_hardreg =
1228 {
1229 RTL_PASS, /* type */
1230 "cprop_hardreg", /* name */
1231 OPTGROUP_NONE, /* optinfo_flags */
1232 TV_CPROP_REGISTERS, /* tv_id */
1233 0, /* properties_required */
1234 0, /* properties_provided */
1235 0, /* properties_destroyed */
1236 0, /* todo_flags_start */
1237 TODO_df_finish, /* todo_flags_finish */
1238 };
1239
1240 class pass_cprop_hardreg : public rtl_opt_pass
1241 {
1242 public:
1243 pass_cprop_hardreg (gcc::context *ctxt)
1244 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1245 {}
1246
1247 /* opt_pass methods: */
1248 virtual bool gate (function *)
1249 {
1250 return (optimize > 0 && (flag_cprop_registers));
1251 }
1252
1253 virtual unsigned int execute (function *);
1254
1255 }; // class pass_cprop_hardreg
1256
1257 unsigned int
1258 pass_cprop_hardreg::execute (function *fun)
1259 {
1260 struct value_data *all_vd;
1261 basic_block bb;
1262 bool analyze_called = false;
1263
1264 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1265
1266 auto_sbitmap visited (last_basic_block_for_fn (fun));
1267 bitmap_clear (visited);
1268
1269 FOR_EACH_BB_FN (bb, fun)
1270 {
1271 bitmap_set_bit (visited, bb->index);
1272
1273 /* If a block has a single predecessor, that we've already
1274 processed, begin with the value data that was live at
1275 the end of the predecessor block. */
1276 /* ??? Ought to use more intelligent queuing of blocks. */
1277 if (single_pred_p (bb)
1278 && bitmap_bit_p (visited, single_pred (bb)->index)
1279 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1280 {
1281 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1282 if (all_vd[bb->index].n_debug_insn_changes)
1283 {
1284 unsigned int regno;
1285
1286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1287 {
1288 if (all_vd[bb->index].e[regno].debug_insn_changes)
1289 {
1290 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1291 if (--all_vd[bb->index].n_debug_insn_changes == 0)
1292 break;
1293 }
1294 }
1295 }
1296 }
1297 else
1298 init_value_data (all_vd + bb->index);
1299
1300 copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1301 }
1302
1303 if (MAY_HAVE_DEBUG_BIND_INSNS)
1304 {
1305 FOR_EACH_BB_FN (bb, fun)
1306 if (bitmap_bit_p (visited, bb->index)
1307 && all_vd[bb->index].n_debug_insn_changes)
1308 {
1309 unsigned int regno;
1310 bitmap live;
1311
1312 if (!analyze_called)
1313 {
1314 df_analyze ();
1315 analyze_called = true;
1316 }
1317 live = df_get_live_out (bb);
1318 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1319 if (all_vd[bb->index].e[regno].debug_insn_changes)
1320 {
1321 if (REGNO_REG_SET_P (live, regno))
1322 apply_debug_insn_changes (all_vd + bb->index, regno);
1323 if (all_vd[bb->index].n_debug_insn_changes == 0)
1324 break;
1325 }
1326 }
1327
1328 queued_debug_insn_change_pool.release ();
1329 }
1330
1331 free (all_vd);
1332 return 0;
1333 }
1334
1335 } // anon namespace
1336
1337 rtl_opt_pass *
1338 make_pass_cprop_hardreg (gcc::context *ctxt)
1339 {
1340 return new pass_cprop_hardreg (ctxt);
1341 }