backport: As described in http://gcc.gnu.org/ml/gcc/2012-08/msg00015.html...
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011, 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl-error.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "ggc.h"
34 #include "flags.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "regs.h"
39 #include "addresses.h"
40 #include "basic-block.h"
41 #include "df.h"
42 #include "reload.h"
43 #include "recog.h"
44 #include "except.h"
45 #include "tree.h"
46 #include "ira.h"
47 #include "target.h"
48 #include "emit-rtl.h"
49 #include "dumpfile.h"
50
51 /* This file contains the reload pass of the compiler, which is
52 run after register allocation has been done. It checks that
53 each insn is valid (operands required to be in registers really
54 are in registers of the proper class) and fixes up invalid ones
55 by copying values temporarily into registers for the insns
56 that need them.
57
58 The results of register allocation are described by the vector
59 reg_renumber; the insns still contain pseudo regs, but reg_renumber
60 can be used to find which hard reg, if any, a pseudo reg is in.
61
62 The technique we always use is to free up a few hard regs that are
63 called ``reload regs'', and for each place where a pseudo reg
64 must be in a hard reg, copy it temporarily into one of the reload regs.
65
66 Reload regs are allocated locally for every instruction that needs
67 reloads. When there are pseudos which are allocated to a register that
68 has been chosen as a reload reg, such pseudos must be ``spilled''.
69 This means that they go to other hard regs, or to stack slots if no other
70 available hard regs can be found. Spilling can invalidate more
71 insns, requiring additional need for reloads, so we must keep checking
72 until the process stabilizes.
73
74 For machines with different classes of registers, we must keep track
75 of the register class needed for each reload, and make sure that
76 we allocate enough reload registers of each class.
77
78 The file reload.c contains the code that checks one insn for
79 validity and reports the reloads that it needs. This file
80 is in charge of scanning the entire rtl code, accumulating the
81 reload needs, spilling, assigning reload registers to use for
82 fixing up each insn, and generating the new insns to copy values
83 into the reload registers. */
84 \f
85 struct target_reload default_target_reload;
86 #if SWITCHABLE_TARGET
87 struct target_reload *this_target_reload = &default_target_reload;
88 #endif
89
90 #define spill_indirect_levels \
91 (this_target_reload->x_spill_indirect_levels)
92
93 /* During reload_as_needed, element N contains a REG rtx for the hard reg
94 into which reg N has been reloaded (perhaps for a previous insn). */
95 static rtx *reg_last_reload_reg;
96
97 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
98 for an output reload that stores into reg N. */
99 static regset_head reg_has_output_reload;
100
101 /* Indicates which hard regs are reload-registers for an output reload
102 in the current insn. */
103 static HARD_REG_SET reg_is_output_reload;
104
105 /* Widest width in which each pseudo reg is referred to (via subreg). */
106 static unsigned int *reg_max_ref_width;
107
108 /* Vector to remember old contents of reg_renumber before spilling. */
109 static short *reg_old_renumber;
110
111 /* During reload_as_needed, element N contains the last pseudo regno reloaded
112 into hard register N. If that pseudo reg occupied more than one register,
113 reg_reloaded_contents points to that pseudo for each spill register in
114 use; all of these must remain set for an inheritance to occur. */
115 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
116
117 /* During reload_as_needed, element N contains the insn for which
118 hard register N was last used. Its contents are significant only
119 when reg_reloaded_valid is set for this register. */
120 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
121
122 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
123 static HARD_REG_SET reg_reloaded_valid;
124 /* Indicate if the register was dead at the end of the reload.
125 This is only valid if reg_reloaded_contents is set and valid. */
126 static HARD_REG_SET reg_reloaded_dead;
127
128 /* Indicate whether the register's current value is one that is not
129 safe to retain across a call, even for registers that are normally
130 call-saved. This is only meaningful for members of reg_reloaded_valid. */
131 static HARD_REG_SET reg_reloaded_call_part_clobbered;
132
133 /* Number of spill-regs so far; number of valid elements of spill_regs. */
134 static int n_spills;
135
136 /* In parallel with spill_regs, contains REG rtx's for those regs.
137 Holds the last rtx used for any given reg, or 0 if it has never
138 been used for spilling yet. This rtx is reused, provided it has
139 the proper mode. */
140 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
141
142 /* In parallel with spill_regs, contains nonzero for a spill reg
143 that was stored after the last time it was used.
144 The precise value is the insn generated to do the store. */
145 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
146
147 /* This is the register that was stored with spill_reg_store. This is a
148 copy of reload_out / reload_out_reg when the value was stored; if
149 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
150 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
151
152 /* This table is the inverse mapping of spill_regs:
153 indexed by hard reg number,
154 it contains the position of that reg in spill_regs,
155 or -1 for something that is not in spill_regs.
156
157 ?!? This is no longer accurate. */
158 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
159
160 /* This reg set indicates registers that can't be used as spill registers for
161 the currently processed insn. These are the hard registers which are live
162 during the insn, but not allocated to pseudos, as well as fixed
163 registers. */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* These are the hard registers that can't be used as spill register for any
167 insn. This includes registers used for user variables and registers that
168 we can't eliminate. A register that appears in this set also can't be used
169 to retry register allocation. */
170 static HARD_REG_SET bad_spill_regs_global;
171
172 /* Describes order of use of registers for reloading
173 of spilled pseudo-registers. `n_spills' is the number of
174 elements that are actually valid; new ones are added at the end.
175
176 Both spill_regs and spill_reg_order are used on two occasions:
177 once during find_reload_regs, where they keep track of the spill registers
178 for a single insn, but also during reload_as_needed where they show all
179 the registers ever used by reload. For the latter case, the information
180 is calculated during finish_spills. */
181 static short spill_regs[FIRST_PSEUDO_REGISTER];
182
183 /* This vector of reg sets indicates, for each pseudo, which hard registers
184 may not be used for retrying global allocation because the register was
185 formerly spilled from one of them. If we allowed reallocating a pseudo to
186 a register that it was already allocated to, reload might not
187 terminate. */
188 static HARD_REG_SET *pseudo_previous_regs;
189
190 /* This vector of reg sets indicates, for each pseudo, which hard
191 registers may not be used for retrying global allocation because they
192 are used as spill registers during one of the insns in which the
193 pseudo is live. */
194 static HARD_REG_SET *pseudo_forbidden_regs;
195
196 /* All hard regs that have been used as spill registers for any insn are
197 marked in this set. */
198 static HARD_REG_SET used_spill_regs;
199
200 /* Index of last register assigned as a spill register. We allocate in
201 a round-robin fashion. */
202 static int last_spill_reg;
203
204 /* Record the stack slot for each spilled hard register. */
205 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
206
207 /* Width allocated so far for that stack slot. */
208 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
209
210 /* Record which pseudos needed to be spilled. */
211 static regset_head spilled_pseudos;
212
213 /* Record which pseudos changed their allocation in finish_spills. */
214 static regset_head changed_allocation_pseudos;
215
216 /* Used for communication between order_regs_for_reload and count_pseudo.
217 Used to avoid counting one pseudo twice. */
218 static regset_head pseudos_counted;
219
220 /* First uid used by insns created by reload in this function.
221 Used in find_equiv_reg. */
222 int reload_first_uid;
223
224 /* Flag set by local-alloc or global-alloc if anything is live in
225 a call-clobbered reg across calls. */
226 int caller_save_needed;
227
228 /* Set to 1 while reload_as_needed is operating.
229 Required by some machines to handle any generated moves differently. */
230 int reload_in_progress = 0;
231
232 /* This obstack is used for allocation of rtl during register elimination.
233 The allocated storage can be freed once find_reloads has processed the
234 insn. */
235 static struct obstack reload_obstack;
236
237 /* Points to the beginning of the reload_obstack. All insn_chain structures
238 are allocated first. */
239 static char *reload_startobj;
240
241 /* The point after all insn_chain structures. Used to quickly deallocate
242 memory allocated in copy_reloads during calculate_needs_all_insns. */
243 static char *reload_firstobj;
244
245 /* This points before all local rtl generated by register elimination.
246 Used to quickly free all memory after processing one insn. */
247 static char *reload_insn_firstobj;
248
249 /* List of insn_chain instructions, one for every insn that reload needs to
250 examine. */
251 struct insn_chain *reload_insn_chain;
252
253 /* TRUE if we potentially left dead insns in the insn stream and want to
254 run DCE immediately after reload, FALSE otherwise. */
255 static bool need_dce;
256
257 /* List of all insns needing reloads. */
258 static struct insn_chain *insns_need_reload;
259 \f
260 /* This structure is used to record information about register eliminations.
261 Each array entry describes one possible way of eliminating a register
262 in favor of another. If there is more than one way of eliminating a
263 particular register, the most preferred should be specified first. */
264
265 struct elim_table
266 {
267 int from; /* Register number to be eliminated. */
268 int to; /* Register number used as replacement. */
269 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
270 int can_eliminate; /* Nonzero if this elimination can be done. */
271 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
272 target hook in previous scan over insns
273 made by reload. */
274 HOST_WIDE_INT offset; /* Current offset between the two regs. */
275 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
276 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
277 rtx from_rtx; /* REG rtx for the register to be eliminated.
278 We cannot simply compare the number since
279 we might then spuriously replace a hard
280 register corresponding to a pseudo
281 assigned to the reg to be eliminated. */
282 rtx to_rtx; /* REG rtx for the replacement. */
283 };
284
285 static struct elim_table *reg_eliminate = 0;
286
287 /* This is an intermediate structure to initialize the table. It has
288 exactly the members provided by ELIMINABLE_REGS. */
289 static const struct elim_table_1
290 {
291 const int from;
292 const int to;
293 } reg_eliminate_1[] =
294
295 /* If a set of eliminable registers was specified, define the table from it.
296 Otherwise, default to the normal case of the frame pointer being
297 replaced by the stack pointer. */
298
299 #ifdef ELIMINABLE_REGS
300 ELIMINABLE_REGS;
301 #else
302 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
303 #endif
304
305 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
306
307 /* Record the number of pending eliminations that have an offset not equal
308 to their initial offset. If nonzero, we use a new copy of each
309 replacement result in any insns encountered. */
310 int num_not_at_initial_offset;
311
312 /* Count the number of registers that we may be able to eliminate. */
313 static int num_eliminable;
314 /* And the number of registers that are equivalent to a constant that
315 can be eliminated to frame_pointer / arg_pointer + constant. */
316 static int num_eliminable_invariants;
317
318 /* For each label, we record the offset of each elimination. If we reach
319 a label by more than one path and an offset differs, we cannot do the
320 elimination. This information is indexed by the difference of the
321 number of the label and the first label number. We can't offset the
322 pointer itself as this can cause problems on machines with segmented
323 memory. The first table is an array of flags that records whether we
324 have yet encountered a label and the second table is an array of arrays,
325 one entry in the latter array for each elimination. */
326
327 static int first_label_num;
328 static char *offsets_known_at;
329 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
330
331 VEC(reg_equivs_t,gc) *reg_equivs;
332
333 /* Stack of addresses where an rtx has been changed. We can undo the
334 changes by popping items off the stack and restoring the original
335 value at each location.
336
337 We use this simplistic undo capability rather than copy_rtx as copy_rtx
338 will not make a deep copy of a normally sharable rtx, such as
339 (const (plus (symbol_ref) (const_int))). If such an expression appears
340 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
341 rtx expression would be changed. See PR 42431. */
342
343 typedef rtx *rtx_p;
344 DEF_VEC_P(rtx_p);
345 DEF_VEC_ALLOC_P(rtx_p,heap);
346 static VEC(rtx_p,heap) *substitute_stack;
347
348 /* Number of labels in the current function. */
349
350 static int num_labels;
351 \f
352 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
353 static void maybe_fix_stack_asms (void);
354 static void copy_reloads (struct insn_chain *);
355 static void calculate_needs_all_insns (int);
356 static int find_reg (struct insn_chain *, int);
357 static void find_reload_regs (struct insn_chain *);
358 static void select_reload_regs (void);
359 static void delete_caller_save_insns (void);
360
361 static void spill_failure (rtx, enum reg_class);
362 static void count_spilled_pseudo (int, int, int);
363 static void delete_dead_insn (rtx);
364 static void alter_reg (int, int, bool);
365 static void set_label_offsets (rtx, rtx, int);
366 static void check_eliminable_occurrences (rtx);
367 static void elimination_effects (rtx, enum machine_mode);
368 static rtx eliminate_regs_1 (rtx, enum machine_mode, rtx, bool, bool);
369 static int eliminate_regs_in_insn (rtx, int);
370 static void update_eliminable_offsets (void);
371 static void mark_not_eliminable (rtx, const_rtx, void *);
372 static void set_initial_elim_offsets (void);
373 static bool verify_initial_elim_offsets (void);
374 static void set_initial_label_offsets (void);
375 static void set_offsets_for_label (rtx);
376 static void init_eliminable_invariants (rtx, bool);
377 static void init_elim_table (void);
378 static void free_reg_equiv (void);
379 static void update_eliminables (HARD_REG_SET *);
380 static void elimination_costs_in_insn (rtx);
381 static void spill_hard_reg (unsigned int, int);
382 static int finish_spills (int);
383 static void scan_paradoxical_subregs (rtx);
384 static void count_pseudo (int);
385 static void order_regs_for_reload (struct insn_chain *);
386 static void reload_as_needed (int);
387 static void forget_old_reloads_1 (rtx, const_rtx, void *);
388 static void forget_marked_reloads (regset);
389 static int reload_reg_class_lower (const void *, const void *);
390 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
391 enum machine_mode);
392 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
393 enum machine_mode);
394 static int reload_reg_free_p (unsigned int, int, enum reload_type);
395 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
396 rtx, rtx, int, int);
397 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
398 rtx, rtx, int, int);
399 static int allocate_reload_reg (struct insn_chain *, int, int);
400 static int conflicts_with_override (rtx);
401 static void failed_reload (rtx, int);
402 static int set_reload_reg (int, int);
403 static void choose_reload_regs_init (struct insn_chain *, rtx *);
404 static void choose_reload_regs (struct insn_chain *);
405 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
406 rtx, int);
407 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
408 int);
409 static void do_input_reload (struct insn_chain *, struct reload *, int);
410 static void do_output_reload (struct insn_chain *, struct reload *, int);
411 static void emit_reload_insns (struct insn_chain *);
412 static void delete_output_reload (rtx, int, int, rtx);
413 static void delete_address_reloads (rtx, rtx);
414 static void delete_address_reloads_1 (rtx, rtx, rtx);
415 static void inc_for_reload (rtx, rtx, rtx, int);
416 #ifdef AUTO_INC_DEC
417 static void add_auto_inc_notes (rtx, rtx);
418 #endif
419 static void substitute (rtx *, const_rtx, rtx);
420 static bool gen_reload_chain_without_interm_reg_p (int, int);
421 static int reloads_conflict (int, int);
422 static rtx gen_reload (rtx, rtx, int, enum reload_type);
423 static rtx emit_insn_if_valid_for_reload (rtx);
424 \f
425 /* Initialize the reload pass. This is called at the beginning of compilation
426 and may be called again if the target is reinitialized. */
427
428 void
429 init_reload (void)
430 {
431 int i;
432
433 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
434 Set spill_indirect_levels to the number of levels such addressing is
435 permitted, zero if it is not permitted at all. */
436
437 rtx tem
438 = gen_rtx_MEM (Pmode,
439 gen_rtx_PLUS (Pmode,
440 gen_rtx_REG (Pmode,
441 LAST_VIRTUAL_REGISTER + 1),
442 GEN_INT (4)));
443 spill_indirect_levels = 0;
444
445 while (memory_address_p (QImode, tem))
446 {
447 spill_indirect_levels++;
448 tem = gen_rtx_MEM (Pmode, tem);
449 }
450
451 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
452
453 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
454 indirect_symref_ok = memory_address_p (QImode, tem);
455
456 /* See if reg+reg is a valid (and offsettable) address. */
457
458 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
459 {
460 tem = gen_rtx_PLUS (Pmode,
461 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
462 gen_rtx_REG (Pmode, i));
463
464 /* This way, we make sure that reg+reg is an offsettable address. */
465 tem = plus_constant (Pmode, tem, 4);
466
467 if (memory_address_p (QImode, tem))
468 {
469 double_reg_address_ok = 1;
470 break;
471 }
472 }
473
474 /* Initialize obstack for our rtl allocation. */
475 gcc_obstack_init (&reload_obstack);
476 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
477
478 INIT_REG_SET (&spilled_pseudos);
479 INIT_REG_SET (&changed_allocation_pseudos);
480 INIT_REG_SET (&pseudos_counted);
481 }
482
483 /* List of insn chains that are currently unused. */
484 static struct insn_chain *unused_insn_chains = 0;
485
486 /* Allocate an empty insn_chain structure. */
487 struct insn_chain *
488 new_insn_chain (void)
489 {
490 struct insn_chain *c;
491
492 if (unused_insn_chains == 0)
493 {
494 c = XOBNEW (&reload_obstack, struct insn_chain);
495 INIT_REG_SET (&c->live_throughout);
496 INIT_REG_SET (&c->dead_or_set);
497 }
498 else
499 {
500 c = unused_insn_chains;
501 unused_insn_chains = c->next;
502 }
503 c->is_caller_save_insn = 0;
504 c->need_operand_change = 0;
505 c->need_reload = 0;
506 c->need_elim = 0;
507 return c;
508 }
509
510 /* Small utility function to set all regs in hard reg set TO which are
511 allocated to pseudos in regset FROM. */
512
513 void
514 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
515 {
516 unsigned int regno;
517 reg_set_iterator rsi;
518
519 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
520 {
521 int r = reg_renumber[regno];
522
523 if (r < 0)
524 {
525 /* reload_combine uses the information from DF_LIVE_IN,
526 which might still contain registers that have not
527 actually been allocated since they have an
528 equivalence. */
529 gcc_assert (ira_conflicts_p || reload_completed);
530 }
531 else
532 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
533 }
534 }
535
536 /* Replace all pseudos found in LOC with their corresponding
537 equivalences. */
538
539 static void
540 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
541 {
542 rtx x = *loc;
543 enum rtx_code code;
544 const char *fmt;
545 int i, j;
546
547 if (! x)
548 return;
549
550 code = GET_CODE (x);
551 if (code == REG)
552 {
553 unsigned int regno = REGNO (x);
554
555 if (regno < FIRST_PSEUDO_REGISTER)
556 return;
557
558 x = eliminate_regs_1 (x, mem_mode, usage, true, false);
559 if (x != *loc)
560 {
561 *loc = x;
562 replace_pseudos_in (loc, mem_mode, usage);
563 return;
564 }
565
566 if (reg_equiv_constant (regno))
567 *loc = reg_equiv_constant (regno);
568 else if (reg_equiv_invariant (regno))
569 *loc = reg_equiv_invariant (regno);
570 else if (reg_equiv_mem (regno))
571 *loc = reg_equiv_mem (regno);
572 else if (reg_equiv_address (regno))
573 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
574 else
575 {
576 gcc_assert (!REG_P (regno_reg_rtx[regno])
577 || REGNO (regno_reg_rtx[regno]) != regno);
578 *loc = regno_reg_rtx[regno];
579 }
580
581 return;
582 }
583 else if (code == MEM)
584 {
585 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
586 return;
587 }
588
589 /* Process each of our operands recursively. */
590 fmt = GET_RTX_FORMAT (code);
591 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
592 if (*fmt == 'e')
593 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
594 else if (*fmt == 'E')
595 for (j = 0; j < XVECLEN (x, i); j++)
596 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
597 }
598
599 /* Determine if the current function has an exception receiver block
600 that reaches the exit block via non-exceptional edges */
601
602 static bool
603 has_nonexceptional_receiver (void)
604 {
605 edge e;
606 edge_iterator ei;
607 basic_block *tos, *worklist, bb;
608
609 /* If we're not optimizing, then just err on the safe side. */
610 if (!optimize)
611 return true;
612
613 /* First determine which blocks can reach exit via normal paths. */
614 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
615
616 FOR_EACH_BB (bb)
617 bb->flags &= ~BB_REACHABLE;
618
619 /* Place the exit block on our worklist. */
620 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
621 *tos++ = EXIT_BLOCK_PTR;
622
623 /* Iterate: find everything reachable from what we've already seen. */
624 while (tos != worklist)
625 {
626 bb = *--tos;
627
628 FOR_EACH_EDGE (e, ei, bb->preds)
629 if (!(e->flags & EDGE_ABNORMAL))
630 {
631 basic_block src = e->src;
632
633 if (!(src->flags & BB_REACHABLE))
634 {
635 src->flags |= BB_REACHABLE;
636 *tos++ = src;
637 }
638 }
639 }
640 free (worklist);
641
642 /* Now see if there's a reachable block with an exceptional incoming
643 edge. */
644 FOR_EACH_BB (bb)
645 if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
646 return true;
647
648 /* No exceptional block reached exit unexceptionally. */
649 return false;
650 }
651
652 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
653 zero elements) to MAX_REG_NUM elements.
654
655 Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
656 void
657 grow_reg_equivs (void)
658 {
659 int old_size = VEC_length (reg_equivs_t, reg_equivs);
660 int max_regno = max_reg_num ();
661 int i;
662
663 VEC_reserve (reg_equivs_t, gc, reg_equivs, max_regno);
664 for (i = old_size; i < max_regno; i++)
665 {
666 VEC_quick_insert (reg_equivs_t, reg_equivs, i, 0);
667 memset (&VEC_index (reg_equivs_t, reg_equivs, i), 0,
668 sizeof (reg_equivs_t));
669 }
670
671 }
672
673 \f
674 /* Global variables used by reload and its subroutines. */
675
676 /* The current basic block while in calculate_elim_costs_all_insns. */
677 static basic_block elim_bb;
678
679 /* Set during calculate_needs if an insn needs register elimination. */
680 static int something_needs_elimination;
681 /* Set during calculate_needs if an insn needs an operand changed. */
682 static int something_needs_operands_changed;
683 /* Set by alter_regs if we spilled a register to the stack. */
684 static bool something_was_spilled;
685
686 /* Nonzero means we couldn't get enough spill regs. */
687 static int failure;
688
689 /* Temporary array of pseudo-register number. */
690 static int *temp_pseudo_reg_arr;
691
692 /* Main entry point for the reload pass.
693
694 FIRST is the first insn of the function being compiled.
695
696 GLOBAL nonzero means we were called from global_alloc
697 and should attempt to reallocate any pseudoregs that we
698 displace from hard regs we will use for reloads.
699 If GLOBAL is zero, we do not have enough information to do that,
700 so any pseudo reg that is spilled must go to the stack.
701
702 Return value is TRUE if reload likely left dead insns in the
703 stream and a DCE pass should be run to elimiante them. Else the
704 return value is FALSE. */
705
706 bool
707 reload (rtx first, int global)
708 {
709 int i, n;
710 rtx insn;
711 struct elim_table *ep;
712 basic_block bb;
713 bool inserted;
714
715 /* Make sure even insns with volatile mem refs are recognizable. */
716 init_recog ();
717
718 failure = 0;
719
720 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
721
722 /* Make sure that the last insn in the chain
723 is not something that needs reloading. */
724 emit_note (NOTE_INSN_DELETED);
725
726 /* Enable find_equiv_reg to distinguish insns made by reload. */
727 reload_first_uid = get_max_uid ();
728
729 #ifdef SECONDARY_MEMORY_NEEDED
730 /* Initialize the secondary memory table. */
731 clear_secondary_mem ();
732 #endif
733
734 /* We don't have a stack slot for any spill reg yet. */
735 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
736 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
737
738 /* Initialize the save area information for caller-save, in case some
739 are needed. */
740 init_save_areas ();
741
742 /* Compute which hard registers are now in use
743 as homes for pseudo registers.
744 This is done here rather than (eg) in global_alloc
745 because this point is reached even if not optimizing. */
746 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
747 mark_home_live (i);
748
749 /* A function that has a nonlocal label that can reach the exit
750 block via non-exceptional paths must save all call-saved
751 registers. */
752 if (cfun->has_nonlocal_label
753 && has_nonexceptional_receiver ())
754 crtl->saves_all_registers = 1;
755
756 if (crtl->saves_all_registers)
757 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
758 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
759 df_set_regs_ever_live (i, true);
760
761 /* Find all the pseudo registers that didn't get hard regs
762 but do have known equivalent constants or memory slots.
763 These include parameters (known equivalent to parameter slots)
764 and cse'd or loop-moved constant memory addresses.
765
766 Record constant equivalents in reg_equiv_constant
767 so they will be substituted by find_reloads.
768 Record memory equivalents in reg_mem_equiv so they can
769 be substituted eventually by altering the REG-rtx's. */
770
771 grow_reg_equivs ();
772 reg_old_renumber = XCNEWVEC (short, max_regno);
773 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
774 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
775 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
776
777 CLEAR_HARD_REG_SET (bad_spill_regs_global);
778
779 init_eliminable_invariants (first, true);
780 init_elim_table ();
781
782 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
783 stack slots to the pseudos that lack hard regs or equivalents.
784 Do not touch virtual registers. */
785
786 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
787 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
788 temp_pseudo_reg_arr[n++] = i;
789
790 if (ira_conflicts_p)
791 /* Ask IRA to order pseudo-registers for better stack slot
792 sharing. */
793 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
794
795 for (i = 0; i < n; i++)
796 alter_reg (temp_pseudo_reg_arr[i], -1, false);
797
798 /* If we have some registers we think can be eliminated, scan all insns to
799 see if there is an insn that sets one of these registers to something
800 other than itself plus a constant. If so, the register cannot be
801 eliminated. Doing this scan here eliminates an extra pass through the
802 main reload loop in the most common case where register elimination
803 cannot be done. */
804 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
805 if (INSN_P (insn))
806 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
807
808 maybe_fix_stack_asms ();
809
810 insns_need_reload = 0;
811 something_needs_elimination = 0;
812
813 /* Initialize to -1, which means take the first spill register. */
814 last_spill_reg = -1;
815
816 /* Spill any hard regs that we know we can't eliminate. */
817 CLEAR_HARD_REG_SET (used_spill_regs);
818 /* There can be multiple ways to eliminate a register;
819 they should be listed adjacently.
820 Elimination for any register fails only if all possible ways fail. */
821 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
822 {
823 int from = ep->from;
824 int can_eliminate = 0;
825 do
826 {
827 can_eliminate |= ep->can_eliminate;
828 ep++;
829 }
830 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
831 if (! can_eliminate)
832 spill_hard_reg (from, 1);
833 }
834
835 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
836 if (frame_pointer_needed)
837 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
838 #endif
839 finish_spills (global);
840
841 /* From now on, we may need to generate moves differently. We may also
842 allow modifications of insns which cause them to not be recognized.
843 Any such modifications will be cleaned up during reload itself. */
844 reload_in_progress = 1;
845
846 /* This loop scans the entire function each go-round
847 and repeats until one repetition spills no additional hard regs. */
848 for (;;)
849 {
850 int something_changed;
851 int did_spill;
852 HOST_WIDE_INT starting_frame_size;
853
854 starting_frame_size = get_frame_size ();
855 something_was_spilled = false;
856
857 set_initial_elim_offsets ();
858 set_initial_label_offsets ();
859
860 /* For each pseudo register that has an equivalent location defined,
861 try to eliminate any eliminable registers (such as the frame pointer)
862 assuming initial offsets for the replacement register, which
863 is the normal case.
864
865 If the resulting location is directly addressable, substitute
866 the MEM we just got directly for the old REG.
867
868 If it is not addressable but is a constant or the sum of a hard reg
869 and constant, it is probably not addressable because the constant is
870 out of range, in that case record the address; we will generate
871 hairy code to compute the address in a register each time it is
872 needed. Similarly if it is a hard register, but one that is not
873 valid as an address register.
874
875 If the location is not addressable, but does not have one of the
876 above forms, assign a stack slot. We have to do this to avoid the
877 potential of producing lots of reloads if, e.g., a location involves
878 a pseudo that didn't get a hard register and has an equivalent memory
879 location that also involves a pseudo that didn't get a hard register.
880
881 Perhaps at some point we will improve reload_when_needed handling
882 so this problem goes away. But that's very hairy. */
883
884 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
885 if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
886 {
887 rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
888 NULL_RTX);
889
890 if (strict_memory_address_addr_space_p
891 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
892 MEM_ADDR_SPACE (x)))
893 reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
894 else if (CONSTANT_P (XEXP (x, 0))
895 || (REG_P (XEXP (x, 0))
896 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
897 || (GET_CODE (XEXP (x, 0)) == PLUS
898 && REG_P (XEXP (XEXP (x, 0), 0))
899 && (REGNO (XEXP (XEXP (x, 0), 0))
900 < FIRST_PSEUDO_REGISTER)
901 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
902 reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
903 else
904 {
905 /* Make a new stack slot. Then indicate that something
906 changed so we go back and recompute offsets for
907 eliminable registers because the allocation of memory
908 below might change some offset. reg_equiv_{mem,address}
909 will be set up for this pseudo on the next pass around
910 the loop. */
911 reg_equiv_memory_loc (i) = 0;
912 reg_equiv_init (i) = 0;
913 alter_reg (i, -1, true);
914 }
915 }
916
917 if (caller_save_needed)
918 setup_save_areas ();
919
920 /* If we allocated another stack slot, redo elimination bookkeeping. */
921 if (something_was_spilled || starting_frame_size != get_frame_size ())
922 continue;
923 if (starting_frame_size && crtl->stack_alignment_needed)
924 {
925 /* If we have a stack frame, we must align it now. The
926 stack size may be a part of the offset computation for
927 register elimination. So if this changes the stack size,
928 then repeat the elimination bookkeeping. We don't
929 realign when there is no stack, as that will cause a
930 stack frame when none is needed should
931 STARTING_FRAME_OFFSET not be already aligned to
932 STACK_BOUNDARY. */
933 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
934 if (starting_frame_size != get_frame_size ())
935 continue;
936 }
937
938 if (caller_save_needed)
939 {
940 save_call_clobbered_regs ();
941 /* That might have allocated new insn_chain structures. */
942 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
943 }
944
945 calculate_needs_all_insns (global);
946
947 if (! ira_conflicts_p)
948 /* Don't do it for IRA. We need this info because we don't
949 change live_throughout and dead_or_set for chains when IRA
950 is used. */
951 CLEAR_REG_SET (&spilled_pseudos);
952
953 did_spill = 0;
954
955 something_changed = 0;
956
957 /* If we allocated any new memory locations, make another pass
958 since it might have changed elimination offsets. */
959 if (something_was_spilled || starting_frame_size != get_frame_size ())
960 something_changed = 1;
961
962 /* Even if the frame size remained the same, we might still have
963 changed elimination offsets, e.g. if find_reloads called
964 force_const_mem requiring the back end to allocate a constant
965 pool base register that needs to be saved on the stack. */
966 else if (!verify_initial_elim_offsets ())
967 something_changed = 1;
968
969 {
970 HARD_REG_SET to_spill;
971 CLEAR_HARD_REG_SET (to_spill);
972 update_eliminables (&to_spill);
973 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
974
975 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
976 if (TEST_HARD_REG_BIT (to_spill, i))
977 {
978 spill_hard_reg (i, 1);
979 did_spill = 1;
980
981 /* Regardless of the state of spills, if we previously had
982 a register that we thought we could eliminate, but now can
983 not eliminate, we must run another pass.
984
985 Consider pseudos which have an entry in reg_equiv_* which
986 reference an eliminable register. We must make another pass
987 to update reg_equiv_* so that we do not substitute in the
988 old value from when we thought the elimination could be
989 performed. */
990 something_changed = 1;
991 }
992 }
993
994 select_reload_regs ();
995 if (failure)
996 goto failed;
997
998 if (insns_need_reload != 0 || did_spill)
999 something_changed |= finish_spills (global);
1000
1001 if (! something_changed)
1002 break;
1003
1004 if (caller_save_needed)
1005 delete_caller_save_insns ();
1006
1007 obstack_free (&reload_obstack, reload_firstobj);
1008 }
1009
1010 /* If global-alloc was run, notify it of any register eliminations we have
1011 done. */
1012 if (global)
1013 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1014 if (ep->can_eliminate)
1015 mark_elimination (ep->from, ep->to);
1016
1017 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1018 If that insn didn't set the register (i.e., it copied the register to
1019 memory), just delete that insn instead of the equivalencing insn plus
1020 anything now dead. If we call delete_dead_insn on that insn, we may
1021 delete the insn that actually sets the register if the register dies
1022 there and that is incorrect. */
1023
1024 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1025 {
1026 if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
1027 {
1028 rtx list;
1029 for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
1030 {
1031 rtx equiv_insn = XEXP (list, 0);
1032
1033 /* If we already deleted the insn or if it may trap, we can't
1034 delete it. The latter case shouldn't happen, but can
1035 if an insn has a variable address, gets a REG_EH_REGION
1036 note added to it, and then gets converted into a load
1037 from a constant address. */
1038 if (NOTE_P (equiv_insn)
1039 || can_throw_internal (equiv_insn))
1040 ;
1041 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1042 delete_dead_insn (equiv_insn);
1043 else
1044 SET_INSN_DELETED (equiv_insn);
1045 }
1046 }
1047 }
1048
1049 /* Use the reload registers where necessary
1050 by generating move instructions to move the must-be-register
1051 values into or out of the reload registers. */
1052
1053 if (insns_need_reload != 0 || something_needs_elimination
1054 || something_needs_operands_changed)
1055 {
1056 HOST_WIDE_INT old_frame_size = get_frame_size ();
1057
1058 reload_as_needed (global);
1059
1060 gcc_assert (old_frame_size == get_frame_size ());
1061
1062 gcc_assert (verify_initial_elim_offsets ());
1063 }
1064
1065 /* If we were able to eliminate the frame pointer, show that it is no
1066 longer live at the start of any basic block. If it ls live by
1067 virtue of being in a pseudo, that pseudo will be marked live
1068 and hence the frame pointer will be known to be live via that
1069 pseudo. */
1070
1071 if (! frame_pointer_needed)
1072 FOR_EACH_BB (bb)
1073 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1074
1075 /* Come here (with failure set nonzero) if we can't get enough spill
1076 regs. */
1077 failed:
1078
1079 CLEAR_REG_SET (&changed_allocation_pseudos);
1080 CLEAR_REG_SET (&spilled_pseudos);
1081 reload_in_progress = 0;
1082
1083 /* Now eliminate all pseudo regs by modifying them into
1084 their equivalent memory references.
1085 The REG-rtx's for the pseudos are modified in place,
1086 so all insns that used to refer to them now refer to memory.
1087
1088 For a reg that has a reg_equiv_address, all those insns
1089 were changed by reloading so that no insns refer to it any longer;
1090 but the DECL_RTL of a variable decl may refer to it,
1091 and if so this causes the debugging info to mention the variable. */
1092
1093 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1094 {
1095 rtx addr = 0;
1096
1097 if (reg_equiv_mem (i))
1098 addr = XEXP (reg_equiv_mem (i), 0);
1099
1100 if (reg_equiv_address (i))
1101 addr = reg_equiv_address (i);
1102
1103 if (addr)
1104 {
1105 if (reg_renumber[i] < 0)
1106 {
1107 rtx reg = regno_reg_rtx[i];
1108
1109 REG_USERVAR_P (reg) = 0;
1110 PUT_CODE (reg, MEM);
1111 XEXP (reg, 0) = addr;
1112 if (reg_equiv_memory_loc (i))
1113 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1114 else
1115 MEM_ATTRS (reg) = 0;
1116 MEM_NOTRAP_P (reg) = 1;
1117 }
1118 else if (reg_equiv_mem (i))
1119 XEXP (reg_equiv_mem (i), 0) = addr;
1120 }
1121
1122 /* We don't want complex addressing modes in debug insns
1123 if simpler ones will do, so delegitimize equivalences
1124 in debug insns. */
1125 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1126 {
1127 rtx reg = regno_reg_rtx[i];
1128 rtx equiv = 0;
1129 df_ref use, next;
1130
1131 if (reg_equiv_constant (i))
1132 equiv = reg_equiv_constant (i);
1133 else if (reg_equiv_invariant (i))
1134 equiv = reg_equiv_invariant (i);
1135 else if (reg && MEM_P (reg))
1136 equiv = targetm.delegitimize_address (reg);
1137 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1138 equiv = reg;
1139
1140 if (equiv == reg)
1141 continue;
1142
1143 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1144 {
1145 insn = DF_REF_INSN (use);
1146
1147 /* Make sure the next ref is for a different instruction,
1148 so that we're not affected by the rescan. */
1149 next = DF_REF_NEXT_REG (use);
1150 while (next && DF_REF_INSN (next) == insn)
1151 next = DF_REF_NEXT_REG (next);
1152
1153 if (DEBUG_INSN_P (insn))
1154 {
1155 if (!equiv)
1156 {
1157 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1158 df_insn_rescan_debug_internal (insn);
1159 }
1160 else
1161 INSN_VAR_LOCATION_LOC (insn)
1162 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1163 reg, equiv);
1164 }
1165 }
1166 }
1167 }
1168
1169 /* We must set reload_completed now since the cleanup_subreg_operands call
1170 below will re-recognize each insn and reload may have generated insns
1171 which are only valid during and after reload. */
1172 reload_completed = 1;
1173
1174 /* Make a pass over all the insns and delete all USEs which we inserted
1175 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1176 notes. Delete all CLOBBER insns, except those that refer to the return
1177 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1178 from misarranging variable-array code, and simplify (subreg (reg))
1179 operands. Strip and regenerate REG_INC notes that may have been moved
1180 around. */
1181
1182 for (insn = first; insn; insn = NEXT_INSN (insn))
1183 if (INSN_P (insn))
1184 {
1185 rtx *pnote;
1186
1187 if (CALL_P (insn))
1188 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1189 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1190
1191 if ((GET_CODE (PATTERN (insn)) == USE
1192 /* We mark with QImode USEs introduced by reload itself. */
1193 && (GET_MODE (insn) == QImode
1194 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1195 || (GET_CODE (PATTERN (insn)) == CLOBBER
1196 && (!MEM_P (XEXP (PATTERN (insn), 0))
1197 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1198 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1199 && XEXP (XEXP (PATTERN (insn), 0), 0)
1200 != stack_pointer_rtx))
1201 && (!REG_P (XEXP (PATTERN (insn), 0))
1202 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1203 {
1204 delete_insn (insn);
1205 continue;
1206 }
1207
1208 /* Some CLOBBERs may survive until here and still reference unassigned
1209 pseudos with const equivalent, which may in turn cause ICE in later
1210 passes if the reference remains in place. */
1211 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1212 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1213 VOIDmode, PATTERN (insn));
1214
1215 /* Discard obvious no-ops, even without -O. This optimization
1216 is fast and doesn't interfere with debugging. */
1217 if (NONJUMP_INSN_P (insn)
1218 && GET_CODE (PATTERN (insn)) == SET
1219 && REG_P (SET_SRC (PATTERN (insn)))
1220 && REG_P (SET_DEST (PATTERN (insn)))
1221 && (REGNO (SET_SRC (PATTERN (insn)))
1222 == REGNO (SET_DEST (PATTERN (insn)))))
1223 {
1224 delete_insn (insn);
1225 continue;
1226 }
1227
1228 pnote = &REG_NOTES (insn);
1229 while (*pnote != 0)
1230 {
1231 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1232 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1233 || REG_NOTE_KIND (*pnote) == REG_INC)
1234 *pnote = XEXP (*pnote, 1);
1235 else
1236 pnote = &XEXP (*pnote, 1);
1237 }
1238
1239 #ifdef AUTO_INC_DEC
1240 add_auto_inc_notes (insn, PATTERN (insn));
1241 #endif
1242
1243 /* Simplify (subreg (reg)) if it appears as an operand. */
1244 cleanup_subreg_operands (insn);
1245
1246 /* Clean up invalid ASMs so that they don't confuse later passes.
1247 See PR 21299. */
1248 if (asm_noperands (PATTERN (insn)) >= 0)
1249 {
1250 extract_insn (insn);
1251 if (!constrain_operands (1))
1252 {
1253 error_for_asm (insn,
1254 "%<asm%> operand has impossible constraints");
1255 delete_insn (insn);
1256 continue;
1257 }
1258 }
1259 }
1260
1261 /* If we are doing generic stack checking, give a warning if this
1262 function's frame size is larger than we expect. */
1263 if (flag_stack_check == GENERIC_STACK_CHECK)
1264 {
1265 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1266 static int verbose_warned = 0;
1267
1268 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1269 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1270 size += UNITS_PER_WORD;
1271
1272 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1273 {
1274 warning (0, "frame size too large for reliable stack checking");
1275 if (! verbose_warned)
1276 {
1277 warning (0, "try reducing the number of local variables");
1278 verbose_warned = 1;
1279 }
1280 }
1281 }
1282
1283 free (temp_pseudo_reg_arr);
1284
1285 /* Indicate that we no longer have known memory locations or constants. */
1286 free_reg_equiv ();
1287
1288 free (reg_max_ref_width);
1289 free (reg_old_renumber);
1290 free (pseudo_previous_regs);
1291 free (pseudo_forbidden_regs);
1292
1293 CLEAR_HARD_REG_SET (used_spill_regs);
1294 for (i = 0; i < n_spills; i++)
1295 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1296
1297 /* Free all the insn_chain structures at once. */
1298 obstack_free (&reload_obstack, reload_startobj);
1299 unused_insn_chains = 0;
1300
1301 inserted = fixup_abnormal_edges ();
1302
1303 /* We've possibly turned single trapping insn into multiple ones. */
1304 if (cfun->can_throw_non_call_exceptions)
1305 {
1306 sbitmap blocks;
1307 blocks = sbitmap_alloc (last_basic_block);
1308 sbitmap_ones (blocks);
1309 find_many_sub_basic_blocks (blocks);
1310 sbitmap_free (blocks);
1311 }
1312
1313 if (inserted)
1314 commit_edge_insertions ();
1315
1316 /* Replacing pseudos with their memory equivalents might have
1317 created shared rtx. Subsequent passes would get confused
1318 by this, so unshare everything here. */
1319 unshare_all_rtl_again (first);
1320
1321 #ifdef STACK_BOUNDARY
1322 /* init_emit has set the alignment of the hard frame pointer
1323 to STACK_BOUNDARY. It is very likely no longer valid if
1324 the hard frame pointer was used for register allocation. */
1325 if (!frame_pointer_needed)
1326 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1327 #endif
1328
1329 VEC_free (rtx_p, heap, substitute_stack);
1330
1331 gcc_assert (bitmap_empty_p (&spilled_pseudos));
1332
1333 reload_completed = !failure;
1334
1335 return need_dce;
1336 }
1337
1338 /* Yet another special case. Unfortunately, reg-stack forces people to
1339 write incorrect clobbers in asm statements. These clobbers must not
1340 cause the register to appear in bad_spill_regs, otherwise we'll call
1341 fatal_insn later. We clear the corresponding regnos in the live
1342 register sets to avoid this.
1343 The whole thing is rather sick, I'm afraid. */
1344
1345 static void
1346 maybe_fix_stack_asms (void)
1347 {
1348 #ifdef STACK_REGS
1349 const char *constraints[MAX_RECOG_OPERANDS];
1350 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1351 struct insn_chain *chain;
1352
1353 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1354 {
1355 int i, noperands;
1356 HARD_REG_SET clobbered, allowed;
1357 rtx pat;
1358
1359 if (! INSN_P (chain->insn)
1360 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1361 continue;
1362 pat = PATTERN (chain->insn);
1363 if (GET_CODE (pat) != PARALLEL)
1364 continue;
1365
1366 CLEAR_HARD_REG_SET (clobbered);
1367 CLEAR_HARD_REG_SET (allowed);
1368
1369 /* First, make a mask of all stack regs that are clobbered. */
1370 for (i = 0; i < XVECLEN (pat, 0); i++)
1371 {
1372 rtx t = XVECEXP (pat, 0, i);
1373 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1374 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1375 }
1376
1377 /* Get the operand values and constraints out of the insn. */
1378 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1379 constraints, operand_mode, NULL);
1380
1381 /* For every operand, see what registers are allowed. */
1382 for (i = 0; i < noperands; i++)
1383 {
1384 const char *p = constraints[i];
1385 /* For every alternative, we compute the class of registers allowed
1386 for reloading in CLS, and merge its contents into the reg set
1387 ALLOWED. */
1388 int cls = (int) NO_REGS;
1389
1390 for (;;)
1391 {
1392 char c = *p;
1393
1394 if (c == '\0' || c == ',' || c == '#')
1395 {
1396 /* End of one alternative - mark the regs in the current
1397 class, and reset the class. */
1398 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1399 cls = NO_REGS;
1400 p++;
1401 if (c == '#')
1402 do {
1403 c = *p++;
1404 } while (c != '\0' && c != ',');
1405 if (c == '\0')
1406 break;
1407 continue;
1408 }
1409
1410 switch (c)
1411 {
1412 case '=': case '+': case '*': case '%': case '?': case '!':
1413 case '0': case '1': case '2': case '3': case '4': case '<':
1414 case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1415 case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1416 case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1417 case TARGET_MEM_CONSTRAINT:
1418 break;
1419
1420 case 'p':
1421 cls = (int) reg_class_subunion[cls]
1422 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1423 ADDRESS, SCRATCH)];
1424 break;
1425
1426 case 'g':
1427 case 'r':
1428 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1429 break;
1430
1431 default:
1432 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1433 cls = (int) reg_class_subunion[cls]
1434 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1435 ADDRESS, SCRATCH)];
1436 else
1437 cls = (int) reg_class_subunion[cls]
1438 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1439 }
1440 p += CONSTRAINT_LEN (c, p);
1441 }
1442 }
1443 /* Those of the registers which are clobbered, but allowed by the
1444 constraints, must be usable as reload registers. So clear them
1445 out of the life information. */
1446 AND_HARD_REG_SET (allowed, clobbered);
1447 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1448 if (TEST_HARD_REG_BIT (allowed, i))
1449 {
1450 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1451 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1452 }
1453 }
1454
1455 #endif
1456 }
1457 \f
1458 /* Copy the global variables n_reloads and rld into the corresponding elts
1459 of CHAIN. */
1460 static void
1461 copy_reloads (struct insn_chain *chain)
1462 {
1463 chain->n_reloads = n_reloads;
1464 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1465 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1466 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1467 }
1468
1469 /* Walk the chain of insns, and determine for each whether it needs reloads
1470 and/or eliminations. Build the corresponding insns_need_reload list, and
1471 set something_needs_elimination as appropriate. */
1472 static void
1473 calculate_needs_all_insns (int global)
1474 {
1475 struct insn_chain **pprev_reload = &insns_need_reload;
1476 struct insn_chain *chain, *next = 0;
1477
1478 something_needs_elimination = 0;
1479
1480 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1481 for (chain = reload_insn_chain; chain != 0; chain = next)
1482 {
1483 rtx insn = chain->insn;
1484
1485 next = chain->next;
1486
1487 /* Clear out the shortcuts. */
1488 chain->n_reloads = 0;
1489 chain->need_elim = 0;
1490 chain->need_reload = 0;
1491 chain->need_operand_change = 0;
1492
1493 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1494 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1495 what effects this has on the known offsets at labels. */
1496
1497 if (LABEL_P (insn) || JUMP_P (insn)
1498 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1499 set_label_offsets (insn, insn, 0);
1500
1501 if (INSN_P (insn))
1502 {
1503 rtx old_body = PATTERN (insn);
1504 int old_code = INSN_CODE (insn);
1505 rtx old_notes = REG_NOTES (insn);
1506 int did_elimination = 0;
1507 int operands_changed = 0;
1508 rtx set = single_set (insn);
1509
1510 /* Skip insns that only set an equivalence. */
1511 if (set && REG_P (SET_DEST (set))
1512 && reg_renumber[REGNO (SET_DEST (set))] < 0
1513 && (reg_equiv_constant (REGNO (SET_DEST (set)))
1514 || (reg_equiv_invariant (REGNO (SET_DEST (set)))))
1515 && reg_equiv_init (REGNO (SET_DEST (set))))
1516 continue;
1517
1518 /* If needed, eliminate any eliminable registers. */
1519 if (num_eliminable || num_eliminable_invariants)
1520 did_elimination = eliminate_regs_in_insn (insn, 0);
1521
1522 /* Analyze the instruction. */
1523 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1524 global, spill_reg_order);
1525
1526 /* If a no-op set needs more than one reload, this is likely
1527 to be something that needs input address reloads. We
1528 can't get rid of this cleanly later, and it is of no use
1529 anyway, so discard it now.
1530 We only do this when expensive_optimizations is enabled,
1531 since this complements reload inheritance / output
1532 reload deletion, and it can make debugging harder. */
1533 if (flag_expensive_optimizations && n_reloads > 1)
1534 {
1535 rtx set = single_set (insn);
1536 if (set
1537 &&
1538 ((SET_SRC (set) == SET_DEST (set)
1539 && REG_P (SET_SRC (set))
1540 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1541 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1542 && reg_renumber[REGNO (SET_SRC (set))] < 0
1543 && reg_renumber[REGNO (SET_DEST (set))] < 0
1544 && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1545 && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1546 && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1547 reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1548 {
1549 if (ira_conflicts_p)
1550 /* Inform IRA about the insn deletion. */
1551 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1552 REGNO (SET_SRC (set)));
1553 delete_insn (insn);
1554 /* Delete it from the reload chain. */
1555 if (chain->prev)
1556 chain->prev->next = next;
1557 else
1558 reload_insn_chain = next;
1559 if (next)
1560 next->prev = chain->prev;
1561 chain->next = unused_insn_chains;
1562 unused_insn_chains = chain;
1563 continue;
1564 }
1565 }
1566 if (num_eliminable)
1567 update_eliminable_offsets ();
1568
1569 /* Remember for later shortcuts which insns had any reloads or
1570 register eliminations. */
1571 chain->need_elim = did_elimination;
1572 chain->need_reload = n_reloads > 0;
1573 chain->need_operand_change = operands_changed;
1574
1575 /* Discard any register replacements done. */
1576 if (did_elimination)
1577 {
1578 obstack_free (&reload_obstack, reload_insn_firstobj);
1579 PATTERN (insn) = old_body;
1580 INSN_CODE (insn) = old_code;
1581 REG_NOTES (insn) = old_notes;
1582 something_needs_elimination = 1;
1583 }
1584
1585 something_needs_operands_changed |= operands_changed;
1586
1587 if (n_reloads != 0)
1588 {
1589 copy_reloads (chain);
1590 *pprev_reload = chain;
1591 pprev_reload = &chain->next_need_reload;
1592 }
1593 }
1594 }
1595 *pprev_reload = 0;
1596 }
1597 \f
1598 /* This function is called from the register allocator to set up estimates
1599 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1600 an invariant. The structure is similar to calculate_needs_all_insns. */
1601
1602 void
1603 calculate_elim_costs_all_insns (void)
1604 {
1605 int *reg_equiv_init_cost;
1606 basic_block bb;
1607 int i;
1608
1609 reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1610 init_elim_table ();
1611 init_eliminable_invariants (get_insns (), false);
1612
1613 set_initial_elim_offsets ();
1614 set_initial_label_offsets ();
1615
1616 FOR_EACH_BB (bb)
1617 {
1618 rtx insn;
1619 elim_bb = bb;
1620
1621 FOR_BB_INSNS (bb, insn)
1622 {
1623 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1624 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1625 what effects this has on the known offsets at labels. */
1626
1627 if (LABEL_P (insn) || JUMP_P (insn)
1628 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1629 set_label_offsets (insn, insn, 0);
1630
1631 if (INSN_P (insn))
1632 {
1633 rtx set = single_set (insn);
1634
1635 /* Skip insns that only set an equivalence. */
1636 if (set && REG_P (SET_DEST (set))
1637 && reg_renumber[REGNO (SET_DEST (set))] < 0
1638 && (reg_equiv_constant (REGNO (SET_DEST (set)))
1639 || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1640 {
1641 unsigned regno = REGNO (SET_DEST (set));
1642 rtx init = reg_equiv_init (regno);
1643 if (init)
1644 {
1645 rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1646 false, true);
1647 int cost = set_src_cost (t, optimize_bb_for_speed_p (bb));
1648 int freq = REG_FREQ_FROM_BB (bb);
1649
1650 reg_equiv_init_cost[regno] = cost * freq;
1651 continue;
1652 }
1653 }
1654 /* If needed, eliminate any eliminable registers. */
1655 if (num_eliminable || num_eliminable_invariants)
1656 elimination_costs_in_insn (insn);
1657
1658 if (num_eliminable)
1659 update_eliminable_offsets ();
1660 }
1661 }
1662 }
1663 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1664 {
1665 if (reg_equiv_invariant (i))
1666 {
1667 if (reg_equiv_init (i))
1668 {
1669 int cost = reg_equiv_init_cost[i];
1670 if (dump_file)
1671 fprintf (dump_file,
1672 "Reg %d has equivalence, initial gains %d\n", i, cost);
1673 if (cost != 0)
1674 ira_adjust_equiv_reg_cost (i, cost);
1675 }
1676 else
1677 {
1678 if (dump_file)
1679 fprintf (dump_file,
1680 "Reg %d had equivalence, but can't be eliminated\n",
1681 i);
1682 ira_adjust_equiv_reg_cost (i, 0);
1683 }
1684 }
1685 }
1686
1687 free (reg_equiv_init_cost);
1688 free (offsets_known_at);
1689 free (offsets_at);
1690 offsets_at = NULL;
1691 offsets_known_at = NULL;
1692 }
1693 \f
1694 /* Comparison function for qsort to decide which of two reloads
1695 should be handled first. *P1 and *P2 are the reload numbers. */
1696
1697 static int
1698 reload_reg_class_lower (const void *r1p, const void *r2p)
1699 {
1700 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1701 int t;
1702
1703 /* Consider required reloads before optional ones. */
1704 t = rld[r1].optional - rld[r2].optional;
1705 if (t != 0)
1706 return t;
1707
1708 /* Count all solitary classes before non-solitary ones. */
1709 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1710 - (reg_class_size[(int) rld[r1].rclass] == 1));
1711 if (t != 0)
1712 return t;
1713
1714 /* Aside from solitaires, consider all multi-reg groups first. */
1715 t = rld[r2].nregs - rld[r1].nregs;
1716 if (t != 0)
1717 return t;
1718
1719 /* Consider reloads in order of increasing reg-class number. */
1720 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1721 if (t != 0)
1722 return t;
1723
1724 /* If reloads are equally urgent, sort by reload number,
1725 so that the results of qsort leave nothing to chance. */
1726 return r1 - r2;
1727 }
1728 \f
1729 /* The cost of spilling each hard reg. */
1730 static int spill_cost[FIRST_PSEUDO_REGISTER];
1731
1732 /* When spilling multiple hard registers, we use SPILL_COST for the first
1733 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1734 only the first hard reg for a multi-reg pseudo. */
1735 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1736
1737 /* Map of hard regno to pseudo regno currently occupying the hard
1738 reg. */
1739 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1740
1741 /* Update the spill cost arrays, considering that pseudo REG is live. */
1742
1743 static void
1744 count_pseudo (int reg)
1745 {
1746 int freq = REG_FREQ (reg);
1747 int r = reg_renumber[reg];
1748 int nregs;
1749
1750 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1751 if (ira_conflicts_p && r < 0)
1752 return;
1753
1754 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1755 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1756 return;
1757
1758 SET_REGNO_REG_SET (&pseudos_counted, reg);
1759
1760 gcc_assert (r >= 0);
1761
1762 spill_add_cost[r] += freq;
1763 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1764 while (nregs-- > 0)
1765 {
1766 hard_regno_to_pseudo_regno[r + nregs] = reg;
1767 spill_cost[r + nregs] += freq;
1768 }
1769 }
1770
1771 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1772 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1773
1774 static void
1775 order_regs_for_reload (struct insn_chain *chain)
1776 {
1777 unsigned i;
1778 HARD_REG_SET used_by_pseudos;
1779 HARD_REG_SET used_by_pseudos2;
1780 reg_set_iterator rsi;
1781
1782 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1783
1784 memset (spill_cost, 0, sizeof spill_cost);
1785 memset (spill_add_cost, 0, sizeof spill_add_cost);
1786 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1787 hard_regno_to_pseudo_regno[i] = -1;
1788
1789 /* Count number of uses of each hard reg by pseudo regs allocated to it
1790 and then order them by decreasing use. First exclude hard registers
1791 that are live in or across this insn. */
1792
1793 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1794 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1795 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1796 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1797
1798 /* Now find out which pseudos are allocated to it, and update
1799 hard_reg_n_uses. */
1800 CLEAR_REG_SET (&pseudos_counted);
1801
1802 EXECUTE_IF_SET_IN_REG_SET
1803 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1804 {
1805 count_pseudo (i);
1806 }
1807 EXECUTE_IF_SET_IN_REG_SET
1808 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1809 {
1810 count_pseudo (i);
1811 }
1812 CLEAR_REG_SET (&pseudos_counted);
1813 }
1814 \f
1815 /* Vector of reload-numbers showing the order in which the reloads should
1816 be processed. */
1817 static short reload_order[MAX_RELOADS];
1818
1819 /* This is used to keep track of the spill regs used in one insn. */
1820 static HARD_REG_SET used_spill_regs_local;
1821
1822 /* We decided to spill hard register SPILLED, which has a size of
1823 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1824 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1825 update SPILL_COST/SPILL_ADD_COST. */
1826
1827 static void
1828 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1829 {
1830 int freq = REG_FREQ (reg);
1831 int r = reg_renumber[reg];
1832 int nregs;
1833
1834 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1835 if (ira_conflicts_p && r < 0)
1836 return;
1837
1838 gcc_assert (r >= 0);
1839
1840 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1841
1842 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1843 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1844 return;
1845
1846 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1847
1848 spill_add_cost[r] -= freq;
1849 while (nregs-- > 0)
1850 {
1851 hard_regno_to_pseudo_regno[r + nregs] = -1;
1852 spill_cost[r + nregs] -= freq;
1853 }
1854 }
1855
1856 /* Find reload register to use for reload number ORDER. */
1857
1858 static int
1859 find_reg (struct insn_chain *chain, int order)
1860 {
1861 int rnum = reload_order[order];
1862 struct reload *rl = rld + rnum;
1863 int best_cost = INT_MAX;
1864 int best_reg = -1;
1865 unsigned int i, j, n;
1866 int k;
1867 HARD_REG_SET not_usable;
1868 HARD_REG_SET used_by_other_reload;
1869 reg_set_iterator rsi;
1870 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1871 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1872
1873 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1874 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1875 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1876
1877 CLEAR_HARD_REG_SET (used_by_other_reload);
1878 for (k = 0; k < order; k++)
1879 {
1880 int other = reload_order[k];
1881
1882 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1883 for (j = 0; j < rld[other].nregs; j++)
1884 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1885 }
1886
1887 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1888 {
1889 #ifdef REG_ALLOC_ORDER
1890 unsigned int regno = reg_alloc_order[i];
1891 #else
1892 unsigned int regno = i;
1893 #endif
1894
1895 if (! TEST_HARD_REG_BIT (not_usable, regno)
1896 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1897 && HARD_REGNO_MODE_OK (regno, rl->mode))
1898 {
1899 int this_cost = spill_cost[regno];
1900 int ok = 1;
1901 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1902
1903 for (j = 1; j < this_nregs; j++)
1904 {
1905 this_cost += spill_add_cost[regno + j];
1906 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1907 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1908 ok = 0;
1909 }
1910 if (! ok)
1911 continue;
1912
1913 if (ira_conflicts_p)
1914 {
1915 /* Ask IRA to find a better pseudo-register for
1916 spilling. */
1917 for (n = j = 0; j < this_nregs; j++)
1918 {
1919 int r = hard_regno_to_pseudo_regno[regno + j];
1920
1921 if (r < 0)
1922 continue;
1923 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1924 regno_pseudo_regs[n++] = r;
1925 }
1926 regno_pseudo_regs[n++] = -1;
1927 if (best_reg < 0
1928 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1929 best_regno_pseudo_regs,
1930 rl->in, rl->out,
1931 chain->insn))
1932 {
1933 best_reg = regno;
1934 for (j = 0;; j++)
1935 {
1936 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1937 if (regno_pseudo_regs[j] < 0)
1938 break;
1939 }
1940 }
1941 continue;
1942 }
1943
1944 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1945 this_cost--;
1946 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1947 this_cost--;
1948 if (this_cost < best_cost
1949 /* Among registers with equal cost, prefer caller-saved ones, or
1950 use REG_ALLOC_ORDER if it is defined. */
1951 || (this_cost == best_cost
1952 #ifdef REG_ALLOC_ORDER
1953 && (inv_reg_alloc_order[regno]
1954 < inv_reg_alloc_order[best_reg])
1955 #else
1956 && call_used_regs[regno]
1957 && ! call_used_regs[best_reg]
1958 #endif
1959 ))
1960 {
1961 best_reg = regno;
1962 best_cost = this_cost;
1963 }
1964 }
1965 }
1966 if (best_reg == -1)
1967 return 0;
1968
1969 if (dump_file)
1970 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1971
1972 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1973 rl->regno = best_reg;
1974
1975 EXECUTE_IF_SET_IN_REG_SET
1976 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1977 {
1978 count_spilled_pseudo (best_reg, rl->nregs, j);
1979 }
1980
1981 EXECUTE_IF_SET_IN_REG_SET
1982 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1983 {
1984 count_spilled_pseudo (best_reg, rl->nregs, j);
1985 }
1986
1987 for (i = 0; i < rl->nregs; i++)
1988 {
1989 gcc_assert (spill_cost[best_reg + i] == 0);
1990 gcc_assert (spill_add_cost[best_reg + i] == 0);
1991 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1992 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1993 }
1994 return 1;
1995 }
1996
1997 /* Find more reload regs to satisfy the remaining need of an insn, which
1998 is given by CHAIN.
1999 Do it by ascending class number, since otherwise a reg
2000 might be spilled for a big class and might fail to count
2001 for a smaller class even though it belongs to that class. */
2002
2003 static void
2004 find_reload_regs (struct insn_chain *chain)
2005 {
2006 int i;
2007
2008 /* In order to be certain of getting the registers we need,
2009 we must sort the reloads into order of increasing register class.
2010 Then our grabbing of reload registers will parallel the process
2011 that provided the reload registers. */
2012 for (i = 0; i < chain->n_reloads; i++)
2013 {
2014 /* Show whether this reload already has a hard reg. */
2015 if (chain->rld[i].reg_rtx)
2016 {
2017 int regno = REGNO (chain->rld[i].reg_rtx);
2018 chain->rld[i].regno = regno;
2019 chain->rld[i].nregs
2020 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2021 }
2022 else
2023 chain->rld[i].regno = -1;
2024 reload_order[i] = i;
2025 }
2026
2027 n_reloads = chain->n_reloads;
2028 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2029
2030 CLEAR_HARD_REG_SET (used_spill_regs_local);
2031
2032 if (dump_file)
2033 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2034
2035 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2036
2037 /* Compute the order of preference for hard registers to spill. */
2038
2039 order_regs_for_reload (chain);
2040
2041 for (i = 0; i < n_reloads; i++)
2042 {
2043 int r = reload_order[i];
2044
2045 /* Ignore reloads that got marked inoperative. */
2046 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2047 && ! rld[r].optional
2048 && rld[r].regno == -1)
2049 if (! find_reg (chain, i))
2050 {
2051 if (dump_file)
2052 fprintf (dump_file, "reload failure for reload %d\n", r);
2053 spill_failure (chain->insn, rld[r].rclass);
2054 failure = 1;
2055 return;
2056 }
2057 }
2058
2059 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2060 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2061
2062 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2063 }
2064
2065 static void
2066 select_reload_regs (void)
2067 {
2068 struct insn_chain *chain;
2069
2070 /* Try to satisfy the needs for each insn. */
2071 for (chain = insns_need_reload; chain != 0;
2072 chain = chain->next_need_reload)
2073 find_reload_regs (chain);
2074 }
2075 \f
2076 /* Delete all insns that were inserted by emit_caller_save_insns during
2077 this iteration. */
2078 static void
2079 delete_caller_save_insns (void)
2080 {
2081 struct insn_chain *c = reload_insn_chain;
2082
2083 while (c != 0)
2084 {
2085 while (c != 0 && c->is_caller_save_insn)
2086 {
2087 struct insn_chain *next = c->next;
2088 rtx insn = c->insn;
2089
2090 if (c == reload_insn_chain)
2091 reload_insn_chain = next;
2092 delete_insn (insn);
2093
2094 if (next)
2095 next->prev = c->prev;
2096 if (c->prev)
2097 c->prev->next = next;
2098 c->next = unused_insn_chains;
2099 unused_insn_chains = c;
2100 c = next;
2101 }
2102 if (c != 0)
2103 c = c->next;
2104 }
2105 }
2106 \f
2107 /* Handle the failure to find a register to spill.
2108 INSN should be one of the insns which needed this particular spill reg. */
2109
2110 static void
2111 spill_failure (rtx insn, enum reg_class rclass)
2112 {
2113 if (asm_noperands (PATTERN (insn)) >= 0)
2114 error_for_asm (insn, "can%'t find a register in class %qs while "
2115 "reloading %<asm%>",
2116 reg_class_names[rclass]);
2117 else
2118 {
2119 error ("unable to find a register to spill in class %qs",
2120 reg_class_names[rclass]);
2121
2122 if (dump_file)
2123 {
2124 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2125 debug_reload_to_stream (dump_file);
2126 }
2127 fatal_insn ("this is the insn:", insn);
2128 }
2129 }
2130 \f
2131 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2132 data that is dead in INSN. */
2133
2134 static void
2135 delete_dead_insn (rtx insn)
2136 {
2137 rtx prev = prev_active_insn (insn);
2138 rtx prev_dest;
2139
2140 /* If the previous insn sets a register that dies in our insn make
2141 a note that we want to run DCE immediately after reload.
2142
2143 We used to delete the previous insn & recurse, but that's wrong for
2144 block local equivalences. Instead of trying to figure out the exact
2145 circumstances where we can delete the potentially dead insns, just
2146 let DCE do the job. */
2147 if (prev && GET_CODE (PATTERN (prev)) == SET
2148 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2149 && reg_mentioned_p (prev_dest, PATTERN (insn))
2150 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2151 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2152 need_dce = 1;
2153
2154 SET_INSN_DELETED (insn);
2155 }
2156
2157 /* Modify the home of pseudo-reg I.
2158 The new home is present in reg_renumber[I].
2159
2160 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2161 or it may be -1, meaning there is none or it is not relevant.
2162 This is used so that all pseudos spilled from a given hard reg
2163 can share one stack slot. */
2164
2165 static void
2166 alter_reg (int i, int from_reg, bool dont_share_p)
2167 {
2168 /* When outputting an inline function, this can happen
2169 for a reg that isn't actually used. */
2170 if (regno_reg_rtx[i] == 0)
2171 return;
2172
2173 /* If the reg got changed to a MEM at rtl-generation time,
2174 ignore it. */
2175 if (!REG_P (regno_reg_rtx[i]))
2176 return;
2177
2178 /* Modify the reg-rtx to contain the new hard reg
2179 number or else to contain its pseudo reg number. */
2180 SET_REGNO (regno_reg_rtx[i],
2181 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2182
2183 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2184 allocate a stack slot for it. */
2185
2186 if (reg_renumber[i] < 0
2187 && REG_N_REFS (i) > 0
2188 && reg_equiv_constant (i) == 0
2189 && (reg_equiv_invariant (i) == 0
2190 || reg_equiv_init (i) == 0)
2191 && reg_equiv_memory_loc (i) == 0)
2192 {
2193 rtx x = NULL_RTX;
2194 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2195 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2196 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2197 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2198 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2199 int adjust = 0;
2200
2201 something_was_spilled = true;
2202
2203 if (ira_conflicts_p)
2204 {
2205 /* Mark the spill for IRA. */
2206 SET_REGNO_REG_SET (&spilled_pseudos, i);
2207 if (!dont_share_p)
2208 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2209 }
2210
2211 if (x)
2212 ;
2213
2214 /* Each pseudo reg has an inherent size which comes from its own mode,
2215 and a total size which provides room for paradoxical subregs
2216 which refer to the pseudo reg in wider modes.
2217
2218 We can use a slot already allocated if it provides both
2219 enough inherent space and enough total space.
2220 Otherwise, we allocate a new slot, making sure that it has no less
2221 inherent space, and no less total space, then the previous slot. */
2222 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2223 {
2224 rtx stack_slot;
2225
2226 /* No known place to spill from => no slot to reuse. */
2227 x = assign_stack_local (mode, total_size,
2228 min_align > inherent_align
2229 || total_size > inherent_size ? -1 : 0);
2230
2231 stack_slot = x;
2232
2233 /* Cancel the big-endian correction done in assign_stack_local.
2234 Get the address of the beginning of the slot. This is so we
2235 can do a big-endian correction unconditionally below. */
2236 if (BYTES_BIG_ENDIAN)
2237 {
2238 adjust = inherent_size - total_size;
2239 if (adjust)
2240 stack_slot
2241 = adjust_address_nv (x, mode_for_size (total_size
2242 * BITS_PER_UNIT,
2243 MODE_INT, 1),
2244 adjust);
2245 }
2246
2247 if (! dont_share_p && ira_conflicts_p)
2248 /* Inform IRA about allocation a new stack slot. */
2249 ira_mark_new_stack_slot (stack_slot, i, total_size);
2250 }
2251
2252 /* Reuse a stack slot if possible. */
2253 else if (spill_stack_slot[from_reg] != 0
2254 && spill_stack_slot_width[from_reg] >= total_size
2255 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2256 >= inherent_size)
2257 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2258 x = spill_stack_slot[from_reg];
2259
2260 /* Allocate a bigger slot. */
2261 else
2262 {
2263 /* Compute maximum size needed, both for inherent size
2264 and for total size. */
2265 rtx stack_slot;
2266
2267 if (spill_stack_slot[from_reg])
2268 {
2269 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2270 > inherent_size)
2271 mode = GET_MODE (spill_stack_slot[from_reg]);
2272 if (spill_stack_slot_width[from_reg] > total_size)
2273 total_size = spill_stack_slot_width[from_reg];
2274 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2275 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2276 }
2277
2278 /* Make a slot with that size. */
2279 x = assign_stack_local (mode, total_size,
2280 min_align > inherent_align
2281 || total_size > inherent_size ? -1 : 0);
2282 stack_slot = x;
2283
2284 /* Cancel the big-endian correction done in assign_stack_local.
2285 Get the address of the beginning of the slot. This is so we
2286 can do a big-endian correction unconditionally below. */
2287 if (BYTES_BIG_ENDIAN)
2288 {
2289 adjust = GET_MODE_SIZE (mode) - total_size;
2290 if (adjust)
2291 stack_slot
2292 = adjust_address_nv (x, mode_for_size (total_size
2293 * BITS_PER_UNIT,
2294 MODE_INT, 1),
2295 adjust);
2296 }
2297
2298 spill_stack_slot[from_reg] = stack_slot;
2299 spill_stack_slot_width[from_reg] = total_size;
2300 }
2301
2302 /* On a big endian machine, the "address" of the slot
2303 is the address of the low part that fits its inherent mode. */
2304 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2305 adjust += (total_size - inherent_size);
2306
2307 /* If we have any adjustment to make, or if the stack slot is the
2308 wrong mode, make a new stack slot. */
2309 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2310
2311 /* Set all of the memory attributes as appropriate for a spill. */
2312 set_mem_attrs_for_spill (x);
2313
2314 /* Save the stack slot for later. */
2315 reg_equiv_memory_loc (i) = x;
2316 }
2317 }
2318
2319 /* Mark the slots in regs_ever_live for the hard regs used by
2320 pseudo-reg number REGNO, accessed in MODE. */
2321
2322 static void
2323 mark_home_live_1 (int regno, enum machine_mode mode)
2324 {
2325 int i, lim;
2326
2327 i = reg_renumber[regno];
2328 if (i < 0)
2329 return;
2330 lim = end_hard_regno (mode, i);
2331 while (i < lim)
2332 df_set_regs_ever_live(i++, true);
2333 }
2334
2335 /* Mark the slots in regs_ever_live for the hard regs
2336 used by pseudo-reg number REGNO. */
2337
2338 void
2339 mark_home_live (int regno)
2340 {
2341 if (reg_renumber[regno] >= 0)
2342 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2343 }
2344 \f
2345 /* This function handles the tracking of elimination offsets around branches.
2346
2347 X is a piece of RTL being scanned.
2348
2349 INSN is the insn that it came from, if any.
2350
2351 INITIAL_P is nonzero if we are to set the offset to be the initial
2352 offset and zero if we are setting the offset of the label to be the
2353 current offset. */
2354
2355 static void
2356 set_label_offsets (rtx x, rtx insn, int initial_p)
2357 {
2358 enum rtx_code code = GET_CODE (x);
2359 rtx tem;
2360 unsigned int i;
2361 struct elim_table *p;
2362
2363 switch (code)
2364 {
2365 case LABEL_REF:
2366 if (LABEL_REF_NONLOCAL_P (x))
2367 return;
2368
2369 x = XEXP (x, 0);
2370
2371 /* ... fall through ... */
2372
2373 case CODE_LABEL:
2374 /* If we know nothing about this label, set the desired offsets. Note
2375 that this sets the offset at a label to be the offset before a label
2376 if we don't know anything about the label. This is not correct for
2377 the label after a BARRIER, but is the best guess we can make. If
2378 we guessed wrong, we will suppress an elimination that might have
2379 been possible had we been able to guess correctly. */
2380
2381 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2382 {
2383 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2384 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2385 = (initial_p ? reg_eliminate[i].initial_offset
2386 : reg_eliminate[i].offset);
2387 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2388 }
2389
2390 /* Otherwise, if this is the definition of a label and it is
2391 preceded by a BARRIER, set our offsets to the known offset of
2392 that label. */
2393
2394 else if (x == insn
2395 && (tem = prev_nonnote_insn (insn)) != 0
2396 && BARRIER_P (tem))
2397 set_offsets_for_label (insn);
2398 else
2399 /* If neither of the above cases is true, compare each offset
2400 with those previously recorded and suppress any eliminations
2401 where the offsets disagree. */
2402
2403 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2404 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2405 != (initial_p ? reg_eliminate[i].initial_offset
2406 : reg_eliminate[i].offset))
2407 reg_eliminate[i].can_eliminate = 0;
2408
2409 return;
2410
2411 case JUMP_INSN:
2412 set_label_offsets (PATTERN (insn), insn, initial_p);
2413
2414 /* ... fall through ... */
2415
2416 case INSN:
2417 case CALL_INSN:
2418 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2419 to indirectly and hence must have all eliminations at their
2420 initial offsets. */
2421 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2422 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2423 set_label_offsets (XEXP (tem, 0), insn, 1);
2424 return;
2425
2426 case PARALLEL:
2427 case ADDR_VEC:
2428 case ADDR_DIFF_VEC:
2429 /* Each of the labels in the parallel or address vector must be
2430 at their initial offsets. We want the first field for PARALLEL
2431 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2432
2433 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2434 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2435 insn, initial_p);
2436 return;
2437
2438 case SET:
2439 /* We only care about setting PC. If the source is not RETURN,
2440 IF_THEN_ELSE, or a label, disable any eliminations not at
2441 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2442 isn't one of those possibilities. For branches to a label,
2443 call ourselves recursively.
2444
2445 Note that this can disable elimination unnecessarily when we have
2446 a non-local goto since it will look like a non-constant jump to
2447 someplace in the current function. This isn't a significant
2448 problem since such jumps will normally be when all elimination
2449 pairs are back to their initial offsets. */
2450
2451 if (SET_DEST (x) != pc_rtx)
2452 return;
2453
2454 switch (GET_CODE (SET_SRC (x)))
2455 {
2456 case PC:
2457 case RETURN:
2458 return;
2459
2460 case LABEL_REF:
2461 set_label_offsets (SET_SRC (x), insn, initial_p);
2462 return;
2463
2464 case IF_THEN_ELSE:
2465 tem = XEXP (SET_SRC (x), 1);
2466 if (GET_CODE (tem) == LABEL_REF)
2467 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2468 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2469 break;
2470
2471 tem = XEXP (SET_SRC (x), 2);
2472 if (GET_CODE (tem) == LABEL_REF)
2473 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2474 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2475 break;
2476 return;
2477
2478 default:
2479 break;
2480 }
2481
2482 /* If we reach here, all eliminations must be at their initial
2483 offset because we are doing a jump to a variable address. */
2484 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2485 if (p->offset != p->initial_offset)
2486 p->can_eliminate = 0;
2487 break;
2488
2489 default:
2490 break;
2491 }
2492 }
2493 \f
2494 /* Called through for_each_rtx, this function examines every reg that occurs
2495 in PX and adjusts the costs for its elimination which are gathered by IRA.
2496 DATA is the insn in which PX occurs. We do not recurse into MEM
2497 expressions. */
2498
2499 static int
2500 note_reg_elim_costly (rtx *px, void *data)
2501 {
2502 rtx insn = (rtx)data;
2503 rtx x = *px;
2504
2505 if (MEM_P (x))
2506 return -1;
2507
2508 if (REG_P (x)
2509 && REGNO (x) >= FIRST_PSEUDO_REGISTER
2510 && reg_equiv_init (REGNO (x))
2511 && reg_equiv_invariant (REGNO (x)))
2512 {
2513 rtx t = reg_equiv_invariant (REGNO (x));
2514 rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2515 int cost = set_src_cost (new_rtx, optimize_bb_for_speed_p (elim_bb));
2516 int freq = REG_FREQ_FROM_BB (elim_bb);
2517
2518 if (cost != 0)
2519 ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2520 }
2521 return 0;
2522 }
2523
2524 /* Scan X and replace any eliminable registers (such as fp) with a
2525 replacement (such as sp), plus an offset.
2526
2527 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2528 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2529 MEM, we are allowed to replace a sum of a register and the constant zero
2530 with the register, which we cannot do outside a MEM. In addition, we need
2531 to record the fact that a register is referenced outside a MEM.
2532
2533 If INSN is an insn, it is the insn containing X. If we replace a REG
2534 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2535 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2536 the REG is being modified.
2537
2538 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2539 That's used when we eliminate in expressions stored in notes.
2540 This means, do not set ref_outside_mem even if the reference
2541 is outside of MEMs.
2542
2543 If FOR_COSTS is true, we are being called before reload in order to
2544 estimate the costs of keeping registers with an equivalence unallocated.
2545
2546 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2547 replacements done assuming all offsets are at their initial values. If
2548 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2549 encounter, return the actual location so that find_reloads will do
2550 the proper thing. */
2551
2552 static rtx
2553 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2554 bool may_use_invariant, bool for_costs)
2555 {
2556 enum rtx_code code = GET_CODE (x);
2557 struct elim_table *ep;
2558 int regno;
2559 rtx new_rtx;
2560 int i, j;
2561 const char *fmt;
2562 int copied = 0;
2563
2564 if (! current_function_decl)
2565 return x;
2566
2567 switch (code)
2568 {
2569 case CONST_INT:
2570 case CONST_DOUBLE:
2571 case CONST_FIXED:
2572 case CONST_VECTOR:
2573 case CONST:
2574 case SYMBOL_REF:
2575 case CODE_LABEL:
2576 case PC:
2577 case CC0:
2578 case ASM_INPUT:
2579 case ADDR_VEC:
2580 case ADDR_DIFF_VEC:
2581 case RETURN:
2582 return x;
2583
2584 case REG:
2585 regno = REGNO (x);
2586
2587 /* First handle the case where we encounter a bare register that
2588 is eliminable. Replace it with a PLUS. */
2589 if (regno < FIRST_PSEUDO_REGISTER)
2590 {
2591 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2592 ep++)
2593 if (ep->from_rtx == x && ep->can_eliminate)
2594 return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2595
2596 }
2597 else if (reg_renumber && reg_renumber[regno] < 0
2598 && reg_equivs
2599 && reg_equiv_invariant (regno))
2600 {
2601 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2602 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2603 mem_mode, insn, true, for_costs);
2604 /* There exists at least one use of REGNO that cannot be
2605 eliminated. Prevent the defining insn from being deleted. */
2606 reg_equiv_init (regno) = NULL_RTX;
2607 if (!for_costs)
2608 alter_reg (regno, -1, true);
2609 }
2610 return x;
2611
2612 /* You might think handling MINUS in a manner similar to PLUS is a
2613 good idea. It is not. It has been tried multiple times and every
2614 time the change has had to have been reverted.
2615
2616 Other parts of reload know a PLUS is special (gen_reload for example)
2617 and require special code to handle code a reloaded PLUS operand.
2618
2619 Also consider backends where the flags register is clobbered by a
2620 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2621 lea instruction comes to mind). If we try to reload a MINUS, we
2622 may kill the flags register that was holding a useful value.
2623
2624 So, please before trying to handle MINUS, consider reload as a
2625 whole instead of this little section as well as the backend issues. */
2626 case PLUS:
2627 /* If this is the sum of an eliminable register and a constant, rework
2628 the sum. */
2629 if (REG_P (XEXP (x, 0))
2630 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2631 && CONSTANT_P (XEXP (x, 1)))
2632 {
2633 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2634 ep++)
2635 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2636 {
2637 /* The only time we want to replace a PLUS with a REG (this
2638 occurs when the constant operand of the PLUS is the negative
2639 of the offset) is when we are inside a MEM. We won't want
2640 to do so at other times because that would change the
2641 structure of the insn in a way that reload can't handle.
2642 We special-case the commonest situation in
2643 eliminate_regs_in_insn, so just replace a PLUS with a
2644 PLUS here, unless inside a MEM. */
2645 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2646 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2647 return ep->to_rtx;
2648 else
2649 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2650 plus_constant (Pmode, XEXP (x, 1),
2651 ep->previous_offset));
2652 }
2653
2654 /* If the register is not eliminable, we are done since the other
2655 operand is a constant. */
2656 return x;
2657 }
2658
2659 /* If this is part of an address, we want to bring any constant to the
2660 outermost PLUS. We will do this by doing register replacement in
2661 our operands and seeing if a constant shows up in one of them.
2662
2663 Note that there is no risk of modifying the structure of the insn,
2664 since we only get called for its operands, thus we are either
2665 modifying the address inside a MEM, or something like an address
2666 operand of a load-address insn. */
2667
2668 {
2669 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2670 for_costs);
2671 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2672 for_costs);
2673
2674 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2675 {
2676 /* If one side is a PLUS and the other side is a pseudo that
2677 didn't get a hard register but has a reg_equiv_constant,
2678 we must replace the constant here since it may no longer
2679 be in the position of any operand. */
2680 if (GET_CODE (new0) == PLUS && REG_P (new1)
2681 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2682 && reg_renumber[REGNO (new1)] < 0
2683 && reg_equivs
2684 && reg_equiv_constant (REGNO (new1)) != 0)
2685 new1 = reg_equiv_constant (REGNO (new1));
2686 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2687 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2688 && reg_renumber[REGNO (new0)] < 0
2689 && reg_equiv_constant (REGNO (new0)) != 0)
2690 new0 = reg_equiv_constant (REGNO (new0));
2691
2692 new_rtx = form_sum (GET_MODE (x), new0, new1);
2693
2694 /* As above, if we are not inside a MEM we do not want to
2695 turn a PLUS into something else. We might try to do so here
2696 for an addition of 0 if we aren't optimizing. */
2697 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2698 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2699 else
2700 return new_rtx;
2701 }
2702 }
2703 return x;
2704
2705 case MULT:
2706 /* If this is the product of an eliminable register and a
2707 constant, apply the distribute law and move the constant out
2708 so that we have (plus (mult ..) ..). This is needed in order
2709 to keep load-address insns valid. This case is pathological.
2710 We ignore the possibility of overflow here. */
2711 if (REG_P (XEXP (x, 0))
2712 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2713 && CONST_INT_P (XEXP (x, 1)))
2714 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2715 ep++)
2716 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2717 {
2718 if (! mem_mode
2719 /* Refs inside notes or in DEBUG_INSNs don't count for
2720 this purpose. */
2721 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2722 || GET_CODE (insn) == INSN_LIST
2723 || DEBUG_INSN_P (insn))))
2724 ep->ref_outside_mem = 1;
2725
2726 return
2727 plus_constant (Pmode,
2728 gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2729 ep->previous_offset * INTVAL (XEXP (x, 1)));
2730 }
2731
2732 /* ... fall through ... */
2733
2734 case CALL:
2735 case COMPARE:
2736 /* See comments before PLUS about handling MINUS. */
2737 case MINUS:
2738 case DIV: case UDIV:
2739 case MOD: case UMOD:
2740 case AND: case IOR: case XOR:
2741 case ROTATERT: case ROTATE:
2742 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2743 case NE: case EQ:
2744 case GE: case GT: case GEU: case GTU:
2745 case LE: case LT: case LEU: case LTU:
2746 {
2747 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2748 for_costs);
2749 rtx new1 = XEXP (x, 1)
2750 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2751 for_costs) : 0;
2752
2753 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2754 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2755 }
2756 return x;
2757
2758 case EXPR_LIST:
2759 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2760 if (XEXP (x, 0))
2761 {
2762 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2763 for_costs);
2764 if (new_rtx != XEXP (x, 0))
2765 {
2766 /* If this is a REG_DEAD note, it is not valid anymore.
2767 Using the eliminated version could result in creating a
2768 REG_DEAD note for the stack or frame pointer. */
2769 if (REG_NOTE_KIND (x) == REG_DEAD)
2770 return (XEXP (x, 1)
2771 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2772 for_costs)
2773 : NULL_RTX);
2774
2775 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2776 }
2777 }
2778
2779 /* ... fall through ... */
2780
2781 case INSN_LIST:
2782 /* Now do eliminations in the rest of the chain. If this was
2783 an EXPR_LIST, this might result in allocating more memory than is
2784 strictly needed, but it simplifies the code. */
2785 if (XEXP (x, 1))
2786 {
2787 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2788 for_costs);
2789 if (new_rtx != XEXP (x, 1))
2790 return
2791 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2792 }
2793 return x;
2794
2795 case PRE_INC:
2796 case POST_INC:
2797 case PRE_DEC:
2798 case POST_DEC:
2799 /* We do not support elimination of a register that is modified.
2800 elimination_effects has already make sure that this does not
2801 happen. */
2802 return x;
2803
2804 case PRE_MODIFY:
2805 case POST_MODIFY:
2806 /* We do not support elimination of a register that is modified.
2807 elimination_effects has already make sure that this does not
2808 happen. The only remaining case we need to consider here is
2809 that the increment value may be an eliminable register. */
2810 if (GET_CODE (XEXP (x, 1)) == PLUS
2811 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2812 {
2813 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2814 insn, true, for_costs);
2815
2816 if (new_rtx != XEXP (XEXP (x, 1), 1))
2817 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2818 gen_rtx_PLUS (GET_MODE (x),
2819 XEXP (x, 0), new_rtx));
2820 }
2821 return x;
2822
2823 case STRICT_LOW_PART:
2824 case NEG: case NOT:
2825 case SIGN_EXTEND: case ZERO_EXTEND:
2826 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2827 case FLOAT: case FIX:
2828 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2829 case ABS:
2830 case SQRT:
2831 case FFS:
2832 case CLZ:
2833 case CTZ:
2834 case POPCOUNT:
2835 case PARITY:
2836 case BSWAP:
2837 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2838 for_costs);
2839 if (new_rtx != XEXP (x, 0))
2840 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2841 return x;
2842
2843 case SUBREG:
2844 /* Similar to above processing, but preserve SUBREG_BYTE.
2845 Convert (subreg (mem)) to (mem) if not paradoxical.
2846 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2847 pseudo didn't get a hard reg, we must replace this with the
2848 eliminated version of the memory location because push_reload
2849 may do the replacement in certain circumstances. */
2850 if (REG_P (SUBREG_REG (x))
2851 && !paradoxical_subreg_p (x)
2852 && reg_equivs
2853 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2854 {
2855 new_rtx = SUBREG_REG (x);
2856 }
2857 else
2858 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2859
2860 if (new_rtx != SUBREG_REG (x))
2861 {
2862 int x_size = GET_MODE_SIZE (GET_MODE (x));
2863 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2864
2865 if (MEM_P (new_rtx)
2866 && ((x_size < new_size
2867 #ifdef WORD_REGISTER_OPERATIONS
2868 /* On these machines, combine can create rtl of the form
2869 (set (subreg:m1 (reg:m2 R) 0) ...)
2870 where m1 < m2, and expects something interesting to
2871 happen to the entire word. Moreover, it will use the
2872 (reg:m2 R) later, expecting all bits to be preserved.
2873 So if the number of words is the same, preserve the
2874 subreg so that push_reload can see it. */
2875 && ! ((x_size - 1) / UNITS_PER_WORD
2876 == (new_size -1 ) / UNITS_PER_WORD)
2877 #endif
2878 )
2879 || x_size == new_size)
2880 )
2881 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2882 else
2883 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2884 }
2885
2886 return x;
2887
2888 case MEM:
2889 /* Our only special processing is to pass the mode of the MEM to our
2890 recursive call and copy the flags. While we are here, handle this
2891 case more efficiently. */
2892
2893 new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2894 for_costs);
2895 if (for_costs
2896 && memory_address_p (GET_MODE (x), XEXP (x, 0))
2897 && !memory_address_p (GET_MODE (x), new_rtx))
2898 for_each_rtx (&XEXP (x, 0), note_reg_elim_costly, insn);
2899
2900 return replace_equiv_address_nv (x, new_rtx);
2901
2902 case USE:
2903 /* Handle insn_list USE that a call to a pure function may generate. */
2904 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2905 for_costs);
2906 if (new_rtx != XEXP (x, 0))
2907 return gen_rtx_USE (GET_MODE (x), new_rtx);
2908 return x;
2909
2910 case CLOBBER:
2911 case ASM_OPERANDS:
2912 gcc_assert (insn && DEBUG_INSN_P (insn));
2913 break;
2914
2915 case SET:
2916 gcc_unreachable ();
2917
2918 default:
2919 break;
2920 }
2921
2922 /* Process each of our operands recursively. If any have changed, make a
2923 copy of the rtx. */
2924 fmt = GET_RTX_FORMAT (code);
2925 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2926 {
2927 if (*fmt == 'e')
2928 {
2929 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2930 for_costs);
2931 if (new_rtx != XEXP (x, i) && ! copied)
2932 {
2933 x = shallow_copy_rtx (x);
2934 copied = 1;
2935 }
2936 XEXP (x, i) = new_rtx;
2937 }
2938 else if (*fmt == 'E')
2939 {
2940 int copied_vec = 0;
2941 for (j = 0; j < XVECLEN (x, i); j++)
2942 {
2943 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2944 for_costs);
2945 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2946 {
2947 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2948 XVEC (x, i)->elem);
2949 if (! copied)
2950 {
2951 x = shallow_copy_rtx (x);
2952 copied = 1;
2953 }
2954 XVEC (x, i) = new_v;
2955 copied_vec = 1;
2956 }
2957 XVECEXP (x, i, j) = new_rtx;
2958 }
2959 }
2960 }
2961
2962 return x;
2963 }
2964
2965 rtx
2966 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2967 {
2968 return eliminate_regs_1 (x, mem_mode, insn, false, false);
2969 }
2970
2971 /* Scan rtx X for modifications of elimination target registers. Update
2972 the table of eliminables to reflect the changed state. MEM_MODE is
2973 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2974
2975 static void
2976 elimination_effects (rtx x, enum machine_mode mem_mode)
2977 {
2978 enum rtx_code code = GET_CODE (x);
2979 struct elim_table *ep;
2980 int regno;
2981 int i, j;
2982 const char *fmt;
2983
2984 switch (code)
2985 {
2986 case CONST_INT:
2987 case CONST_DOUBLE:
2988 case CONST_FIXED:
2989 case CONST_VECTOR:
2990 case CONST:
2991 case SYMBOL_REF:
2992 case CODE_LABEL:
2993 case PC:
2994 case CC0:
2995 case ASM_INPUT:
2996 case ADDR_VEC:
2997 case ADDR_DIFF_VEC:
2998 case RETURN:
2999 return;
3000
3001 case REG:
3002 regno = REGNO (x);
3003
3004 /* First handle the case where we encounter a bare register that
3005 is eliminable. Replace it with a PLUS. */
3006 if (regno < FIRST_PSEUDO_REGISTER)
3007 {
3008 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3009 ep++)
3010 if (ep->from_rtx == x && ep->can_eliminate)
3011 {
3012 if (! mem_mode)
3013 ep->ref_outside_mem = 1;
3014 return;
3015 }
3016
3017 }
3018 else if (reg_renumber[regno] < 0
3019 && reg_equivs != 0
3020 && reg_equiv_constant (regno)
3021 && ! function_invariant_p (reg_equiv_constant (regno)))
3022 elimination_effects (reg_equiv_constant (regno), mem_mode);
3023 return;
3024
3025 case PRE_INC:
3026 case POST_INC:
3027 case PRE_DEC:
3028 case POST_DEC:
3029 case POST_MODIFY:
3030 case PRE_MODIFY:
3031 /* If we modify the source of an elimination rule, disable it. */
3032 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3033 if (ep->from_rtx == XEXP (x, 0))
3034 ep->can_eliminate = 0;
3035
3036 /* If we modify the target of an elimination rule by adding a constant,
3037 update its offset. If we modify the target in any other way, we'll
3038 have to disable the rule as well. */
3039 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3040 if (ep->to_rtx == XEXP (x, 0))
3041 {
3042 int size = GET_MODE_SIZE (mem_mode);
3043
3044 /* If more bytes than MEM_MODE are pushed, account for them. */
3045 #ifdef PUSH_ROUNDING
3046 if (ep->to_rtx == stack_pointer_rtx)
3047 size = PUSH_ROUNDING (size);
3048 #endif
3049 if (code == PRE_DEC || code == POST_DEC)
3050 ep->offset += size;
3051 else if (code == PRE_INC || code == POST_INC)
3052 ep->offset -= size;
3053 else if (code == PRE_MODIFY || code == POST_MODIFY)
3054 {
3055 if (GET_CODE (XEXP (x, 1)) == PLUS
3056 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3057 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3058 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3059 else
3060 ep->can_eliminate = 0;
3061 }
3062 }
3063
3064 /* These two aren't unary operators. */
3065 if (code == POST_MODIFY || code == PRE_MODIFY)
3066 break;
3067
3068 /* Fall through to generic unary operation case. */
3069 case STRICT_LOW_PART:
3070 case NEG: case NOT:
3071 case SIGN_EXTEND: case ZERO_EXTEND:
3072 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3073 case FLOAT: case FIX:
3074 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3075 case ABS:
3076 case SQRT:
3077 case FFS:
3078 case CLZ:
3079 case CTZ:
3080 case POPCOUNT:
3081 case PARITY:
3082 case BSWAP:
3083 elimination_effects (XEXP (x, 0), mem_mode);
3084 return;
3085
3086 case SUBREG:
3087 if (REG_P (SUBREG_REG (x))
3088 && (GET_MODE_SIZE (GET_MODE (x))
3089 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3090 && reg_equivs != 0
3091 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3092 return;
3093
3094 elimination_effects (SUBREG_REG (x), mem_mode);
3095 return;
3096
3097 case USE:
3098 /* If using a register that is the source of an eliminate we still
3099 think can be performed, note it cannot be performed since we don't
3100 know how this register is used. */
3101 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3102 if (ep->from_rtx == XEXP (x, 0))
3103 ep->can_eliminate = 0;
3104
3105 elimination_effects (XEXP (x, 0), mem_mode);
3106 return;
3107
3108 case CLOBBER:
3109 /* If clobbering a register that is the replacement register for an
3110 elimination we still think can be performed, note that it cannot
3111 be performed. Otherwise, we need not be concerned about it. */
3112 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3113 if (ep->to_rtx == XEXP (x, 0))
3114 ep->can_eliminate = 0;
3115
3116 elimination_effects (XEXP (x, 0), mem_mode);
3117 return;
3118
3119 case SET:
3120 /* Check for setting a register that we know about. */
3121 if (REG_P (SET_DEST (x)))
3122 {
3123 /* See if this is setting the replacement register for an
3124 elimination.
3125
3126 If DEST is the hard frame pointer, we do nothing because we
3127 assume that all assignments to the frame pointer are for
3128 non-local gotos and are being done at a time when they are valid
3129 and do not disturb anything else. Some machines want to
3130 eliminate a fake argument pointer (or even a fake frame pointer)
3131 with either the real frame or the stack pointer. Assignments to
3132 the hard frame pointer must not prevent this elimination. */
3133
3134 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3135 ep++)
3136 if (ep->to_rtx == SET_DEST (x)
3137 && SET_DEST (x) != hard_frame_pointer_rtx)
3138 {
3139 /* If it is being incremented, adjust the offset. Otherwise,
3140 this elimination can't be done. */
3141 rtx src = SET_SRC (x);
3142
3143 if (GET_CODE (src) == PLUS
3144 && XEXP (src, 0) == SET_DEST (x)
3145 && CONST_INT_P (XEXP (src, 1)))
3146 ep->offset -= INTVAL (XEXP (src, 1));
3147 else
3148 ep->can_eliminate = 0;
3149 }
3150 }
3151
3152 elimination_effects (SET_DEST (x), VOIDmode);
3153 elimination_effects (SET_SRC (x), VOIDmode);
3154 return;
3155
3156 case MEM:
3157 /* Our only special processing is to pass the mode of the MEM to our
3158 recursive call. */
3159 elimination_effects (XEXP (x, 0), GET_MODE (x));
3160 return;
3161
3162 default:
3163 break;
3164 }
3165
3166 fmt = GET_RTX_FORMAT (code);
3167 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3168 {
3169 if (*fmt == 'e')
3170 elimination_effects (XEXP (x, i), mem_mode);
3171 else if (*fmt == 'E')
3172 for (j = 0; j < XVECLEN (x, i); j++)
3173 elimination_effects (XVECEXP (x, i, j), mem_mode);
3174 }
3175 }
3176
3177 /* Descend through rtx X and verify that no references to eliminable registers
3178 remain. If any do remain, mark the involved register as not
3179 eliminable. */
3180
3181 static void
3182 check_eliminable_occurrences (rtx x)
3183 {
3184 const char *fmt;
3185 int i;
3186 enum rtx_code code;
3187
3188 if (x == 0)
3189 return;
3190
3191 code = GET_CODE (x);
3192
3193 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3194 {
3195 struct elim_table *ep;
3196
3197 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3198 if (ep->from_rtx == x)
3199 ep->can_eliminate = 0;
3200 return;
3201 }
3202
3203 fmt = GET_RTX_FORMAT (code);
3204 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3205 {
3206 if (*fmt == 'e')
3207 check_eliminable_occurrences (XEXP (x, i));
3208 else if (*fmt == 'E')
3209 {
3210 int j;
3211 for (j = 0; j < XVECLEN (x, i); j++)
3212 check_eliminable_occurrences (XVECEXP (x, i, j));
3213 }
3214 }
3215 }
3216 \f
3217 /* Scan INSN and eliminate all eliminable registers in it.
3218
3219 If REPLACE is nonzero, do the replacement destructively. Also
3220 delete the insn as dead it if it is setting an eliminable register.
3221
3222 If REPLACE is zero, do all our allocations in reload_obstack.
3223
3224 If no eliminations were done and this insn doesn't require any elimination
3225 processing (these are not identical conditions: it might be updating sp,
3226 but not referencing fp; this needs to be seen during reload_as_needed so
3227 that the offset between fp and sp can be taken into consideration), zero
3228 is returned. Otherwise, 1 is returned. */
3229
3230 static int
3231 eliminate_regs_in_insn (rtx insn, int replace)
3232 {
3233 int icode = recog_memoized (insn);
3234 rtx old_body = PATTERN (insn);
3235 int insn_is_asm = asm_noperands (old_body) >= 0;
3236 rtx old_set = single_set (insn);
3237 rtx new_body;
3238 int val = 0;
3239 int i;
3240 rtx substed_operand[MAX_RECOG_OPERANDS];
3241 rtx orig_operand[MAX_RECOG_OPERANDS];
3242 struct elim_table *ep;
3243 rtx plus_src, plus_cst_src;
3244
3245 if (! insn_is_asm && icode < 0)
3246 {
3247 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3248 || GET_CODE (PATTERN (insn)) == CLOBBER
3249 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3250 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3251 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3252 || DEBUG_INSN_P (insn));
3253 if (DEBUG_INSN_P (insn))
3254 INSN_VAR_LOCATION_LOC (insn)
3255 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3256 return 0;
3257 }
3258
3259 if (old_set != 0 && REG_P (SET_DEST (old_set))
3260 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3261 {
3262 /* Check for setting an eliminable register. */
3263 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3264 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3265 {
3266 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3267 /* If this is setting the frame pointer register to the
3268 hardware frame pointer register and this is an elimination
3269 that will be done (tested above), this insn is really
3270 adjusting the frame pointer downward to compensate for
3271 the adjustment done before a nonlocal goto. */
3272 if (ep->from == FRAME_POINTER_REGNUM
3273 && ep->to == HARD_FRAME_POINTER_REGNUM)
3274 {
3275 rtx base = SET_SRC (old_set);
3276 rtx base_insn = insn;
3277 HOST_WIDE_INT offset = 0;
3278
3279 while (base != ep->to_rtx)
3280 {
3281 rtx prev_insn, prev_set;
3282
3283 if (GET_CODE (base) == PLUS
3284 && CONST_INT_P (XEXP (base, 1)))
3285 {
3286 offset += INTVAL (XEXP (base, 1));
3287 base = XEXP (base, 0);
3288 }
3289 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3290 && (prev_set = single_set (prev_insn)) != 0
3291 && rtx_equal_p (SET_DEST (prev_set), base))
3292 {
3293 base = SET_SRC (prev_set);
3294 base_insn = prev_insn;
3295 }
3296 else
3297 break;
3298 }
3299
3300 if (base == ep->to_rtx)
3301 {
3302 rtx src = plus_constant (Pmode, ep->to_rtx,
3303 offset - ep->offset);
3304
3305 new_body = old_body;
3306 if (! replace)
3307 {
3308 new_body = copy_insn (old_body);
3309 if (REG_NOTES (insn))
3310 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3311 }
3312 PATTERN (insn) = new_body;
3313 old_set = single_set (insn);
3314
3315 /* First see if this insn remains valid when we
3316 make the change. If not, keep the INSN_CODE
3317 the same and let reload fit it up. */
3318 validate_change (insn, &SET_SRC (old_set), src, 1);
3319 validate_change (insn, &SET_DEST (old_set),
3320 ep->to_rtx, 1);
3321 if (! apply_change_group ())
3322 {
3323 SET_SRC (old_set) = src;
3324 SET_DEST (old_set) = ep->to_rtx;
3325 }
3326
3327 val = 1;
3328 goto done;
3329 }
3330 }
3331 #endif
3332
3333 /* In this case this insn isn't serving a useful purpose. We
3334 will delete it in reload_as_needed once we know that this
3335 elimination is, in fact, being done.
3336
3337 If REPLACE isn't set, we can't delete this insn, but needn't
3338 process it since it won't be used unless something changes. */
3339 if (replace)
3340 {
3341 delete_dead_insn (insn);
3342 return 1;
3343 }
3344 val = 1;
3345 goto done;
3346 }
3347 }
3348
3349 /* We allow one special case which happens to work on all machines we
3350 currently support: a single set with the source or a REG_EQUAL
3351 note being a PLUS of an eliminable register and a constant. */
3352 plus_src = plus_cst_src = 0;
3353 if (old_set && REG_P (SET_DEST (old_set)))
3354 {
3355 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3356 plus_src = SET_SRC (old_set);
3357 /* First see if the source is of the form (plus (...) CST). */
3358 if (plus_src
3359 && CONST_INT_P (XEXP (plus_src, 1)))
3360 plus_cst_src = plus_src;
3361 else if (REG_P (SET_SRC (old_set))
3362 || plus_src)
3363 {
3364 /* Otherwise, see if we have a REG_EQUAL note of the form
3365 (plus (...) CST). */
3366 rtx links;
3367 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3368 {
3369 if ((REG_NOTE_KIND (links) == REG_EQUAL
3370 || REG_NOTE_KIND (links) == REG_EQUIV)
3371 && GET_CODE (XEXP (links, 0)) == PLUS
3372 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3373 {
3374 plus_cst_src = XEXP (links, 0);
3375 break;
3376 }
3377 }
3378 }
3379
3380 /* Check that the first operand of the PLUS is a hard reg or
3381 the lowpart subreg of one. */
3382 if (plus_cst_src)
3383 {
3384 rtx reg = XEXP (plus_cst_src, 0);
3385 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3386 reg = SUBREG_REG (reg);
3387
3388 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3389 plus_cst_src = 0;
3390 }
3391 }
3392 if (plus_cst_src)
3393 {
3394 rtx reg = XEXP (plus_cst_src, 0);
3395 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3396
3397 if (GET_CODE (reg) == SUBREG)
3398 reg = SUBREG_REG (reg);
3399
3400 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3401 if (ep->from_rtx == reg && ep->can_eliminate)
3402 {
3403 rtx to_rtx = ep->to_rtx;
3404 offset += ep->offset;
3405 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3406
3407 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3408 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3409 to_rtx);
3410 /* If we have a nonzero offset, and the source is already
3411 a simple REG, the following transformation would
3412 increase the cost of the insn by replacing a simple REG
3413 with (plus (reg sp) CST). So try only when we already
3414 had a PLUS before. */
3415 if (offset == 0 || plus_src)
3416 {
3417 rtx new_src = plus_constant (GET_MODE (to_rtx),
3418 to_rtx, offset);
3419
3420 new_body = old_body;
3421 if (! replace)
3422 {
3423 new_body = copy_insn (old_body);
3424 if (REG_NOTES (insn))
3425 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3426 }
3427 PATTERN (insn) = new_body;
3428 old_set = single_set (insn);
3429
3430 /* First see if this insn remains valid when we make the
3431 change. If not, try to replace the whole pattern with
3432 a simple set (this may help if the original insn was a
3433 PARALLEL that was only recognized as single_set due to
3434 REG_UNUSED notes). If this isn't valid either, keep
3435 the INSN_CODE the same and let reload fix it up. */
3436 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3437 {
3438 rtx new_pat = gen_rtx_SET (VOIDmode,
3439 SET_DEST (old_set), new_src);
3440
3441 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3442 SET_SRC (old_set) = new_src;
3443 }
3444 }
3445 else
3446 break;
3447
3448 val = 1;
3449 /* This can't have an effect on elimination offsets, so skip right
3450 to the end. */
3451 goto done;
3452 }
3453 }
3454
3455 /* Determine the effects of this insn on elimination offsets. */
3456 elimination_effects (old_body, VOIDmode);
3457
3458 /* Eliminate all eliminable registers occurring in operands that
3459 can be handled by reload. */
3460 extract_insn (insn);
3461 for (i = 0; i < recog_data.n_operands; i++)
3462 {
3463 orig_operand[i] = recog_data.operand[i];
3464 substed_operand[i] = recog_data.operand[i];
3465
3466 /* For an asm statement, every operand is eliminable. */
3467 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3468 {
3469 bool is_set_src, in_plus;
3470
3471 /* Check for setting a register that we know about. */
3472 if (recog_data.operand_type[i] != OP_IN
3473 && REG_P (orig_operand[i]))
3474 {
3475 /* If we are assigning to a register that can be eliminated, it
3476 must be as part of a PARALLEL, since the code above handles
3477 single SETs. We must indicate that we can no longer
3478 eliminate this reg. */
3479 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3480 ep++)
3481 if (ep->from_rtx == orig_operand[i])
3482 ep->can_eliminate = 0;
3483 }
3484
3485 /* Companion to the above plus substitution, we can allow
3486 invariants as the source of a plain move. */
3487 is_set_src = false;
3488 if (old_set
3489 && recog_data.operand_loc[i] == &SET_SRC (old_set))
3490 is_set_src = true;
3491 in_plus = false;
3492 if (plus_src
3493 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3494 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3495 in_plus = true;
3496
3497 substed_operand[i]
3498 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3499 replace ? insn : NULL_RTX,
3500 is_set_src || in_plus, false);
3501 if (substed_operand[i] != orig_operand[i])
3502 val = 1;
3503 /* Terminate the search in check_eliminable_occurrences at
3504 this point. */
3505 *recog_data.operand_loc[i] = 0;
3506
3507 /* If an output operand changed from a REG to a MEM and INSN is an
3508 insn, write a CLOBBER insn. */
3509 if (recog_data.operand_type[i] != OP_IN
3510 && REG_P (orig_operand[i])
3511 && MEM_P (substed_operand[i])
3512 && replace)
3513 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3514 }
3515 }
3516
3517 for (i = 0; i < recog_data.n_dups; i++)
3518 *recog_data.dup_loc[i]
3519 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3520
3521 /* If any eliminable remain, they aren't eliminable anymore. */
3522 check_eliminable_occurrences (old_body);
3523
3524 /* Substitute the operands; the new values are in the substed_operand
3525 array. */
3526 for (i = 0; i < recog_data.n_operands; i++)
3527 *recog_data.operand_loc[i] = substed_operand[i];
3528 for (i = 0; i < recog_data.n_dups; i++)
3529 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3530
3531 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3532 re-recognize the insn. We do this in case we had a simple addition
3533 but now can do this as a load-address. This saves an insn in this
3534 common case.
3535 If re-recognition fails, the old insn code number will still be used,
3536 and some register operands may have changed into PLUS expressions.
3537 These will be handled by find_reloads by loading them into a register
3538 again. */
3539
3540 if (val)
3541 {
3542 /* If we aren't replacing things permanently and we changed something,
3543 make another copy to ensure that all the RTL is new. Otherwise
3544 things can go wrong if find_reload swaps commutative operands
3545 and one is inside RTL that has been copied while the other is not. */
3546 new_body = old_body;
3547 if (! replace)
3548 {
3549 new_body = copy_insn (old_body);
3550 if (REG_NOTES (insn))
3551 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3552 }
3553 PATTERN (insn) = new_body;
3554
3555 /* If we had a move insn but now we don't, rerecognize it. This will
3556 cause spurious re-recognition if the old move had a PARALLEL since
3557 the new one still will, but we can't call single_set without
3558 having put NEW_BODY into the insn and the re-recognition won't
3559 hurt in this rare case. */
3560 /* ??? Why this huge if statement - why don't we just rerecognize the
3561 thing always? */
3562 if (! insn_is_asm
3563 && old_set != 0
3564 && ((REG_P (SET_SRC (old_set))
3565 && (GET_CODE (new_body) != SET
3566 || !REG_P (SET_SRC (new_body))))
3567 /* If this was a load from or store to memory, compare
3568 the MEM in recog_data.operand to the one in the insn.
3569 If they are not equal, then rerecognize the insn. */
3570 || (old_set != 0
3571 && ((MEM_P (SET_SRC (old_set))
3572 && SET_SRC (old_set) != recog_data.operand[1])
3573 || (MEM_P (SET_DEST (old_set))
3574 && SET_DEST (old_set) != recog_data.operand[0])))
3575 /* If this was an add insn before, rerecognize. */
3576 || GET_CODE (SET_SRC (old_set)) == PLUS))
3577 {
3578 int new_icode = recog (PATTERN (insn), insn, 0);
3579 if (new_icode >= 0)
3580 INSN_CODE (insn) = new_icode;
3581 }
3582 }
3583
3584 /* Restore the old body. If there were any changes to it, we made a copy
3585 of it while the changes were still in place, so we'll correctly return
3586 a modified insn below. */
3587 if (! replace)
3588 {
3589 /* Restore the old body. */
3590 for (i = 0; i < recog_data.n_operands; i++)
3591 /* Restoring a top-level match_parallel would clobber the new_body
3592 we installed in the insn. */
3593 if (recog_data.operand_loc[i] != &PATTERN (insn))
3594 *recog_data.operand_loc[i] = orig_operand[i];
3595 for (i = 0; i < recog_data.n_dups; i++)
3596 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3597 }
3598
3599 /* Update all elimination pairs to reflect the status after the current
3600 insn. The changes we make were determined by the earlier call to
3601 elimination_effects.
3602
3603 We also detect cases where register elimination cannot be done,
3604 namely, if a register would be both changed and referenced outside a MEM
3605 in the resulting insn since such an insn is often undefined and, even if
3606 not, we cannot know what meaning will be given to it. Note that it is
3607 valid to have a register used in an address in an insn that changes it
3608 (presumably with a pre- or post-increment or decrement).
3609
3610 If anything changes, return nonzero. */
3611
3612 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3613 {
3614 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3615 ep->can_eliminate = 0;
3616
3617 ep->ref_outside_mem = 0;
3618
3619 if (ep->previous_offset != ep->offset)
3620 val = 1;
3621 }
3622
3623 done:
3624 /* If we changed something, perform elimination in REG_NOTES. This is
3625 needed even when REPLACE is zero because a REG_DEAD note might refer
3626 to a register that we eliminate and could cause a different number
3627 of spill registers to be needed in the final reload pass than in
3628 the pre-passes. */
3629 if (val && REG_NOTES (insn) != 0)
3630 REG_NOTES (insn)
3631 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3632 false);
3633
3634 return val;
3635 }
3636
3637 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3638 register allocator. INSN is the instruction we need to examine, we perform
3639 eliminations in its operands and record cases where eliminating a reg with
3640 an invariant equivalence would add extra cost. */
3641
3642 static void
3643 elimination_costs_in_insn (rtx insn)
3644 {
3645 int icode = recog_memoized (insn);
3646 rtx old_body = PATTERN (insn);
3647 int insn_is_asm = asm_noperands (old_body) >= 0;
3648 rtx old_set = single_set (insn);
3649 int i;
3650 rtx orig_operand[MAX_RECOG_OPERANDS];
3651 rtx orig_dup[MAX_RECOG_OPERANDS];
3652 struct elim_table *ep;
3653 rtx plus_src, plus_cst_src;
3654 bool sets_reg_p;
3655
3656 if (! insn_is_asm && icode < 0)
3657 {
3658 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3659 || GET_CODE (PATTERN (insn)) == CLOBBER
3660 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3661 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3662 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3663 || DEBUG_INSN_P (insn));
3664 return;
3665 }
3666
3667 if (old_set != 0 && REG_P (SET_DEST (old_set))
3668 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3669 {
3670 /* Check for setting an eliminable register. */
3671 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3672 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3673 return;
3674 }
3675
3676 /* We allow one special case which happens to work on all machines we
3677 currently support: a single set with the source or a REG_EQUAL
3678 note being a PLUS of an eliminable register and a constant. */
3679 plus_src = plus_cst_src = 0;
3680 sets_reg_p = false;
3681 if (old_set && REG_P (SET_DEST (old_set)))
3682 {
3683 sets_reg_p = true;
3684 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3685 plus_src = SET_SRC (old_set);
3686 /* First see if the source is of the form (plus (...) CST). */
3687 if (plus_src
3688 && CONST_INT_P (XEXP (plus_src, 1)))
3689 plus_cst_src = plus_src;
3690 else if (REG_P (SET_SRC (old_set))
3691 || plus_src)
3692 {
3693 /* Otherwise, see if we have a REG_EQUAL note of the form
3694 (plus (...) CST). */
3695 rtx links;
3696 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3697 {
3698 if ((REG_NOTE_KIND (links) == REG_EQUAL
3699 || REG_NOTE_KIND (links) == REG_EQUIV)
3700 && GET_CODE (XEXP (links, 0)) == PLUS
3701 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3702 {
3703 plus_cst_src = XEXP (links, 0);
3704 break;
3705 }
3706 }
3707 }
3708 }
3709
3710 /* Determine the effects of this insn on elimination offsets. */
3711 elimination_effects (old_body, VOIDmode);
3712
3713 /* Eliminate all eliminable registers occurring in operands that
3714 can be handled by reload. */
3715 extract_insn (insn);
3716 for (i = 0; i < recog_data.n_dups; i++)
3717 orig_dup[i] = *recog_data.dup_loc[i];
3718
3719 for (i = 0; i < recog_data.n_operands; i++)
3720 {
3721 orig_operand[i] = recog_data.operand[i];
3722
3723 /* For an asm statement, every operand is eliminable. */
3724 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3725 {
3726 bool is_set_src, in_plus;
3727
3728 /* Check for setting a register that we know about. */
3729 if (recog_data.operand_type[i] != OP_IN
3730 && REG_P (orig_operand[i]))
3731 {
3732 /* If we are assigning to a register that can be eliminated, it
3733 must be as part of a PARALLEL, since the code above handles
3734 single SETs. We must indicate that we can no longer
3735 eliminate this reg. */
3736 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3737 ep++)
3738 if (ep->from_rtx == orig_operand[i])
3739 ep->can_eliminate = 0;
3740 }
3741
3742 /* Companion to the above plus substitution, we can allow
3743 invariants as the source of a plain move. */
3744 is_set_src = false;
3745 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3746 is_set_src = true;
3747 if (is_set_src && !sets_reg_p)
3748 note_reg_elim_costly (&SET_SRC (old_set), insn);
3749 in_plus = false;
3750 if (plus_src && sets_reg_p
3751 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3752 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3753 in_plus = true;
3754
3755 eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3756 NULL_RTX,
3757 is_set_src || in_plus, true);
3758 /* Terminate the search in check_eliminable_occurrences at
3759 this point. */
3760 *recog_data.operand_loc[i] = 0;
3761 }
3762 }
3763
3764 for (i = 0; i < recog_data.n_dups; i++)
3765 *recog_data.dup_loc[i]
3766 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3767
3768 /* If any eliminable remain, they aren't eliminable anymore. */
3769 check_eliminable_occurrences (old_body);
3770
3771 /* Restore the old body. */
3772 for (i = 0; i < recog_data.n_operands; i++)
3773 *recog_data.operand_loc[i] = orig_operand[i];
3774 for (i = 0; i < recog_data.n_dups; i++)
3775 *recog_data.dup_loc[i] = orig_dup[i];
3776
3777 /* Update all elimination pairs to reflect the status after the current
3778 insn. The changes we make were determined by the earlier call to
3779 elimination_effects. */
3780
3781 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3782 {
3783 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3784 ep->can_eliminate = 0;
3785
3786 ep->ref_outside_mem = 0;
3787 }
3788
3789 return;
3790 }
3791
3792 /* Loop through all elimination pairs.
3793 Recalculate the number not at initial offset.
3794
3795 Compute the maximum offset (minimum offset if the stack does not
3796 grow downward) for each elimination pair. */
3797
3798 static void
3799 update_eliminable_offsets (void)
3800 {
3801 struct elim_table *ep;
3802
3803 num_not_at_initial_offset = 0;
3804 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3805 {
3806 ep->previous_offset = ep->offset;
3807 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3808 num_not_at_initial_offset++;
3809 }
3810 }
3811
3812 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3813 replacement we currently believe is valid, mark it as not eliminable if X
3814 modifies DEST in any way other than by adding a constant integer to it.
3815
3816 If DEST is the frame pointer, we do nothing because we assume that
3817 all assignments to the hard frame pointer are nonlocal gotos and are being
3818 done at a time when they are valid and do not disturb anything else.
3819 Some machines want to eliminate a fake argument pointer with either the
3820 frame or stack pointer. Assignments to the hard frame pointer must not
3821 prevent this elimination.
3822
3823 Called via note_stores from reload before starting its passes to scan
3824 the insns of the function. */
3825
3826 static void
3827 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3828 {
3829 unsigned int i;
3830
3831 /* A SUBREG of a hard register here is just changing its mode. We should
3832 not see a SUBREG of an eliminable hard register, but check just in
3833 case. */
3834 if (GET_CODE (dest) == SUBREG)
3835 dest = SUBREG_REG (dest);
3836
3837 if (dest == hard_frame_pointer_rtx)
3838 return;
3839
3840 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3841 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3842 && (GET_CODE (x) != SET
3843 || GET_CODE (SET_SRC (x)) != PLUS
3844 || XEXP (SET_SRC (x), 0) != dest
3845 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3846 {
3847 reg_eliminate[i].can_eliminate_previous
3848 = reg_eliminate[i].can_eliminate = 0;
3849 num_eliminable--;
3850 }
3851 }
3852
3853 /* Verify that the initial elimination offsets did not change since the
3854 last call to set_initial_elim_offsets. This is used to catch cases
3855 where something illegal happened during reload_as_needed that could
3856 cause incorrect code to be generated if we did not check for it. */
3857
3858 static bool
3859 verify_initial_elim_offsets (void)
3860 {
3861 HOST_WIDE_INT t;
3862
3863 if (!num_eliminable)
3864 return true;
3865
3866 #ifdef ELIMINABLE_REGS
3867 {
3868 struct elim_table *ep;
3869
3870 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3871 {
3872 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3873 if (t != ep->initial_offset)
3874 return false;
3875 }
3876 }
3877 #else
3878 INITIAL_FRAME_POINTER_OFFSET (t);
3879 if (t != reg_eliminate[0].initial_offset)
3880 return false;
3881 #endif
3882
3883 return true;
3884 }
3885
3886 /* Reset all offsets on eliminable registers to their initial values. */
3887
3888 static void
3889 set_initial_elim_offsets (void)
3890 {
3891 struct elim_table *ep = reg_eliminate;
3892
3893 #ifdef ELIMINABLE_REGS
3894 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3895 {
3896 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3897 ep->previous_offset = ep->offset = ep->initial_offset;
3898 }
3899 #else
3900 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3901 ep->previous_offset = ep->offset = ep->initial_offset;
3902 #endif
3903
3904 num_not_at_initial_offset = 0;
3905 }
3906
3907 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3908
3909 static void
3910 set_initial_eh_label_offset (rtx label)
3911 {
3912 set_label_offsets (label, NULL_RTX, 1);
3913 }
3914
3915 /* Initialize the known label offsets.
3916 Set a known offset for each forced label to be at the initial offset
3917 of each elimination. We do this because we assume that all
3918 computed jumps occur from a location where each elimination is
3919 at its initial offset.
3920 For all other labels, show that we don't know the offsets. */
3921
3922 static void
3923 set_initial_label_offsets (void)
3924 {
3925 rtx x;
3926 memset (offsets_known_at, 0, num_labels);
3927
3928 for (x = forced_labels; x; x = XEXP (x, 1))
3929 if (XEXP (x, 0))
3930 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3931
3932 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
3933 if (XEXP (x, 0))
3934 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3935
3936 for_each_eh_label (set_initial_eh_label_offset);
3937 }
3938
3939 /* Set all elimination offsets to the known values for the code label given
3940 by INSN. */
3941
3942 static void
3943 set_offsets_for_label (rtx insn)
3944 {
3945 unsigned int i;
3946 int label_nr = CODE_LABEL_NUMBER (insn);
3947 struct elim_table *ep;
3948
3949 num_not_at_initial_offset = 0;
3950 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3951 {
3952 ep->offset = ep->previous_offset
3953 = offsets_at[label_nr - first_label_num][i];
3954 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3955 num_not_at_initial_offset++;
3956 }
3957 }
3958
3959 /* See if anything that happened changes which eliminations are valid.
3960 For example, on the SPARC, whether or not the frame pointer can
3961 be eliminated can depend on what registers have been used. We need
3962 not check some conditions again (such as flag_omit_frame_pointer)
3963 since they can't have changed. */
3964
3965 static void
3966 update_eliminables (HARD_REG_SET *pset)
3967 {
3968 int previous_frame_pointer_needed = frame_pointer_needed;
3969 struct elim_table *ep;
3970
3971 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3972 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3973 && targetm.frame_pointer_required ())
3974 #ifdef ELIMINABLE_REGS
3975 || ! targetm.can_eliminate (ep->from, ep->to)
3976 #endif
3977 )
3978 ep->can_eliminate = 0;
3979
3980 /* Look for the case where we have discovered that we can't replace
3981 register A with register B and that means that we will now be
3982 trying to replace register A with register C. This means we can
3983 no longer replace register C with register B and we need to disable
3984 such an elimination, if it exists. This occurs often with A == ap,
3985 B == sp, and C == fp. */
3986
3987 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3988 {
3989 struct elim_table *op;
3990 int new_to = -1;
3991
3992 if (! ep->can_eliminate && ep->can_eliminate_previous)
3993 {
3994 /* Find the current elimination for ep->from, if there is a
3995 new one. */
3996 for (op = reg_eliminate;
3997 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3998 if (op->from == ep->from && op->can_eliminate)
3999 {
4000 new_to = op->to;
4001 break;
4002 }
4003
4004 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
4005 disable it. */
4006 for (op = reg_eliminate;
4007 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
4008 if (op->from == new_to && op->to == ep->to)
4009 op->can_eliminate = 0;
4010 }
4011 }
4012
4013 /* See if any registers that we thought we could eliminate the previous
4014 time are no longer eliminable. If so, something has changed and we
4015 must spill the register. Also, recompute the number of eliminable
4016 registers and see if the frame pointer is needed; it is if there is
4017 no elimination of the frame pointer that we can perform. */
4018
4019 frame_pointer_needed = 1;
4020 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4021 {
4022 if (ep->can_eliminate
4023 && ep->from == FRAME_POINTER_REGNUM
4024 && ep->to != HARD_FRAME_POINTER_REGNUM
4025 && (! SUPPORTS_STACK_ALIGNMENT
4026 || ! crtl->stack_realign_needed))
4027 frame_pointer_needed = 0;
4028
4029 if (! ep->can_eliminate && ep->can_eliminate_previous)
4030 {
4031 ep->can_eliminate_previous = 0;
4032 SET_HARD_REG_BIT (*pset, ep->from);
4033 num_eliminable--;
4034 }
4035 }
4036
4037 /* If we didn't need a frame pointer last time, but we do now, spill
4038 the hard frame pointer. */
4039 if (frame_pointer_needed && ! previous_frame_pointer_needed)
4040 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4041 }
4042
4043 /* Return true if X is used as the target register of an elimination. */
4044
4045 bool
4046 elimination_target_reg_p (rtx x)
4047 {
4048 struct elim_table *ep;
4049
4050 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4051 if (ep->to_rtx == x && ep->can_eliminate)
4052 return true;
4053
4054 return false;
4055 }
4056
4057 /* Initialize the table of registers to eliminate.
4058 Pre-condition: global flag frame_pointer_needed has been set before
4059 calling this function. */
4060
4061 static void
4062 init_elim_table (void)
4063 {
4064 struct elim_table *ep;
4065 #ifdef ELIMINABLE_REGS
4066 const struct elim_table_1 *ep1;
4067 #endif
4068
4069 if (!reg_eliminate)
4070 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4071
4072 num_eliminable = 0;
4073
4074 #ifdef ELIMINABLE_REGS
4075 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4076 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4077 {
4078 ep->from = ep1->from;
4079 ep->to = ep1->to;
4080 ep->can_eliminate = ep->can_eliminate_previous
4081 = (targetm.can_eliminate (ep->from, ep->to)
4082 && ! (ep->to == STACK_POINTER_REGNUM
4083 && frame_pointer_needed
4084 && (! SUPPORTS_STACK_ALIGNMENT
4085 || ! stack_realign_fp)));
4086 }
4087 #else
4088 reg_eliminate[0].from = reg_eliminate_1[0].from;
4089 reg_eliminate[0].to = reg_eliminate_1[0].to;
4090 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4091 = ! frame_pointer_needed;
4092 #endif
4093
4094 /* Count the number of eliminable registers and build the FROM and TO
4095 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4096 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4097 We depend on this. */
4098 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4099 {
4100 num_eliminable += ep->can_eliminate;
4101 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4102 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4103 }
4104 }
4105
4106 /* Find all the pseudo registers that didn't get hard regs
4107 but do have known equivalent constants or memory slots.
4108 These include parameters (known equivalent to parameter slots)
4109 and cse'd or loop-moved constant memory addresses.
4110
4111 Record constant equivalents in reg_equiv_constant
4112 so they will be substituted by find_reloads.
4113 Record memory equivalents in reg_mem_equiv so they can
4114 be substituted eventually by altering the REG-rtx's. */
4115
4116 static void
4117 init_eliminable_invariants (rtx first, bool do_subregs)
4118 {
4119 int i;
4120 rtx insn;
4121
4122 grow_reg_equivs ();
4123 if (do_subregs)
4124 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4125 else
4126 reg_max_ref_width = NULL;
4127
4128 num_eliminable_invariants = 0;
4129
4130 first_label_num = get_first_label_num ();
4131 num_labels = max_label_num () - first_label_num;
4132
4133 /* Allocate the tables used to store offset information at labels. */
4134 offsets_known_at = XNEWVEC (char, num_labels);
4135 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4136
4137 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4138 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4139 find largest such for each pseudo. FIRST is the head of the insn
4140 list. */
4141
4142 for (insn = first; insn; insn = NEXT_INSN (insn))
4143 {
4144 rtx set = single_set (insn);
4145
4146 /* We may introduce USEs that we want to remove at the end, so
4147 we'll mark them with QImode. Make sure there are no
4148 previously-marked insns left by say regmove. */
4149 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4150 && GET_MODE (insn) != VOIDmode)
4151 PUT_MODE (insn, VOIDmode);
4152
4153 if (do_subregs && NONDEBUG_INSN_P (insn))
4154 scan_paradoxical_subregs (PATTERN (insn));
4155
4156 if (set != 0 && REG_P (SET_DEST (set)))
4157 {
4158 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4159 rtx x;
4160
4161 if (! note)
4162 continue;
4163
4164 i = REGNO (SET_DEST (set));
4165 x = XEXP (note, 0);
4166
4167 if (i <= LAST_VIRTUAL_REGISTER)
4168 continue;
4169
4170 /* If flag_pic and we have constant, verify it's legitimate. */
4171 if (!CONSTANT_P (x)
4172 || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4173 {
4174 /* It can happen that a REG_EQUIV note contains a MEM
4175 that is not a legitimate memory operand. As later
4176 stages of reload assume that all addresses found
4177 in the reg_equiv_* arrays were originally legitimate,
4178 we ignore such REG_EQUIV notes. */
4179 if (memory_operand (x, VOIDmode))
4180 {
4181 /* Always unshare the equivalence, so we can
4182 substitute into this insn without touching the
4183 equivalence. */
4184 reg_equiv_memory_loc (i) = copy_rtx (x);
4185 }
4186 else if (function_invariant_p (x))
4187 {
4188 enum machine_mode mode;
4189
4190 mode = GET_MODE (SET_DEST (set));
4191 if (GET_CODE (x) == PLUS)
4192 {
4193 /* This is PLUS of frame pointer and a constant,
4194 and might be shared. Unshare it. */
4195 reg_equiv_invariant (i) = copy_rtx (x);
4196 num_eliminable_invariants++;
4197 }
4198 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4199 {
4200 reg_equiv_invariant (i) = x;
4201 num_eliminable_invariants++;
4202 }
4203 else if (targetm.legitimate_constant_p (mode, x))
4204 reg_equiv_constant (i) = x;
4205 else
4206 {
4207 reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4208 if (! reg_equiv_memory_loc (i))
4209 reg_equiv_init (i) = NULL_RTX;
4210 }
4211 }
4212 else
4213 {
4214 reg_equiv_init (i) = NULL_RTX;
4215 continue;
4216 }
4217 }
4218 else
4219 reg_equiv_init (i) = NULL_RTX;
4220 }
4221 }
4222
4223 if (dump_file)
4224 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4225 if (reg_equiv_init (i))
4226 {
4227 fprintf (dump_file, "init_insns for %u: ", i);
4228 print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4229 fprintf (dump_file, "\n");
4230 }
4231 }
4232
4233 /* Indicate that we no longer have known memory locations or constants.
4234 Free all data involved in tracking these. */
4235
4236 static void
4237 free_reg_equiv (void)
4238 {
4239 int i;
4240
4241
4242 free (offsets_known_at);
4243 free (offsets_at);
4244 offsets_at = 0;
4245 offsets_known_at = 0;
4246
4247 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4248 if (reg_equiv_alt_mem_list (i))
4249 free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4250 VEC_free (reg_equivs_t, gc, reg_equivs);
4251 reg_equivs = NULL;
4252
4253 }
4254 \f
4255 /* Kick all pseudos out of hard register REGNO.
4256
4257 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4258 because we found we can't eliminate some register. In the case, no pseudos
4259 are allowed to be in the register, even if they are only in a block that
4260 doesn't require spill registers, unlike the case when we are spilling this
4261 hard reg to produce another spill register.
4262
4263 Return nonzero if any pseudos needed to be kicked out. */
4264
4265 static void
4266 spill_hard_reg (unsigned int regno, int cant_eliminate)
4267 {
4268 int i;
4269
4270 if (cant_eliminate)
4271 {
4272 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4273 df_set_regs_ever_live (regno, true);
4274 }
4275
4276 /* Spill every pseudo reg that was allocated to this reg
4277 or to something that overlaps this reg. */
4278
4279 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4280 if (reg_renumber[i] >= 0
4281 && (unsigned int) reg_renumber[i] <= regno
4282 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4283 SET_REGNO_REG_SET (&spilled_pseudos, i);
4284 }
4285
4286 /* After find_reload_regs has been run for all insn that need reloads,
4287 and/or spill_hard_regs was called, this function is used to actually
4288 spill pseudo registers and try to reallocate them. It also sets up the
4289 spill_regs array for use by choose_reload_regs. */
4290
4291 static int
4292 finish_spills (int global)
4293 {
4294 struct insn_chain *chain;
4295 int something_changed = 0;
4296 unsigned i;
4297 reg_set_iterator rsi;
4298
4299 /* Build the spill_regs array for the function. */
4300 /* If there are some registers still to eliminate and one of the spill regs
4301 wasn't ever used before, additional stack space may have to be
4302 allocated to store this register. Thus, we may have changed the offset
4303 between the stack and frame pointers, so mark that something has changed.
4304
4305 One might think that we need only set VAL to 1 if this is a call-used
4306 register. However, the set of registers that must be saved by the
4307 prologue is not identical to the call-used set. For example, the
4308 register used by the call insn for the return PC is a call-used register,
4309 but must be saved by the prologue. */
4310
4311 n_spills = 0;
4312 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4313 if (TEST_HARD_REG_BIT (used_spill_regs, i))
4314 {
4315 spill_reg_order[i] = n_spills;
4316 spill_regs[n_spills++] = i;
4317 if (num_eliminable && ! df_regs_ever_live_p (i))
4318 something_changed = 1;
4319 df_set_regs_ever_live (i, true);
4320 }
4321 else
4322 spill_reg_order[i] = -1;
4323
4324 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4325 if (! ira_conflicts_p || reg_renumber[i] >= 0)
4326 {
4327 /* Record the current hard register the pseudo is allocated to
4328 in pseudo_previous_regs so we avoid reallocating it to the
4329 same hard reg in a later pass. */
4330 gcc_assert (reg_renumber[i] >= 0);
4331
4332 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4333 /* Mark it as no longer having a hard register home. */
4334 reg_renumber[i] = -1;
4335 if (ira_conflicts_p)
4336 /* Inform IRA about the change. */
4337 ira_mark_allocation_change (i);
4338 /* We will need to scan everything again. */
4339 something_changed = 1;
4340 }
4341
4342 /* Retry global register allocation if possible. */
4343 if (global && ira_conflicts_p)
4344 {
4345 unsigned int n;
4346
4347 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4348 /* For every insn that needs reloads, set the registers used as spill
4349 regs in pseudo_forbidden_regs for every pseudo live across the
4350 insn. */
4351 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4352 {
4353 EXECUTE_IF_SET_IN_REG_SET
4354 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4355 {
4356 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4357 chain->used_spill_regs);
4358 }
4359 EXECUTE_IF_SET_IN_REG_SET
4360 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4361 {
4362 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4363 chain->used_spill_regs);
4364 }
4365 }
4366
4367 /* Retry allocating the pseudos spilled in IRA and the
4368 reload. For each reg, merge the various reg sets that
4369 indicate which hard regs can't be used, and call
4370 ira_reassign_pseudos. */
4371 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4372 if (reg_old_renumber[i] != reg_renumber[i])
4373 {
4374 if (reg_renumber[i] < 0)
4375 temp_pseudo_reg_arr[n++] = i;
4376 else
4377 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4378 }
4379 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4380 bad_spill_regs_global,
4381 pseudo_forbidden_regs, pseudo_previous_regs,
4382 &spilled_pseudos))
4383 something_changed = 1;
4384 }
4385 /* Fix up the register information in the insn chain.
4386 This involves deleting those of the spilled pseudos which did not get
4387 a new hard register home from the live_{before,after} sets. */
4388 for (chain = reload_insn_chain; chain; chain = chain->next)
4389 {
4390 HARD_REG_SET used_by_pseudos;
4391 HARD_REG_SET used_by_pseudos2;
4392
4393 if (! ira_conflicts_p)
4394 {
4395 /* Don't do it for IRA because IRA and the reload still can
4396 assign hard registers to the spilled pseudos on next
4397 reload iterations. */
4398 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4399 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4400 }
4401 /* Mark any unallocated hard regs as available for spills. That
4402 makes inheritance work somewhat better. */
4403 if (chain->need_reload)
4404 {
4405 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4406 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4407 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4408
4409 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4410 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4411 /* Value of chain->used_spill_regs from previous iteration
4412 may be not included in the value calculated here because
4413 of possible removing caller-saves insns (see function
4414 delete_caller_save_insns. */
4415 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4416 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4417 }
4418 }
4419
4420 CLEAR_REG_SET (&changed_allocation_pseudos);
4421 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4422 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4423 {
4424 int regno = reg_renumber[i];
4425 if (reg_old_renumber[i] == regno)
4426 continue;
4427
4428 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4429
4430 alter_reg (i, reg_old_renumber[i], false);
4431 reg_old_renumber[i] = regno;
4432 if (dump_file)
4433 {
4434 if (regno == -1)
4435 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4436 else
4437 fprintf (dump_file, " Register %d now in %d.\n\n",
4438 i, reg_renumber[i]);
4439 }
4440 }
4441
4442 return something_changed;
4443 }
4444 \f
4445 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4446
4447 static void
4448 scan_paradoxical_subregs (rtx x)
4449 {
4450 int i;
4451 const char *fmt;
4452 enum rtx_code code = GET_CODE (x);
4453
4454 switch (code)
4455 {
4456 case REG:
4457 case CONST_INT:
4458 case CONST:
4459 case SYMBOL_REF:
4460 case LABEL_REF:
4461 case CONST_DOUBLE:
4462 case CONST_FIXED:
4463 case CONST_VECTOR: /* shouldn't happen, but just in case. */
4464 case CC0:
4465 case PC:
4466 case USE:
4467 case CLOBBER:
4468 return;
4469
4470 case SUBREG:
4471 if (REG_P (SUBREG_REG (x))
4472 && (GET_MODE_SIZE (GET_MODE (x))
4473 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4474 {
4475 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4476 = GET_MODE_SIZE (GET_MODE (x));
4477 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4478 }
4479 return;
4480
4481 default:
4482 break;
4483 }
4484
4485 fmt = GET_RTX_FORMAT (code);
4486 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4487 {
4488 if (fmt[i] == 'e')
4489 scan_paradoxical_subregs (XEXP (x, i));
4490 else if (fmt[i] == 'E')
4491 {
4492 int j;
4493 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4494 scan_paradoxical_subregs (XVECEXP (x, i, j));
4495 }
4496 }
4497 }
4498
4499 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4500 If *OP_PTR is a paradoxical subreg, try to remove that subreg
4501 and apply the corresponding narrowing subreg to *OTHER_PTR.
4502 Return true if the operands were changed, false otherwise. */
4503
4504 static bool
4505 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4506 {
4507 rtx op, inner, other, tem;
4508
4509 op = *op_ptr;
4510 if (!paradoxical_subreg_p (op))
4511 return false;
4512 inner = SUBREG_REG (op);
4513
4514 other = *other_ptr;
4515 tem = gen_lowpart_common (GET_MODE (inner), other);
4516 if (!tem)
4517 return false;
4518
4519 /* If the lowpart operation turned a hard register into a subreg,
4520 rather than simplifying it to another hard register, then the
4521 mode change cannot be properly represented. For example, OTHER
4522 might be valid in its current mode, but not in the new one. */
4523 if (GET_CODE (tem) == SUBREG
4524 && REG_P (other)
4525 && HARD_REGISTER_P (other))
4526 return false;
4527
4528 *op_ptr = inner;
4529 *other_ptr = tem;
4530 return true;
4531 }
4532 \f
4533 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4534 examine all of the reload insns between PREV and NEXT exclusive, and
4535 annotate all that may trap. */
4536
4537 static void
4538 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4539 {
4540 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4541 if (note == NULL)
4542 return;
4543 if (!insn_could_throw_p (insn))
4544 remove_note (insn, note);
4545 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4546 }
4547
4548 /* Reload pseudo-registers into hard regs around each insn as needed.
4549 Additional register load insns are output before the insn that needs it
4550 and perhaps store insns after insns that modify the reloaded pseudo reg.
4551
4552 reg_last_reload_reg and reg_reloaded_contents keep track of
4553 which registers are already available in reload registers.
4554 We update these for the reloads that we perform,
4555 as the insns are scanned. */
4556
4557 static void
4558 reload_as_needed (int live_known)
4559 {
4560 struct insn_chain *chain;
4561 #if defined (AUTO_INC_DEC)
4562 int i;
4563 #endif
4564 rtx x, marker;
4565
4566 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4567 memset (spill_reg_store, 0, sizeof spill_reg_store);
4568 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4569 INIT_REG_SET (&reg_has_output_reload);
4570 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4571 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4572
4573 set_initial_elim_offsets ();
4574
4575 /* Generate a marker insn that we will move around. */
4576 marker = emit_note (NOTE_INSN_DELETED);
4577 unlink_insn_chain (marker, marker);
4578
4579 for (chain = reload_insn_chain; chain; chain = chain->next)
4580 {
4581 rtx prev = 0;
4582 rtx insn = chain->insn;
4583 rtx old_next = NEXT_INSN (insn);
4584 #ifdef AUTO_INC_DEC
4585 rtx old_prev = PREV_INSN (insn);
4586 #endif
4587
4588 /* If we pass a label, copy the offsets from the label information
4589 into the current offsets of each elimination. */
4590 if (LABEL_P (insn))
4591 set_offsets_for_label (insn);
4592
4593 else if (INSN_P (insn))
4594 {
4595 regset_head regs_to_forget;
4596 INIT_REG_SET (&regs_to_forget);
4597 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4598
4599 /* If this is a USE and CLOBBER of a MEM, ensure that any
4600 references to eliminable registers have been removed. */
4601
4602 if ((GET_CODE (PATTERN (insn)) == USE
4603 || GET_CODE (PATTERN (insn)) == CLOBBER)
4604 && MEM_P (XEXP (PATTERN (insn), 0)))
4605 XEXP (XEXP (PATTERN (insn), 0), 0)
4606 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4607 GET_MODE (XEXP (PATTERN (insn), 0)),
4608 NULL_RTX);
4609
4610 /* If we need to do register elimination processing, do so.
4611 This might delete the insn, in which case we are done. */
4612 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4613 {
4614 eliminate_regs_in_insn (insn, 1);
4615 if (NOTE_P (insn))
4616 {
4617 update_eliminable_offsets ();
4618 CLEAR_REG_SET (&regs_to_forget);
4619 continue;
4620 }
4621 }
4622
4623 /* If need_elim is nonzero but need_reload is zero, one might think
4624 that we could simply set n_reloads to 0. However, find_reloads
4625 could have done some manipulation of the insn (such as swapping
4626 commutative operands), and these manipulations are lost during
4627 the first pass for every insn that needs register elimination.
4628 So the actions of find_reloads must be redone here. */
4629
4630 if (! chain->need_elim && ! chain->need_reload
4631 && ! chain->need_operand_change)
4632 n_reloads = 0;
4633 /* First find the pseudo regs that must be reloaded for this insn.
4634 This info is returned in the tables reload_... (see reload.h).
4635 Also modify the body of INSN by substituting RELOAD
4636 rtx's for those pseudo regs. */
4637 else
4638 {
4639 CLEAR_REG_SET (&reg_has_output_reload);
4640 CLEAR_HARD_REG_SET (reg_is_output_reload);
4641
4642 find_reloads (insn, 1, spill_indirect_levels, live_known,
4643 spill_reg_order);
4644 }
4645
4646 if (n_reloads > 0)
4647 {
4648 rtx next = NEXT_INSN (insn);
4649 rtx p;
4650
4651 /* ??? PREV can get deleted by reload inheritance.
4652 Work around this by emitting a marker note. */
4653 prev = PREV_INSN (insn);
4654 reorder_insns_nobb (marker, marker, prev);
4655
4656 /* Now compute which reload regs to reload them into. Perhaps
4657 reusing reload regs from previous insns, or else output
4658 load insns to reload them. Maybe output store insns too.
4659 Record the choices of reload reg in reload_reg_rtx. */
4660 choose_reload_regs (chain);
4661
4662 /* Generate the insns to reload operands into or out of
4663 their reload regs. */
4664 emit_reload_insns (chain);
4665
4666 /* Substitute the chosen reload regs from reload_reg_rtx
4667 into the insn's body (or perhaps into the bodies of other
4668 load and store insn that we just made for reloading
4669 and that we moved the structure into). */
4670 subst_reloads (insn);
4671
4672 prev = PREV_INSN (marker);
4673 unlink_insn_chain (marker, marker);
4674
4675 /* Adjust the exception region notes for loads and stores. */
4676 if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4677 fixup_eh_region_note (insn, prev, next);
4678
4679 /* Adjust the location of REG_ARGS_SIZE. */
4680 p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4681 if (p)
4682 {
4683 remove_note (insn, p);
4684 fixup_args_size_notes (prev, PREV_INSN (next),
4685 INTVAL (XEXP (p, 0)));
4686 }
4687
4688 /* If this was an ASM, make sure that all the reload insns
4689 we have generated are valid. If not, give an error
4690 and delete them. */
4691 if (asm_noperands (PATTERN (insn)) >= 0)
4692 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4693 if (p != insn && INSN_P (p)
4694 && GET_CODE (PATTERN (p)) != USE
4695 && (recog_memoized (p) < 0
4696 || (extract_insn (p), ! constrain_operands (1))))
4697 {
4698 error_for_asm (insn,
4699 "%<asm%> operand requires "
4700 "impossible reload");
4701 delete_insn (p);
4702 }
4703 }
4704
4705 if (num_eliminable && chain->need_elim)
4706 update_eliminable_offsets ();
4707
4708 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4709 is no longer validly lying around to save a future reload.
4710 Note that this does not detect pseudos that were reloaded
4711 for this insn in order to be stored in
4712 (obeying register constraints). That is correct; such reload
4713 registers ARE still valid. */
4714 forget_marked_reloads (&regs_to_forget);
4715 CLEAR_REG_SET (&regs_to_forget);
4716
4717 /* There may have been CLOBBER insns placed after INSN. So scan
4718 between INSN and NEXT and use them to forget old reloads. */
4719 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4720 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4721 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4722
4723 #ifdef AUTO_INC_DEC
4724 /* Likewise for regs altered by auto-increment in this insn.
4725 REG_INC notes have been changed by reloading:
4726 find_reloads_address_1 records substitutions for them,
4727 which have been performed by subst_reloads above. */
4728 for (i = n_reloads - 1; i >= 0; i--)
4729 {
4730 rtx in_reg = rld[i].in_reg;
4731 if (in_reg)
4732 {
4733 enum rtx_code code = GET_CODE (in_reg);
4734 /* PRE_INC / PRE_DEC will have the reload register ending up
4735 with the same value as the stack slot, but that doesn't
4736 hold true for POST_INC / POST_DEC. Either we have to
4737 convert the memory access to a true POST_INC / POST_DEC,
4738 or we can't use the reload register for inheritance. */
4739 if ((code == POST_INC || code == POST_DEC)
4740 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4741 REGNO (rld[i].reg_rtx))
4742 /* Make sure it is the inc/dec pseudo, and not
4743 some other (e.g. output operand) pseudo. */
4744 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4745 == REGNO (XEXP (in_reg, 0))))
4746
4747 {
4748 rtx reload_reg = rld[i].reg_rtx;
4749 enum machine_mode mode = GET_MODE (reload_reg);
4750 int n = 0;
4751 rtx p;
4752
4753 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4754 {
4755 /* We really want to ignore REG_INC notes here, so
4756 use PATTERN (p) as argument to reg_set_p . */
4757 if (reg_set_p (reload_reg, PATTERN (p)))
4758 break;
4759 n = count_occurrences (PATTERN (p), reload_reg, 0);
4760 if (! n)
4761 continue;
4762 if (n == 1)
4763 {
4764 rtx replace_reg
4765 = gen_rtx_fmt_e (code, mode, reload_reg);
4766
4767 validate_replace_rtx_group (reload_reg,
4768 replace_reg, p);
4769 n = verify_changes (0);
4770
4771 /* We must also verify that the constraints
4772 are met after the replacement. Make sure
4773 extract_insn is only called for an insn
4774 where the replacements were found to be
4775 valid so far. */
4776 if (n)
4777 {
4778 extract_insn (p);
4779 n = constrain_operands (1);
4780 }
4781
4782 /* If the constraints were not met, then
4783 undo the replacement, else confirm it. */
4784 if (!n)
4785 cancel_changes (0);
4786 else
4787 confirm_change_group ();
4788 }
4789 break;
4790 }
4791 if (n == 1)
4792 {
4793 add_reg_note (p, REG_INC, reload_reg);
4794 /* Mark this as having an output reload so that the
4795 REG_INC processing code below won't invalidate
4796 the reload for inheritance. */
4797 SET_HARD_REG_BIT (reg_is_output_reload,
4798 REGNO (reload_reg));
4799 SET_REGNO_REG_SET (&reg_has_output_reload,
4800 REGNO (XEXP (in_reg, 0)));
4801 }
4802 else
4803 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4804 NULL);
4805 }
4806 else if ((code == PRE_INC || code == PRE_DEC)
4807 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4808 REGNO (rld[i].reg_rtx))
4809 /* Make sure it is the inc/dec pseudo, and not
4810 some other (e.g. output operand) pseudo. */
4811 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4812 == REGNO (XEXP (in_reg, 0))))
4813 {
4814 SET_HARD_REG_BIT (reg_is_output_reload,
4815 REGNO (rld[i].reg_rtx));
4816 SET_REGNO_REG_SET (&reg_has_output_reload,
4817 REGNO (XEXP (in_reg, 0)));
4818 }
4819 else if (code == PRE_INC || code == PRE_DEC
4820 || code == POST_INC || code == POST_DEC)
4821 {
4822 int in_regno = REGNO (XEXP (in_reg, 0));
4823
4824 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4825 {
4826 int in_hard_regno;
4827 bool forget_p = true;
4828
4829 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4830 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4831 in_hard_regno))
4832 {
4833 for (x = old_prev ? NEXT_INSN (old_prev) : insn;
4834 x != old_next;
4835 x = NEXT_INSN (x))
4836 if (x == reg_reloaded_insn[in_hard_regno])
4837 {
4838 forget_p = false;
4839 break;
4840 }
4841 }
4842 /* If for some reasons, we didn't set up
4843 reg_last_reload_reg in this insn,
4844 invalidate inheritance from previous
4845 insns for the incremented/decremented
4846 register. Such registers will be not in
4847 reg_has_output_reload. Invalidate it
4848 also if the corresponding element in
4849 reg_reloaded_insn is also
4850 invalidated. */
4851 if (forget_p)
4852 forget_old_reloads_1 (XEXP (in_reg, 0),
4853 NULL_RTX, NULL);
4854 }
4855 }
4856 }
4857 }
4858 /* If a pseudo that got a hard register is auto-incremented,
4859 we must purge records of copying it into pseudos without
4860 hard registers. */
4861 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4862 if (REG_NOTE_KIND (x) == REG_INC)
4863 {
4864 /* See if this pseudo reg was reloaded in this insn.
4865 If so, its last-reload info is still valid
4866 because it is based on this insn's reload. */
4867 for (i = 0; i < n_reloads; i++)
4868 if (rld[i].out == XEXP (x, 0))
4869 break;
4870
4871 if (i == n_reloads)
4872 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4873 }
4874 #endif
4875 }
4876 /* A reload reg's contents are unknown after a label. */
4877 if (LABEL_P (insn))
4878 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4879
4880 /* Don't assume a reload reg is still good after a call insn
4881 if it is a call-used reg, or if it contains a value that will
4882 be partially clobbered by the call. */
4883 else if (CALL_P (insn))
4884 {
4885 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4886 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4887
4888 /* If this is a call to a setjmp-type function, we must not
4889 reuse any reload reg contents across the call; that will
4890 just be clobbered by other uses of the register in later
4891 code, before the longjmp. */
4892 if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4893 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4894 }
4895 }
4896
4897 /* Clean up. */
4898 free (reg_last_reload_reg);
4899 CLEAR_REG_SET (&reg_has_output_reload);
4900 }
4901
4902 /* Discard all record of any value reloaded from X,
4903 or reloaded in X from someplace else;
4904 unless X is an output reload reg of the current insn.
4905
4906 X may be a hard reg (the reload reg)
4907 or it may be a pseudo reg that was reloaded from.
4908
4909 When DATA is non-NULL just mark the registers in regset
4910 to be forgotten later. */
4911
4912 static void
4913 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4914 void *data)
4915 {
4916 unsigned int regno;
4917 unsigned int nr;
4918 regset regs = (regset) data;
4919
4920 /* note_stores does give us subregs of hard regs,
4921 subreg_regno_offset requires a hard reg. */
4922 while (GET_CODE (x) == SUBREG)
4923 {
4924 /* We ignore the subreg offset when calculating the regno,
4925 because we are using the entire underlying hard register
4926 below. */
4927 x = SUBREG_REG (x);
4928 }
4929
4930 if (!REG_P (x))
4931 return;
4932
4933 regno = REGNO (x);
4934
4935 if (regno >= FIRST_PSEUDO_REGISTER)
4936 nr = 1;
4937 else
4938 {
4939 unsigned int i;
4940
4941 nr = hard_regno_nregs[regno][GET_MODE (x)];
4942 /* Storing into a spilled-reg invalidates its contents.
4943 This can happen if a block-local pseudo is allocated to that reg
4944 and it wasn't spilled because this block's total need is 0.
4945 Then some insn might have an optional reload and use this reg. */
4946 if (!regs)
4947 for (i = 0; i < nr; i++)
4948 /* But don't do this if the reg actually serves as an output
4949 reload reg in the current instruction. */
4950 if (n_reloads == 0
4951 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4952 {
4953 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4954 spill_reg_store[regno + i] = 0;
4955 }
4956 }
4957
4958 if (regs)
4959 while (nr-- > 0)
4960 SET_REGNO_REG_SET (regs, regno + nr);
4961 else
4962 {
4963 /* Since value of X has changed,
4964 forget any value previously copied from it. */
4965
4966 while (nr-- > 0)
4967 /* But don't forget a copy if this is the output reload
4968 that establishes the copy's validity. */
4969 if (n_reloads == 0
4970 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4971 reg_last_reload_reg[regno + nr] = 0;
4972 }
4973 }
4974
4975 /* Forget the reloads marked in regset by previous function. */
4976 static void
4977 forget_marked_reloads (regset regs)
4978 {
4979 unsigned int reg;
4980 reg_set_iterator rsi;
4981 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4982 {
4983 if (reg < FIRST_PSEUDO_REGISTER
4984 /* But don't do this if the reg actually serves as an output
4985 reload reg in the current instruction. */
4986 && (n_reloads == 0
4987 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4988 {
4989 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4990 spill_reg_store[reg] = 0;
4991 }
4992 if (n_reloads == 0
4993 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4994 reg_last_reload_reg[reg] = 0;
4995 }
4996 }
4997 \f
4998 /* The following HARD_REG_SETs indicate when each hard register is
4999 used for a reload of various parts of the current insn. */
5000
5001 /* If reg is unavailable for all reloads. */
5002 static HARD_REG_SET reload_reg_unavailable;
5003 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
5004 static HARD_REG_SET reload_reg_used;
5005 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
5006 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5007 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
5008 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5009 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
5010 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5011 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
5012 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5013 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
5014 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5015 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
5016 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5017 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
5018 static HARD_REG_SET reload_reg_used_in_op_addr;
5019 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
5020 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5021 /* If reg is in use for a RELOAD_FOR_INSN reload. */
5022 static HARD_REG_SET reload_reg_used_in_insn;
5023 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
5024 static HARD_REG_SET reload_reg_used_in_other_addr;
5025
5026 /* If reg is in use as a reload reg for any sort of reload. */
5027 static HARD_REG_SET reload_reg_used_at_all;
5028
5029 /* If reg is use as an inherited reload. We just mark the first register
5030 in the group. */
5031 static HARD_REG_SET reload_reg_used_for_inherit;
5032
5033 /* Records which hard regs are used in any way, either as explicit use or
5034 by being allocated to a pseudo during any point of the current insn. */
5035 static HARD_REG_SET reg_used_in_insn;
5036
5037 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5038 TYPE. MODE is used to indicate how many consecutive regs are
5039 actually used. */
5040
5041 static void
5042 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5043 enum machine_mode mode)
5044 {
5045 switch (type)
5046 {
5047 case RELOAD_OTHER:
5048 add_to_hard_reg_set (&reload_reg_used, mode, regno);
5049 break;
5050
5051 case RELOAD_FOR_INPUT_ADDRESS:
5052 add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5053 break;
5054
5055 case RELOAD_FOR_INPADDR_ADDRESS:
5056 add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5057 break;
5058
5059 case RELOAD_FOR_OUTPUT_ADDRESS:
5060 add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5061 break;
5062
5063 case RELOAD_FOR_OUTADDR_ADDRESS:
5064 add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5065 break;
5066
5067 case RELOAD_FOR_OPERAND_ADDRESS:
5068 add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5069 break;
5070
5071 case RELOAD_FOR_OPADDR_ADDR:
5072 add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5073 break;
5074
5075 case RELOAD_FOR_OTHER_ADDRESS:
5076 add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5077 break;
5078
5079 case RELOAD_FOR_INPUT:
5080 add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5081 break;
5082
5083 case RELOAD_FOR_OUTPUT:
5084 add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5085 break;
5086
5087 case RELOAD_FOR_INSN:
5088 add_to_hard_reg_set (&reload_reg_used_in_insn, mode, regno);
5089 break;
5090 }
5091
5092 add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5093 }
5094
5095 /* Similarly, but show REGNO is no longer in use for a reload. */
5096
5097 static void
5098 clear_reload_reg_in_use (unsigned int regno, int opnum,
5099 enum reload_type type, enum machine_mode mode)
5100 {
5101 unsigned int nregs = hard_regno_nregs[regno][mode];
5102 unsigned int start_regno, end_regno, r;
5103 int i;
5104 /* A complication is that for some reload types, inheritance might
5105 allow multiple reloads of the same types to share a reload register.
5106 We set check_opnum if we have to check only reloads with the same
5107 operand number, and check_any if we have to check all reloads. */
5108 int check_opnum = 0;
5109 int check_any = 0;
5110 HARD_REG_SET *used_in_set;
5111
5112 switch (type)
5113 {
5114 case RELOAD_OTHER:
5115 used_in_set = &reload_reg_used;
5116 break;
5117
5118 case RELOAD_FOR_INPUT_ADDRESS:
5119 used_in_set = &reload_reg_used_in_input_addr[opnum];
5120 break;
5121
5122 case RELOAD_FOR_INPADDR_ADDRESS:
5123 check_opnum = 1;
5124 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5125 break;
5126
5127 case RELOAD_FOR_OUTPUT_ADDRESS:
5128 used_in_set = &reload_reg_used_in_output_addr[opnum];
5129 break;
5130
5131 case RELOAD_FOR_OUTADDR_ADDRESS:
5132 check_opnum = 1;
5133 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5134 break;
5135
5136 case RELOAD_FOR_OPERAND_ADDRESS:
5137 used_in_set = &reload_reg_used_in_op_addr;
5138 break;
5139
5140 case RELOAD_FOR_OPADDR_ADDR:
5141 check_any = 1;
5142 used_in_set = &reload_reg_used_in_op_addr_reload;
5143 break;
5144
5145 case RELOAD_FOR_OTHER_ADDRESS:
5146 used_in_set = &reload_reg_used_in_other_addr;
5147 check_any = 1;
5148 break;
5149
5150 case RELOAD_FOR_INPUT:
5151 used_in_set = &reload_reg_used_in_input[opnum];
5152 break;
5153
5154 case RELOAD_FOR_OUTPUT:
5155 used_in_set = &reload_reg_used_in_output[opnum];
5156 break;
5157
5158 case RELOAD_FOR_INSN:
5159 used_in_set = &reload_reg_used_in_insn;
5160 break;
5161 default:
5162 gcc_unreachable ();
5163 }
5164 /* We resolve conflicts with remaining reloads of the same type by
5165 excluding the intervals of reload registers by them from the
5166 interval of freed reload registers. Since we only keep track of
5167 one set of interval bounds, we might have to exclude somewhat
5168 more than what would be necessary if we used a HARD_REG_SET here.
5169 But this should only happen very infrequently, so there should
5170 be no reason to worry about it. */
5171
5172 start_regno = regno;
5173 end_regno = regno + nregs;
5174 if (check_opnum || check_any)
5175 {
5176 for (i = n_reloads - 1; i >= 0; i--)
5177 {
5178 if (rld[i].when_needed == type
5179 && (check_any || rld[i].opnum == opnum)
5180 && rld[i].reg_rtx)
5181 {
5182 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5183 unsigned int conflict_end
5184 = end_hard_regno (rld[i].mode, conflict_start);
5185
5186 /* If there is an overlap with the first to-be-freed register,
5187 adjust the interval start. */
5188 if (conflict_start <= start_regno && conflict_end > start_regno)
5189 start_regno = conflict_end;
5190 /* Otherwise, if there is a conflict with one of the other
5191 to-be-freed registers, adjust the interval end. */
5192 if (conflict_start > start_regno && conflict_start < end_regno)
5193 end_regno = conflict_start;
5194 }
5195 }
5196 }
5197
5198 for (r = start_regno; r < end_regno; r++)
5199 CLEAR_HARD_REG_BIT (*used_in_set, r);
5200 }
5201
5202 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5203 specified by OPNUM and TYPE. */
5204
5205 static int
5206 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5207 {
5208 int i;
5209
5210 /* In use for a RELOAD_OTHER means it's not available for anything. */
5211 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5212 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5213 return 0;
5214
5215 switch (type)
5216 {
5217 case RELOAD_OTHER:
5218 /* In use for anything means we can't use it for RELOAD_OTHER. */
5219 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5220 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5221 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5222 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5223 return 0;
5224
5225 for (i = 0; i < reload_n_operands; i++)
5226 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5227 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5228 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5229 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5230 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5231 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5232 return 0;
5233
5234 return 1;
5235
5236 case RELOAD_FOR_INPUT:
5237 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5238 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5239 return 0;
5240
5241 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5242 return 0;
5243
5244 /* If it is used for some other input, can't use it. */
5245 for (i = 0; i < reload_n_operands; i++)
5246 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5247 return 0;
5248
5249 /* If it is used in a later operand's address, can't use it. */
5250 for (i = opnum + 1; i < reload_n_operands; i++)
5251 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5252 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5253 return 0;
5254
5255 return 1;
5256
5257 case RELOAD_FOR_INPUT_ADDRESS:
5258 /* Can't use a register if it is used for an input address for this
5259 operand or used as an input in an earlier one. */
5260 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5261 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5262 return 0;
5263
5264 for (i = 0; i < opnum; i++)
5265 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5266 return 0;
5267
5268 return 1;
5269
5270 case RELOAD_FOR_INPADDR_ADDRESS:
5271 /* Can't use a register if it is used for an input address
5272 for this operand or used as an input in an earlier
5273 one. */
5274 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5275 return 0;
5276
5277 for (i = 0; i < opnum; i++)
5278 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5279 return 0;
5280
5281 return 1;
5282
5283 case RELOAD_FOR_OUTPUT_ADDRESS:
5284 /* Can't use a register if it is used for an output address for this
5285 operand or used as an output in this or a later operand. Note
5286 that multiple output operands are emitted in reverse order, so
5287 the conflicting ones are those with lower indices. */
5288 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5289 return 0;
5290
5291 for (i = 0; i <= opnum; i++)
5292 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5293 return 0;
5294
5295 return 1;
5296
5297 case RELOAD_FOR_OUTADDR_ADDRESS:
5298 /* Can't use a register if it is used for an output address
5299 for this operand or used as an output in this or a
5300 later operand. Note that multiple output operands are
5301 emitted in reverse order, so the conflicting ones are
5302 those with lower indices. */
5303 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5304 return 0;
5305
5306 for (i = 0; i <= opnum; i++)
5307 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5308 return 0;
5309
5310 return 1;
5311
5312 case RELOAD_FOR_OPERAND_ADDRESS:
5313 for (i = 0; i < reload_n_operands; i++)
5314 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5315 return 0;
5316
5317 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5318 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5319
5320 case RELOAD_FOR_OPADDR_ADDR:
5321 for (i = 0; i < reload_n_operands; i++)
5322 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5323 return 0;
5324
5325 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5326
5327 case RELOAD_FOR_OUTPUT:
5328 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5329 outputs, or an operand address for this or an earlier output.
5330 Note that multiple output operands are emitted in reverse order,
5331 so the conflicting ones are those with higher indices. */
5332 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5333 return 0;
5334
5335 for (i = 0; i < reload_n_operands; i++)
5336 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5337 return 0;
5338
5339 for (i = opnum; i < reload_n_operands; i++)
5340 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5341 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5342 return 0;
5343
5344 return 1;
5345
5346 case RELOAD_FOR_INSN:
5347 for (i = 0; i < reload_n_operands; i++)
5348 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5349 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5350 return 0;
5351
5352 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5353 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5354
5355 case RELOAD_FOR_OTHER_ADDRESS:
5356 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5357
5358 default:
5359 gcc_unreachable ();
5360 }
5361 }
5362
5363 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5364 the number RELOADNUM, is still available in REGNO at the end of the insn.
5365
5366 We can assume that the reload reg was already tested for availability
5367 at the time it is needed, and we should not check this again,
5368 in case the reg has already been marked in use. */
5369
5370 static int
5371 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5372 {
5373 int opnum = rld[reloadnum].opnum;
5374 enum reload_type type = rld[reloadnum].when_needed;
5375 int i;
5376
5377 /* See if there is a reload with the same type for this operand, using
5378 the same register. This case is not handled by the code below. */
5379 for (i = reloadnum + 1; i < n_reloads; i++)
5380 {
5381 rtx reg;
5382 int nregs;
5383
5384 if (rld[i].opnum != opnum || rld[i].when_needed != type)
5385 continue;
5386 reg = rld[i].reg_rtx;
5387 if (reg == NULL_RTX)
5388 continue;
5389 nregs = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
5390 if (regno >= REGNO (reg) && regno < REGNO (reg) + nregs)
5391 return 0;
5392 }
5393
5394 switch (type)
5395 {
5396 case RELOAD_OTHER:
5397 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5398 its value must reach the end. */
5399 return 1;
5400
5401 /* If this use is for part of the insn,
5402 its value reaches if no subsequent part uses the same register.
5403 Just like the above function, don't try to do this with lots
5404 of fallthroughs. */
5405
5406 case RELOAD_FOR_OTHER_ADDRESS:
5407 /* Here we check for everything else, since these don't conflict
5408 with anything else and everything comes later. */
5409
5410 for (i = 0; i < reload_n_operands; i++)
5411 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5412 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5413 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5414 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5415 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5416 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5417 return 0;
5418
5419 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5420 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5421 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5422 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5423
5424 case RELOAD_FOR_INPUT_ADDRESS:
5425 case RELOAD_FOR_INPADDR_ADDRESS:
5426 /* Similar, except that we check only for this and subsequent inputs
5427 and the address of only subsequent inputs and we do not need
5428 to check for RELOAD_OTHER objects since they are known not to
5429 conflict. */
5430
5431 for (i = opnum; i < reload_n_operands; i++)
5432 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5433 return 0;
5434
5435 /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5436 could be killed if the register is also used by reload with type
5437 RELOAD_FOR_INPUT_ADDRESS, so check it. */
5438 if (type == RELOAD_FOR_INPADDR_ADDRESS
5439 && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5440 return 0;
5441
5442 for (i = opnum + 1; i < reload_n_operands; i++)
5443 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5444 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5445 return 0;
5446
5447 for (i = 0; i < reload_n_operands; i++)
5448 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5449 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5450 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5451 return 0;
5452
5453 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5454 return 0;
5455
5456 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5457 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5458 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5459
5460 case RELOAD_FOR_INPUT:
5461 /* Similar to input address, except we start at the next operand for
5462 both input and input address and we do not check for
5463 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5464 would conflict. */
5465
5466 for (i = opnum + 1; i < reload_n_operands; i++)
5467 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5468 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5469 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5470 return 0;
5471
5472 /* ... fall through ... */
5473
5474 case RELOAD_FOR_OPERAND_ADDRESS:
5475 /* Check outputs and their addresses. */
5476
5477 for (i = 0; i < reload_n_operands; i++)
5478 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5479 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5480 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5481 return 0;
5482
5483 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5484
5485 case RELOAD_FOR_OPADDR_ADDR:
5486 for (i = 0; i < reload_n_operands; i++)
5487 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5488 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5489 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5490 return 0;
5491
5492 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5493 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5494 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5495
5496 case RELOAD_FOR_INSN:
5497 /* These conflict with other outputs with RELOAD_OTHER. So
5498 we need only check for output addresses. */
5499
5500 opnum = reload_n_operands;
5501
5502 /* ... fall through ... */
5503
5504 case RELOAD_FOR_OUTPUT:
5505 case RELOAD_FOR_OUTPUT_ADDRESS:
5506 case RELOAD_FOR_OUTADDR_ADDRESS:
5507 /* We already know these can't conflict with a later output. So the
5508 only thing to check are later output addresses.
5509 Note that multiple output operands are emitted in reverse order,
5510 so the conflicting ones are those with lower indices. */
5511 for (i = 0; i < opnum; i++)
5512 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5513 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5514 return 0;
5515
5516 /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5517 could be killed if the register is also used by reload with type
5518 RELOAD_FOR_OUTPUT_ADDRESS, so check it. */
5519 if (type == RELOAD_FOR_OUTADDR_ADDRESS
5520 && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5521 return 0;
5522
5523 return 1;
5524
5525 default:
5526 gcc_unreachable ();
5527 }
5528 }
5529
5530 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5531 every register in REG. */
5532
5533 static bool
5534 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5535 {
5536 unsigned int i;
5537
5538 for (i = REGNO (reg); i < END_REGNO (reg); i++)
5539 if (!reload_reg_reaches_end_p (i, reloadnum))
5540 return false;
5541 return true;
5542 }
5543 \f
5544
5545 /* Returns whether R1 and R2 are uniquely chained: the value of one
5546 is used by the other, and that value is not used by any other
5547 reload for this insn. This is used to partially undo the decision
5548 made in find_reloads when in the case of multiple
5549 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5550 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5551 reloads. This code tries to avoid the conflict created by that
5552 change. It might be cleaner to explicitly keep track of which
5553 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5554 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5555 this after the fact. */
5556 static bool
5557 reloads_unique_chain_p (int r1, int r2)
5558 {
5559 int i;
5560
5561 /* We only check input reloads. */
5562 if (! rld[r1].in || ! rld[r2].in)
5563 return false;
5564
5565 /* Avoid anything with output reloads. */
5566 if (rld[r1].out || rld[r2].out)
5567 return false;
5568
5569 /* "chained" means one reload is a component of the other reload,
5570 not the same as the other reload. */
5571 if (rld[r1].opnum != rld[r2].opnum
5572 || rtx_equal_p (rld[r1].in, rld[r2].in)
5573 || rld[r1].optional || rld[r2].optional
5574 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5575 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5576 return false;
5577
5578 for (i = 0; i < n_reloads; i ++)
5579 /* Look for input reloads that aren't our two */
5580 if (i != r1 && i != r2 && rld[i].in)
5581 {
5582 /* If our reload is mentioned at all, it isn't a simple chain. */
5583 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5584 return false;
5585 }
5586 return true;
5587 }
5588
5589 /* The recursive function change all occurrences of WHAT in *WHERE
5590 to REPL. */
5591 static void
5592 substitute (rtx *where, const_rtx what, rtx repl)
5593 {
5594 const char *fmt;
5595 int i;
5596 enum rtx_code code;
5597
5598 if (*where == 0)
5599 return;
5600
5601 if (*where == what || rtx_equal_p (*where, what))
5602 {
5603 /* Record the location of the changed rtx. */
5604 VEC_safe_push (rtx_p, heap, substitute_stack, where);
5605 *where = repl;
5606 return;
5607 }
5608
5609 code = GET_CODE (*where);
5610 fmt = GET_RTX_FORMAT (code);
5611 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5612 {
5613 if (fmt[i] == 'E')
5614 {
5615 int j;
5616
5617 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5618 substitute (&XVECEXP (*where, i, j), what, repl);
5619 }
5620 else if (fmt[i] == 'e')
5621 substitute (&XEXP (*where, i), what, repl);
5622 }
5623 }
5624
5625 /* The function returns TRUE if chain of reload R1 and R2 (in any
5626 order) can be evaluated without usage of intermediate register for
5627 the reload containing another reload. It is important to see
5628 gen_reload to understand what the function is trying to do. As an
5629 example, let us have reload chain
5630
5631 r2: const
5632 r1: <something> + const
5633
5634 and reload R2 got reload reg HR. The function returns true if
5635 there is a correct insn HR = HR + <something>. Otherwise,
5636 gen_reload will use intermediate register (and this is the reload
5637 reg for R1) to reload <something>.
5638
5639 We need this function to find a conflict for chain reloads. In our
5640 example, if HR = HR + <something> is incorrect insn, then we cannot
5641 use HR as a reload register for R2. If we do use it then we get a
5642 wrong code:
5643
5644 HR = const
5645 HR = <something>
5646 HR = HR + HR
5647
5648 */
5649 static bool
5650 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5651 {
5652 /* Assume other cases in gen_reload are not possible for
5653 chain reloads or do need an intermediate hard registers. */
5654 bool result = true;
5655 int regno, n, code;
5656 rtx out, in, insn;
5657 rtx last = get_last_insn ();
5658
5659 /* Make r2 a component of r1. */
5660 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5661 {
5662 n = r1;
5663 r1 = r2;
5664 r2 = n;
5665 }
5666 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5667 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5668 gcc_assert (regno >= 0);
5669 out = gen_rtx_REG (rld[r1].mode, regno);
5670 in = rld[r1].in;
5671 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5672
5673 /* If IN is a paradoxical SUBREG, remove it and try to put the
5674 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5675 strip_paradoxical_subreg (&in, &out);
5676
5677 if (GET_CODE (in) == PLUS
5678 && (REG_P (XEXP (in, 0))
5679 || GET_CODE (XEXP (in, 0)) == SUBREG
5680 || MEM_P (XEXP (in, 0)))
5681 && (REG_P (XEXP (in, 1))
5682 || GET_CODE (XEXP (in, 1)) == SUBREG
5683 || CONSTANT_P (XEXP (in, 1))
5684 || MEM_P (XEXP (in, 1))))
5685 {
5686 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5687 code = recog_memoized (insn);
5688 result = false;
5689
5690 if (code >= 0)
5691 {
5692 extract_insn (insn);
5693 /* We want constrain operands to treat this insn strictly in
5694 its validity determination, i.e., the way it would after
5695 reload has completed. */
5696 result = constrain_operands (1);
5697 }
5698
5699 delete_insns_since (last);
5700 }
5701
5702 /* Restore the original value at each changed address within R1. */
5703 while (!VEC_empty (rtx_p, substitute_stack))
5704 {
5705 rtx *where = VEC_pop (rtx_p, substitute_stack);
5706 *where = rld[r2].in;
5707 }
5708
5709 return result;
5710 }
5711
5712 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5713 Return 0 otherwise.
5714
5715 This function uses the same algorithm as reload_reg_free_p above. */
5716
5717 static int
5718 reloads_conflict (int r1, int r2)
5719 {
5720 enum reload_type r1_type = rld[r1].when_needed;
5721 enum reload_type r2_type = rld[r2].when_needed;
5722 int r1_opnum = rld[r1].opnum;
5723 int r2_opnum = rld[r2].opnum;
5724
5725 /* RELOAD_OTHER conflicts with everything. */
5726 if (r2_type == RELOAD_OTHER)
5727 return 1;
5728
5729 /* Otherwise, check conflicts differently for each type. */
5730
5731 switch (r1_type)
5732 {
5733 case RELOAD_FOR_INPUT:
5734 return (r2_type == RELOAD_FOR_INSN
5735 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5736 || r2_type == RELOAD_FOR_OPADDR_ADDR
5737 || r2_type == RELOAD_FOR_INPUT
5738 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5739 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5740 && r2_opnum > r1_opnum));
5741
5742 case RELOAD_FOR_INPUT_ADDRESS:
5743 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5744 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5745
5746 case RELOAD_FOR_INPADDR_ADDRESS:
5747 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5748 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5749
5750 case RELOAD_FOR_OUTPUT_ADDRESS:
5751 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5752 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5753
5754 case RELOAD_FOR_OUTADDR_ADDRESS:
5755 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5756 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5757
5758 case RELOAD_FOR_OPERAND_ADDRESS:
5759 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5760 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5761 && (!reloads_unique_chain_p (r1, r2)
5762 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5763
5764 case RELOAD_FOR_OPADDR_ADDR:
5765 return (r2_type == RELOAD_FOR_INPUT
5766 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5767
5768 case RELOAD_FOR_OUTPUT:
5769 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5770 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5771 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5772 && r2_opnum >= r1_opnum));
5773
5774 case RELOAD_FOR_INSN:
5775 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5776 || r2_type == RELOAD_FOR_INSN
5777 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5778
5779 case RELOAD_FOR_OTHER_ADDRESS:
5780 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5781
5782 case RELOAD_OTHER:
5783 return 1;
5784
5785 default:
5786 gcc_unreachable ();
5787 }
5788 }
5789 \f
5790 /* Indexed by reload number, 1 if incoming value
5791 inherited from previous insns. */
5792 static char reload_inherited[MAX_RELOADS];
5793
5794 /* For an inherited reload, this is the insn the reload was inherited from,
5795 if we know it. Otherwise, this is 0. */
5796 static rtx reload_inheritance_insn[MAX_RELOADS];
5797
5798 /* If nonzero, this is a place to get the value of the reload,
5799 rather than using reload_in. */
5800 static rtx reload_override_in[MAX_RELOADS];
5801
5802 /* For each reload, the hard register number of the register used,
5803 or -1 if we did not need a register for this reload. */
5804 static int reload_spill_index[MAX_RELOADS];
5805
5806 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5807 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5808
5809 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5810 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5811
5812 /* Subroutine of free_for_value_p, used to check a single register.
5813 START_REGNO is the starting regno of the full reload register
5814 (possibly comprising multiple hard registers) that we are considering. */
5815
5816 static int
5817 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5818 enum reload_type type, rtx value, rtx out,
5819 int reloadnum, int ignore_address_reloads)
5820 {
5821 int time1;
5822 /* Set if we see an input reload that must not share its reload register
5823 with any new earlyclobber, but might otherwise share the reload
5824 register with an output or input-output reload. */
5825 int check_earlyclobber = 0;
5826 int i;
5827 int copy = 0;
5828
5829 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5830 return 0;
5831
5832 if (out == const0_rtx)
5833 {
5834 copy = 1;
5835 out = NULL_RTX;
5836 }
5837
5838 /* We use some pseudo 'time' value to check if the lifetimes of the
5839 new register use would overlap with the one of a previous reload
5840 that is not read-only or uses a different value.
5841 The 'time' used doesn't have to be linear in any shape or form, just
5842 monotonic.
5843 Some reload types use different 'buckets' for each operand.
5844 So there are MAX_RECOG_OPERANDS different time values for each
5845 such reload type.
5846 We compute TIME1 as the time when the register for the prospective
5847 new reload ceases to be live, and TIME2 for each existing
5848 reload as the time when that the reload register of that reload
5849 becomes live.
5850 Where there is little to be gained by exact lifetime calculations,
5851 we just make conservative assumptions, i.e. a longer lifetime;
5852 this is done in the 'default:' cases. */
5853 switch (type)
5854 {
5855 case RELOAD_FOR_OTHER_ADDRESS:
5856 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5857 time1 = copy ? 0 : 1;
5858 break;
5859 case RELOAD_OTHER:
5860 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5861 break;
5862 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5863 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5864 respectively, to the time values for these, we get distinct time
5865 values. To get distinct time values for each operand, we have to
5866 multiply opnum by at least three. We round that up to four because
5867 multiply by four is often cheaper. */
5868 case RELOAD_FOR_INPADDR_ADDRESS:
5869 time1 = opnum * 4 + 2;
5870 break;
5871 case RELOAD_FOR_INPUT_ADDRESS:
5872 time1 = opnum * 4 + 3;
5873 break;
5874 case RELOAD_FOR_INPUT:
5875 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5876 executes (inclusive). */
5877 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5878 break;
5879 case RELOAD_FOR_OPADDR_ADDR:
5880 /* opnum * 4 + 4
5881 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5882 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5883 break;
5884 case RELOAD_FOR_OPERAND_ADDRESS:
5885 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5886 is executed. */
5887 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5888 break;
5889 case RELOAD_FOR_OUTADDR_ADDRESS:
5890 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5891 break;
5892 case RELOAD_FOR_OUTPUT_ADDRESS:
5893 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5894 break;
5895 default:
5896 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5897 }
5898
5899 for (i = 0; i < n_reloads; i++)
5900 {
5901 rtx reg = rld[i].reg_rtx;
5902 if (reg && REG_P (reg)
5903 && ((unsigned) regno - true_regnum (reg)
5904 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5905 && i != reloadnum)
5906 {
5907 rtx other_input = rld[i].in;
5908
5909 /* If the other reload loads the same input value, that
5910 will not cause a conflict only if it's loading it into
5911 the same register. */
5912 if (true_regnum (reg) != start_regno)
5913 other_input = NULL_RTX;
5914 if (! other_input || ! rtx_equal_p (other_input, value)
5915 || rld[i].out || out)
5916 {
5917 int time2;
5918 switch (rld[i].when_needed)
5919 {
5920 case RELOAD_FOR_OTHER_ADDRESS:
5921 time2 = 0;
5922 break;
5923 case RELOAD_FOR_INPADDR_ADDRESS:
5924 /* find_reloads makes sure that a
5925 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5926 by at most one - the first -
5927 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5928 address reload is inherited, the address address reload
5929 goes away, so we can ignore this conflict. */
5930 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5931 && ignore_address_reloads
5932 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5933 Then the address address is still needed to store
5934 back the new address. */
5935 && ! rld[reloadnum].out)
5936 continue;
5937 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5938 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5939 reloads go away. */
5940 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5941 && ignore_address_reloads
5942 /* Unless we are reloading an auto_inc expression. */
5943 && ! rld[reloadnum].out)
5944 continue;
5945 time2 = rld[i].opnum * 4 + 2;
5946 break;
5947 case RELOAD_FOR_INPUT_ADDRESS:
5948 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5949 && ignore_address_reloads
5950 && ! rld[reloadnum].out)
5951 continue;
5952 time2 = rld[i].opnum * 4 + 3;
5953 break;
5954 case RELOAD_FOR_INPUT:
5955 time2 = rld[i].opnum * 4 + 4;
5956 check_earlyclobber = 1;
5957 break;
5958 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5959 == MAX_RECOG_OPERAND * 4 */
5960 case RELOAD_FOR_OPADDR_ADDR:
5961 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5962 && ignore_address_reloads
5963 && ! rld[reloadnum].out)
5964 continue;
5965 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5966 break;
5967 case RELOAD_FOR_OPERAND_ADDRESS:
5968 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5969 check_earlyclobber = 1;
5970 break;
5971 case RELOAD_FOR_INSN:
5972 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5973 break;
5974 case RELOAD_FOR_OUTPUT:
5975 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5976 instruction is executed. */
5977 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5978 break;
5979 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5980 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5981 value. */
5982 case RELOAD_FOR_OUTADDR_ADDRESS:
5983 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5984 && ignore_address_reloads
5985 && ! rld[reloadnum].out)
5986 continue;
5987 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5988 break;
5989 case RELOAD_FOR_OUTPUT_ADDRESS:
5990 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5991 break;
5992 case RELOAD_OTHER:
5993 /* If there is no conflict in the input part, handle this
5994 like an output reload. */
5995 if (! rld[i].in || rtx_equal_p (other_input, value))
5996 {
5997 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5998 /* Earlyclobbered outputs must conflict with inputs. */
5999 if (earlyclobber_operand_p (rld[i].out))
6000 time2 = MAX_RECOG_OPERANDS * 4 + 3;
6001
6002 break;
6003 }
6004 time2 = 1;
6005 /* RELOAD_OTHER might be live beyond instruction execution,
6006 but this is not obvious when we set time2 = 1. So check
6007 here if there might be a problem with the new reload
6008 clobbering the register used by the RELOAD_OTHER. */
6009 if (out)
6010 return 0;
6011 break;
6012 default:
6013 return 0;
6014 }
6015 if ((time1 >= time2
6016 && (! rld[i].in || rld[i].out
6017 || ! rtx_equal_p (other_input, value)))
6018 || (out && rld[reloadnum].out_reg
6019 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
6020 return 0;
6021 }
6022 }
6023 }
6024
6025 /* Earlyclobbered outputs must conflict with inputs. */
6026 if (check_earlyclobber && out && earlyclobber_operand_p (out))
6027 return 0;
6028
6029 return 1;
6030 }
6031
6032 /* Return 1 if the value in reload reg REGNO, as used by a reload
6033 needed for the part of the insn specified by OPNUM and TYPE,
6034 may be used to load VALUE into it.
6035
6036 MODE is the mode in which the register is used, this is needed to
6037 determine how many hard regs to test.
6038
6039 Other read-only reloads with the same value do not conflict
6040 unless OUT is nonzero and these other reloads have to live while
6041 output reloads live.
6042 If OUT is CONST0_RTX, this is a special case: it means that the
6043 test should not be for using register REGNO as reload register, but
6044 for copying from register REGNO into the reload register.
6045
6046 RELOADNUM is the number of the reload we want to load this value for;
6047 a reload does not conflict with itself.
6048
6049 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6050 reloads that load an address for the very reload we are considering.
6051
6052 The caller has to make sure that there is no conflict with the return
6053 register. */
6054
6055 static int
6056 free_for_value_p (int regno, enum machine_mode mode, int opnum,
6057 enum reload_type type, rtx value, rtx out, int reloadnum,
6058 int ignore_address_reloads)
6059 {
6060 int nregs = hard_regno_nregs[regno][mode];
6061 while (nregs-- > 0)
6062 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6063 value, out, reloadnum,
6064 ignore_address_reloads))
6065 return 0;
6066 return 1;
6067 }
6068
6069 /* Return nonzero if the rtx X is invariant over the current function. */
6070 /* ??? Actually, the places where we use this expect exactly what is
6071 tested here, and not everything that is function invariant. In
6072 particular, the frame pointer and arg pointer are special cased;
6073 pic_offset_table_rtx is not, and we must not spill these things to
6074 memory. */
6075
6076 int
6077 function_invariant_p (const_rtx x)
6078 {
6079 if (CONSTANT_P (x))
6080 return 1;
6081 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6082 return 1;
6083 if (GET_CODE (x) == PLUS
6084 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6085 && GET_CODE (XEXP (x, 1)) == CONST_INT)
6086 return 1;
6087 return 0;
6088 }
6089
6090 /* Determine whether the reload reg X overlaps any rtx'es used for
6091 overriding inheritance. Return nonzero if so. */
6092
6093 static int
6094 conflicts_with_override (rtx x)
6095 {
6096 int i;
6097 for (i = 0; i < n_reloads; i++)
6098 if (reload_override_in[i]
6099 && reg_overlap_mentioned_p (x, reload_override_in[i]))
6100 return 1;
6101 return 0;
6102 }
6103 \f
6104 /* Give an error message saying we failed to find a reload for INSN,
6105 and clear out reload R. */
6106 static void
6107 failed_reload (rtx insn, int r)
6108 {
6109 if (asm_noperands (PATTERN (insn)) < 0)
6110 /* It's the compiler's fault. */
6111 fatal_insn ("could not find a spill register", insn);
6112
6113 /* It's the user's fault; the operand's mode and constraint
6114 don't match. Disable this reload so we don't crash in final. */
6115 error_for_asm (insn,
6116 "%<asm%> operand constraint incompatible with operand size");
6117 rld[r].in = 0;
6118 rld[r].out = 0;
6119 rld[r].reg_rtx = 0;
6120 rld[r].optional = 1;
6121 rld[r].secondary_p = 1;
6122 }
6123
6124 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6125 for reload R. If it's valid, get an rtx for it. Return nonzero if
6126 successful. */
6127 static int
6128 set_reload_reg (int i, int r)
6129 {
6130 /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6131 parameter. */
6132 int regno ATTRIBUTE_UNUSED;
6133 rtx reg = spill_reg_rtx[i];
6134
6135 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6136 spill_reg_rtx[i] = reg
6137 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6138
6139 regno = true_regnum (reg);
6140
6141 /* Detect when the reload reg can't hold the reload mode.
6142 This used to be one `if', but Sequent compiler can't handle that. */
6143 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6144 {
6145 enum machine_mode test_mode = VOIDmode;
6146 if (rld[r].in)
6147 test_mode = GET_MODE (rld[r].in);
6148 /* If rld[r].in has VOIDmode, it means we will load it
6149 in whatever mode the reload reg has: to wit, rld[r].mode.
6150 We have already tested that for validity. */
6151 /* Aside from that, we need to test that the expressions
6152 to reload from or into have modes which are valid for this
6153 reload register. Otherwise the reload insns would be invalid. */
6154 if (! (rld[r].in != 0 && test_mode != VOIDmode
6155 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6156 if (! (rld[r].out != 0
6157 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6158 {
6159 /* The reg is OK. */
6160 last_spill_reg = i;
6161
6162 /* Mark as in use for this insn the reload regs we use
6163 for this. */
6164 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6165 rld[r].when_needed, rld[r].mode);
6166
6167 rld[r].reg_rtx = reg;
6168 reload_spill_index[r] = spill_regs[i];
6169 return 1;
6170 }
6171 }
6172 return 0;
6173 }
6174
6175 /* Find a spill register to use as a reload register for reload R.
6176 LAST_RELOAD is nonzero if this is the last reload for the insn being
6177 processed.
6178
6179 Set rld[R].reg_rtx to the register allocated.
6180
6181 We return 1 if successful, or 0 if we couldn't find a spill reg and
6182 we didn't change anything. */
6183
6184 static int
6185 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6186 int last_reload)
6187 {
6188 int i, pass, count;
6189
6190 /* If we put this reload ahead, thinking it is a group,
6191 then insist on finding a group. Otherwise we can grab a
6192 reg that some other reload needs.
6193 (That can happen when we have a 68000 DATA_OR_FP_REG
6194 which is a group of data regs or one fp reg.)
6195 We need not be so restrictive if there are no more reloads
6196 for this insn.
6197
6198 ??? Really it would be nicer to have smarter handling
6199 for that kind of reg class, where a problem like this is normal.
6200 Perhaps those classes should be avoided for reloading
6201 by use of more alternatives. */
6202
6203 int force_group = rld[r].nregs > 1 && ! last_reload;
6204
6205 /* If we want a single register and haven't yet found one,
6206 take any reg in the right class and not in use.
6207 If we want a consecutive group, here is where we look for it.
6208
6209 We use three passes so we can first look for reload regs to
6210 reuse, which are already in use for other reloads in this insn,
6211 and only then use additional registers which are not "bad", then
6212 finally any register.
6213
6214 I think that maximizing reuse is needed to make sure we don't
6215 run out of reload regs. Suppose we have three reloads, and
6216 reloads A and B can share regs. These need two regs.
6217 Suppose A and B are given different regs.
6218 That leaves none for C. */
6219 for (pass = 0; pass < 3; pass++)
6220 {
6221 /* I is the index in spill_regs.
6222 We advance it round-robin between insns to use all spill regs
6223 equally, so that inherited reloads have a chance
6224 of leapfrogging each other. */
6225
6226 i = last_spill_reg;
6227
6228 for (count = 0; count < n_spills; count++)
6229 {
6230 int rclass = (int) rld[r].rclass;
6231 int regnum;
6232
6233 i++;
6234 if (i >= n_spills)
6235 i -= n_spills;
6236 regnum = spill_regs[i];
6237
6238 if ((reload_reg_free_p (regnum, rld[r].opnum,
6239 rld[r].when_needed)
6240 || (rld[r].in
6241 /* We check reload_reg_used to make sure we
6242 don't clobber the return register. */
6243 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6244 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6245 rld[r].when_needed, rld[r].in,
6246 rld[r].out, r, 1)))
6247 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6248 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6249 /* Look first for regs to share, then for unshared. But
6250 don't share regs used for inherited reloads; they are
6251 the ones we want to preserve. */
6252 && (pass
6253 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6254 regnum)
6255 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6256 regnum))))
6257 {
6258 int nr = hard_regno_nregs[regnum][rld[r].mode];
6259
6260 /* During the second pass we want to avoid reload registers
6261 which are "bad" for this reload. */
6262 if (pass == 1
6263 && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6264 continue;
6265
6266 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6267 (on 68000) got us two FP regs. If NR is 1,
6268 we would reject both of them. */
6269 if (force_group)
6270 nr = rld[r].nregs;
6271 /* If we need only one reg, we have already won. */
6272 if (nr == 1)
6273 {
6274 /* But reject a single reg if we demand a group. */
6275 if (force_group)
6276 continue;
6277 break;
6278 }
6279 /* Otherwise check that as many consecutive regs as we need
6280 are available here. */
6281 while (nr > 1)
6282 {
6283 int regno = regnum + nr - 1;
6284 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6285 && spill_reg_order[regno] >= 0
6286 && reload_reg_free_p (regno, rld[r].opnum,
6287 rld[r].when_needed)))
6288 break;
6289 nr--;
6290 }
6291 if (nr == 1)
6292 break;
6293 }
6294 }
6295
6296 /* If we found something on the current pass, omit later passes. */
6297 if (count < n_spills)
6298 break;
6299 }
6300
6301 /* We should have found a spill register by now. */
6302 if (count >= n_spills)
6303 return 0;
6304
6305 /* I is the index in SPILL_REG_RTX of the reload register we are to
6306 allocate. Get an rtx for it and find its register number. */
6307
6308 return set_reload_reg (i, r);
6309 }
6310 \f
6311 /* Initialize all the tables needed to allocate reload registers.
6312 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6313 is the array we use to restore the reg_rtx field for every reload. */
6314
6315 static void
6316 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6317 {
6318 int i;
6319
6320 for (i = 0; i < n_reloads; i++)
6321 rld[i].reg_rtx = save_reload_reg_rtx[i];
6322
6323 memset (reload_inherited, 0, MAX_RELOADS);
6324 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6325 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6326
6327 CLEAR_HARD_REG_SET (reload_reg_used);
6328 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6329 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6330 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6331 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6332 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6333
6334 CLEAR_HARD_REG_SET (reg_used_in_insn);
6335 {
6336 HARD_REG_SET tmp;
6337 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6338 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6339 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6340 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6341 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6342 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6343 }
6344
6345 for (i = 0; i < reload_n_operands; i++)
6346 {
6347 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6348 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6349 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6350 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6351 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6352 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6353 }
6354
6355 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6356
6357 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6358
6359 for (i = 0; i < n_reloads; i++)
6360 /* If we have already decided to use a certain register,
6361 don't use it in another way. */
6362 if (rld[i].reg_rtx)
6363 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6364 rld[i].when_needed, rld[i].mode);
6365 }
6366
6367 /* Assign hard reg targets for the pseudo-registers we must reload
6368 into hard regs for this insn.
6369 Also output the instructions to copy them in and out of the hard regs.
6370
6371 For machines with register classes, we are responsible for
6372 finding a reload reg in the proper class. */
6373
6374 static void
6375 choose_reload_regs (struct insn_chain *chain)
6376 {
6377 rtx insn = chain->insn;
6378 int i, j;
6379 unsigned int max_group_size = 1;
6380 enum reg_class group_class = NO_REGS;
6381 int pass, win, inheritance;
6382
6383 rtx save_reload_reg_rtx[MAX_RELOADS];
6384
6385 /* In order to be certain of getting the registers we need,
6386 we must sort the reloads into order of increasing register class.
6387 Then our grabbing of reload registers will parallel the process
6388 that provided the reload registers.
6389
6390 Also note whether any of the reloads wants a consecutive group of regs.
6391 If so, record the maximum size of the group desired and what
6392 register class contains all the groups needed by this insn. */
6393
6394 for (j = 0; j < n_reloads; j++)
6395 {
6396 reload_order[j] = j;
6397 if (rld[j].reg_rtx != NULL_RTX)
6398 {
6399 gcc_assert (REG_P (rld[j].reg_rtx)
6400 && HARD_REGISTER_P (rld[j].reg_rtx));
6401 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6402 }
6403 else
6404 reload_spill_index[j] = -1;
6405
6406 if (rld[j].nregs > 1)
6407 {
6408 max_group_size = MAX (rld[j].nregs, max_group_size);
6409 group_class
6410 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6411 }
6412
6413 save_reload_reg_rtx[j] = rld[j].reg_rtx;
6414 }
6415
6416 if (n_reloads > 1)
6417 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6418
6419 /* If -O, try first with inheritance, then turning it off.
6420 If not -O, don't do inheritance.
6421 Using inheritance when not optimizing leads to paradoxes
6422 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6423 because one side of the comparison might be inherited. */
6424 win = 0;
6425 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6426 {
6427 choose_reload_regs_init (chain, save_reload_reg_rtx);
6428
6429 /* Process the reloads in order of preference just found.
6430 Beyond this point, subregs can be found in reload_reg_rtx.
6431
6432 This used to look for an existing reloaded home for all of the
6433 reloads, and only then perform any new reloads. But that could lose
6434 if the reloads were done out of reg-class order because a later
6435 reload with a looser constraint might have an old home in a register
6436 needed by an earlier reload with a tighter constraint.
6437
6438 To solve this, we make two passes over the reloads, in the order
6439 described above. In the first pass we try to inherit a reload
6440 from a previous insn. If there is a later reload that needs a
6441 class that is a proper subset of the class being processed, we must
6442 also allocate a spill register during the first pass.
6443
6444 Then make a second pass over the reloads to allocate any reloads
6445 that haven't been given registers yet. */
6446
6447 for (j = 0; j < n_reloads; j++)
6448 {
6449 int r = reload_order[j];
6450 rtx search_equiv = NULL_RTX;
6451
6452 /* Ignore reloads that got marked inoperative. */
6453 if (rld[r].out == 0 && rld[r].in == 0
6454 && ! rld[r].secondary_p)
6455 continue;
6456
6457 /* If find_reloads chose to use reload_in or reload_out as a reload
6458 register, we don't need to chose one. Otherwise, try even if it
6459 found one since we might save an insn if we find the value lying
6460 around.
6461 Try also when reload_in is a pseudo without a hard reg. */
6462 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6463 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6464 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6465 && !MEM_P (rld[r].in)
6466 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6467 continue;
6468
6469 #if 0 /* No longer needed for correct operation.
6470 It might give better code, or might not; worth an experiment? */
6471 /* If this is an optional reload, we can't inherit from earlier insns
6472 until we are sure that any non-optional reloads have been allocated.
6473 The following code takes advantage of the fact that optional reloads
6474 are at the end of reload_order. */
6475 if (rld[r].optional != 0)
6476 for (i = 0; i < j; i++)
6477 if ((rld[reload_order[i]].out != 0
6478 || rld[reload_order[i]].in != 0
6479 || rld[reload_order[i]].secondary_p)
6480 && ! rld[reload_order[i]].optional
6481 && rld[reload_order[i]].reg_rtx == 0)
6482 allocate_reload_reg (chain, reload_order[i], 0);
6483 #endif
6484
6485 /* First see if this pseudo is already available as reloaded
6486 for a previous insn. We cannot try to inherit for reloads
6487 that are smaller than the maximum number of registers needed
6488 for groups unless the register we would allocate cannot be used
6489 for the groups.
6490
6491 We could check here to see if this is a secondary reload for
6492 an object that is already in a register of the desired class.
6493 This would avoid the need for the secondary reload register.
6494 But this is complex because we can't easily determine what
6495 objects might want to be loaded via this reload. So let a
6496 register be allocated here. In `emit_reload_insns' we suppress
6497 one of the loads in the case described above. */
6498
6499 if (inheritance)
6500 {
6501 int byte = 0;
6502 int regno = -1;
6503 enum machine_mode mode = VOIDmode;
6504
6505 if (rld[r].in == 0)
6506 ;
6507 else if (REG_P (rld[r].in))
6508 {
6509 regno = REGNO (rld[r].in);
6510 mode = GET_MODE (rld[r].in);
6511 }
6512 else if (REG_P (rld[r].in_reg))
6513 {
6514 regno = REGNO (rld[r].in_reg);
6515 mode = GET_MODE (rld[r].in_reg);
6516 }
6517 else if (GET_CODE (rld[r].in_reg) == SUBREG
6518 && REG_P (SUBREG_REG (rld[r].in_reg)))
6519 {
6520 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6521 if (regno < FIRST_PSEUDO_REGISTER)
6522 regno = subreg_regno (rld[r].in_reg);
6523 else
6524 byte = SUBREG_BYTE (rld[r].in_reg);
6525 mode = GET_MODE (rld[r].in_reg);
6526 }
6527 #ifdef AUTO_INC_DEC
6528 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6529 && REG_P (XEXP (rld[r].in_reg, 0)))
6530 {
6531 regno = REGNO (XEXP (rld[r].in_reg, 0));
6532 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6533 rld[r].out = rld[r].in;
6534 }
6535 #endif
6536 #if 0
6537 /* This won't work, since REGNO can be a pseudo reg number.
6538 Also, it takes much more hair to keep track of all the things
6539 that can invalidate an inherited reload of part of a pseudoreg. */
6540 else if (GET_CODE (rld[r].in) == SUBREG
6541 && REG_P (SUBREG_REG (rld[r].in)))
6542 regno = subreg_regno (rld[r].in);
6543 #endif
6544
6545 if (regno >= 0
6546 && reg_last_reload_reg[regno] != 0
6547 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
6548 >= GET_MODE_SIZE (mode) + byte)
6549 #ifdef CANNOT_CHANGE_MODE_CLASS
6550 /* Verify that the register it's in can be used in
6551 mode MODE. */
6552 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6553 GET_MODE (reg_last_reload_reg[regno]),
6554 mode)
6555 #endif
6556 )
6557 {
6558 enum reg_class rclass = rld[r].rclass, last_class;
6559 rtx last_reg = reg_last_reload_reg[regno];
6560
6561 i = REGNO (last_reg);
6562 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6563 last_class = REGNO_REG_CLASS (i);
6564
6565 if (reg_reloaded_contents[i] == regno
6566 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6567 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6568 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6569 /* Even if we can't use this register as a reload
6570 register, we might use it for reload_override_in,
6571 if copying it to the desired class is cheap
6572 enough. */
6573 || ((register_move_cost (mode, last_class, rclass)
6574 < memory_move_cost (mode, rclass, true))
6575 && (secondary_reload_class (1, rclass, mode,
6576 last_reg)
6577 == NO_REGS)
6578 #ifdef SECONDARY_MEMORY_NEEDED
6579 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6580 mode)
6581 #endif
6582 ))
6583
6584 && (rld[r].nregs == max_group_size
6585 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6586 i))
6587 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6588 rld[r].when_needed, rld[r].in,
6589 const0_rtx, r, 1))
6590 {
6591 /* If a group is needed, verify that all the subsequent
6592 registers still have their values intact. */
6593 int nr = hard_regno_nregs[i][rld[r].mode];
6594 int k;
6595
6596 for (k = 1; k < nr; k++)
6597 if (reg_reloaded_contents[i + k] != regno
6598 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6599 break;
6600
6601 if (k == nr)
6602 {
6603 int i1;
6604 int bad_for_class;
6605
6606 last_reg = (GET_MODE (last_reg) == mode
6607 ? last_reg : gen_rtx_REG (mode, i));
6608
6609 bad_for_class = 0;
6610 for (k = 0; k < nr; k++)
6611 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6612 i+k);
6613
6614 /* We found a register that contains the
6615 value we need. If this register is the
6616 same as an `earlyclobber' operand of the
6617 current insn, just mark it as a place to
6618 reload from since we can't use it as the
6619 reload register itself. */
6620
6621 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6622 if (reg_overlap_mentioned_for_reload_p
6623 (reg_last_reload_reg[regno],
6624 reload_earlyclobbers[i1]))
6625 break;
6626
6627 if (i1 != n_earlyclobbers
6628 || ! (free_for_value_p (i, rld[r].mode,
6629 rld[r].opnum,
6630 rld[r].when_needed, rld[r].in,
6631 rld[r].out, r, 1))
6632 /* Don't use it if we'd clobber a pseudo reg. */
6633 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6634 && rld[r].out
6635 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6636 /* Don't clobber the frame pointer. */
6637 || (i == HARD_FRAME_POINTER_REGNUM
6638 && frame_pointer_needed
6639 && rld[r].out)
6640 /* Don't really use the inherited spill reg
6641 if we need it wider than we've got it. */
6642 || (GET_MODE_SIZE (rld[r].mode)
6643 > GET_MODE_SIZE (mode))
6644 || bad_for_class
6645
6646 /* If find_reloads chose reload_out as reload
6647 register, stay with it - that leaves the
6648 inherited register for subsequent reloads. */
6649 || (rld[r].out && rld[r].reg_rtx
6650 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6651 {
6652 if (! rld[r].optional)
6653 {
6654 reload_override_in[r] = last_reg;
6655 reload_inheritance_insn[r]
6656 = reg_reloaded_insn[i];
6657 }
6658 }
6659 else
6660 {
6661 int k;
6662 /* We can use this as a reload reg. */
6663 /* Mark the register as in use for this part of
6664 the insn. */
6665 mark_reload_reg_in_use (i,
6666 rld[r].opnum,
6667 rld[r].when_needed,
6668 rld[r].mode);
6669 rld[r].reg_rtx = last_reg;
6670 reload_inherited[r] = 1;
6671 reload_inheritance_insn[r]
6672 = reg_reloaded_insn[i];
6673 reload_spill_index[r] = i;
6674 for (k = 0; k < nr; k++)
6675 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6676 i + k);
6677 }
6678 }
6679 }
6680 }
6681 }
6682
6683 /* Here's another way to see if the value is already lying around. */
6684 if (inheritance
6685 && rld[r].in != 0
6686 && ! reload_inherited[r]
6687 && rld[r].out == 0
6688 && (CONSTANT_P (rld[r].in)
6689 || GET_CODE (rld[r].in) == PLUS
6690 || REG_P (rld[r].in)
6691 || MEM_P (rld[r].in))
6692 && (rld[r].nregs == max_group_size
6693 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6694 search_equiv = rld[r].in;
6695
6696 if (search_equiv)
6697 {
6698 rtx equiv
6699 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6700 -1, NULL, 0, rld[r].mode);
6701 int regno = 0;
6702
6703 if (equiv != 0)
6704 {
6705 if (REG_P (equiv))
6706 regno = REGNO (equiv);
6707 else
6708 {
6709 /* This must be a SUBREG of a hard register.
6710 Make a new REG since this might be used in an
6711 address and not all machines support SUBREGs
6712 there. */
6713 gcc_assert (GET_CODE (equiv) == SUBREG);
6714 regno = subreg_regno (equiv);
6715 equiv = gen_rtx_REG (rld[r].mode, regno);
6716 /* If we choose EQUIV as the reload register, but the
6717 loop below decides to cancel the inheritance, we'll
6718 end up reloading EQUIV in rld[r].mode, not the mode
6719 it had originally. That isn't safe when EQUIV isn't
6720 available as a spill register since its value might
6721 still be live at this point. */
6722 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6723 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6724 equiv = 0;
6725 }
6726 }
6727
6728 /* If we found a spill reg, reject it unless it is free
6729 and of the desired class. */
6730 if (equiv != 0)
6731 {
6732 int regs_used = 0;
6733 int bad_for_class = 0;
6734 int max_regno = regno + rld[r].nregs;
6735
6736 for (i = regno; i < max_regno; i++)
6737 {
6738 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6739 i);
6740 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6741 i);
6742 }
6743
6744 if ((regs_used
6745 && ! free_for_value_p (regno, rld[r].mode,
6746 rld[r].opnum, rld[r].when_needed,
6747 rld[r].in, rld[r].out, r, 1))
6748 || bad_for_class)
6749 equiv = 0;
6750 }
6751
6752 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6753 equiv = 0;
6754
6755 /* We found a register that contains the value we need.
6756 If this register is the same as an `earlyclobber' operand
6757 of the current insn, just mark it as a place to reload from
6758 since we can't use it as the reload register itself. */
6759
6760 if (equiv != 0)
6761 for (i = 0; i < n_earlyclobbers; i++)
6762 if (reg_overlap_mentioned_for_reload_p (equiv,
6763 reload_earlyclobbers[i]))
6764 {
6765 if (! rld[r].optional)
6766 reload_override_in[r] = equiv;
6767 equiv = 0;
6768 break;
6769 }
6770
6771 /* If the equiv register we have found is explicitly clobbered
6772 in the current insn, it depends on the reload type if we
6773 can use it, use it for reload_override_in, or not at all.
6774 In particular, we then can't use EQUIV for a
6775 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6776
6777 if (equiv != 0)
6778 {
6779 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6780 switch (rld[r].when_needed)
6781 {
6782 case RELOAD_FOR_OTHER_ADDRESS:
6783 case RELOAD_FOR_INPADDR_ADDRESS:
6784 case RELOAD_FOR_INPUT_ADDRESS:
6785 case RELOAD_FOR_OPADDR_ADDR:
6786 break;
6787 case RELOAD_OTHER:
6788 case RELOAD_FOR_INPUT:
6789 case RELOAD_FOR_OPERAND_ADDRESS:
6790 if (! rld[r].optional)
6791 reload_override_in[r] = equiv;
6792 /* Fall through. */
6793 default:
6794 equiv = 0;
6795 break;
6796 }
6797 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6798 switch (rld[r].when_needed)
6799 {
6800 case RELOAD_FOR_OTHER_ADDRESS:
6801 case RELOAD_FOR_INPADDR_ADDRESS:
6802 case RELOAD_FOR_INPUT_ADDRESS:
6803 case RELOAD_FOR_OPADDR_ADDR:
6804 case RELOAD_FOR_OPERAND_ADDRESS:
6805 case RELOAD_FOR_INPUT:
6806 break;
6807 case RELOAD_OTHER:
6808 if (! rld[r].optional)
6809 reload_override_in[r] = equiv;
6810 /* Fall through. */
6811 default:
6812 equiv = 0;
6813 break;
6814 }
6815 }
6816
6817 /* If we found an equivalent reg, say no code need be generated
6818 to load it, and use it as our reload reg. */
6819 if (equiv != 0
6820 && (regno != HARD_FRAME_POINTER_REGNUM
6821 || !frame_pointer_needed))
6822 {
6823 int nr = hard_regno_nregs[regno][rld[r].mode];
6824 int k;
6825 rld[r].reg_rtx = equiv;
6826 reload_spill_index[r] = regno;
6827 reload_inherited[r] = 1;
6828
6829 /* If reg_reloaded_valid is not set for this register,
6830 there might be a stale spill_reg_store lying around.
6831 We must clear it, since otherwise emit_reload_insns
6832 might delete the store. */
6833 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6834 spill_reg_store[regno] = NULL_RTX;
6835 /* If any of the hard registers in EQUIV are spill
6836 registers, mark them as in use for this insn. */
6837 for (k = 0; k < nr; k++)
6838 {
6839 i = spill_reg_order[regno + k];
6840 if (i >= 0)
6841 {
6842 mark_reload_reg_in_use (regno, rld[r].opnum,
6843 rld[r].when_needed,
6844 rld[r].mode);
6845 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6846 regno + k);
6847 }
6848 }
6849 }
6850 }
6851
6852 /* If we found a register to use already, or if this is an optional
6853 reload, we are done. */
6854 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6855 continue;
6856
6857 #if 0
6858 /* No longer needed for correct operation. Might or might
6859 not give better code on the average. Want to experiment? */
6860
6861 /* See if there is a later reload that has a class different from our
6862 class that intersects our class or that requires less register
6863 than our reload. If so, we must allocate a register to this
6864 reload now, since that reload might inherit a previous reload
6865 and take the only available register in our class. Don't do this
6866 for optional reloads since they will force all previous reloads
6867 to be allocated. Also don't do this for reloads that have been
6868 turned off. */
6869
6870 for (i = j + 1; i < n_reloads; i++)
6871 {
6872 int s = reload_order[i];
6873
6874 if ((rld[s].in == 0 && rld[s].out == 0
6875 && ! rld[s].secondary_p)
6876 || rld[s].optional)
6877 continue;
6878
6879 if ((rld[s].rclass != rld[r].rclass
6880 && reg_classes_intersect_p (rld[r].rclass,
6881 rld[s].rclass))
6882 || rld[s].nregs < rld[r].nregs)
6883 break;
6884 }
6885
6886 if (i == n_reloads)
6887 continue;
6888
6889 allocate_reload_reg (chain, r, j == n_reloads - 1);
6890 #endif
6891 }
6892
6893 /* Now allocate reload registers for anything non-optional that
6894 didn't get one yet. */
6895 for (j = 0; j < n_reloads; j++)
6896 {
6897 int r = reload_order[j];
6898
6899 /* Ignore reloads that got marked inoperative. */
6900 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6901 continue;
6902
6903 /* Skip reloads that already have a register allocated or are
6904 optional. */
6905 if (rld[r].reg_rtx != 0 || rld[r].optional)
6906 continue;
6907
6908 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6909 break;
6910 }
6911
6912 /* If that loop got all the way, we have won. */
6913 if (j == n_reloads)
6914 {
6915 win = 1;
6916 break;
6917 }
6918
6919 /* Loop around and try without any inheritance. */
6920 }
6921
6922 if (! win)
6923 {
6924 /* First undo everything done by the failed attempt
6925 to allocate with inheritance. */
6926 choose_reload_regs_init (chain, save_reload_reg_rtx);
6927
6928 /* Some sanity tests to verify that the reloads found in the first
6929 pass are identical to the ones we have now. */
6930 gcc_assert (chain->n_reloads == n_reloads);
6931
6932 for (i = 0; i < n_reloads; i++)
6933 {
6934 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6935 continue;
6936 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6937 for (j = 0; j < n_spills; j++)
6938 if (spill_regs[j] == chain->rld[i].regno)
6939 if (! set_reload_reg (j, i))
6940 failed_reload (chain->insn, i);
6941 }
6942 }
6943
6944 /* If we thought we could inherit a reload, because it seemed that
6945 nothing else wanted the same reload register earlier in the insn,
6946 verify that assumption, now that all reloads have been assigned.
6947 Likewise for reloads where reload_override_in has been set. */
6948
6949 /* If doing expensive optimizations, do one preliminary pass that doesn't
6950 cancel any inheritance, but removes reloads that have been needed only
6951 for reloads that we know can be inherited. */
6952 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6953 {
6954 for (j = 0; j < n_reloads; j++)
6955 {
6956 int r = reload_order[j];
6957 rtx check_reg;
6958 if (reload_inherited[r] && rld[r].reg_rtx)
6959 check_reg = rld[r].reg_rtx;
6960 else if (reload_override_in[r]
6961 && (REG_P (reload_override_in[r])
6962 || GET_CODE (reload_override_in[r]) == SUBREG))
6963 check_reg = reload_override_in[r];
6964 else
6965 continue;
6966 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6967 rld[r].opnum, rld[r].when_needed, rld[r].in,
6968 (reload_inherited[r]
6969 ? rld[r].out : const0_rtx),
6970 r, 1))
6971 {
6972 if (pass)
6973 continue;
6974 reload_inherited[r] = 0;
6975 reload_override_in[r] = 0;
6976 }
6977 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6978 reload_override_in, then we do not need its related
6979 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6980 likewise for other reload types.
6981 We handle this by removing a reload when its only replacement
6982 is mentioned in reload_in of the reload we are going to inherit.
6983 A special case are auto_inc expressions; even if the input is
6984 inherited, we still need the address for the output. We can
6985 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6986 If we succeeded removing some reload and we are doing a preliminary
6987 pass just to remove such reloads, make another pass, since the
6988 removal of one reload might allow us to inherit another one. */
6989 else if (rld[r].in
6990 && rld[r].out != rld[r].in
6991 && remove_address_replacements (rld[r].in) && pass)
6992 pass = 2;
6993 }
6994 }
6995
6996 /* Now that reload_override_in is known valid,
6997 actually override reload_in. */
6998 for (j = 0; j < n_reloads; j++)
6999 if (reload_override_in[j])
7000 rld[j].in = reload_override_in[j];
7001
7002 /* If this reload won't be done because it has been canceled or is
7003 optional and not inherited, clear reload_reg_rtx so other
7004 routines (such as subst_reloads) don't get confused. */
7005 for (j = 0; j < n_reloads; j++)
7006 if (rld[j].reg_rtx != 0
7007 && ((rld[j].optional && ! reload_inherited[j])
7008 || (rld[j].in == 0 && rld[j].out == 0
7009 && ! rld[j].secondary_p)))
7010 {
7011 int regno = true_regnum (rld[j].reg_rtx);
7012
7013 if (spill_reg_order[regno] >= 0)
7014 clear_reload_reg_in_use (regno, rld[j].opnum,
7015 rld[j].when_needed, rld[j].mode);
7016 rld[j].reg_rtx = 0;
7017 reload_spill_index[j] = -1;
7018 }
7019
7020 /* Record which pseudos and which spill regs have output reloads. */
7021 for (j = 0; j < n_reloads; j++)
7022 {
7023 int r = reload_order[j];
7024
7025 i = reload_spill_index[r];
7026
7027 /* I is nonneg if this reload uses a register.
7028 If rld[r].reg_rtx is 0, this is an optional reload
7029 that we opted to ignore. */
7030 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7031 && rld[r].reg_rtx != 0)
7032 {
7033 int nregno = REGNO (rld[r].out_reg);
7034 int nr = 1;
7035
7036 if (nregno < FIRST_PSEUDO_REGISTER)
7037 nr = hard_regno_nregs[nregno][rld[r].mode];
7038
7039 while (--nr >= 0)
7040 SET_REGNO_REG_SET (&reg_has_output_reload,
7041 nregno + nr);
7042
7043 if (i >= 0)
7044 add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7045
7046 gcc_assert (rld[r].when_needed == RELOAD_OTHER
7047 || rld[r].when_needed == RELOAD_FOR_OUTPUT
7048 || rld[r].when_needed == RELOAD_FOR_INSN);
7049 }
7050 }
7051 }
7052
7053 /* Deallocate the reload register for reload R. This is called from
7054 remove_address_replacements. */
7055
7056 void
7057 deallocate_reload_reg (int r)
7058 {
7059 int regno;
7060
7061 if (! rld[r].reg_rtx)
7062 return;
7063 regno = true_regnum (rld[r].reg_rtx);
7064 rld[r].reg_rtx = 0;
7065 if (spill_reg_order[regno] >= 0)
7066 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7067 rld[r].mode);
7068 reload_spill_index[r] = -1;
7069 }
7070 \f
7071 /* These arrays are filled by emit_reload_insns and its subroutines. */
7072 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
7073 static rtx other_input_address_reload_insns = 0;
7074 static rtx other_input_reload_insns = 0;
7075 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
7076 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7077 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
7078 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
7079 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7080 static rtx operand_reload_insns = 0;
7081 static rtx other_operand_reload_insns = 0;
7082 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
7083
7084 /* Values to be put in spill_reg_store are put here first. Instructions
7085 must only be placed here if the associated reload register reaches
7086 the end of the instruction's reload sequence. */
7087 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7088 static HARD_REG_SET reg_reloaded_died;
7089
7090 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7091 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7092 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7093 adjusted register, and return true. Otherwise, return false. */
7094 static bool
7095 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7096 enum reg_class new_class,
7097 enum machine_mode new_mode)
7098
7099 {
7100 rtx reg;
7101
7102 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7103 {
7104 unsigned regno = REGNO (reg);
7105
7106 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7107 continue;
7108 if (GET_MODE (reg) != new_mode)
7109 {
7110 if (!HARD_REGNO_MODE_OK (regno, new_mode))
7111 continue;
7112 if (hard_regno_nregs[regno][new_mode]
7113 > hard_regno_nregs[regno][GET_MODE (reg)])
7114 continue;
7115 reg = reload_adjust_reg_for_mode (reg, new_mode);
7116 }
7117 *reload_reg = reg;
7118 return true;
7119 }
7120 return false;
7121 }
7122
7123 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7124 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7125 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7126 adjusted register, and return true. Otherwise, return false. */
7127 static bool
7128 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7129 enum insn_code icode)
7130
7131 {
7132 enum reg_class new_class = scratch_reload_class (icode);
7133 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7134
7135 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7136 new_class, new_mode);
7137 }
7138
7139 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7140 has the number J. OLD contains the value to be used as input. */
7141
7142 static void
7143 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7144 rtx old, int j)
7145 {
7146 rtx insn = chain->insn;
7147 rtx reloadreg;
7148 rtx oldequiv_reg = 0;
7149 rtx oldequiv = 0;
7150 int special = 0;
7151 enum machine_mode mode;
7152 rtx *where;
7153
7154 /* delete_output_reload is only invoked properly if old contains
7155 the original pseudo register. Since this is replaced with a
7156 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7157 find the pseudo in RELOAD_IN_REG. */
7158 if (reload_override_in[j]
7159 && REG_P (rl->in_reg))
7160 {
7161 oldequiv = old;
7162 old = rl->in_reg;
7163 }
7164 if (oldequiv == 0)
7165 oldequiv = old;
7166 else if (REG_P (oldequiv))
7167 oldequiv_reg = oldequiv;
7168 else if (GET_CODE (oldequiv) == SUBREG)
7169 oldequiv_reg = SUBREG_REG (oldequiv);
7170
7171 reloadreg = reload_reg_rtx_for_input[j];
7172 mode = GET_MODE (reloadreg);
7173
7174 /* If we are reloading from a register that was recently stored in
7175 with an output-reload, see if we can prove there was
7176 actually no need to store the old value in it. */
7177
7178 if (optimize && REG_P (oldequiv)
7179 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7180 && spill_reg_store[REGNO (oldequiv)]
7181 && REG_P (old)
7182 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7183 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7184 rl->out_reg)))
7185 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7186
7187 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7188 OLDEQUIV. */
7189
7190 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7191 oldequiv = SUBREG_REG (oldequiv);
7192 if (GET_MODE (oldequiv) != VOIDmode
7193 && mode != GET_MODE (oldequiv))
7194 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7195
7196 /* Switch to the right place to emit the reload insns. */
7197 switch (rl->when_needed)
7198 {
7199 case RELOAD_OTHER:
7200 where = &other_input_reload_insns;
7201 break;
7202 case RELOAD_FOR_INPUT:
7203 where = &input_reload_insns[rl->opnum];
7204 break;
7205 case RELOAD_FOR_INPUT_ADDRESS:
7206 where = &input_address_reload_insns[rl->opnum];
7207 break;
7208 case RELOAD_FOR_INPADDR_ADDRESS:
7209 where = &inpaddr_address_reload_insns[rl->opnum];
7210 break;
7211 case RELOAD_FOR_OUTPUT_ADDRESS:
7212 where = &output_address_reload_insns[rl->opnum];
7213 break;
7214 case RELOAD_FOR_OUTADDR_ADDRESS:
7215 where = &outaddr_address_reload_insns[rl->opnum];
7216 break;
7217 case RELOAD_FOR_OPERAND_ADDRESS:
7218 where = &operand_reload_insns;
7219 break;
7220 case RELOAD_FOR_OPADDR_ADDR:
7221 where = &other_operand_reload_insns;
7222 break;
7223 case RELOAD_FOR_OTHER_ADDRESS:
7224 where = &other_input_address_reload_insns;
7225 break;
7226 default:
7227 gcc_unreachable ();
7228 }
7229
7230 push_to_sequence (*where);
7231
7232 /* Auto-increment addresses must be reloaded in a special way. */
7233 if (rl->out && ! rl->out_reg)
7234 {
7235 /* We are not going to bother supporting the case where a
7236 incremented register can't be copied directly from
7237 OLDEQUIV since this seems highly unlikely. */
7238 gcc_assert (rl->secondary_in_reload < 0);
7239
7240 if (reload_inherited[j])
7241 oldequiv = reloadreg;
7242
7243 old = XEXP (rl->in_reg, 0);
7244
7245 /* Prevent normal processing of this reload. */
7246 special = 1;
7247 /* Output a special code sequence for this case. */
7248 inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7249 }
7250
7251 /* If we are reloading a pseudo-register that was set by the previous
7252 insn, see if we can get rid of that pseudo-register entirely
7253 by redirecting the previous insn into our reload register. */
7254
7255 else if (optimize && REG_P (old)
7256 && REGNO (old) >= FIRST_PSEUDO_REGISTER
7257 && dead_or_set_p (insn, old)
7258 /* This is unsafe if some other reload
7259 uses the same reg first. */
7260 && ! conflicts_with_override (reloadreg)
7261 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7262 rl->when_needed, old, rl->out, j, 0))
7263 {
7264 rtx temp = PREV_INSN (insn);
7265 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7266 temp = PREV_INSN (temp);
7267 if (temp
7268 && NONJUMP_INSN_P (temp)
7269 && GET_CODE (PATTERN (temp)) == SET
7270 && SET_DEST (PATTERN (temp)) == old
7271 /* Make sure we can access insn_operand_constraint. */
7272 && asm_noperands (PATTERN (temp)) < 0
7273 /* This is unsafe if operand occurs more than once in current
7274 insn. Perhaps some occurrences aren't reloaded. */
7275 && count_occurrences (PATTERN (insn), old, 0) == 1)
7276 {
7277 rtx old = SET_DEST (PATTERN (temp));
7278 /* Store into the reload register instead of the pseudo. */
7279 SET_DEST (PATTERN (temp)) = reloadreg;
7280
7281 /* Verify that resulting insn is valid. */
7282 extract_insn (temp);
7283 if (constrain_operands (1))
7284 {
7285 /* If the previous insn is an output reload, the source is
7286 a reload register, and its spill_reg_store entry will
7287 contain the previous destination. This is now
7288 invalid. */
7289 if (REG_P (SET_SRC (PATTERN (temp)))
7290 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7291 {
7292 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7293 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7294 }
7295
7296 /* If these are the only uses of the pseudo reg,
7297 pretend for GDB it lives in the reload reg we used. */
7298 if (REG_N_DEATHS (REGNO (old)) == 1
7299 && REG_N_SETS (REGNO (old)) == 1)
7300 {
7301 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7302 if (ira_conflicts_p)
7303 /* Inform IRA about the change. */
7304 ira_mark_allocation_change (REGNO (old));
7305 alter_reg (REGNO (old), -1, false);
7306 }
7307 special = 1;
7308
7309 /* Adjust any debug insns between temp and insn. */
7310 while ((temp = NEXT_INSN (temp)) != insn)
7311 if (DEBUG_INSN_P (temp))
7312 replace_rtx (PATTERN (temp), old, reloadreg);
7313 else
7314 gcc_assert (NOTE_P (temp));
7315 }
7316 else
7317 {
7318 SET_DEST (PATTERN (temp)) = old;
7319 }
7320 }
7321 }
7322
7323 /* We can't do that, so output an insn to load RELOADREG. */
7324
7325 /* If we have a secondary reload, pick up the secondary register
7326 and icode, if any. If OLDEQUIV and OLD are different or
7327 if this is an in-out reload, recompute whether or not we
7328 still need a secondary register and what the icode should
7329 be. If we still need a secondary register and the class or
7330 icode is different, go back to reloading from OLD if using
7331 OLDEQUIV means that we got the wrong type of register. We
7332 cannot have different class or icode due to an in-out reload
7333 because we don't make such reloads when both the input and
7334 output need secondary reload registers. */
7335
7336 if (! special && rl->secondary_in_reload >= 0)
7337 {
7338 rtx second_reload_reg = 0;
7339 rtx third_reload_reg = 0;
7340 int secondary_reload = rl->secondary_in_reload;
7341 rtx real_oldequiv = oldequiv;
7342 rtx real_old = old;
7343 rtx tmp;
7344 enum insn_code icode;
7345 enum insn_code tertiary_icode = CODE_FOR_nothing;
7346
7347 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7348 and similarly for OLD.
7349 See comments in get_secondary_reload in reload.c. */
7350 /* If it is a pseudo that cannot be replaced with its
7351 equivalent MEM, we must fall back to reload_in, which
7352 will have all the necessary substitutions registered.
7353 Likewise for a pseudo that can't be replaced with its
7354 equivalent constant.
7355
7356 Take extra care for subregs of such pseudos. Note that
7357 we cannot use reg_equiv_mem in this case because it is
7358 not in the right mode. */
7359
7360 tmp = oldequiv;
7361 if (GET_CODE (tmp) == SUBREG)
7362 tmp = SUBREG_REG (tmp);
7363 if (REG_P (tmp)
7364 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7365 && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7366 || reg_equiv_constant (REGNO (tmp)) != 0))
7367 {
7368 if (! reg_equiv_mem (REGNO (tmp))
7369 || num_not_at_initial_offset
7370 || GET_CODE (oldequiv) == SUBREG)
7371 real_oldequiv = rl->in;
7372 else
7373 real_oldequiv = reg_equiv_mem (REGNO (tmp));
7374 }
7375
7376 tmp = old;
7377 if (GET_CODE (tmp) == SUBREG)
7378 tmp = SUBREG_REG (tmp);
7379 if (REG_P (tmp)
7380 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7381 && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7382 || reg_equiv_constant (REGNO (tmp)) != 0))
7383 {
7384 if (! reg_equiv_mem (REGNO (tmp))
7385 || num_not_at_initial_offset
7386 || GET_CODE (old) == SUBREG)
7387 real_old = rl->in;
7388 else
7389 real_old = reg_equiv_mem (REGNO (tmp));
7390 }
7391
7392 second_reload_reg = rld[secondary_reload].reg_rtx;
7393 if (rld[secondary_reload].secondary_in_reload >= 0)
7394 {
7395 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7396
7397 third_reload_reg = rld[tertiary_reload].reg_rtx;
7398 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7399 /* We'd have to add more code for quartary reloads. */
7400 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7401 }
7402 icode = rl->secondary_in_icode;
7403
7404 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7405 || (rl->in != 0 && rl->out != 0))
7406 {
7407 secondary_reload_info sri, sri2;
7408 enum reg_class new_class, new_t_class;
7409
7410 sri.icode = CODE_FOR_nothing;
7411 sri.prev_sri = NULL;
7412 new_class
7413 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7414 rl->rclass, mode,
7415 &sri);
7416
7417 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7418 second_reload_reg = 0;
7419 else if (new_class == NO_REGS)
7420 {
7421 if (reload_adjust_reg_for_icode (&second_reload_reg,
7422 third_reload_reg,
7423 (enum insn_code) sri.icode))
7424 {
7425 icode = (enum insn_code) sri.icode;
7426 third_reload_reg = 0;
7427 }
7428 else
7429 {
7430 oldequiv = old;
7431 real_oldequiv = real_old;
7432 }
7433 }
7434 else if (sri.icode != CODE_FOR_nothing)
7435 /* We currently lack a way to express this in reloads. */
7436 gcc_unreachable ();
7437 else
7438 {
7439 sri2.icode = CODE_FOR_nothing;
7440 sri2.prev_sri = &sri;
7441 new_t_class
7442 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7443 new_class, mode,
7444 &sri);
7445 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7446 {
7447 if (reload_adjust_reg_for_temp (&second_reload_reg,
7448 third_reload_reg,
7449 new_class, mode))
7450 {
7451 third_reload_reg = 0;
7452 tertiary_icode = (enum insn_code) sri2.icode;
7453 }
7454 else
7455 {
7456 oldequiv = old;
7457 real_oldequiv = real_old;
7458 }
7459 }
7460 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7461 {
7462 rtx intermediate = second_reload_reg;
7463
7464 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7465 new_class, mode)
7466 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7467 ((enum insn_code)
7468 sri2.icode)))
7469 {
7470 second_reload_reg = intermediate;
7471 tertiary_icode = (enum insn_code) sri2.icode;
7472 }
7473 else
7474 {
7475 oldequiv = old;
7476 real_oldequiv = real_old;
7477 }
7478 }
7479 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7480 {
7481 rtx intermediate = second_reload_reg;
7482
7483 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7484 new_class, mode)
7485 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7486 new_t_class, mode))
7487 {
7488 second_reload_reg = intermediate;
7489 tertiary_icode = (enum insn_code) sri2.icode;
7490 }
7491 else
7492 {
7493 oldequiv = old;
7494 real_oldequiv = real_old;
7495 }
7496 }
7497 else
7498 {
7499 /* This could be handled more intelligently too. */
7500 oldequiv = old;
7501 real_oldequiv = real_old;
7502 }
7503 }
7504 }
7505
7506 /* If we still need a secondary reload register, check
7507 to see if it is being used as a scratch or intermediate
7508 register and generate code appropriately. If we need
7509 a scratch register, use REAL_OLDEQUIV since the form of
7510 the insn may depend on the actual address if it is
7511 a MEM. */
7512
7513 if (second_reload_reg)
7514 {
7515 if (icode != CODE_FOR_nothing)
7516 {
7517 /* We'd have to add extra code to handle this case. */
7518 gcc_assert (!third_reload_reg);
7519
7520 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7521 second_reload_reg));
7522 special = 1;
7523 }
7524 else
7525 {
7526 /* See if we need a scratch register to load the
7527 intermediate register (a tertiary reload). */
7528 if (tertiary_icode != CODE_FOR_nothing)
7529 {
7530 emit_insn ((GEN_FCN (tertiary_icode)
7531 (second_reload_reg, real_oldequiv,
7532 third_reload_reg)));
7533 }
7534 else if (third_reload_reg)
7535 {
7536 gen_reload (third_reload_reg, real_oldequiv,
7537 rl->opnum,
7538 rl->when_needed);
7539 gen_reload (second_reload_reg, third_reload_reg,
7540 rl->opnum,
7541 rl->when_needed);
7542 }
7543 else
7544 gen_reload (second_reload_reg, real_oldequiv,
7545 rl->opnum,
7546 rl->when_needed);
7547
7548 oldequiv = second_reload_reg;
7549 }
7550 }
7551 }
7552
7553 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7554 {
7555 rtx real_oldequiv = oldequiv;
7556
7557 if ((REG_P (oldequiv)
7558 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7559 && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7560 || reg_equiv_constant (REGNO (oldequiv)) != 0))
7561 || (GET_CODE (oldequiv) == SUBREG
7562 && REG_P (SUBREG_REG (oldequiv))
7563 && (REGNO (SUBREG_REG (oldequiv))
7564 >= FIRST_PSEUDO_REGISTER)
7565 && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7566 || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7567 || (CONSTANT_P (oldequiv)
7568 && (targetm.preferred_reload_class (oldequiv,
7569 REGNO_REG_CLASS (REGNO (reloadreg)))
7570 == NO_REGS)))
7571 real_oldequiv = rl->in;
7572 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7573 rl->when_needed);
7574 }
7575
7576 if (cfun->can_throw_non_call_exceptions)
7577 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7578
7579 /* End this sequence. */
7580 *where = get_insns ();
7581 end_sequence ();
7582
7583 /* Update reload_override_in so that delete_address_reloads_1
7584 can see the actual register usage. */
7585 if (oldequiv_reg)
7586 reload_override_in[j] = oldequiv;
7587 }
7588
7589 /* Generate insns to for the output reload RL, which is for the insn described
7590 by CHAIN and has the number J. */
7591 static void
7592 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7593 int j)
7594 {
7595 rtx reloadreg;
7596 rtx insn = chain->insn;
7597 int special = 0;
7598 rtx old = rl->out;
7599 enum machine_mode mode;
7600 rtx p;
7601 rtx rl_reg_rtx;
7602
7603 if (rl->when_needed == RELOAD_OTHER)
7604 start_sequence ();
7605 else
7606 push_to_sequence (output_reload_insns[rl->opnum]);
7607
7608 rl_reg_rtx = reload_reg_rtx_for_output[j];
7609 mode = GET_MODE (rl_reg_rtx);
7610
7611 reloadreg = rl_reg_rtx;
7612
7613 /* If we need two reload regs, set RELOADREG to the intermediate
7614 one, since it will be stored into OLD. We might need a secondary
7615 register only for an input reload, so check again here. */
7616
7617 if (rl->secondary_out_reload >= 0)
7618 {
7619 rtx real_old = old;
7620 int secondary_reload = rl->secondary_out_reload;
7621 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7622
7623 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7624 && reg_equiv_mem (REGNO (old)) != 0)
7625 real_old = reg_equiv_mem (REGNO (old));
7626
7627 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7628 {
7629 rtx second_reloadreg = reloadreg;
7630 reloadreg = rld[secondary_reload].reg_rtx;
7631
7632 /* See if RELOADREG is to be used as a scratch register
7633 or as an intermediate register. */
7634 if (rl->secondary_out_icode != CODE_FOR_nothing)
7635 {
7636 /* We'd have to add extra code to handle this case. */
7637 gcc_assert (tertiary_reload < 0);
7638
7639 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7640 (real_old, second_reloadreg, reloadreg)));
7641 special = 1;
7642 }
7643 else
7644 {
7645 /* See if we need both a scratch and intermediate reload
7646 register. */
7647
7648 enum insn_code tertiary_icode
7649 = rld[secondary_reload].secondary_out_icode;
7650
7651 /* We'd have to add more code for quartary reloads. */
7652 gcc_assert (tertiary_reload < 0
7653 || rld[tertiary_reload].secondary_out_reload < 0);
7654
7655 if (GET_MODE (reloadreg) != mode)
7656 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7657
7658 if (tertiary_icode != CODE_FOR_nothing)
7659 {
7660 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7661
7662 /* Copy primary reload reg to secondary reload reg.
7663 (Note that these have been swapped above, then
7664 secondary reload reg to OLD using our insn.) */
7665
7666 /* If REAL_OLD is a paradoxical SUBREG, remove it
7667 and try to put the opposite SUBREG on
7668 RELOADREG. */
7669 strip_paradoxical_subreg (&real_old, &reloadreg);
7670
7671 gen_reload (reloadreg, second_reloadreg,
7672 rl->opnum, rl->when_needed);
7673 emit_insn ((GEN_FCN (tertiary_icode)
7674 (real_old, reloadreg, third_reloadreg)));
7675 special = 1;
7676 }
7677
7678 else
7679 {
7680 /* Copy between the reload regs here and then to
7681 OUT later. */
7682
7683 gen_reload (reloadreg, second_reloadreg,
7684 rl->opnum, rl->when_needed);
7685 if (tertiary_reload >= 0)
7686 {
7687 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7688
7689 gen_reload (third_reloadreg, reloadreg,
7690 rl->opnum, rl->when_needed);
7691 reloadreg = third_reloadreg;
7692 }
7693 }
7694 }
7695 }
7696 }
7697
7698 /* Output the last reload insn. */
7699 if (! special)
7700 {
7701 rtx set;
7702
7703 /* Don't output the last reload if OLD is not the dest of
7704 INSN and is in the src and is clobbered by INSN. */
7705 if (! flag_expensive_optimizations
7706 || !REG_P (old)
7707 || !(set = single_set (insn))
7708 || rtx_equal_p (old, SET_DEST (set))
7709 || !reg_mentioned_p (old, SET_SRC (set))
7710 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7711 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7712 gen_reload (old, reloadreg, rl->opnum,
7713 rl->when_needed);
7714 }
7715
7716 /* Look at all insns we emitted, just to be safe. */
7717 for (p = get_insns (); p; p = NEXT_INSN (p))
7718 if (INSN_P (p))
7719 {
7720 rtx pat = PATTERN (p);
7721
7722 /* If this output reload doesn't come from a spill reg,
7723 clear any memory of reloaded copies of the pseudo reg.
7724 If this output reload comes from a spill reg,
7725 reg_has_output_reload will make this do nothing. */
7726 note_stores (pat, forget_old_reloads_1, NULL);
7727
7728 if (reg_mentioned_p (rl_reg_rtx, pat))
7729 {
7730 rtx set = single_set (insn);
7731 if (reload_spill_index[j] < 0
7732 && set
7733 && SET_SRC (set) == rl_reg_rtx)
7734 {
7735 int src = REGNO (SET_SRC (set));
7736
7737 reload_spill_index[j] = src;
7738 SET_HARD_REG_BIT (reg_is_output_reload, src);
7739 if (find_regno_note (insn, REG_DEAD, src))
7740 SET_HARD_REG_BIT (reg_reloaded_died, src);
7741 }
7742 if (HARD_REGISTER_P (rl_reg_rtx))
7743 {
7744 int s = rl->secondary_out_reload;
7745 set = single_set (p);
7746 /* If this reload copies only to the secondary reload
7747 register, the secondary reload does the actual
7748 store. */
7749 if (s >= 0 && set == NULL_RTX)
7750 /* We can't tell what function the secondary reload
7751 has and where the actual store to the pseudo is
7752 made; leave new_spill_reg_store alone. */
7753 ;
7754 else if (s >= 0
7755 && SET_SRC (set) == rl_reg_rtx
7756 && SET_DEST (set) == rld[s].reg_rtx)
7757 {
7758 /* Usually the next instruction will be the
7759 secondary reload insn; if we can confirm
7760 that it is, setting new_spill_reg_store to
7761 that insn will allow an extra optimization. */
7762 rtx s_reg = rld[s].reg_rtx;
7763 rtx next = NEXT_INSN (p);
7764 rld[s].out = rl->out;
7765 rld[s].out_reg = rl->out_reg;
7766 set = single_set (next);
7767 if (set && SET_SRC (set) == s_reg
7768 && reload_reg_rtx_reaches_end_p (s_reg, s))
7769 {
7770 SET_HARD_REG_BIT (reg_is_output_reload,
7771 REGNO (s_reg));
7772 new_spill_reg_store[REGNO (s_reg)] = next;
7773 }
7774 }
7775 else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7776 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7777 }
7778 }
7779 }
7780
7781 if (rl->when_needed == RELOAD_OTHER)
7782 {
7783 emit_insn (other_output_reload_insns[rl->opnum]);
7784 other_output_reload_insns[rl->opnum] = get_insns ();
7785 }
7786 else
7787 output_reload_insns[rl->opnum] = get_insns ();
7788
7789 if (cfun->can_throw_non_call_exceptions)
7790 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7791
7792 end_sequence ();
7793 }
7794
7795 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7796 and has the number J. */
7797 static void
7798 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7799 {
7800 rtx insn = chain->insn;
7801 rtx old = (rl->in && MEM_P (rl->in)
7802 ? rl->in_reg : rl->in);
7803 rtx reg_rtx = rl->reg_rtx;
7804
7805 if (old && reg_rtx)
7806 {
7807 enum machine_mode mode;
7808
7809 /* Determine the mode to reload in.
7810 This is very tricky because we have three to choose from.
7811 There is the mode the insn operand wants (rl->inmode).
7812 There is the mode of the reload register RELOADREG.
7813 There is the intrinsic mode of the operand, which we could find
7814 by stripping some SUBREGs.
7815 It turns out that RELOADREG's mode is irrelevant:
7816 we can change that arbitrarily.
7817
7818 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7819 then the reload reg may not support QImode moves, so use SImode.
7820 If foo is in memory due to spilling a pseudo reg, this is safe,
7821 because the QImode value is in the least significant part of a
7822 slot big enough for a SImode. If foo is some other sort of
7823 memory reference, then it is impossible to reload this case,
7824 so previous passes had better make sure this never happens.
7825
7826 Then consider a one-word union which has SImode and one of its
7827 members is a float, being fetched as (SUBREG:SF union:SI).
7828 We must fetch that as SFmode because we could be loading into
7829 a float-only register. In this case OLD's mode is correct.
7830
7831 Consider an immediate integer: it has VOIDmode. Here we need
7832 to get a mode from something else.
7833
7834 In some cases, there is a fourth mode, the operand's
7835 containing mode. If the insn specifies a containing mode for
7836 this operand, it overrides all others.
7837
7838 I am not sure whether the algorithm here is always right,
7839 but it does the right things in those cases. */
7840
7841 mode = GET_MODE (old);
7842 if (mode == VOIDmode)
7843 mode = rl->inmode;
7844
7845 /* We cannot use gen_lowpart_common since it can do the wrong thing
7846 when REG_RTX has a multi-word mode. Note that REG_RTX must
7847 always be a REG here. */
7848 if (GET_MODE (reg_rtx) != mode)
7849 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7850 }
7851 reload_reg_rtx_for_input[j] = reg_rtx;
7852
7853 if (old != 0
7854 /* AUTO_INC reloads need to be handled even if inherited. We got an
7855 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7856 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7857 && ! rtx_equal_p (reg_rtx, old)
7858 && reg_rtx != 0)
7859 emit_input_reload_insns (chain, rld + j, old, j);
7860
7861 /* When inheriting a wider reload, we have a MEM in rl->in,
7862 e.g. inheriting a SImode output reload for
7863 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7864 if (optimize && reload_inherited[j] && rl->in
7865 && MEM_P (rl->in)
7866 && MEM_P (rl->in_reg)
7867 && reload_spill_index[j] >= 0
7868 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7869 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7870
7871 /* If we are reloading a register that was recently stored in with an
7872 output-reload, see if we can prove there was
7873 actually no need to store the old value in it. */
7874
7875 if (optimize
7876 && (reload_inherited[j] || reload_override_in[j])
7877 && reg_rtx
7878 && REG_P (reg_rtx)
7879 && spill_reg_store[REGNO (reg_rtx)] != 0
7880 #if 0
7881 /* There doesn't seem to be any reason to restrict this to pseudos
7882 and doing so loses in the case where we are copying from a
7883 register of the wrong class. */
7884 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7885 #endif
7886 /* The insn might have already some references to stackslots
7887 replaced by MEMs, while reload_out_reg still names the
7888 original pseudo. */
7889 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7890 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7891 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7892 }
7893
7894 /* Do output reloading for reload RL, which is for the insn described by
7895 CHAIN and has the number J.
7896 ??? At some point we need to support handling output reloads of
7897 JUMP_INSNs or insns that set cc0. */
7898 static void
7899 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7900 {
7901 rtx note, old;
7902 rtx insn = chain->insn;
7903 /* If this is an output reload that stores something that is
7904 not loaded in this same reload, see if we can eliminate a previous
7905 store. */
7906 rtx pseudo = rl->out_reg;
7907 rtx reg_rtx = rl->reg_rtx;
7908
7909 if (rl->out && reg_rtx)
7910 {
7911 enum machine_mode mode;
7912
7913 /* Determine the mode to reload in.
7914 See comments above (for input reloading). */
7915 mode = GET_MODE (rl->out);
7916 if (mode == VOIDmode)
7917 {
7918 /* VOIDmode should never happen for an output. */
7919 if (asm_noperands (PATTERN (insn)) < 0)
7920 /* It's the compiler's fault. */
7921 fatal_insn ("VOIDmode on an output", insn);
7922 error_for_asm (insn, "output operand is constant in %<asm%>");
7923 /* Prevent crash--use something we know is valid. */
7924 mode = word_mode;
7925 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7926 }
7927 if (GET_MODE (reg_rtx) != mode)
7928 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7929 }
7930 reload_reg_rtx_for_output[j] = reg_rtx;
7931
7932 if (pseudo
7933 && optimize
7934 && REG_P (pseudo)
7935 && ! rtx_equal_p (rl->in_reg, pseudo)
7936 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7937 && reg_last_reload_reg[REGNO (pseudo)])
7938 {
7939 int pseudo_no = REGNO (pseudo);
7940 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7941
7942 /* We don't need to test full validity of last_regno for
7943 inherit here; we only want to know if the store actually
7944 matches the pseudo. */
7945 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7946 && reg_reloaded_contents[last_regno] == pseudo_no
7947 && spill_reg_store[last_regno]
7948 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7949 delete_output_reload (insn, j, last_regno, reg_rtx);
7950 }
7951
7952 old = rl->out_reg;
7953 if (old == 0
7954 || reg_rtx == 0
7955 || rtx_equal_p (old, reg_rtx))
7956 return;
7957
7958 /* An output operand that dies right away does need a reload,
7959 but need not be copied from it. Show the new location in the
7960 REG_UNUSED note. */
7961 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7962 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7963 {
7964 XEXP (note, 0) = reg_rtx;
7965 return;
7966 }
7967 /* Likewise for a SUBREG of an operand that dies. */
7968 else if (GET_CODE (old) == SUBREG
7969 && REG_P (SUBREG_REG (old))
7970 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7971 SUBREG_REG (old))))
7972 {
7973 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7974 return;
7975 }
7976 else if (GET_CODE (old) == SCRATCH)
7977 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7978 but we don't want to make an output reload. */
7979 return;
7980
7981 /* If is a JUMP_INSN, we can't support output reloads yet. */
7982 gcc_assert (NONJUMP_INSN_P (insn));
7983
7984 emit_output_reload_insns (chain, rld + j, j);
7985 }
7986
7987 /* A reload copies values of MODE from register SRC to register DEST.
7988 Return true if it can be treated for inheritance purposes like a
7989 group of reloads, each one reloading a single hard register. The
7990 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7991 occupy the same number of hard registers. */
7992
7993 static bool
7994 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7995 int src ATTRIBUTE_UNUSED,
7996 enum machine_mode mode ATTRIBUTE_UNUSED)
7997 {
7998 #ifdef CANNOT_CHANGE_MODE_CLASS
7999 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8000 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8001 #else
8002 return true;
8003 #endif
8004 }
8005
8006 /* Output insns to reload values in and out of the chosen reload regs. */
8007
8008 static void
8009 emit_reload_insns (struct insn_chain *chain)
8010 {
8011 rtx insn = chain->insn;
8012
8013 int j;
8014
8015 CLEAR_HARD_REG_SET (reg_reloaded_died);
8016
8017 for (j = 0; j < reload_n_operands; j++)
8018 input_reload_insns[j] = input_address_reload_insns[j]
8019 = inpaddr_address_reload_insns[j]
8020 = output_reload_insns[j] = output_address_reload_insns[j]
8021 = outaddr_address_reload_insns[j]
8022 = other_output_reload_insns[j] = 0;
8023 other_input_address_reload_insns = 0;
8024 other_input_reload_insns = 0;
8025 operand_reload_insns = 0;
8026 other_operand_reload_insns = 0;
8027
8028 /* Dump reloads into the dump file. */
8029 if (dump_file)
8030 {
8031 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8032 debug_reload_to_stream (dump_file);
8033 }
8034
8035 for (j = 0; j < n_reloads; j++)
8036 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8037 {
8038 unsigned int i;
8039
8040 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8041 new_spill_reg_store[i] = 0;
8042 }
8043
8044 /* Now output the instructions to copy the data into and out of the
8045 reload registers. Do these in the order that the reloads were reported,
8046 since reloads of base and index registers precede reloads of operands
8047 and the operands may need the base and index registers reloaded. */
8048
8049 for (j = 0; j < n_reloads; j++)
8050 {
8051 do_input_reload (chain, rld + j, j);
8052 do_output_reload (chain, rld + j, j);
8053 }
8054
8055 /* Now write all the insns we made for reloads in the order expected by
8056 the allocation functions. Prior to the insn being reloaded, we write
8057 the following reloads:
8058
8059 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8060
8061 RELOAD_OTHER reloads.
8062
8063 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8064 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8065 RELOAD_FOR_INPUT reload for the operand.
8066
8067 RELOAD_FOR_OPADDR_ADDRS reloads.
8068
8069 RELOAD_FOR_OPERAND_ADDRESS reloads.
8070
8071 After the insn being reloaded, we write the following:
8072
8073 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8074 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8075 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8076 reloads for the operand. The RELOAD_OTHER output reloads are
8077 output in descending order by reload number. */
8078
8079 emit_insn_before (other_input_address_reload_insns, insn);
8080 emit_insn_before (other_input_reload_insns, insn);
8081
8082 for (j = 0; j < reload_n_operands; j++)
8083 {
8084 emit_insn_before (inpaddr_address_reload_insns[j], insn);
8085 emit_insn_before (input_address_reload_insns[j], insn);
8086 emit_insn_before (input_reload_insns[j], insn);
8087 }
8088
8089 emit_insn_before (other_operand_reload_insns, insn);
8090 emit_insn_before (operand_reload_insns, insn);
8091
8092 for (j = 0; j < reload_n_operands; j++)
8093 {
8094 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8095 x = emit_insn_after (output_address_reload_insns[j], x);
8096 x = emit_insn_after (output_reload_insns[j], x);
8097 emit_insn_after (other_output_reload_insns[j], x);
8098 }
8099
8100 /* For all the spill regs newly reloaded in this instruction,
8101 record what they were reloaded from, so subsequent instructions
8102 can inherit the reloads.
8103
8104 Update spill_reg_store for the reloads of this insn.
8105 Copy the elements that were updated in the loop above. */
8106
8107 for (j = 0; j < n_reloads; j++)
8108 {
8109 int r = reload_order[j];
8110 int i = reload_spill_index[r];
8111
8112 /* If this is a non-inherited input reload from a pseudo, we must
8113 clear any memory of a previous store to the same pseudo. Only do
8114 something if there will not be an output reload for the pseudo
8115 being reloaded. */
8116 if (rld[r].in_reg != 0
8117 && ! (reload_inherited[r] || reload_override_in[r]))
8118 {
8119 rtx reg = rld[r].in_reg;
8120
8121 if (GET_CODE (reg) == SUBREG)
8122 reg = SUBREG_REG (reg);
8123
8124 if (REG_P (reg)
8125 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8126 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8127 {
8128 int nregno = REGNO (reg);
8129
8130 if (reg_last_reload_reg[nregno])
8131 {
8132 int last_regno = REGNO (reg_last_reload_reg[nregno]);
8133
8134 if (reg_reloaded_contents[last_regno] == nregno)
8135 spill_reg_store[last_regno] = 0;
8136 }
8137 }
8138 }
8139
8140 /* I is nonneg if this reload used a register.
8141 If rld[r].reg_rtx is 0, this is an optional reload
8142 that we opted to ignore. */
8143
8144 if (i >= 0 && rld[r].reg_rtx != 0)
8145 {
8146 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8147 int k;
8148
8149 /* For a multi register reload, we need to check if all or part
8150 of the value lives to the end. */
8151 for (k = 0; k < nr; k++)
8152 if (reload_reg_reaches_end_p (i + k, r))
8153 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8154
8155 /* Maybe the spill reg contains a copy of reload_out. */
8156 if (rld[r].out != 0
8157 && (REG_P (rld[r].out)
8158 || (rld[r].out_reg
8159 ? REG_P (rld[r].out_reg)
8160 /* The reload value is an auto-modification of
8161 some kind. For PRE_INC, POST_INC, PRE_DEC
8162 and POST_DEC, we record an equivalence
8163 between the reload register and the operand
8164 on the optimistic assumption that we can make
8165 the equivalence hold. reload_as_needed must
8166 then either make it hold or invalidate the
8167 equivalence.
8168
8169 PRE_MODIFY and POST_MODIFY addresses are reloaded
8170 somewhat differently, and allowing them here leads
8171 to problems. */
8172 : (GET_CODE (rld[r].out) != POST_MODIFY
8173 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8174 {
8175 rtx reg;
8176
8177 reg = reload_reg_rtx_for_output[r];
8178 if (reload_reg_rtx_reaches_end_p (reg, r))
8179 {
8180 enum machine_mode mode = GET_MODE (reg);
8181 int regno = REGNO (reg);
8182 int nregs = hard_regno_nregs[regno][mode];
8183 rtx out = (REG_P (rld[r].out)
8184 ? rld[r].out
8185 : rld[r].out_reg
8186 ? rld[r].out_reg
8187 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
8188 int out_regno = REGNO (out);
8189 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8190 : hard_regno_nregs[out_regno][mode]);
8191 bool piecemeal;
8192
8193 spill_reg_store[regno] = new_spill_reg_store[regno];
8194 spill_reg_stored_to[regno] = out;
8195 reg_last_reload_reg[out_regno] = reg;
8196
8197 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8198 && nregs == out_nregs
8199 && inherit_piecemeal_p (out_regno, regno, mode));
8200
8201 /* If OUT_REGNO is a hard register, it may occupy more than
8202 one register. If it does, say what is in the
8203 rest of the registers assuming that both registers
8204 agree on how many words the object takes. If not,
8205 invalidate the subsequent registers. */
8206
8207 if (HARD_REGISTER_NUM_P (out_regno))
8208 for (k = 1; k < out_nregs; k++)
8209 reg_last_reload_reg[out_regno + k]
8210 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8211
8212 /* Now do the inverse operation. */
8213 for (k = 0; k < nregs; k++)
8214 {
8215 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8216 reg_reloaded_contents[regno + k]
8217 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8218 ? out_regno
8219 : out_regno + k);
8220 reg_reloaded_insn[regno + k] = insn;
8221 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8222 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8223 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8224 regno + k);
8225 else
8226 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8227 regno + k);
8228 }
8229 }
8230 }
8231 /* Maybe the spill reg contains a copy of reload_in. Only do
8232 something if there will not be an output reload for
8233 the register being reloaded. */
8234 else if (rld[r].out_reg == 0
8235 && rld[r].in != 0
8236 && ((REG_P (rld[r].in)
8237 && !HARD_REGISTER_P (rld[r].in)
8238 && !REGNO_REG_SET_P (&reg_has_output_reload,
8239 REGNO (rld[r].in)))
8240 || (REG_P (rld[r].in_reg)
8241 && !REGNO_REG_SET_P (&reg_has_output_reload,
8242 REGNO (rld[r].in_reg))))
8243 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8244 {
8245 rtx reg;
8246
8247 reg = reload_reg_rtx_for_input[r];
8248 if (reload_reg_rtx_reaches_end_p (reg, r))
8249 {
8250 enum machine_mode mode;
8251 int regno;
8252 int nregs;
8253 int in_regno;
8254 int in_nregs;
8255 rtx in;
8256 bool piecemeal;
8257
8258 mode = GET_MODE (reg);
8259 regno = REGNO (reg);
8260 nregs = hard_regno_nregs[regno][mode];
8261 if (REG_P (rld[r].in)
8262 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8263 in = rld[r].in;
8264 else if (REG_P (rld[r].in_reg))
8265 in = rld[r].in_reg;
8266 else
8267 in = XEXP (rld[r].in_reg, 0);
8268 in_regno = REGNO (in);
8269
8270 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8271 : hard_regno_nregs[in_regno][mode]);
8272
8273 reg_last_reload_reg[in_regno] = reg;
8274
8275 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8276 && nregs == in_nregs
8277 && inherit_piecemeal_p (regno, in_regno, mode));
8278
8279 if (HARD_REGISTER_NUM_P (in_regno))
8280 for (k = 1; k < in_nregs; k++)
8281 reg_last_reload_reg[in_regno + k]
8282 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8283
8284 /* Unless we inherited this reload, show we haven't
8285 recently done a store.
8286 Previous stores of inherited auto_inc expressions
8287 also have to be discarded. */
8288 if (! reload_inherited[r]
8289 || (rld[r].out && ! rld[r].out_reg))
8290 spill_reg_store[regno] = 0;
8291
8292 for (k = 0; k < nregs; k++)
8293 {
8294 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8295 reg_reloaded_contents[regno + k]
8296 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8297 ? in_regno
8298 : in_regno + k);
8299 reg_reloaded_insn[regno + k] = insn;
8300 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8301 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8302 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8303 regno + k);
8304 else
8305 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8306 regno + k);
8307 }
8308 }
8309 }
8310 }
8311
8312 /* The following if-statement was #if 0'd in 1.34 (or before...).
8313 It's reenabled in 1.35 because supposedly nothing else
8314 deals with this problem. */
8315
8316 /* If a register gets output-reloaded from a non-spill register,
8317 that invalidates any previous reloaded copy of it.
8318 But forget_old_reloads_1 won't get to see it, because
8319 it thinks only about the original insn. So invalidate it here.
8320 Also do the same thing for RELOAD_OTHER constraints where the
8321 output is discarded. */
8322 if (i < 0
8323 && ((rld[r].out != 0
8324 && (REG_P (rld[r].out)
8325 || (MEM_P (rld[r].out)
8326 && REG_P (rld[r].out_reg))))
8327 || (rld[r].out == 0 && rld[r].out_reg
8328 && REG_P (rld[r].out_reg))))
8329 {
8330 rtx out = ((rld[r].out && REG_P (rld[r].out))
8331 ? rld[r].out : rld[r].out_reg);
8332 int out_regno = REGNO (out);
8333 enum machine_mode mode = GET_MODE (out);
8334
8335 /* REG_RTX is now set or clobbered by the main instruction.
8336 As the comment above explains, forget_old_reloads_1 only
8337 sees the original instruction, and there is no guarantee
8338 that the original instruction also clobbered REG_RTX.
8339 For example, if find_reloads sees that the input side of
8340 a matched operand pair dies in this instruction, it may
8341 use the input register as the reload register.
8342
8343 Calling forget_old_reloads_1 is a waste of effort if
8344 REG_RTX is also the output register.
8345
8346 If we know that REG_RTX holds the value of a pseudo
8347 register, the code after the call will record that fact. */
8348 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8349 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8350
8351 if (!HARD_REGISTER_NUM_P (out_regno))
8352 {
8353 rtx src_reg, store_insn = NULL_RTX;
8354
8355 reg_last_reload_reg[out_regno] = 0;
8356
8357 /* If we can find a hard register that is stored, record
8358 the storing insn so that we may delete this insn with
8359 delete_output_reload. */
8360 src_reg = reload_reg_rtx_for_output[r];
8361
8362 if (src_reg)
8363 {
8364 if (reload_reg_rtx_reaches_end_p (src_reg, r))
8365 store_insn = new_spill_reg_store[REGNO (src_reg)];
8366 else
8367 src_reg = NULL_RTX;
8368 }
8369 else
8370 {
8371 /* If this is an optional reload, try to find the
8372 source reg from an input reload. */
8373 rtx set = single_set (insn);
8374 if (set && SET_DEST (set) == rld[r].out)
8375 {
8376 int k;
8377
8378 src_reg = SET_SRC (set);
8379 store_insn = insn;
8380 for (k = 0; k < n_reloads; k++)
8381 {
8382 if (rld[k].in == src_reg)
8383 {
8384 src_reg = reload_reg_rtx_for_input[k];
8385 break;
8386 }
8387 }
8388 }
8389 }
8390 if (src_reg && REG_P (src_reg)
8391 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8392 {
8393 int src_regno, src_nregs, k;
8394 rtx note;
8395
8396 gcc_assert (GET_MODE (src_reg) == mode);
8397 src_regno = REGNO (src_reg);
8398 src_nregs = hard_regno_nregs[src_regno][mode];
8399 /* The place where to find a death note varies with
8400 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8401 necessarily checked exactly in the code that moves
8402 notes, so just check both locations. */
8403 note = find_regno_note (insn, REG_DEAD, src_regno);
8404 if (! note && store_insn)
8405 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8406 for (k = 0; k < src_nregs; k++)
8407 {
8408 spill_reg_store[src_regno + k] = store_insn;
8409 spill_reg_stored_to[src_regno + k] = out;
8410 reg_reloaded_contents[src_regno + k] = out_regno;
8411 reg_reloaded_insn[src_regno + k] = store_insn;
8412 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8413 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8414 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8415 mode))
8416 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8417 src_regno + k);
8418 else
8419 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8420 src_regno + k);
8421 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8422 if (note)
8423 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8424 else
8425 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8426 }
8427 reg_last_reload_reg[out_regno] = src_reg;
8428 /* We have to set reg_has_output_reload here, or else
8429 forget_old_reloads_1 will clear reg_last_reload_reg
8430 right away. */
8431 SET_REGNO_REG_SET (&reg_has_output_reload,
8432 out_regno);
8433 }
8434 }
8435 else
8436 {
8437 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8438
8439 for (k = 0; k < out_nregs; k++)
8440 reg_last_reload_reg[out_regno + k] = 0;
8441 }
8442 }
8443 }
8444 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8445 }
8446 \f
8447 /* Go through the motions to emit INSN and test if it is strictly valid.
8448 Return the emitted insn if valid, else return NULL. */
8449
8450 static rtx
8451 emit_insn_if_valid_for_reload (rtx insn)
8452 {
8453 rtx last = get_last_insn ();
8454 int code;
8455
8456 insn = emit_insn (insn);
8457 code = recog_memoized (insn);
8458
8459 if (code >= 0)
8460 {
8461 extract_insn (insn);
8462 /* We want constrain operands to treat this insn strictly in its
8463 validity determination, i.e., the way it would after reload has
8464 completed. */
8465 if (constrain_operands (1))
8466 return insn;
8467 }
8468
8469 delete_insns_since (last);
8470 return NULL;
8471 }
8472
8473 #ifdef SECONDARY_MEMORY_NEEDED
8474 /* If X is not a subreg, return it unmodified. If it is a subreg,
8475 look up whether we made a replacement for the SUBREG_REG. Return
8476 either the replacement or the SUBREG_REG. */
8477
8478 static rtx
8479 replaced_subreg (rtx x)
8480 {
8481 if (GET_CODE (x) == SUBREG)
8482 return find_replacement (&SUBREG_REG (x));
8483 return x;
8484 }
8485 #endif
8486
8487 /* Emit code to perform a reload from IN (which may be a reload register) to
8488 OUT (which may also be a reload register). IN or OUT is from operand
8489 OPNUM with reload type TYPE.
8490
8491 Returns first insn emitted. */
8492
8493 static rtx
8494 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8495 {
8496 rtx last = get_last_insn ();
8497 rtx tem;
8498 #ifdef SECONDARY_MEMORY_NEEDED
8499 rtx tem1, tem2;
8500 #endif
8501
8502 /* If IN is a paradoxical SUBREG, remove it and try to put the
8503 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8504 if (!strip_paradoxical_subreg (&in, &out))
8505 strip_paradoxical_subreg (&out, &in);
8506
8507 /* How to do this reload can get quite tricky. Normally, we are being
8508 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8509 register that didn't get a hard register. In that case we can just
8510 call emit_move_insn.
8511
8512 We can also be asked to reload a PLUS that adds a register or a MEM to
8513 another register, constant or MEM. This can occur during frame pointer
8514 elimination and while reloading addresses. This case is handled by
8515 trying to emit a single insn to perform the add. If it is not valid,
8516 we use a two insn sequence.
8517
8518 Or we can be asked to reload an unary operand that was a fragment of
8519 an addressing mode, into a register. If it isn't recognized as-is,
8520 we try making the unop operand and the reload-register the same:
8521 (set reg:X (unop:X expr:Y))
8522 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8523
8524 Finally, we could be called to handle an 'o' constraint by putting
8525 an address into a register. In that case, we first try to do this
8526 with a named pattern of "reload_load_address". If no such pattern
8527 exists, we just emit a SET insn and hope for the best (it will normally
8528 be valid on machines that use 'o').
8529
8530 This entire process is made complex because reload will never
8531 process the insns we generate here and so we must ensure that
8532 they will fit their constraints and also by the fact that parts of
8533 IN might be being reloaded separately and replaced with spill registers.
8534 Because of this, we are, in some sense, just guessing the right approach
8535 here. The one listed above seems to work.
8536
8537 ??? At some point, this whole thing needs to be rethought. */
8538
8539 if (GET_CODE (in) == PLUS
8540 && (REG_P (XEXP (in, 0))
8541 || GET_CODE (XEXP (in, 0)) == SUBREG
8542 || MEM_P (XEXP (in, 0)))
8543 && (REG_P (XEXP (in, 1))
8544 || GET_CODE (XEXP (in, 1)) == SUBREG
8545 || CONSTANT_P (XEXP (in, 1))
8546 || MEM_P (XEXP (in, 1))))
8547 {
8548 /* We need to compute the sum of a register or a MEM and another
8549 register, constant, or MEM, and put it into the reload
8550 register. The best possible way of doing this is if the machine
8551 has a three-operand ADD insn that accepts the required operands.
8552
8553 The simplest approach is to try to generate such an insn and see if it
8554 is recognized and matches its constraints. If so, it can be used.
8555
8556 It might be better not to actually emit the insn unless it is valid,
8557 but we need to pass the insn as an operand to `recog' and
8558 `extract_insn' and it is simpler to emit and then delete the insn if
8559 not valid than to dummy things up. */
8560
8561 rtx op0, op1, tem, insn;
8562 enum insn_code code;
8563
8564 op0 = find_replacement (&XEXP (in, 0));
8565 op1 = find_replacement (&XEXP (in, 1));
8566
8567 /* Since constraint checking is strict, commutativity won't be
8568 checked, so we need to do that here to avoid spurious failure
8569 if the add instruction is two-address and the second operand
8570 of the add is the same as the reload reg, which is frequently
8571 the case. If the insn would be A = B + A, rearrange it so
8572 it will be A = A + B as constrain_operands expects. */
8573
8574 if (REG_P (XEXP (in, 1))
8575 && REGNO (out) == REGNO (XEXP (in, 1)))
8576 tem = op0, op0 = op1, op1 = tem;
8577
8578 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8579 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8580
8581 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8582 if (insn)
8583 return insn;
8584
8585 /* If that failed, we must use a conservative two-insn sequence.
8586
8587 Use a move to copy one operand into the reload register. Prefer
8588 to reload a constant, MEM or pseudo since the move patterns can
8589 handle an arbitrary operand. If OP1 is not a constant, MEM or
8590 pseudo and OP1 is not a valid operand for an add instruction, then
8591 reload OP1.
8592
8593 After reloading one of the operands into the reload register, add
8594 the reload register to the output register.
8595
8596 If there is another way to do this for a specific machine, a
8597 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8598 we emit below. */
8599
8600 code = optab_handler (add_optab, GET_MODE (out));
8601
8602 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8603 || (REG_P (op1)
8604 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8605 || (code != CODE_FOR_nothing
8606 && !insn_operand_matches (code, 2, op1)))
8607 tem = op0, op0 = op1, op1 = tem;
8608
8609 gen_reload (out, op0, opnum, type);
8610
8611 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8612 This fixes a problem on the 32K where the stack pointer cannot
8613 be used as an operand of an add insn. */
8614
8615 if (rtx_equal_p (op0, op1))
8616 op1 = out;
8617
8618 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8619 if (insn)
8620 {
8621 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8622 set_dst_reg_note (insn, REG_EQUIV, in, out);
8623 return insn;
8624 }
8625
8626 /* If that failed, copy the address register to the reload register.
8627 Then add the constant to the reload register. */
8628
8629 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8630 gen_reload (out, op1, opnum, type);
8631 insn = emit_insn (gen_add2_insn (out, op0));
8632 set_dst_reg_note (insn, REG_EQUIV, in, out);
8633 }
8634
8635 #ifdef SECONDARY_MEMORY_NEEDED
8636 /* If we need a memory location to do the move, do it that way. */
8637 else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8638 (REG_P (tem1) && REG_P (tem2)))
8639 && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8640 && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8641 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem1)),
8642 REGNO_REG_CLASS (REGNO (tem2)),
8643 GET_MODE (out)))
8644 {
8645 /* Get the memory to use and rewrite both registers to its mode. */
8646 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8647
8648 if (GET_MODE (loc) != GET_MODE (out))
8649 out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8650
8651 if (GET_MODE (loc) != GET_MODE (in))
8652 in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8653
8654 gen_reload (loc, in, opnum, type);
8655 gen_reload (out, loc, opnum, type);
8656 }
8657 #endif
8658 else if (REG_P (out) && UNARY_P (in))
8659 {
8660 rtx insn;
8661 rtx op1;
8662 rtx out_moded;
8663 rtx set;
8664
8665 op1 = find_replacement (&XEXP (in, 0));
8666 if (op1 != XEXP (in, 0))
8667 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8668
8669 /* First, try a plain SET. */
8670 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8671 if (set)
8672 return set;
8673
8674 /* If that failed, move the inner operand to the reload
8675 register, and try the same unop with the inner expression
8676 replaced with the reload register. */
8677
8678 if (GET_MODE (op1) != GET_MODE (out))
8679 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8680 else
8681 out_moded = out;
8682
8683 gen_reload (out_moded, op1, opnum, type);
8684
8685 insn
8686 = gen_rtx_SET (VOIDmode, out,
8687 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8688 out_moded));
8689 insn = emit_insn_if_valid_for_reload (insn);
8690 if (insn)
8691 {
8692 set_unique_reg_note (insn, REG_EQUIV, in);
8693 return insn;
8694 }
8695
8696 fatal_insn ("failure trying to reload:", set);
8697 }
8698 /* If IN is a simple operand, use gen_move_insn. */
8699 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8700 {
8701 tem = emit_insn (gen_move_insn (out, in));
8702 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8703 mark_jump_label (in, tem, 0);
8704 }
8705
8706 #ifdef HAVE_reload_load_address
8707 else if (HAVE_reload_load_address)
8708 emit_insn (gen_reload_load_address (out, in));
8709 #endif
8710
8711 /* Otherwise, just write (set OUT IN) and hope for the best. */
8712 else
8713 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8714
8715 /* Return the first insn emitted.
8716 We can not just return get_last_insn, because there may have
8717 been multiple instructions emitted. Also note that gen_move_insn may
8718 emit more than one insn itself, so we can not assume that there is one
8719 insn emitted per emit_insn_before call. */
8720
8721 return last ? NEXT_INSN (last) : get_insns ();
8722 }
8723 \f
8724 /* Delete a previously made output-reload whose result we now believe
8725 is not needed. First we double-check.
8726
8727 INSN is the insn now being processed.
8728 LAST_RELOAD_REG is the hard register number for which we want to delete
8729 the last output reload.
8730 J is the reload-number that originally used REG. The caller has made
8731 certain that reload J doesn't use REG any longer for input.
8732 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8733
8734 static void
8735 delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8736 {
8737 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8738 rtx reg = spill_reg_stored_to[last_reload_reg];
8739 int k;
8740 int n_occurrences;
8741 int n_inherited = 0;
8742 rtx i1;
8743 rtx substed;
8744 unsigned regno;
8745 int nregs;
8746
8747 /* It is possible that this reload has been only used to set another reload
8748 we eliminated earlier and thus deleted this instruction too. */
8749 if (INSN_DELETED_P (output_reload_insn))
8750 return;
8751
8752 /* Get the raw pseudo-register referred to. */
8753
8754 while (GET_CODE (reg) == SUBREG)
8755 reg = SUBREG_REG (reg);
8756 substed = reg_equiv_memory_loc (REGNO (reg));
8757
8758 /* This is unsafe if the operand occurs more often in the current
8759 insn than it is inherited. */
8760 for (k = n_reloads - 1; k >= 0; k--)
8761 {
8762 rtx reg2 = rld[k].in;
8763 if (! reg2)
8764 continue;
8765 if (MEM_P (reg2) || reload_override_in[k])
8766 reg2 = rld[k].in_reg;
8767 #ifdef AUTO_INC_DEC
8768 if (rld[k].out && ! rld[k].out_reg)
8769 reg2 = XEXP (rld[k].in_reg, 0);
8770 #endif
8771 while (GET_CODE (reg2) == SUBREG)
8772 reg2 = SUBREG_REG (reg2);
8773 if (rtx_equal_p (reg2, reg))
8774 {
8775 if (reload_inherited[k] || reload_override_in[k] || k == j)
8776 n_inherited++;
8777 else
8778 return;
8779 }
8780 }
8781 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8782 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8783 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8784 reg, 0);
8785 if (substed)
8786 n_occurrences += count_occurrences (PATTERN (insn),
8787 eliminate_regs (substed, VOIDmode,
8788 NULL_RTX), 0);
8789 for (i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8790 {
8791 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8792 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8793 }
8794 if (n_occurrences > n_inherited)
8795 return;
8796
8797 regno = REGNO (reg);
8798 if (regno >= FIRST_PSEUDO_REGISTER)
8799 nregs = 1;
8800 else
8801 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8802
8803 /* If the pseudo-reg we are reloading is no longer referenced
8804 anywhere between the store into it and here,
8805 and we're within the same basic block, then the value can only
8806 pass through the reload reg and end up here.
8807 Otherwise, give up--return. */
8808 for (i1 = NEXT_INSN (output_reload_insn);
8809 i1 != insn; i1 = NEXT_INSN (i1))
8810 {
8811 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8812 return;
8813 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8814 && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8815 {
8816 /* If this is USE in front of INSN, we only have to check that
8817 there are no more references than accounted for by inheritance. */
8818 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8819 {
8820 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8821 i1 = NEXT_INSN (i1);
8822 }
8823 if (n_occurrences <= n_inherited && i1 == insn)
8824 break;
8825 return;
8826 }
8827 }
8828
8829 /* We will be deleting the insn. Remove the spill reg information. */
8830 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8831 {
8832 spill_reg_store[last_reload_reg + k] = 0;
8833 spill_reg_stored_to[last_reload_reg + k] = 0;
8834 }
8835
8836 /* The caller has already checked that REG dies or is set in INSN.
8837 It has also checked that we are optimizing, and thus some
8838 inaccuracies in the debugging information are acceptable.
8839 So we could just delete output_reload_insn. But in some cases
8840 we can improve the debugging information without sacrificing
8841 optimization - maybe even improving the code: See if the pseudo
8842 reg has been completely replaced with reload regs. If so, delete
8843 the store insn and forget we had a stack slot for the pseudo. */
8844 if (rld[j].out != rld[j].in
8845 && REG_N_DEATHS (REGNO (reg)) == 1
8846 && REG_N_SETS (REGNO (reg)) == 1
8847 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8848 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8849 {
8850 rtx i2;
8851
8852 /* We know that it was used only between here and the beginning of
8853 the current basic block. (We also know that the last use before
8854 INSN was the output reload we are thinking of deleting, but never
8855 mind that.) Search that range; see if any ref remains. */
8856 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8857 {
8858 rtx set = single_set (i2);
8859
8860 /* Uses which just store in the pseudo don't count,
8861 since if they are the only uses, they are dead. */
8862 if (set != 0 && SET_DEST (set) == reg)
8863 continue;
8864 if (LABEL_P (i2)
8865 || JUMP_P (i2))
8866 break;
8867 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8868 && reg_mentioned_p (reg, PATTERN (i2)))
8869 {
8870 /* Some other ref remains; just delete the output reload we
8871 know to be dead. */
8872 delete_address_reloads (output_reload_insn, insn);
8873 delete_insn (output_reload_insn);
8874 return;
8875 }
8876 }
8877
8878 /* Delete the now-dead stores into this pseudo. Note that this
8879 loop also takes care of deleting output_reload_insn. */
8880 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8881 {
8882 rtx set = single_set (i2);
8883
8884 if (set != 0 && SET_DEST (set) == reg)
8885 {
8886 delete_address_reloads (i2, insn);
8887 delete_insn (i2);
8888 }
8889 if (LABEL_P (i2)
8890 || JUMP_P (i2))
8891 break;
8892 }
8893
8894 /* For the debugging info, say the pseudo lives in this reload reg. */
8895 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8896 if (ira_conflicts_p)
8897 /* Inform IRA about the change. */
8898 ira_mark_allocation_change (REGNO (reg));
8899 alter_reg (REGNO (reg), -1, false);
8900 }
8901 else
8902 {
8903 delete_address_reloads (output_reload_insn, insn);
8904 delete_insn (output_reload_insn);
8905 }
8906 }
8907
8908 /* We are going to delete DEAD_INSN. Recursively delete loads of
8909 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8910 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8911 static void
8912 delete_address_reloads (rtx dead_insn, rtx current_insn)
8913 {
8914 rtx set = single_set (dead_insn);
8915 rtx set2, dst, prev, next;
8916 if (set)
8917 {
8918 rtx dst = SET_DEST (set);
8919 if (MEM_P (dst))
8920 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8921 }
8922 /* If we deleted the store from a reloaded post_{in,de}c expression,
8923 we can delete the matching adds. */
8924 prev = PREV_INSN (dead_insn);
8925 next = NEXT_INSN (dead_insn);
8926 if (! prev || ! next)
8927 return;
8928 set = single_set (next);
8929 set2 = single_set (prev);
8930 if (! set || ! set2
8931 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8932 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8933 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8934 return;
8935 dst = SET_DEST (set);
8936 if (! rtx_equal_p (dst, SET_DEST (set2))
8937 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8938 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8939 || (INTVAL (XEXP (SET_SRC (set), 1))
8940 != -INTVAL (XEXP (SET_SRC (set2), 1))))
8941 return;
8942 delete_related_insns (prev);
8943 delete_related_insns (next);
8944 }
8945
8946 /* Subfunction of delete_address_reloads: process registers found in X. */
8947 static void
8948 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8949 {
8950 rtx prev, set, dst, i2;
8951 int i, j;
8952 enum rtx_code code = GET_CODE (x);
8953
8954 if (code != REG)
8955 {
8956 const char *fmt = GET_RTX_FORMAT (code);
8957 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8958 {
8959 if (fmt[i] == 'e')
8960 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8961 else if (fmt[i] == 'E')
8962 {
8963 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8964 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8965 current_insn);
8966 }
8967 }
8968 return;
8969 }
8970
8971 if (spill_reg_order[REGNO (x)] < 0)
8972 return;
8973
8974 /* Scan backwards for the insn that sets x. This might be a way back due
8975 to inheritance. */
8976 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8977 {
8978 code = GET_CODE (prev);
8979 if (code == CODE_LABEL || code == JUMP_INSN)
8980 return;
8981 if (!INSN_P (prev))
8982 continue;
8983 if (reg_set_p (x, PATTERN (prev)))
8984 break;
8985 if (reg_referenced_p (x, PATTERN (prev)))
8986 return;
8987 }
8988 if (! prev || INSN_UID (prev) < reload_first_uid)
8989 return;
8990 /* Check that PREV only sets the reload register. */
8991 set = single_set (prev);
8992 if (! set)
8993 return;
8994 dst = SET_DEST (set);
8995 if (!REG_P (dst)
8996 || ! rtx_equal_p (dst, x))
8997 return;
8998 if (! reg_set_p (dst, PATTERN (dead_insn)))
8999 {
9000 /* Check if DST was used in a later insn -
9001 it might have been inherited. */
9002 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9003 {
9004 if (LABEL_P (i2))
9005 break;
9006 if (! INSN_P (i2))
9007 continue;
9008 if (reg_referenced_p (dst, PATTERN (i2)))
9009 {
9010 /* If there is a reference to the register in the current insn,
9011 it might be loaded in a non-inherited reload. If no other
9012 reload uses it, that means the register is set before
9013 referenced. */
9014 if (i2 == current_insn)
9015 {
9016 for (j = n_reloads - 1; j >= 0; j--)
9017 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9018 || reload_override_in[j] == dst)
9019 return;
9020 for (j = n_reloads - 1; j >= 0; j--)
9021 if (rld[j].in && rld[j].reg_rtx == dst)
9022 break;
9023 if (j >= 0)
9024 break;
9025 }
9026 return;
9027 }
9028 if (JUMP_P (i2))
9029 break;
9030 /* If DST is still live at CURRENT_INSN, check if it is used for
9031 any reload. Note that even if CURRENT_INSN sets DST, we still
9032 have to check the reloads. */
9033 if (i2 == current_insn)
9034 {
9035 for (j = n_reloads - 1; j >= 0; j--)
9036 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9037 || reload_override_in[j] == dst)
9038 return;
9039 /* ??? We can't finish the loop here, because dst might be
9040 allocated to a pseudo in this block if no reload in this
9041 block needs any of the classes containing DST - see
9042 spill_hard_reg. There is no easy way to tell this, so we
9043 have to scan till the end of the basic block. */
9044 }
9045 if (reg_set_p (dst, PATTERN (i2)))
9046 break;
9047 }
9048 }
9049 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9050 reg_reloaded_contents[REGNO (dst)] = -1;
9051 delete_insn (prev);
9052 }
9053 \f
9054 /* Output reload-insns to reload VALUE into RELOADREG.
9055 VALUE is an autoincrement or autodecrement RTX whose operand
9056 is a register or memory location;
9057 so reloading involves incrementing that location.
9058 IN is either identical to VALUE, or some cheaper place to reload from.
9059
9060 INC_AMOUNT is the number to increment or decrement by (always positive).
9061 This cannot be deduced from VALUE. */
9062
9063 static void
9064 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9065 {
9066 /* REG or MEM to be copied and incremented. */
9067 rtx incloc = find_replacement (&XEXP (value, 0));
9068 /* Nonzero if increment after copying. */
9069 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9070 || GET_CODE (value) == POST_MODIFY);
9071 rtx last;
9072 rtx inc;
9073 rtx add_insn;
9074 int code;
9075 rtx real_in = in == value ? incloc : in;
9076
9077 /* No hard register is equivalent to this register after
9078 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9079 we could inc/dec that register as well (maybe even using it for
9080 the source), but I'm not sure it's worth worrying about. */
9081 if (REG_P (incloc))
9082 reg_last_reload_reg[REGNO (incloc)] = 0;
9083
9084 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9085 {
9086 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9087 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9088 }
9089 else
9090 {
9091 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9092 inc_amount = -inc_amount;
9093
9094 inc = GEN_INT (inc_amount);
9095 }
9096
9097 /* If this is post-increment, first copy the location to the reload reg. */
9098 if (post && real_in != reloadreg)
9099 emit_insn (gen_move_insn (reloadreg, real_in));
9100
9101 if (in == value)
9102 {
9103 /* See if we can directly increment INCLOC. Use a method similar to
9104 that in gen_reload. */
9105
9106 last = get_last_insn ();
9107 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
9108 gen_rtx_PLUS (GET_MODE (incloc),
9109 incloc, inc)));
9110
9111 code = recog_memoized (add_insn);
9112 if (code >= 0)
9113 {
9114 extract_insn (add_insn);
9115 if (constrain_operands (1))
9116 {
9117 /* If this is a pre-increment and we have incremented the value
9118 where it lives, copy the incremented value to RELOADREG to
9119 be used as an address. */
9120
9121 if (! post)
9122 emit_insn (gen_move_insn (reloadreg, incloc));
9123 return;
9124 }
9125 }
9126 delete_insns_since (last);
9127 }
9128
9129 /* If couldn't do the increment directly, must increment in RELOADREG.
9130 The way we do this depends on whether this is pre- or post-increment.
9131 For pre-increment, copy INCLOC to the reload register, increment it
9132 there, then save back. */
9133
9134 if (! post)
9135 {
9136 if (in != reloadreg)
9137 emit_insn (gen_move_insn (reloadreg, real_in));
9138 emit_insn (gen_add2_insn (reloadreg, inc));
9139 emit_insn (gen_move_insn (incloc, reloadreg));
9140 }
9141 else
9142 {
9143 /* Postincrement.
9144 Because this might be a jump insn or a compare, and because RELOADREG
9145 may not be available after the insn in an input reload, we must do
9146 the incrementation before the insn being reloaded for.
9147
9148 We have already copied IN to RELOADREG. Increment the copy in
9149 RELOADREG, save that back, then decrement RELOADREG so it has
9150 the original value. */
9151
9152 emit_insn (gen_add2_insn (reloadreg, inc));
9153 emit_insn (gen_move_insn (incloc, reloadreg));
9154 if (CONST_INT_P (inc))
9155 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
9156 else
9157 emit_insn (gen_sub2_insn (reloadreg, inc));
9158 }
9159 }
9160 \f
9161 #ifdef AUTO_INC_DEC
9162 static void
9163 add_auto_inc_notes (rtx insn, rtx x)
9164 {
9165 enum rtx_code code = GET_CODE (x);
9166 const char *fmt;
9167 int i, j;
9168
9169 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9170 {
9171 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9172 return;
9173 }
9174
9175 /* Scan all the operand sub-expressions. */
9176 fmt = GET_RTX_FORMAT (code);
9177 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9178 {
9179 if (fmt[i] == 'e')
9180 add_auto_inc_notes (insn, XEXP (x, i));
9181 else if (fmt[i] == 'E')
9182 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9183 add_auto_inc_notes (insn, XVECEXP (x, i, j));
9184 }
9185 }
9186 #endif