1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
32 #include "insn-config.h"
38 #include "addresses.h"
39 #include "basic-block.h"
51 /* This file contains the reload pass of the compiler, which is
52 run after register allocation has been done. It checks that
53 each insn is valid (operands required to be in registers really
54 are in registers of the proper class) and fixes up invalid ones
55 by copying values temporarily into registers for the insns
58 The results of register allocation are described by the vector
59 reg_renumber; the insns still contain pseudo regs, but reg_renumber
60 can be used to find which hard reg, if any, a pseudo reg is in.
62 The technique we always use is to free up a few hard regs that are
63 called ``reload regs'', and for each place where a pseudo reg
64 must be in a hard reg, copy it temporarily into one of the reload regs.
66 Reload regs are allocated locally for every instruction that needs
67 reloads. When there are pseudos which are allocated to a register that
68 has been chosen as a reload reg, such pseudos must be ``spilled''.
69 This means that they go to other hard regs, or to stack slots if no other
70 available hard regs can be found. Spilling can invalidate more
71 insns, requiring additional need for reloads, so we must keep checking
72 until the process stabilizes.
74 For machines with different classes of registers, we must keep track
75 of the register class needed for each reload, and make sure that
76 we allocate enough reload registers of each class.
78 The file reload.c contains the code that checks one insn for
79 validity and reports the reloads that it needs. This file
80 is in charge of scanning the entire rtl code, accumulating the
81 reload needs, spilling, assigning reload registers to use for
82 fixing up each insn, and generating the new insns to copy values
83 into the reload registers. */
85 /* During reload_as_needed, element N contains a REG rtx for the hard reg
86 into which reg N has been reloaded (perhaps for a previous insn). */
87 static rtx
*reg_last_reload_reg
;
89 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
90 for an output reload that stores into reg N. */
91 static regset_head reg_has_output_reload
;
93 /* Indicates which hard regs are reload-registers for an output reload
94 in the current insn. */
95 static HARD_REG_SET reg_is_output_reload
;
97 /* Element N is the constant value to which pseudo reg N is equivalent,
98 or zero if pseudo reg N is not equivalent to a constant.
99 find_reloads looks at this in order to replace pseudo reg N
100 with the constant it stands for. */
101 rtx
*reg_equiv_constant
;
103 /* Element N is an invariant value to which pseudo reg N is equivalent.
104 eliminate_regs_in_insn uses this to replace pseudos in particular
106 rtx
*reg_equiv_invariant
;
108 /* Element N is a memory location to which pseudo reg N is equivalent,
109 prior to any register elimination (such as frame pointer to stack
110 pointer). Depending on whether or not it is a valid address, this value
111 is transferred to either reg_equiv_address or reg_equiv_mem. */
112 rtx
*reg_equiv_memory_loc
;
114 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
115 collector can keep track of what is inside. */
116 VEC(rtx
,gc
) *reg_equiv_memory_loc_vec
;
118 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
119 This is used when the address is not valid as a memory address
120 (because its displacement is too big for the machine.) */
121 rtx
*reg_equiv_address
;
123 /* Element N is the memory slot to which pseudo reg N is equivalent,
124 or zero if pseudo reg N is not equivalent to a memory slot. */
127 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
128 alternate representations of the location of pseudo reg N. */
129 rtx
*reg_equiv_alt_mem_list
;
131 /* Widest width in which each pseudo reg is referred to (via subreg). */
132 static unsigned int *reg_max_ref_width
;
134 /* Element N is the list of insns that initialized reg N from its equivalent
135 constant or memory slot. */
137 int reg_equiv_init_size
;
139 /* Vector to remember old contents of reg_renumber before spilling. */
140 static short *reg_old_renumber
;
142 /* During reload_as_needed, element N contains the last pseudo regno reloaded
143 into hard register N. If that pseudo reg occupied more than one register,
144 reg_reloaded_contents points to that pseudo for each spill register in
145 use; all of these must remain set for an inheritance to occur. */
146 static int reg_reloaded_contents
[FIRST_PSEUDO_REGISTER
];
148 /* During reload_as_needed, element N contains the insn for which
149 hard register N was last used. Its contents are significant only
150 when reg_reloaded_valid is set for this register. */
151 static rtx reg_reloaded_insn
[FIRST_PSEUDO_REGISTER
];
153 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
154 static HARD_REG_SET reg_reloaded_valid
;
155 /* Indicate if the register was dead at the end of the reload.
156 This is only valid if reg_reloaded_contents is set and valid. */
157 static HARD_REG_SET reg_reloaded_dead
;
159 /* Indicate whether the register's current value is one that is not
160 safe to retain across a call, even for registers that are normally
161 call-saved. This is only meaningful for members of reg_reloaded_valid. */
162 static HARD_REG_SET reg_reloaded_call_part_clobbered
;
164 /* Number of spill-regs so far; number of valid elements of spill_regs. */
167 /* In parallel with spill_regs, contains REG rtx's for those regs.
168 Holds the last rtx used for any given reg, or 0 if it has never
169 been used for spilling yet. This rtx is reused, provided it has
171 static rtx spill_reg_rtx
[FIRST_PSEUDO_REGISTER
];
173 /* In parallel with spill_regs, contains nonzero for a spill reg
174 that was stored after the last time it was used.
175 The precise value is the insn generated to do the store. */
176 static rtx spill_reg_store
[FIRST_PSEUDO_REGISTER
];
178 /* This is the register that was stored with spill_reg_store. This is a
179 copy of reload_out / reload_out_reg when the value was stored; if
180 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
181 static rtx spill_reg_stored_to
[FIRST_PSEUDO_REGISTER
];
183 /* This table is the inverse mapping of spill_regs:
184 indexed by hard reg number,
185 it contains the position of that reg in spill_regs,
186 or -1 for something that is not in spill_regs.
188 ?!? This is no longer accurate. */
189 static short spill_reg_order
[FIRST_PSEUDO_REGISTER
];
191 /* This reg set indicates registers that can't be used as spill registers for
192 the currently processed insn. These are the hard registers which are live
193 during the insn, but not allocated to pseudos, as well as fixed
195 static HARD_REG_SET bad_spill_regs
;
197 /* These are the hard registers that can't be used as spill register for any
198 insn. This includes registers used for user variables and registers that
199 we can't eliminate. A register that appears in this set also can't be used
200 to retry register allocation. */
201 static HARD_REG_SET bad_spill_regs_global
;
203 /* Describes order of use of registers for reloading
204 of spilled pseudo-registers. `n_spills' is the number of
205 elements that are actually valid; new ones are added at the end.
207 Both spill_regs and spill_reg_order are used on two occasions:
208 once during find_reload_regs, where they keep track of the spill registers
209 for a single insn, but also during reload_as_needed where they show all
210 the registers ever used by reload. For the latter case, the information
211 is calculated during finish_spills. */
212 static short spill_regs
[FIRST_PSEUDO_REGISTER
];
214 /* This vector of reg sets indicates, for each pseudo, which hard registers
215 may not be used for retrying global allocation because the register was
216 formerly spilled from one of them. If we allowed reallocating a pseudo to
217 a register that it was already allocated to, reload might not
219 static HARD_REG_SET
*pseudo_previous_regs
;
221 /* This vector of reg sets indicates, for each pseudo, which hard
222 registers may not be used for retrying global allocation because they
223 are used as spill registers during one of the insns in which the
225 static HARD_REG_SET
*pseudo_forbidden_regs
;
227 /* All hard regs that have been used as spill registers for any insn are
228 marked in this set. */
229 static HARD_REG_SET used_spill_regs
;
231 /* Index of last register assigned as a spill register. We allocate in
232 a round-robin fashion. */
233 static int last_spill_reg
;
235 /* Nonzero if indirect addressing is supported on the machine; this means
236 that spilling (REG n) does not require reloading it into a register in
237 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
238 value indicates the level of indirect addressing supported, e.g., two
239 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
241 static char spill_indirect_levels
;
243 /* Nonzero if indirect addressing is supported when the innermost MEM is
244 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
245 which these are valid is the same as spill_indirect_levels, above. */
246 char indirect_symref_ok
;
248 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
249 char double_reg_address_ok
;
251 /* Record the stack slot for each spilled hard register. */
252 static rtx spill_stack_slot
[FIRST_PSEUDO_REGISTER
];
254 /* Width allocated so far for that stack slot. */
255 static unsigned int spill_stack_slot_width
[FIRST_PSEUDO_REGISTER
];
257 /* Record which pseudos needed to be spilled. */
258 static regset_head spilled_pseudos
;
260 /* Record which pseudos changed their allocation in finish_spills. */
261 static regset_head changed_allocation_pseudos
;
263 /* Used for communication between order_regs_for_reload and count_pseudo.
264 Used to avoid counting one pseudo twice. */
265 static regset_head pseudos_counted
;
267 /* First uid used by insns created by reload in this function.
268 Used in find_equiv_reg. */
269 int reload_first_uid
;
271 /* Flag set by local-alloc or global-alloc if anything is live in
272 a call-clobbered reg across calls. */
273 int caller_save_needed
;
275 /* Set to 1 while reload_as_needed is operating.
276 Required by some machines to handle any generated moves differently. */
277 int reload_in_progress
= 0;
279 /* These arrays record the insn_code of insns that may be needed to
280 perform input and output reloads of special objects. They provide a
281 place to pass a scratch register. */
282 enum insn_code reload_in_optab
[NUM_MACHINE_MODES
];
283 enum insn_code reload_out_optab
[NUM_MACHINE_MODES
];
285 /* This obstack is used for allocation of rtl during register elimination.
286 The allocated storage can be freed once find_reloads has processed the
288 static struct obstack reload_obstack
;
290 /* Points to the beginning of the reload_obstack. All insn_chain structures
291 are allocated first. */
292 static char *reload_startobj
;
294 /* The point after all insn_chain structures. Used to quickly deallocate
295 memory allocated in copy_reloads during calculate_needs_all_insns. */
296 static char *reload_firstobj
;
298 /* This points before all local rtl generated by register elimination.
299 Used to quickly free all memory after processing one insn. */
300 static char *reload_insn_firstobj
;
302 /* List of insn_chain instructions, one for every insn that reload needs to
304 struct insn_chain
*reload_insn_chain
;
306 /* List of all insns needing reloads. */
307 static struct insn_chain
*insns_need_reload
;
309 /* This structure is used to record information about register eliminations.
310 Each array entry describes one possible way of eliminating a register
311 in favor of another. If there is more than one way of eliminating a
312 particular register, the most preferred should be specified first. */
316 int from
; /* Register number to be eliminated. */
317 int to
; /* Register number used as replacement. */
318 HOST_WIDE_INT initial_offset
; /* Initial difference between values. */
319 int can_eliminate
; /* Nonzero if this elimination can be done. */
320 int can_eliminate_previous
; /* Value returned by TARGET_CAN_ELIMINATE
321 target hook in previous scan over insns
323 HOST_WIDE_INT offset
; /* Current offset between the two regs. */
324 HOST_WIDE_INT previous_offset
;/* Offset at end of previous insn. */
325 int ref_outside_mem
; /* "to" has been referenced outside a MEM. */
326 rtx from_rtx
; /* REG rtx for the register to be eliminated.
327 We cannot simply compare the number since
328 we might then spuriously replace a hard
329 register corresponding to a pseudo
330 assigned to the reg to be eliminated. */
331 rtx to_rtx
; /* REG rtx for the replacement. */
334 static struct elim_table
*reg_eliminate
= 0;
336 /* This is an intermediate structure to initialize the table. It has
337 exactly the members provided by ELIMINABLE_REGS. */
338 static const struct elim_table_1
342 } reg_eliminate_1
[] =
344 /* If a set of eliminable registers was specified, define the table from it.
345 Otherwise, default to the normal case of the frame pointer being
346 replaced by the stack pointer. */
348 #ifdef ELIMINABLE_REGS
351 {{ FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
}};
354 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
356 /* Record the number of pending eliminations that have an offset not equal
357 to their initial offset. If nonzero, we use a new copy of each
358 replacement result in any insns encountered. */
359 int num_not_at_initial_offset
;
361 /* Count the number of registers that we may be able to eliminate. */
362 static int num_eliminable
;
363 /* And the number of registers that are equivalent to a constant that
364 can be eliminated to frame_pointer / arg_pointer + constant. */
365 static int num_eliminable_invariants
;
367 /* For each label, we record the offset of each elimination. If we reach
368 a label by more than one path and an offset differs, we cannot do the
369 elimination. This information is indexed by the difference of the
370 number of the label and the first label number. We can't offset the
371 pointer itself as this can cause problems on machines with segmented
372 memory. The first table is an array of flags that records whether we
373 have yet encountered a label and the second table is an array of arrays,
374 one entry in the latter array for each elimination. */
376 static int first_label_num
;
377 static char *offsets_known_at
;
378 static HOST_WIDE_INT (*offsets_at
)[NUM_ELIMINABLE_REGS
];
380 /* Stack of addresses where an rtx has been changed. We can undo the
381 changes by popping items off the stack and restoring the original
382 value at each location.
384 We use this simplistic undo capability rather than copy_rtx as copy_rtx
385 will not make a deep copy of a normally sharable rtx, such as
386 (const (plus (symbol_ref) (const_int))). If such an expression appears
387 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
388 rtx expression would be changed. See PR 42431. */
392 DEF_VEC_ALLOC_P(rtx_p
,heap
);
393 static VEC(rtx_p
,heap
) *substitute_stack
;
395 /* Number of labels in the current function. */
397 static int num_labels
;
399 static void replace_pseudos_in (rtx
*, enum machine_mode
, rtx
);
400 static void maybe_fix_stack_asms (void);
401 static void copy_reloads (struct insn_chain
*);
402 static void calculate_needs_all_insns (int);
403 static int find_reg (struct insn_chain
*, int);
404 static void find_reload_regs (struct insn_chain
*);
405 static void select_reload_regs (void);
406 static void delete_caller_save_insns (void);
408 static void spill_failure (rtx
, enum reg_class
);
409 static void count_spilled_pseudo (int, int, int);
410 static void delete_dead_insn (rtx
);
411 static void alter_reg (int, int, bool);
412 static void set_label_offsets (rtx
, rtx
, int);
413 static void check_eliminable_occurrences (rtx
);
414 static void elimination_effects (rtx
, enum machine_mode
);
415 static rtx
eliminate_regs_1 (rtx
, enum machine_mode
, rtx
, bool, bool);
416 static int eliminate_regs_in_insn (rtx
, int);
417 static void update_eliminable_offsets (void);
418 static void mark_not_eliminable (rtx
, const_rtx
, void *);
419 static void set_initial_elim_offsets (void);
420 static bool verify_initial_elim_offsets (void);
421 static void set_initial_label_offsets (void);
422 static void set_offsets_for_label (rtx
);
423 static void init_eliminable_invariants (rtx
, bool);
424 static void init_elim_table (void);
425 static void free_reg_equiv (void);
426 static void update_eliminables (HARD_REG_SET
*);
427 static void elimination_costs_in_insn (rtx
);
428 static void spill_hard_reg (unsigned int, int);
429 static int finish_spills (int);
430 static void scan_paradoxical_subregs (rtx
);
431 static void count_pseudo (int);
432 static void order_regs_for_reload (struct insn_chain
*);
433 static void reload_as_needed (int);
434 static void forget_old_reloads_1 (rtx
, const_rtx
, void *);
435 static void forget_marked_reloads (regset
);
436 static int reload_reg_class_lower (const void *, const void *);
437 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type
,
439 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type
,
441 static int reload_reg_free_p (unsigned int, int, enum reload_type
);
442 static int reload_reg_free_for_value_p (int, int, int, enum reload_type
,
444 static int free_for_value_p (int, enum machine_mode
, int, enum reload_type
,
446 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type
);
447 static int allocate_reload_reg (struct insn_chain
*, int, int);
448 static int conflicts_with_override (rtx
);
449 static void failed_reload (rtx
, int);
450 static int set_reload_reg (int, int);
451 static void choose_reload_regs_init (struct insn_chain
*, rtx
*);
452 static void choose_reload_regs (struct insn_chain
*);
453 static void merge_assigned_reloads (rtx
);
454 static void emit_input_reload_insns (struct insn_chain
*, struct reload
*,
456 static void emit_output_reload_insns (struct insn_chain
*, struct reload
*,
458 static void do_input_reload (struct insn_chain
*, struct reload
*, int);
459 static void do_output_reload (struct insn_chain
*, struct reload
*, int);
460 static void emit_reload_insns (struct insn_chain
*);
461 static void delete_output_reload (rtx
, int, int, rtx
);
462 static void delete_address_reloads (rtx
, rtx
);
463 static void delete_address_reloads_1 (rtx
, rtx
, rtx
);
464 static rtx
inc_for_reload (rtx
, rtx
, rtx
, int);
466 static void add_auto_inc_notes (rtx
, rtx
);
468 static void substitute (rtx
*, const_rtx
, rtx
);
469 static bool gen_reload_chain_without_interm_reg_p (int, int);
470 static int reloads_conflict (int, int);
471 static rtx
gen_reload (rtx
, rtx
, int, enum reload_type
);
472 static rtx
emit_insn_if_valid_for_reload (rtx
);
474 /* Initialize the reload pass. This is called at the beginning of compilation
475 and may be called again if the target is reinitialized. */
482 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
483 Set spill_indirect_levels to the number of levels such addressing is
484 permitted, zero if it is not permitted at all. */
487 = gen_rtx_MEM (Pmode
,
490 LAST_VIRTUAL_REGISTER
+ 1),
492 spill_indirect_levels
= 0;
494 while (memory_address_p (QImode
, tem
))
496 spill_indirect_levels
++;
497 tem
= gen_rtx_MEM (Pmode
, tem
);
500 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
502 tem
= gen_rtx_MEM (Pmode
, gen_rtx_SYMBOL_REF (Pmode
, "foo"));
503 indirect_symref_ok
= memory_address_p (QImode
, tem
);
505 /* See if reg+reg is a valid (and offsettable) address. */
507 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
509 tem
= gen_rtx_PLUS (Pmode
,
510 gen_rtx_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
),
511 gen_rtx_REG (Pmode
, i
));
513 /* This way, we make sure that reg+reg is an offsettable address. */
514 tem
= plus_constant (tem
, 4);
516 if (memory_address_p (QImode
, tem
))
518 double_reg_address_ok
= 1;
523 /* Initialize obstack for our rtl allocation. */
524 gcc_obstack_init (&reload_obstack
);
525 reload_startobj
= XOBNEWVAR (&reload_obstack
, char, 0);
527 INIT_REG_SET (&spilled_pseudos
);
528 INIT_REG_SET (&changed_allocation_pseudos
);
529 INIT_REG_SET (&pseudos_counted
);
532 /* List of insn chains that are currently unused. */
533 static struct insn_chain
*unused_insn_chains
= 0;
535 /* Allocate an empty insn_chain structure. */
537 new_insn_chain (void)
539 struct insn_chain
*c
;
541 if (unused_insn_chains
== 0)
543 c
= XOBNEW (&reload_obstack
, struct insn_chain
);
544 INIT_REG_SET (&c
->live_throughout
);
545 INIT_REG_SET (&c
->dead_or_set
);
549 c
= unused_insn_chains
;
550 unused_insn_chains
= c
->next
;
552 c
->is_caller_save_insn
= 0;
553 c
->need_operand_change
= 0;
559 /* Small utility function to set all regs in hard reg set TO which are
560 allocated to pseudos in regset FROM. */
563 compute_use_by_pseudos (HARD_REG_SET
*to
, regset from
)
566 reg_set_iterator rsi
;
568 EXECUTE_IF_SET_IN_REG_SET (from
, FIRST_PSEUDO_REGISTER
, regno
, rsi
)
570 int r
= reg_renumber
[regno
];
574 /* reload_combine uses the information from DF_LIVE_IN,
575 which might still contain registers that have not
576 actually been allocated since they have an
578 gcc_assert (ira_conflicts_p
|| reload_completed
);
581 add_to_hard_reg_set (to
, PSEUDO_REGNO_MODE (regno
), r
);
585 /* Replace all pseudos found in LOC with their corresponding
589 replace_pseudos_in (rtx
*loc
, enum machine_mode mem_mode
, rtx usage
)
602 unsigned int regno
= REGNO (x
);
604 if (regno
< FIRST_PSEUDO_REGISTER
)
607 x
= eliminate_regs (x
, mem_mode
, usage
);
611 replace_pseudos_in (loc
, mem_mode
, usage
);
615 if (reg_equiv_constant
[regno
])
616 *loc
= reg_equiv_constant
[regno
];
617 else if (reg_equiv_mem
[regno
])
618 *loc
= reg_equiv_mem
[regno
];
619 else if (reg_equiv_address
[regno
])
620 *loc
= gen_rtx_MEM (GET_MODE (x
), reg_equiv_address
[regno
]);
623 gcc_assert (!REG_P (regno_reg_rtx
[regno
])
624 || REGNO (regno_reg_rtx
[regno
]) != regno
);
625 *loc
= regno_reg_rtx
[regno
];
630 else if (code
== MEM
)
632 replace_pseudos_in (& XEXP (x
, 0), GET_MODE (x
), usage
);
636 /* Process each of our operands recursively. */
637 fmt
= GET_RTX_FORMAT (code
);
638 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
640 replace_pseudos_in (&XEXP (x
, i
), mem_mode
, usage
);
641 else if (*fmt
== 'E')
642 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
643 replace_pseudos_in (& XVECEXP (x
, i
, j
), mem_mode
, usage
);
646 /* Determine if the current function has an exception receiver block
647 that reaches the exit block via non-exceptional edges */
650 has_nonexceptional_receiver (void)
654 basic_block
*tos
, *worklist
, bb
;
656 /* If we're not optimizing, then just err on the safe side. */
660 /* First determine which blocks can reach exit via normal paths. */
661 tos
= worklist
= XNEWVEC (basic_block
, n_basic_blocks
+ 1);
664 bb
->flags
&= ~BB_REACHABLE
;
666 /* Place the exit block on our worklist. */
667 EXIT_BLOCK_PTR
->flags
|= BB_REACHABLE
;
668 *tos
++ = EXIT_BLOCK_PTR
;
670 /* Iterate: find everything reachable from what we've already seen. */
671 while (tos
!= worklist
)
675 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
676 if (!(e
->flags
& EDGE_ABNORMAL
))
678 basic_block src
= e
->src
;
680 if (!(src
->flags
& BB_REACHABLE
))
682 src
->flags
|= BB_REACHABLE
;
689 /* Now see if there's a reachable block with an exceptional incoming
692 if (bb
->flags
& BB_REACHABLE
)
693 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
694 if (e
->flags
& EDGE_ABNORMAL
)
697 /* No exceptional block reached exit unexceptionally. */
702 /* Global variables used by reload and its subroutines. */
704 /* The current basic block while in calculate_elim_costs_all_insns. */
705 static basic_block elim_bb
;
707 /* Set during calculate_needs if an insn needs register elimination. */
708 static int something_needs_elimination
;
709 /* Set during calculate_needs if an insn needs an operand changed. */
710 static int something_needs_operands_changed
;
711 /* Set by alter_regs if we spilled a register to the stack. */
712 static bool something_was_spilled
;
714 /* Nonzero means we couldn't get enough spill regs. */
717 /* Temporary array of pseudo-register number. */
718 static int *temp_pseudo_reg_arr
;
720 /* Main entry point for the reload pass.
722 FIRST is the first insn of the function being compiled.
724 GLOBAL nonzero means we were called from global_alloc
725 and should attempt to reallocate any pseudoregs that we
726 displace from hard regs we will use for reloads.
727 If GLOBAL is zero, we do not have enough information to do that,
728 so any pseudo reg that is spilled must go to the stack.
730 Return value is nonzero if reload failed
731 and we must not do any more for this function. */
734 reload (rtx first
, int global
)
738 struct elim_table
*ep
;
741 /* Make sure even insns with volatile mem refs are recognizable. */
746 reload_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
748 /* Make sure that the last insn in the chain
749 is not something that needs reloading. */
750 emit_note (NOTE_INSN_DELETED
);
752 /* Enable find_equiv_reg to distinguish insns made by reload. */
753 reload_first_uid
= get_max_uid ();
755 #ifdef SECONDARY_MEMORY_NEEDED
756 /* Initialize the secondary memory table. */
757 clear_secondary_mem ();
760 /* We don't have a stack slot for any spill reg yet. */
761 memset (spill_stack_slot
, 0, sizeof spill_stack_slot
);
762 memset (spill_stack_slot_width
, 0, sizeof spill_stack_slot_width
);
764 /* Initialize the save area information for caller-save, in case some
768 /* Compute which hard registers are now in use
769 as homes for pseudo registers.
770 This is done here rather than (eg) in global_alloc
771 because this point is reached even if not optimizing. */
772 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
775 /* A function that has a nonlocal label that can reach the exit
776 block via non-exceptional paths must save all call-saved
778 if (cfun
->has_nonlocal_label
779 && has_nonexceptional_receiver ())
780 crtl
->saves_all_registers
= 1;
782 if (crtl
->saves_all_registers
)
783 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
784 if (! call_used_regs
[i
] && ! fixed_regs
[i
] && ! LOCAL_REGNO (i
))
785 df_set_regs_ever_live (i
, true);
787 reg_old_renumber
= XCNEWVEC (short, max_regno
);
788 memcpy (reg_old_renumber
, reg_renumber
, max_regno
* sizeof (short));
789 pseudo_forbidden_regs
= XNEWVEC (HARD_REG_SET
, max_regno
);
790 pseudo_previous_regs
= XCNEWVEC (HARD_REG_SET
, max_regno
);
792 CLEAR_HARD_REG_SET (bad_spill_regs_global
);
794 init_eliminable_invariants (first
, true);
797 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
798 stack slots to the pseudos that lack hard regs or equivalents.
799 Do not touch virtual registers. */
801 temp_pseudo_reg_arr
= XNEWVEC (int, max_regno
- LAST_VIRTUAL_REGISTER
- 1);
802 for (n
= 0, i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_regno
; i
++)
803 temp_pseudo_reg_arr
[n
++] = i
;
806 /* Ask IRA to order pseudo-registers for better stack slot
808 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr
, n
, reg_max_ref_width
);
810 for (i
= 0; i
< n
; i
++)
811 alter_reg (temp_pseudo_reg_arr
[i
], -1, false);
813 /* If we have some registers we think can be eliminated, scan all insns to
814 see if there is an insn that sets one of these registers to something
815 other than itself plus a constant. If so, the register cannot be
816 eliminated. Doing this scan here eliminates an extra pass through the
817 main reload loop in the most common case where register elimination
819 for (insn
= first
; insn
&& num_eliminable
; insn
= NEXT_INSN (insn
))
821 note_stores (PATTERN (insn
), mark_not_eliminable
, NULL
);
823 maybe_fix_stack_asms ();
825 insns_need_reload
= 0;
826 something_needs_elimination
= 0;
828 /* Initialize to -1, which means take the first spill register. */
831 /* Spill any hard regs that we know we can't eliminate. */
832 CLEAR_HARD_REG_SET (used_spill_regs
);
833 /* There can be multiple ways to eliminate a register;
834 they should be listed adjacently.
835 Elimination for any register fails only if all possible ways fail. */
836 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; )
839 int can_eliminate
= 0;
842 can_eliminate
|= ep
->can_eliminate
;
845 while (ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
] && ep
->from
== from
);
847 spill_hard_reg (from
, 1);
850 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
851 if (frame_pointer_needed
)
852 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, 1);
854 finish_spills (global
);
856 /* From now on, we may need to generate moves differently. We may also
857 allow modifications of insns which cause them to not be recognized.
858 Any such modifications will be cleaned up during reload itself. */
859 reload_in_progress
= 1;
861 /* This loop scans the entire function each go-round
862 and repeats until one repetition spills no additional hard regs. */
865 int something_changed
;
867 HOST_WIDE_INT starting_frame_size
;
869 starting_frame_size
= get_frame_size ();
870 something_was_spilled
= false;
872 set_initial_elim_offsets ();
873 set_initial_label_offsets ();
875 /* For each pseudo register that has an equivalent location defined,
876 try to eliminate any eliminable registers (such as the frame pointer)
877 assuming initial offsets for the replacement register, which
880 If the resulting location is directly addressable, substitute
881 the MEM we just got directly for the old REG.
883 If it is not addressable but is a constant or the sum of a hard reg
884 and constant, it is probably not addressable because the constant is
885 out of range, in that case record the address; we will generate
886 hairy code to compute the address in a register each time it is
887 needed. Similarly if it is a hard register, but one that is not
888 valid as an address register.
890 If the location is not addressable, but does not have one of the
891 above forms, assign a stack slot. We have to do this to avoid the
892 potential of producing lots of reloads if, e.g., a location involves
893 a pseudo that didn't get a hard register and has an equivalent memory
894 location that also involves a pseudo that didn't get a hard register.
896 Perhaps at some point we will improve reload_when_needed handling
897 so this problem goes away. But that's very hairy. */
899 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
900 if (reg_renumber
[i
] < 0 && reg_equiv_memory_loc
[i
])
902 rtx x
= eliminate_regs (reg_equiv_memory_loc
[i
], VOIDmode
,
905 if (strict_memory_address_addr_space_p
906 (GET_MODE (regno_reg_rtx
[i
]), XEXP (x
, 0),
908 reg_equiv_mem
[i
] = x
, reg_equiv_address
[i
] = 0;
909 else if (CONSTANT_P (XEXP (x
, 0))
910 || (REG_P (XEXP (x
, 0))
911 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
912 || (GET_CODE (XEXP (x
, 0)) == PLUS
913 && REG_P (XEXP (XEXP (x
, 0), 0))
914 && (REGNO (XEXP (XEXP (x
, 0), 0))
915 < FIRST_PSEUDO_REGISTER
)
916 && CONSTANT_P (XEXP (XEXP (x
, 0), 1))))
917 reg_equiv_address
[i
] = XEXP (x
, 0), reg_equiv_mem
[i
] = 0;
920 /* Make a new stack slot. Then indicate that something
921 changed so we go back and recompute offsets for
922 eliminable registers because the allocation of memory
923 below might change some offset. reg_equiv_{mem,address}
924 will be set up for this pseudo on the next pass around
926 reg_equiv_memory_loc
[i
] = 0;
927 reg_equiv_init
[i
] = 0;
928 alter_reg (i
, -1, true);
932 if (caller_save_needed
)
935 /* If we allocated another stack slot, redo elimination bookkeeping. */
936 if (something_was_spilled
|| starting_frame_size
!= get_frame_size ())
938 if (starting_frame_size
&& crtl
->stack_alignment_needed
)
940 /* If we have a stack frame, we must align it now. The
941 stack size may be a part of the offset computation for
942 register elimination. So if this changes the stack size,
943 then repeat the elimination bookkeeping. We don't
944 realign when there is no stack, as that will cause a
945 stack frame when none is needed should
946 STARTING_FRAME_OFFSET not be already aligned to
948 assign_stack_local (BLKmode
, 0, crtl
->stack_alignment_needed
);
949 if (starting_frame_size
!= get_frame_size ())
953 if (caller_save_needed
)
955 save_call_clobbered_regs ();
956 /* That might have allocated new insn_chain structures. */
957 reload_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
960 calculate_needs_all_insns (global
);
962 if (! ira_conflicts_p
)
963 /* Don't do it for IRA. We need this info because we don't
964 change live_throughout and dead_or_set for chains when IRA
966 CLEAR_REG_SET (&spilled_pseudos
);
970 something_changed
= 0;
972 /* If we allocated any new memory locations, make another pass
973 since it might have changed elimination offsets. */
974 if (something_was_spilled
|| starting_frame_size
!= get_frame_size ())
975 something_changed
= 1;
977 /* Even if the frame size remained the same, we might still have
978 changed elimination offsets, e.g. if find_reloads called
979 force_const_mem requiring the back end to allocate a constant
980 pool base register that needs to be saved on the stack. */
981 else if (!verify_initial_elim_offsets ())
982 something_changed
= 1;
985 HARD_REG_SET to_spill
;
986 CLEAR_HARD_REG_SET (to_spill
);
987 update_eliminables (&to_spill
);
988 AND_COMPL_HARD_REG_SET (used_spill_regs
, to_spill
);
990 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
991 if (TEST_HARD_REG_BIT (to_spill
, i
))
993 spill_hard_reg (i
, 1);
996 /* Regardless of the state of spills, if we previously had
997 a register that we thought we could eliminate, but now can
998 not eliminate, we must run another pass.
1000 Consider pseudos which have an entry in reg_equiv_* which
1001 reference an eliminable register. We must make another pass
1002 to update reg_equiv_* so that we do not substitute in the
1003 old value from when we thought the elimination could be
1005 something_changed
= 1;
1009 select_reload_regs ();
1013 if (insns_need_reload
!= 0 || did_spill
)
1014 something_changed
|= finish_spills (global
);
1016 if (! something_changed
)
1019 if (caller_save_needed
)
1020 delete_caller_save_insns ();
1022 obstack_free (&reload_obstack
, reload_firstobj
);
1025 /* If global-alloc was run, notify it of any register eliminations we have
1028 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1029 if (ep
->can_eliminate
)
1030 mark_elimination (ep
->from
, ep
->to
);
1032 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1033 If that insn didn't set the register (i.e., it copied the register to
1034 memory), just delete that insn instead of the equivalencing insn plus
1035 anything now dead. If we call delete_dead_insn on that insn, we may
1036 delete the insn that actually sets the register if the register dies
1037 there and that is incorrect. */
1039 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1041 if (reg_renumber
[i
] < 0 && reg_equiv_init
[i
] != 0)
1044 for (list
= reg_equiv_init
[i
]; list
; list
= XEXP (list
, 1))
1046 rtx equiv_insn
= XEXP (list
, 0);
1048 /* If we already deleted the insn or if it may trap, we can't
1049 delete it. The latter case shouldn't happen, but can
1050 if an insn has a variable address, gets a REG_EH_REGION
1051 note added to it, and then gets converted into a load
1052 from a constant address. */
1053 if (NOTE_P (equiv_insn
)
1054 || can_throw_internal (equiv_insn
))
1056 else if (reg_set_p (regno_reg_rtx
[i
], PATTERN (equiv_insn
)))
1057 delete_dead_insn (equiv_insn
);
1059 SET_INSN_DELETED (equiv_insn
);
1064 /* Use the reload registers where necessary
1065 by generating move instructions to move the must-be-register
1066 values into or out of the reload registers. */
1068 if (insns_need_reload
!= 0 || something_needs_elimination
1069 || something_needs_operands_changed
)
1071 HOST_WIDE_INT old_frame_size
= get_frame_size ();
1073 reload_as_needed (global
);
1075 gcc_assert (old_frame_size
== get_frame_size ());
1077 gcc_assert (verify_initial_elim_offsets ());
1080 /* If we were able to eliminate the frame pointer, show that it is no
1081 longer live at the start of any basic block. If it ls live by
1082 virtue of being in a pseudo, that pseudo will be marked live
1083 and hence the frame pointer will be known to be live via that
1086 if (! frame_pointer_needed
)
1088 bitmap_clear_bit (df_get_live_in (bb
), HARD_FRAME_POINTER_REGNUM
);
1090 /* Come here (with failure set nonzero) if we can't get enough spill
1094 CLEAR_REG_SET (&changed_allocation_pseudos
);
1095 CLEAR_REG_SET (&spilled_pseudos
);
1096 reload_in_progress
= 0;
1098 /* Now eliminate all pseudo regs by modifying them into
1099 their equivalent memory references.
1100 The REG-rtx's for the pseudos are modified in place,
1101 so all insns that used to refer to them now refer to memory.
1103 For a reg that has a reg_equiv_address, all those insns
1104 were changed by reloading so that no insns refer to it any longer;
1105 but the DECL_RTL of a variable decl may refer to it,
1106 and if so this causes the debugging info to mention the variable. */
1108 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1112 if (reg_equiv_mem
[i
])
1113 addr
= XEXP (reg_equiv_mem
[i
], 0);
1115 if (reg_equiv_address
[i
])
1116 addr
= reg_equiv_address
[i
];
1120 if (reg_renumber
[i
] < 0)
1122 rtx reg
= regno_reg_rtx
[i
];
1124 REG_USERVAR_P (reg
) = 0;
1125 PUT_CODE (reg
, MEM
);
1126 XEXP (reg
, 0) = addr
;
1127 if (reg_equiv_memory_loc
[i
])
1128 MEM_COPY_ATTRIBUTES (reg
, reg_equiv_memory_loc
[i
]);
1131 MEM_IN_STRUCT_P (reg
) = MEM_SCALAR_P (reg
) = 0;
1132 MEM_ATTRS (reg
) = 0;
1134 MEM_NOTRAP_P (reg
) = 1;
1136 else if (reg_equiv_mem
[i
])
1137 XEXP (reg_equiv_mem
[i
], 0) = addr
;
1140 /* We don't want complex addressing modes in debug insns
1141 if simpler ones will do, so delegitimize equivalences
1143 if (MAY_HAVE_DEBUG_INSNS
&& reg_renumber
[i
] < 0)
1145 rtx reg
= regno_reg_rtx
[i
];
1149 if (reg_equiv_constant
[i
])
1150 equiv
= reg_equiv_constant
[i
];
1151 else if (reg_equiv_invariant
[i
])
1152 equiv
= reg_equiv_invariant
[i
];
1153 else if (reg
&& MEM_P (reg
))
1154 equiv
= targetm
.delegitimize_address (reg
);
1155 else if (reg
&& REG_P (reg
) && (int)REGNO (reg
) != i
)
1161 for (use
= DF_REG_USE_CHAIN (i
); use
; use
= next
)
1163 insn
= DF_REF_INSN (use
);
1165 /* Make sure the next ref is for a different instruction,
1166 so that we're not affected by the rescan. */
1167 next
= DF_REF_NEXT_REG (use
);
1168 while (next
&& DF_REF_INSN (next
) == insn
)
1169 next
= DF_REF_NEXT_REG (next
);
1171 if (DEBUG_INSN_P (insn
))
1175 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
1176 df_insn_rescan_debug_internal (insn
);
1179 INSN_VAR_LOCATION_LOC (insn
)
1180 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn
),
1187 /* We must set reload_completed now since the cleanup_subreg_operands call
1188 below will re-recognize each insn and reload may have generated insns
1189 which are only valid during and after reload. */
1190 reload_completed
= 1;
1192 /* Make a pass over all the insns and delete all USEs which we inserted
1193 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1194 notes. Delete all CLOBBER insns, except those that refer to the return
1195 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1196 from misarranging variable-array code, and simplify (subreg (reg))
1197 operands. Strip and regenerate REG_INC notes that may have been moved
1200 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1206 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn
),
1207 VOIDmode
, CALL_INSN_FUNCTION_USAGE (insn
));
1209 if ((GET_CODE (PATTERN (insn
)) == USE
1210 /* We mark with QImode USEs introduced by reload itself. */
1211 && (GET_MODE (insn
) == QImode
1212 || find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)))
1213 || (GET_CODE (PATTERN (insn
)) == CLOBBER
1214 && (!MEM_P (XEXP (PATTERN (insn
), 0))
1215 || GET_MODE (XEXP (PATTERN (insn
), 0)) != BLKmode
1216 || (GET_CODE (XEXP (XEXP (PATTERN (insn
), 0), 0)) != SCRATCH
1217 && XEXP (XEXP (PATTERN (insn
), 0), 0)
1218 != stack_pointer_rtx
))
1219 && (!REG_P (XEXP (PATTERN (insn
), 0))
1220 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn
), 0)))))
1226 /* Some CLOBBERs may survive until here and still reference unassigned
1227 pseudos with const equivalent, which may in turn cause ICE in later
1228 passes if the reference remains in place. */
1229 if (GET_CODE (PATTERN (insn
)) == CLOBBER
)
1230 replace_pseudos_in (& XEXP (PATTERN (insn
), 0),
1231 VOIDmode
, PATTERN (insn
));
1233 /* Discard obvious no-ops, even without -O. This optimization
1234 is fast and doesn't interfere with debugging. */
1235 if (NONJUMP_INSN_P (insn
)
1236 && GET_CODE (PATTERN (insn
)) == SET
1237 && REG_P (SET_SRC (PATTERN (insn
)))
1238 && REG_P (SET_DEST (PATTERN (insn
)))
1239 && (REGNO (SET_SRC (PATTERN (insn
)))
1240 == REGNO (SET_DEST (PATTERN (insn
)))))
1246 pnote
= ®_NOTES (insn
);
1249 if (REG_NOTE_KIND (*pnote
) == REG_DEAD
1250 || REG_NOTE_KIND (*pnote
) == REG_UNUSED
1251 || REG_NOTE_KIND (*pnote
) == REG_INC
)
1252 *pnote
= XEXP (*pnote
, 1);
1254 pnote
= &XEXP (*pnote
, 1);
1258 add_auto_inc_notes (insn
, PATTERN (insn
));
1261 /* Simplify (subreg (reg)) if it appears as an operand. */
1262 cleanup_subreg_operands (insn
);
1264 /* Clean up invalid ASMs so that they don't confuse later passes.
1266 if (asm_noperands (PATTERN (insn
)) >= 0)
1268 extract_insn (insn
);
1269 if (!constrain_operands (1))
1271 error_for_asm (insn
,
1272 "%<asm%> operand has impossible constraints");
1279 /* If we are doing generic stack checking, give a warning if this
1280 function's frame size is larger than we expect. */
1281 if (flag_stack_check
== GENERIC_STACK_CHECK
)
1283 HOST_WIDE_INT size
= get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE
;
1284 static int verbose_warned
= 0;
1286 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1287 if (df_regs_ever_live_p (i
) && ! fixed_regs
[i
] && call_used_regs
[i
])
1288 size
+= UNITS_PER_WORD
;
1290 if (size
> STACK_CHECK_MAX_FRAME_SIZE
)
1292 warning (0, "frame size too large for reliable stack checking");
1293 if (! verbose_warned
)
1295 warning (0, "try reducing the number of local variables");
1301 free (temp_pseudo_reg_arr
);
1303 /* Indicate that we no longer have known memory locations or constants. */
1306 free (reg_max_ref_width
);
1307 free (reg_old_renumber
);
1308 free (pseudo_previous_regs
);
1309 free (pseudo_forbidden_regs
);
1311 CLEAR_HARD_REG_SET (used_spill_regs
);
1312 for (i
= 0; i
< n_spills
; i
++)
1313 SET_HARD_REG_BIT (used_spill_regs
, spill_regs
[i
]);
1315 /* Free all the insn_chain structures at once. */
1316 obstack_free (&reload_obstack
, reload_startobj
);
1317 unused_insn_chains
= 0;
1318 fixup_abnormal_edges ();
1320 /* Replacing pseudos with their memory equivalents might have
1321 created shared rtx. Subsequent passes would get confused
1322 by this, so unshare everything here. */
1323 unshare_all_rtl_again (first
);
1325 #ifdef STACK_BOUNDARY
1326 /* init_emit has set the alignment of the hard frame pointer
1327 to STACK_BOUNDARY. It is very likely no longer valid if
1328 the hard frame pointer was used for register allocation. */
1329 if (!frame_pointer_needed
)
1330 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = BITS_PER_UNIT
;
1333 VEC_free (rtx_p
, heap
, substitute_stack
);
1338 /* Yet another special case. Unfortunately, reg-stack forces people to
1339 write incorrect clobbers in asm statements. These clobbers must not
1340 cause the register to appear in bad_spill_regs, otherwise we'll call
1341 fatal_insn later. We clear the corresponding regnos in the live
1342 register sets to avoid this.
1343 The whole thing is rather sick, I'm afraid. */
1346 maybe_fix_stack_asms (void)
1349 const char *constraints
[MAX_RECOG_OPERANDS
];
1350 enum machine_mode operand_mode
[MAX_RECOG_OPERANDS
];
1351 struct insn_chain
*chain
;
1353 for (chain
= reload_insn_chain
; chain
!= 0; chain
= chain
->next
)
1356 HARD_REG_SET clobbered
, allowed
;
1359 if (! INSN_P (chain
->insn
)
1360 || (noperands
= asm_noperands (PATTERN (chain
->insn
))) < 0)
1362 pat
= PATTERN (chain
->insn
);
1363 if (GET_CODE (pat
) != PARALLEL
)
1366 CLEAR_HARD_REG_SET (clobbered
);
1367 CLEAR_HARD_REG_SET (allowed
);
1369 /* First, make a mask of all stack regs that are clobbered. */
1370 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1372 rtx t
= XVECEXP (pat
, 0, i
);
1373 if (GET_CODE (t
) == CLOBBER
&& STACK_REG_P (XEXP (t
, 0)))
1374 SET_HARD_REG_BIT (clobbered
, REGNO (XEXP (t
, 0)));
1377 /* Get the operand values and constraints out of the insn. */
1378 decode_asm_operands (pat
, recog_data
.operand
, recog_data
.operand_loc
,
1379 constraints
, operand_mode
, NULL
);
1381 /* For every operand, see what registers are allowed. */
1382 for (i
= 0; i
< noperands
; i
++)
1384 const char *p
= constraints
[i
];
1385 /* For every alternative, we compute the class of registers allowed
1386 for reloading in CLS, and merge its contents into the reg set
1388 int cls
= (int) NO_REGS
;
1394 if (c
== '\0' || c
== ',' || c
== '#')
1396 /* End of one alternative - mark the regs in the current
1397 class, and reset the class. */
1398 IOR_HARD_REG_SET (allowed
, reg_class_contents
[cls
]);
1404 } while (c
!= '\0' && c
!= ',');
1412 case '=': case '+': case '*': case '%': case '?': case '!':
1413 case '0': case '1': case '2': case '3': case '4': case '<':
1414 case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1415 case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1416 case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1417 case TARGET_MEM_CONSTRAINT
:
1421 cls
= (int) reg_class_subunion
[cls
]
1422 [(int) base_reg_class (VOIDmode
, ADDRESS
, SCRATCH
)];
1427 cls
= (int) reg_class_subunion
[cls
][(int) GENERAL_REGS
];
1431 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
1432 cls
= (int) reg_class_subunion
[cls
]
1433 [(int) base_reg_class (VOIDmode
, ADDRESS
, SCRATCH
)];
1435 cls
= (int) reg_class_subunion
[cls
]
1436 [(int) REG_CLASS_FROM_CONSTRAINT (c
, p
)];
1438 p
+= CONSTRAINT_LEN (c
, p
);
1441 /* Those of the registers which are clobbered, but allowed by the
1442 constraints, must be usable as reload registers. So clear them
1443 out of the life information. */
1444 AND_HARD_REG_SET (allowed
, clobbered
);
1445 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1446 if (TEST_HARD_REG_BIT (allowed
, i
))
1448 CLEAR_REGNO_REG_SET (&chain
->live_throughout
, i
);
1449 CLEAR_REGNO_REG_SET (&chain
->dead_or_set
, i
);
1456 /* Copy the global variables n_reloads and rld into the corresponding elts
1459 copy_reloads (struct insn_chain
*chain
)
1461 chain
->n_reloads
= n_reloads
;
1462 chain
->rld
= XOBNEWVEC (&reload_obstack
, struct reload
, n_reloads
);
1463 memcpy (chain
->rld
, rld
, n_reloads
* sizeof (struct reload
));
1464 reload_insn_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
1467 /* Walk the chain of insns, and determine for each whether it needs reloads
1468 and/or eliminations. Build the corresponding insns_need_reload list, and
1469 set something_needs_elimination as appropriate. */
1471 calculate_needs_all_insns (int global
)
1473 struct insn_chain
**pprev_reload
= &insns_need_reload
;
1474 struct insn_chain
*chain
, *next
= 0;
1476 something_needs_elimination
= 0;
1478 reload_insn_firstobj
= XOBNEWVAR (&reload_obstack
, char, 0);
1479 for (chain
= reload_insn_chain
; chain
!= 0; chain
= next
)
1481 rtx insn
= chain
->insn
;
1485 /* Clear out the shortcuts. */
1486 chain
->n_reloads
= 0;
1487 chain
->need_elim
= 0;
1488 chain
->need_reload
= 0;
1489 chain
->need_operand_change
= 0;
1491 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1492 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1493 what effects this has on the known offsets at labels. */
1495 if (LABEL_P (insn
) || JUMP_P (insn
)
1496 || (INSN_P (insn
) && REG_NOTES (insn
) != 0))
1497 set_label_offsets (insn
, insn
, 0);
1501 rtx old_body
= PATTERN (insn
);
1502 int old_code
= INSN_CODE (insn
);
1503 rtx old_notes
= REG_NOTES (insn
);
1504 int did_elimination
= 0;
1505 int operands_changed
= 0;
1506 rtx set
= single_set (insn
);
1508 /* Skip insns that only set an equivalence. */
1509 if (set
&& REG_P (SET_DEST (set
))
1510 && reg_renumber
[REGNO (SET_DEST (set
))] < 0
1511 && (reg_equiv_constant
[REGNO (SET_DEST (set
))]
1512 || (reg_equiv_invariant
[REGNO (SET_DEST (set
))]))
1513 && reg_equiv_init
[REGNO (SET_DEST (set
))])
1516 /* If needed, eliminate any eliminable registers. */
1517 if (num_eliminable
|| num_eliminable_invariants
)
1518 did_elimination
= eliminate_regs_in_insn (insn
, 0);
1520 /* Analyze the instruction. */
1521 operands_changed
= find_reloads (insn
, 0, spill_indirect_levels
,
1522 global
, spill_reg_order
);
1524 /* If a no-op set needs more than one reload, this is likely
1525 to be something that needs input address reloads. We
1526 can't get rid of this cleanly later, and it is of no use
1527 anyway, so discard it now.
1528 We only do this when expensive_optimizations is enabled,
1529 since this complements reload inheritance / output
1530 reload deletion, and it can make debugging harder. */
1531 if (flag_expensive_optimizations
&& n_reloads
> 1)
1533 rtx set
= single_set (insn
);
1536 ((SET_SRC (set
) == SET_DEST (set
)
1537 && REG_P (SET_SRC (set
))
1538 && REGNO (SET_SRC (set
)) >= FIRST_PSEUDO_REGISTER
)
1539 || (REG_P (SET_SRC (set
)) && REG_P (SET_DEST (set
))
1540 && reg_renumber
[REGNO (SET_SRC (set
))] < 0
1541 && reg_renumber
[REGNO (SET_DEST (set
))] < 0
1542 && reg_equiv_memory_loc
[REGNO (SET_SRC (set
))] != NULL
1543 && reg_equiv_memory_loc
[REGNO (SET_DEST (set
))] != NULL
1544 && rtx_equal_p (reg_equiv_memory_loc
1545 [REGNO (SET_SRC (set
))],
1546 reg_equiv_memory_loc
1547 [REGNO (SET_DEST (set
))]))))
1549 if (ira_conflicts_p
)
1550 /* Inform IRA about the insn deletion. */
1551 ira_mark_memory_move_deletion (REGNO (SET_DEST (set
)),
1552 REGNO (SET_SRC (set
)));
1554 /* Delete it from the reload chain. */
1556 chain
->prev
->next
= next
;
1558 reload_insn_chain
= next
;
1560 next
->prev
= chain
->prev
;
1561 chain
->next
= unused_insn_chains
;
1562 unused_insn_chains
= chain
;
1567 update_eliminable_offsets ();
1569 /* Remember for later shortcuts which insns had any reloads or
1570 register eliminations. */
1571 chain
->need_elim
= did_elimination
;
1572 chain
->need_reload
= n_reloads
> 0;
1573 chain
->need_operand_change
= operands_changed
;
1575 /* Discard any register replacements done. */
1576 if (did_elimination
)
1578 obstack_free (&reload_obstack
, reload_insn_firstobj
);
1579 PATTERN (insn
) = old_body
;
1580 INSN_CODE (insn
) = old_code
;
1581 REG_NOTES (insn
) = old_notes
;
1582 something_needs_elimination
= 1;
1585 something_needs_operands_changed
|= operands_changed
;
1589 copy_reloads (chain
);
1590 *pprev_reload
= chain
;
1591 pprev_reload
= &chain
->next_need_reload
;
1598 /* This function is called from the register allocator to set up estimates
1599 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1600 an invariant. The structure is similar to calculate_needs_all_insns. */
1603 calculate_elim_costs_all_insns (void)
1605 int *reg_equiv_init_cost
;
1609 reg_equiv_init_cost
= XCNEWVEC (int, max_regno
);
1611 init_eliminable_invariants (get_insns (), false);
1613 set_initial_elim_offsets ();
1614 set_initial_label_offsets ();
1621 FOR_BB_INSNS (bb
, insn
)
1623 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1624 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1625 what effects this has on the known offsets at labels. */
1627 if (LABEL_P (insn
) || JUMP_P (insn
)
1628 || (INSN_P (insn
) && REG_NOTES (insn
) != 0))
1629 set_label_offsets (insn
, insn
, 0);
1633 rtx set
= single_set (insn
);
1635 /* Skip insns that only set an equivalence. */
1636 if (set
&& REG_P (SET_DEST (set
))
1637 && reg_renumber
[REGNO (SET_DEST (set
))] < 0
1638 && (reg_equiv_constant
[REGNO (SET_DEST (set
))]
1639 || (reg_equiv_invariant
[REGNO (SET_DEST (set
))])))
1641 unsigned regno
= REGNO (SET_DEST (set
));
1642 rtx init
= reg_equiv_init
[regno
];
1645 rtx t
= eliminate_regs_1 (SET_SRC (set
), VOIDmode
, insn
,
1647 int cost
= rtx_cost (t
, SET
,
1648 optimize_bb_for_speed_p (bb
));
1649 int freq
= REG_FREQ_FROM_BB (bb
);
1651 reg_equiv_init_cost
[regno
] = cost
* freq
;
1655 /* If needed, eliminate any eliminable registers. */
1656 if (num_eliminable
|| num_eliminable_invariants
)
1657 elimination_costs_in_insn (insn
);
1660 update_eliminable_offsets ();
1664 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1666 if (reg_equiv_invariant
[i
])
1668 if (reg_equiv_init
[i
])
1670 int cost
= reg_equiv_init_cost
[i
];
1673 "Reg %d has equivalence, initial gains %d\n", i
, cost
);
1675 ira_adjust_equiv_reg_cost (i
, cost
);
1681 "Reg %d had equivalence, but can't be eliminated\n",
1683 ira_adjust_equiv_reg_cost (i
, 0);
1689 free (reg_equiv_init_cost
);
1692 /* Comparison function for qsort to decide which of two reloads
1693 should be handled first. *P1 and *P2 are the reload numbers. */
1696 reload_reg_class_lower (const void *r1p
, const void *r2p
)
1698 int r1
= *(const short *) r1p
, r2
= *(const short *) r2p
;
1701 /* Consider required reloads before optional ones. */
1702 t
= rld
[r1
].optional
- rld
[r2
].optional
;
1706 /* Count all solitary classes before non-solitary ones. */
1707 t
= ((reg_class_size
[(int) rld
[r2
].rclass
] == 1)
1708 - (reg_class_size
[(int) rld
[r1
].rclass
] == 1));
1712 /* Aside from solitaires, consider all multi-reg groups first. */
1713 t
= rld
[r2
].nregs
- rld
[r1
].nregs
;
1717 /* Consider reloads in order of increasing reg-class number. */
1718 t
= (int) rld
[r1
].rclass
- (int) rld
[r2
].rclass
;
1722 /* If reloads are equally urgent, sort by reload number,
1723 so that the results of qsort leave nothing to chance. */
1727 /* The cost of spilling each hard reg. */
1728 static int spill_cost
[FIRST_PSEUDO_REGISTER
];
1730 /* When spilling multiple hard registers, we use SPILL_COST for the first
1731 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1732 only the first hard reg for a multi-reg pseudo. */
1733 static int spill_add_cost
[FIRST_PSEUDO_REGISTER
];
1735 /* Map of hard regno to pseudo regno currently occupying the hard
1737 static int hard_regno_to_pseudo_regno
[FIRST_PSEUDO_REGISTER
];
1739 /* Update the spill cost arrays, considering that pseudo REG is live. */
1742 count_pseudo (int reg
)
1744 int freq
= REG_FREQ (reg
);
1745 int r
= reg_renumber
[reg
];
1748 if (REGNO_REG_SET_P (&pseudos_counted
, reg
)
1749 || REGNO_REG_SET_P (&spilled_pseudos
, reg
)
1750 /* Ignore spilled pseudo-registers which can be here only if IRA
1752 || (ira_conflicts_p
&& r
< 0))
1755 SET_REGNO_REG_SET (&pseudos_counted
, reg
);
1757 gcc_assert (r
>= 0);
1759 spill_add_cost
[r
] += freq
;
1760 nregs
= hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (reg
)];
1763 hard_regno_to_pseudo_regno
[r
+ nregs
] = reg
;
1764 spill_cost
[r
+ nregs
] += freq
;
1768 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1769 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1772 order_regs_for_reload (struct insn_chain
*chain
)
1775 HARD_REG_SET used_by_pseudos
;
1776 HARD_REG_SET used_by_pseudos2
;
1777 reg_set_iterator rsi
;
1779 COPY_HARD_REG_SET (bad_spill_regs
, fixed_reg_set
);
1781 memset (spill_cost
, 0, sizeof spill_cost
);
1782 memset (spill_add_cost
, 0, sizeof spill_add_cost
);
1783 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1784 hard_regno_to_pseudo_regno
[i
] = -1;
1786 /* Count number of uses of each hard reg by pseudo regs allocated to it
1787 and then order them by decreasing use. First exclude hard registers
1788 that are live in or across this insn. */
1790 REG_SET_TO_HARD_REG_SET (used_by_pseudos
, &chain
->live_throughout
);
1791 REG_SET_TO_HARD_REG_SET (used_by_pseudos2
, &chain
->dead_or_set
);
1792 IOR_HARD_REG_SET (bad_spill_regs
, used_by_pseudos
);
1793 IOR_HARD_REG_SET (bad_spill_regs
, used_by_pseudos2
);
1795 /* Now find out which pseudos are allocated to it, and update
1797 CLEAR_REG_SET (&pseudos_counted
);
1799 EXECUTE_IF_SET_IN_REG_SET
1800 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
1804 EXECUTE_IF_SET_IN_REG_SET
1805 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
1809 CLEAR_REG_SET (&pseudos_counted
);
1812 /* Vector of reload-numbers showing the order in which the reloads should
1814 static short reload_order
[MAX_RELOADS
];
1816 /* This is used to keep track of the spill regs used in one insn. */
1817 static HARD_REG_SET used_spill_regs_local
;
1819 /* We decided to spill hard register SPILLED, which has a size of
1820 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1821 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1822 update SPILL_COST/SPILL_ADD_COST. */
1825 count_spilled_pseudo (int spilled
, int spilled_nregs
, int reg
)
1827 int freq
= REG_FREQ (reg
);
1828 int r
= reg_renumber
[reg
];
1829 int nregs
= hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (reg
)];
1831 /* Ignore spilled pseudo-registers which can be here only if IRA is
1833 if ((ira_conflicts_p
&& r
< 0)
1834 || REGNO_REG_SET_P (&spilled_pseudos
, reg
)
1835 || spilled
+ spilled_nregs
<= r
|| r
+ nregs
<= spilled
)
1838 SET_REGNO_REG_SET (&spilled_pseudos
, reg
);
1840 spill_add_cost
[r
] -= freq
;
1843 hard_regno_to_pseudo_regno
[r
+ nregs
] = -1;
1844 spill_cost
[r
+ nregs
] -= freq
;
1848 /* Find reload register to use for reload number ORDER. */
1851 find_reg (struct insn_chain
*chain
, int order
)
1853 int rnum
= reload_order
[order
];
1854 struct reload
*rl
= rld
+ rnum
;
1855 int best_cost
= INT_MAX
;
1857 unsigned int i
, j
, n
;
1859 HARD_REG_SET not_usable
;
1860 HARD_REG_SET used_by_other_reload
;
1861 reg_set_iterator rsi
;
1862 static int regno_pseudo_regs
[FIRST_PSEUDO_REGISTER
];
1863 static int best_regno_pseudo_regs
[FIRST_PSEUDO_REGISTER
];
1865 COPY_HARD_REG_SET (not_usable
, bad_spill_regs
);
1866 IOR_HARD_REG_SET (not_usable
, bad_spill_regs_global
);
1867 IOR_COMPL_HARD_REG_SET (not_usable
, reg_class_contents
[rl
->rclass
]);
1869 CLEAR_HARD_REG_SET (used_by_other_reload
);
1870 for (k
= 0; k
< order
; k
++)
1872 int other
= reload_order
[k
];
1874 if (rld
[other
].regno
>= 0 && reloads_conflict (other
, rnum
))
1875 for (j
= 0; j
< rld
[other
].nregs
; j
++)
1876 SET_HARD_REG_BIT (used_by_other_reload
, rld
[other
].regno
+ j
);
1879 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1881 #ifdef REG_ALLOC_ORDER
1882 unsigned int regno
= reg_alloc_order
[i
];
1884 unsigned int regno
= i
;
1887 if (! TEST_HARD_REG_BIT (not_usable
, regno
)
1888 && ! TEST_HARD_REG_BIT (used_by_other_reload
, regno
)
1889 && HARD_REGNO_MODE_OK (regno
, rl
->mode
))
1891 int this_cost
= spill_cost
[regno
];
1893 unsigned int this_nregs
= hard_regno_nregs
[regno
][rl
->mode
];
1895 for (j
= 1; j
< this_nregs
; j
++)
1897 this_cost
+= spill_add_cost
[regno
+ j
];
1898 if ((TEST_HARD_REG_BIT (not_usable
, regno
+ j
))
1899 || TEST_HARD_REG_BIT (used_by_other_reload
, regno
+ j
))
1905 if (ira_conflicts_p
)
1907 /* Ask IRA to find a better pseudo-register for
1909 for (n
= j
= 0; j
< this_nregs
; j
++)
1911 int r
= hard_regno_to_pseudo_regno
[regno
+ j
];
1915 if (n
== 0 || regno_pseudo_regs
[n
- 1] != r
)
1916 regno_pseudo_regs
[n
++] = r
;
1918 regno_pseudo_regs
[n
++] = -1;
1920 || ira_better_spill_reload_regno_p (regno_pseudo_regs
,
1921 best_regno_pseudo_regs
,
1928 best_regno_pseudo_regs
[j
] = regno_pseudo_regs
[j
];
1929 if (regno_pseudo_regs
[j
] < 0)
1936 if (rl
->in
&& REG_P (rl
->in
) && REGNO (rl
->in
) == regno
)
1938 if (rl
->out
&& REG_P (rl
->out
) && REGNO (rl
->out
) == regno
)
1940 if (this_cost
< best_cost
1941 /* Among registers with equal cost, prefer caller-saved ones, or
1942 use REG_ALLOC_ORDER if it is defined. */
1943 || (this_cost
== best_cost
1944 #ifdef REG_ALLOC_ORDER
1945 && (inv_reg_alloc_order
[regno
]
1946 < inv_reg_alloc_order
[best_reg
])
1948 && call_used_regs
[regno
]
1949 && ! call_used_regs
[best_reg
]
1954 best_cost
= this_cost
;
1962 fprintf (dump_file
, "Using reg %d for reload %d\n", best_reg
, rnum
);
1964 rl
->nregs
= hard_regno_nregs
[best_reg
][rl
->mode
];
1965 rl
->regno
= best_reg
;
1967 EXECUTE_IF_SET_IN_REG_SET
1968 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, j
, rsi
)
1970 count_spilled_pseudo (best_reg
, rl
->nregs
, j
);
1973 EXECUTE_IF_SET_IN_REG_SET
1974 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, j
, rsi
)
1976 count_spilled_pseudo (best_reg
, rl
->nregs
, j
);
1979 for (i
= 0; i
< rl
->nregs
; i
++)
1981 gcc_assert (spill_cost
[best_reg
+ i
] == 0);
1982 gcc_assert (spill_add_cost
[best_reg
+ i
] == 0);
1983 gcc_assert (hard_regno_to_pseudo_regno
[best_reg
+ i
] == -1);
1984 SET_HARD_REG_BIT (used_spill_regs_local
, best_reg
+ i
);
1989 /* Find more reload regs to satisfy the remaining need of an insn, which
1991 Do it by ascending class number, since otherwise a reg
1992 might be spilled for a big class and might fail to count
1993 for a smaller class even though it belongs to that class. */
1996 find_reload_regs (struct insn_chain
*chain
)
2000 /* In order to be certain of getting the registers we need,
2001 we must sort the reloads into order of increasing register class.
2002 Then our grabbing of reload registers will parallel the process
2003 that provided the reload registers. */
2004 for (i
= 0; i
< chain
->n_reloads
; i
++)
2006 /* Show whether this reload already has a hard reg. */
2007 if (chain
->rld
[i
].reg_rtx
)
2009 int regno
= REGNO (chain
->rld
[i
].reg_rtx
);
2010 chain
->rld
[i
].regno
= regno
;
2012 = hard_regno_nregs
[regno
][GET_MODE (chain
->rld
[i
].reg_rtx
)];
2015 chain
->rld
[i
].regno
= -1;
2016 reload_order
[i
] = i
;
2019 n_reloads
= chain
->n_reloads
;
2020 memcpy (rld
, chain
->rld
, n_reloads
* sizeof (struct reload
));
2022 CLEAR_HARD_REG_SET (used_spill_regs_local
);
2025 fprintf (dump_file
, "Spilling for insn %d.\n", INSN_UID (chain
->insn
));
2027 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
2029 /* Compute the order of preference for hard registers to spill. */
2031 order_regs_for_reload (chain
);
2033 for (i
= 0; i
< n_reloads
; i
++)
2035 int r
= reload_order
[i
];
2037 /* Ignore reloads that got marked inoperative. */
2038 if ((rld
[r
].out
!= 0 || rld
[r
].in
!= 0 || rld
[r
].secondary_p
)
2039 && ! rld
[r
].optional
2040 && rld
[r
].regno
== -1)
2041 if (! find_reg (chain
, i
))
2044 fprintf (dump_file
, "reload failure for reload %d\n", r
);
2045 spill_failure (chain
->insn
, rld
[r
].rclass
);
2051 COPY_HARD_REG_SET (chain
->used_spill_regs
, used_spill_regs_local
);
2052 IOR_HARD_REG_SET (used_spill_regs
, used_spill_regs_local
);
2054 memcpy (chain
->rld
, rld
, n_reloads
* sizeof (struct reload
));
2058 select_reload_regs (void)
2060 struct insn_chain
*chain
;
2062 /* Try to satisfy the needs for each insn. */
2063 for (chain
= insns_need_reload
; chain
!= 0;
2064 chain
= chain
->next_need_reload
)
2065 find_reload_regs (chain
);
2068 /* Delete all insns that were inserted by emit_caller_save_insns during
2071 delete_caller_save_insns (void)
2073 struct insn_chain
*c
= reload_insn_chain
;
2077 while (c
!= 0 && c
->is_caller_save_insn
)
2079 struct insn_chain
*next
= c
->next
;
2082 if (c
== reload_insn_chain
)
2083 reload_insn_chain
= next
;
2087 next
->prev
= c
->prev
;
2089 c
->prev
->next
= next
;
2090 c
->next
= unused_insn_chains
;
2091 unused_insn_chains
= c
;
2099 /* Handle the failure to find a register to spill.
2100 INSN should be one of the insns which needed this particular spill reg. */
2103 spill_failure (rtx insn
, enum reg_class rclass
)
2105 if (asm_noperands (PATTERN (insn
)) >= 0)
2106 error_for_asm (insn
, "can't find a register in class %qs while "
2107 "reloading %<asm%>",
2108 reg_class_names
[rclass
]);
2111 error ("unable to find a register to spill in class %qs",
2112 reg_class_names
[rclass
]);
2116 fprintf (dump_file
, "\nReloads for insn # %d\n", INSN_UID (insn
));
2117 debug_reload_to_stream (dump_file
);
2119 fatal_insn ("this is the insn:", insn
);
2123 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2124 data that is dead in INSN. */
2127 delete_dead_insn (rtx insn
)
2129 rtx prev
= prev_real_insn (insn
);
2132 /* If the previous insn sets a register that dies in our insn, delete it
2134 if (prev
&& GET_CODE (PATTERN (prev
)) == SET
2135 && (prev_dest
= SET_DEST (PATTERN (prev
)), REG_P (prev_dest
))
2136 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
2137 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
))
2138 && ! side_effects_p (SET_SRC (PATTERN (prev
))))
2139 delete_dead_insn (prev
);
2141 SET_INSN_DELETED (insn
);
2144 /* Modify the home of pseudo-reg I.
2145 The new home is present in reg_renumber[I].
2147 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2148 or it may be -1, meaning there is none or it is not relevant.
2149 This is used so that all pseudos spilled from a given hard reg
2150 can share one stack slot. */
2153 alter_reg (int i
, int from_reg
, bool dont_share_p
)
2155 /* When outputting an inline function, this can happen
2156 for a reg that isn't actually used. */
2157 if (regno_reg_rtx
[i
] == 0)
2160 /* If the reg got changed to a MEM at rtl-generation time,
2162 if (!REG_P (regno_reg_rtx
[i
]))
2165 /* Modify the reg-rtx to contain the new hard reg
2166 number or else to contain its pseudo reg number. */
2167 SET_REGNO (regno_reg_rtx
[i
],
2168 reg_renumber
[i
] >= 0 ? reg_renumber
[i
] : i
);
2170 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2171 allocate a stack slot for it. */
2173 if (reg_renumber
[i
] < 0
2174 && REG_N_REFS (i
) > 0
2175 && reg_equiv_constant
[i
] == 0
2176 && (reg_equiv_invariant
[i
] == 0 || reg_equiv_init
[i
] == 0)
2177 && reg_equiv_memory_loc
[i
] == 0)
2180 enum machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
2181 unsigned int inherent_size
= PSEUDO_REGNO_BYTES (i
);
2182 unsigned int inherent_align
= GET_MODE_ALIGNMENT (mode
);
2183 unsigned int total_size
= MAX (inherent_size
, reg_max_ref_width
[i
]);
2184 unsigned int min_align
= reg_max_ref_width
[i
] * BITS_PER_UNIT
;
2187 something_was_spilled
= true;
2189 if (ira_conflicts_p
)
2191 /* Mark the spill for IRA. */
2192 SET_REGNO_REG_SET (&spilled_pseudos
, i
);
2194 x
= ira_reuse_stack_slot (i
, inherent_size
, total_size
);
2200 /* Each pseudo reg has an inherent size which comes from its own mode,
2201 and a total size which provides room for paradoxical subregs
2202 which refer to the pseudo reg in wider modes.
2204 We can use a slot already allocated if it provides both
2205 enough inherent space and enough total space.
2206 Otherwise, we allocate a new slot, making sure that it has no less
2207 inherent space, and no less total space, then the previous slot. */
2208 else if (from_reg
== -1 || (!dont_share_p
&& ira_conflicts_p
))
2212 /* No known place to spill from => no slot to reuse. */
2213 x
= assign_stack_local (mode
, total_size
,
2214 min_align
> inherent_align
2215 || total_size
> inherent_size
? -1 : 0);
2219 /* Cancel the big-endian correction done in assign_stack_local.
2220 Get the address of the beginning of the slot. This is so we
2221 can do a big-endian correction unconditionally below. */
2222 if (BYTES_BIG_ENDIAN
)
2224 adjust
= inherent_size
- total_size
;
2227 = adjust_address_nv (x
, mode_for_size (total_size
2233 if (! dont_share_p
&& ira_conflicts_p
)
2234 /* Inform IRA about allocation a new stack slot. */
2235 ira_mark_new_stack_slot (stack_slot
, i
, total_size
);
2238 /* Reuse a stack slot if possible. */
2239 else if (spill_stack_slot
[from_reg
] != 0
2240 && spill_stack_slot_width
[from_reg
] >= total_size
2241 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2243 && MEM_ALIGN (spill_stack_slot
[from_reg
]) >= min_align
)
2244 x
= spill_stack_slot
[from_reg
];
2246 /* Allocate a bigger slot. */
2249 /* Compute maximum size needed, both for inherent size
2250 and for total size. */
2253 if (spill_stack_slot
[from_reg
])
2255 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2257 mode
= GET_MODE (spill_stack_slot
[from_reg
]);
2258 if (spill_stack_slot_width
[from_reg
] > total_size
)
2259 total_size
= spill_stack_slot_width
[from_reg
];
2260 if (MEM_ALIGN (spill_stack_slot
[from_reg
]) > min_align
)
2261 min_align
= MEM_ALIGN (spill_stack_slot
[from_reg
]);
2264 /* Make a slot with that size. */
2265 x
= assign_stack_local (mode
, total_size
,
2266 min_align
> inherent_align
2267 || total_size
> inherent_size
? -1 : 0);
2270 /* Cancel the big-endian correction done in assign_stack_local.
2271 Get the address of the beginning of the slot. This is so we
2272 can do a big-endian correction unconditionally below. */
2273 if (BYTES_BIG_ENDIAN
)
2275 adjust
= GET_MODE_SIZE (mode
) - total_size
;
2278 = adjust_address_nv (x
, mode_for_size (total_size
2284 spill_stack_slot
[from_reg
] = stack_slot
;
2285 spill_stack_slot_width
[from_reg
] = total_size
;
2288 /* On a big endian machine, the "address" of the slot
2289 is the address of the low part that fits its inherent mode. */
2290 if (BYTES_BIG_ENDIAN
&& inherent_size
< total_size
)
2291 adjust
+= (total_size
- inherent_size
);
2293 /* If we have any adjustment to make, or if the stack slot is the
2294 wrong mode, make a new stack slot. */
2295 x
= adjust_address_nv (x
, GET_MODE (regno_reg_rtx
[i
]), adjust
);
2297 /* Set all of the memory attributes as appropriate for a spill. */
2298 set_mem_attrs_for_spill (x
);
2300 /* Save the stack slot for later. */
2301 reg_equiv_memory_loc
[i
] = x
;
2305 /* Mark the slots in regs_ever_live for the hard regs used by
2306 pseudo-reg number REGNO, accessed in MODE. */
2309 mark_home_live_1 (int regno
, enum machine_mode mode
)
2313 i
= reg_renumber
[regno
];
2316 lim
= end_hard_regno (mode
, i
);
2318 df_set_regs_ever_live(i
++, true);
2321 /* Mark the slots in regs_ever_live for the hard regs
2322 used by pseudo-reg number REGNO. */
2325 mark_home_live (int regno
)
2327 if (reg_renumber
[regno
] >= 0)
2328 mark_home_live_1 (regno
, PSEUDO_REGNO_MODE (regno
));
2331 /* This function handles the tracking of elimination offsets around branches.
2333 X is a piece of RTL being scanned.
2335 INSN is the insn that it came from, if any.
2337 INITIAL_P is nonzero if we are to set the offset to be the initial
2338 offset and zero if we are setting the offset of the label to be the
2342 set_label_offsets (rtx x
, rtx insn
, int initial_p
)
2344 enum rtx_code code
= GET_CODE (x
);
2347 struct elim_table
*p
;
2352 if (LABEL_REF_NONLOCAL_P (x
))
2357 /* ... fall through ... */
2360 /* If we know nothing about this label, set the desired offsets. Note
2361 that this sets the offset at a label to be the offset before a label
2362 if we don't know anything about the label. This is not correct for
2363 the label after a BARRIER, but is the best guess we can make. If
2364 we guessed wrong, we will suppress an elimination that might have
2365 been possible had we been able to guess correctly. */
2367 if (! offsets_known_at
[CODE_LABEL_NUMBER (x
) - first_label_num
])
2369 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2370 offsets_at
[CODE_LABEL_NUMBER (x
) - first_label_num
][i
]
2371 = (initial_p
? reg_eliminate
[i
].initial_offset
2372 : reg_eliminate
[i
].offset
);
2373 offsets_known_at
[CODE_LABEL_NUMBER (x
) - first_label_num
] = 1;
2376 /* Otherwise, if this is the definition of a label and it is
2377 preceded by a BARRIER, set our offsets to the known offset of
2381 && (tem
= prev_nonnote_insn (insn
)) != 0
2383 set_offsets_for_label (insn
);
2385 /* If neither of the above cases is true, compare each offset
2386 with those previously recorded and suppress any eliminations
2387 where the offsets disagree. */
2389 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2390 if (offsets_at
[CODE_LABEL_NUMBER (x
) - first_label_num
][i
]
2391 != (initial_p
? reg_eliminate
[i
].initial_offset
2392 : reg_eliminate
[i
].offset
))
2393 reg_eliminate
[i
].can_eliminate
= 0;
2398 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2400 /* ... fall through ... */
2404 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2405 to indirectly and hence must have all eliminations at their
2407 for (tem
= REG_NOTES (x
); tem
; tem
= XEXP (tem
, 1))
2408 if (REG_NOTE_KIND (tem
) == REG_LABEL_OPERAND
)
2409 set_label_offsets (XEXP (tem
, 0), insn
, 1);
2415 /* Each of the labels in the parallel or address vector must be
2416 at their initial offsets. We want the first field for PARALLEL
2417 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2419 for (i
= 0; i
< (unsigned) XVECLEN (x
, code
== ADDR_DIFF_VEC
); i
++)
2420 set_label_offsets (XVECEXP (x
, code
== ADDR_DIFF_VEC
, i
),
2425 /* We only care about setting PC. If the source is not RETURN,
2426 IF_THEN_ELSE, or a label, disable any eliminations not at
2427 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2428 isn't one of those possibilities. For branches to a label,
2429 call ourselves recursively.
2431 Note that this can disable elimination unnecessarily when we have
2432 a non-local goto since it will look like a non-constant jump to
2433 someplace in the current function. This isn't a significant
2434 problem since such jumps will normally be when all elimination
2435 pairs are back to their initial offsets. */
2437 if (SET_DEST (x
) != pc_rtx
)
2440 switch (GET_CODE (SET_SRC (x
)))
2447 set_label_offsets (SET_SRC (x
), insn
, initial_p
);
2451 tem
= XEXP (SET_SRC (x
), 1);
2452 if (GET_CODE (tem
) == LABEL_REF
)
2453 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2454 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2457 tem
= XEXP (SET_SRC (x
), 2);
2458 if (GET_CODE (tem
) == LABEL_REF
)
2459 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2460 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2468 /* If we reach here, all eliminations must be at their initial
2469 offset because we are doing a jump to a variable address. */
2470 for (p
= reg_eliminate
; p
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; p
++)
2471 if (p
->offset
!= p
->initial_offset
)
2472 p
->can_eliminate
= 0;
2480 /* Called through for_each_rtx, this function examines every reg that occurs
2481 in PX and adjusts the costs for its elimination which are gathered by IRA.
2482 DATA is the insn in which PX occurs. We do not recurse into MEM
2486 note_reg_elim_costly (rtx
*px
, void *data
)
2488 rtx insn
= (rtx
)data
;
2495 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
2496 && reg_equiv_init
[REGNO (x
)]
2497 && reg_equiv_invariant
[REGNO (x
)])
2499 rtx t
= reg_equiv_invariant
[REGNO (x
)];
2500 rtx new_rtx
= eliminate_regs_1 (t
, Pmode
, insn
, true, true);
2501 int cost
= rtx_cost (new_rtx
, SET
, optimize_bb_for_speed_p (elim_bb
));
2502 int freq
= REG_FREQ_FROM_BB (elim_bb
);
2505 ira_adjust_equiv_reg_cost (REGNO (x
), -cost
* freq
);
2510 /* Scan X and replace any eliminable registers (such as fp) with a
2511 replacement (such as sp), plus an offset.
2513 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2514 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2515 MEM, we are allowed to replace a sum of a register and the constant zero
2516 with the register, which we cannot do outside a MEM. In addition, we need
2517 to record the fact that a register is referenced outside a MEM.
2519 If INSN is an insn, it is the insn containing X. If we replace a REG
2520 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2521 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2522 the REG is being modified.
2524 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2525 That's used when we eliminate in expressions stored in notes.
2526 This means, do not set ref_outside_mem even if the reference
2529 If FOR_COSTS is true, we are being called before reload in order to
2530 estimate the costs of keeping registers with an equivalence unallocated.
2532 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2533 replacements done assuming all offsets are at their initial values. If
2534 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2535 encounter, return the actual location so that find_reloads will do
2536 the proper thing. */
2539 eliminate_regs_1 (rtx x
, enum machine_mode mem_mode
, rtx insn
,
2540 bool may_use_invariant
, bool for_costs
)
2542 enum rtx_code code
= GET_CODE (x
);
2543 struct elim_table
*ep
;
2550 if (! current_function_decl
)
2573 /* First handle the case where we encounter a bare register that
2574 is eliminable. Replace it with a PLUS. */
2575 if (regno
< FIRST_PSEUDO_REGISTER
)
2577 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2579 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2580 return plus_constant (ep
->to_rtx
, ep
->previous_offset
);
2583 else if (reg_renumber
&& reg_renumber
[regno
] < 0
2584 && reg_equiv_invariant
&& reg_equiv_invariant
[regno
])
2586 if (may_use_invariant
|| (insn
&& DEBUG_INSN_P (insn
)))
2587 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant
[regno
]),
2588 mem_mode
, insn
, true, for_costs
);
2589 /* There exists at least one use of REGNO that cannot be
2590 eliminated. Prevent the defining insn from being deleted. */
2591 reg_equiv_init
[regno
] = NULL_RTX
;
2593 alter_reg (regno
, -1, true);
2597 /* You might think handling MINUS in a manner similar to PLUS is a
2598 good idea. It is not. It has been tried multiple times and every
2599 time the change has had to have been reverted.
2601 Other parts of reload know a PLUS is special (gen_reload for example)
2602 and require special code to handle code a reloaded PLUS operand.
2604 Also consider backends where the flags register is clobbered by a
2605 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2606 lea instruction comes to mind). If we try to reload a MINUS, we
2607 may kill the flags register that was holding a useful value.
2609 So, please before trying to handle MINUS, consider reload as a
2610 whole instead of this little section as well as the backend issues. */
2612 /* If this is the sum of an eliminable register and a constant, rework
2614 if (REG_P (XEXP (x
, 0))
2615 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2616 && CONSTANT_P (XEXP (x
, 1)))
2618 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2620 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2622 /* The only time we want to replace a PLUS with a REG (this
2623 occurs when the constant operand of the PLUS is the negative
2624 of the offset) is when we are inside a MEM. We won't want
2625 to do so at other times because that would change the
2626 structure of the insn in a way that reload can't handle.
2627 We special-case the commonest situation in
2628 eliminate_regs_in_insn, so just replace a PLUS with a
2629 PLUS here, unless inside a MEM. */
2630 if (mem_mode
!= 0 && CONST_INT_P (XEXP (x
, 1))
2631 && INTVAL (XEXP (x
, 1)) == - ep
->previous_offset
)
2634 return gen_rtx_PLUS (Pmode
, ep
->to_rtx
,
2635 plus_constant (XEXP (x
, 1),
2636 ep
->previous_offset
));
2639 /* If the register is not eliminable, we are done since the other
2640 operand is a constant. */
2644 /* If this is part of an address, we want to bring any constant to the
2645 outermost PLUS. We will do this by doing register replacement in
2646 our operands and seeing if a constant shows up in one of them.
2648 Note that there is no risk of modifying the structure of the insn,
2649 since we only get called for its operands, thus we are either
2650 modifying the address inside a MEM, or something like an address
2651 operand of a load-address insn. */
2654 rtx new0
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, true,
2656 rtx new1
= eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, true,
2659 if (reg_renumber
&& (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1)))
2661 /* If one side is a PLUS and the other side is a pseudo that
2662 didn't get a hard register but has a reg_equiv_constant,
2663 we must replace the constant here since it may no longer
2664 be in the position of any operand. */
2665 if (GET_CODE (new0
) == PLUS
&& REG_P (new1
)
2666 && REGNO (new1
) >= FIRST_PSEUDO_REGISTER
2667 && reg_renumber
[REGNO (new1
)] < 0
2668 && reg_equiv_constant
!= 0
2669 && reg_equiv_constant
[REGNO (new1
)] != 0)
2670 new1
= reg_equiv_constant
[REGNO (new1
)];
2671 else if (GET_CODE (new1
) == PLUS
&& REG_P (new0
)
2672 && REGNO (new0
) >= FIRST_PSEUDO_REGISTER
2673 && reg_renumber
[REGNO (new0
)] < 0
2674 && reg_equiv_constant
[REGNO (new0
)] != 0)
2675 new0
= reg_equiv_constant
[REGNO (new0
)];
2677 new_rtx
= form_sum (GET_MODE (x
), new0
, new1
);
2679 /* As above, if we are not inside a MEM we do not want to
2680 turn a PLUS into something else. We might try to do so here
2681 for an addition of 0 if we aren't optimizing. */
2682 if (! mem_mode
&& GET_CODE (new_rtx
) != PLUS
)
2683 return gen_rtx_PLUS (GET_MODE (x
), new_rtx
, const0_rtx
);
2691 /* If this is the product of an eliminable register and a
2692 constant, apply the distribute law and move the constant out
2693 so that we have (plus (mult ..) ..). This is needed in order
2694 to keep load-address insns valid. This case is pathological.
2695 We ignore the possibility of overflow here. */
2696 if (REG_P (XEXP (x
, 0))
2697 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2698 && CONST_INT_P (XEXP (x
, 1)))
2699 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2701 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2704 /* Refs inside notes or in DEBUG_INSNs don't count for
2706 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2707 || GET_CODE (insn
) == INSN_LIST
2708 || DEBUG_INSN_P (insn
))))
2709 ep
->ref_outside_mem
= 1;
2712 plus_constant (gen_rtx_MULT (Pmode
, ep
->to_rtx
, XEXP (x
, 1)),
2713 ep
->previous_offset
* INTVAL (XEXP (x
, 1)));
2716 /* ... fall through ... */
2720 /* See comments before PLUS about handling MINUS. */
2722 case DIV
: case UDIV
:
2723 case MOD
: case UMOD
:
2724 case AND
: case IOR
: case XOR
:
2725 case ROTATERT
: case ROTATE
:
2726 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2728 case GE
: case GT
: case GEU
: case GTU
:
2729 case LE
: case LT
: case LEU
: case LTU
:
2731 rtx new0
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, false,
2733 rtx new1
= XEXP (x
, 1)
2734 ? eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, false,
2737 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2738 return gen_rtx_fmt_ee (code
, GET_MODE (x
), new0
, new1
);
2743 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2746 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, true,
2748 if (new_rtx
!= XEXP (x
, 0))
2750 /* If this is a REG_DEAD note, it is not valid anymore.
2751 Using the eliminated version could result in creating a
2752 REG_DEAD note for the stack or frame pointer. */
2753 if (REG_NOTE_KIND (x
) == REG_DEAD
)
2755 ? eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, true,
2759 x
= alloc_reg_note (REG_NOTE_KIND (x
), new_rtx
, XEXP (x
, 1));
2763 /* ... fall through ... */
2766 /* Now do eliminations in the rest of the chain. If this was
2767 an EXPR_LIST, this might result in allocating more memory than is
2768 strictly needed, but it simplifies the code. */
2771 new_rtx
= eliminate_regs_1 (XEXP (x
, 1), mem_mode
, insn
, true,
2773 if (new_rtx
!= XEXP (x
, 1))
2775 gen_rtx_fmt_ee (GET_CODE (x
), GET_MODE (x
), XEXP (x
, 0), new_rtx
);
2783 /* We do not support elimination of a register that is modified.
2784 elimination_effects has already make sure that this does not
2790 /* We do not support elimination of a register that is modified.
2791 elimination_effects has already make sure that this does not
2792 happen. The only remaining case we need to consider here is
2793 that the increment value may be an eliminable register. */
2794 if (GET_CODE (XEXP (x
, 1)) == PLUS
2795 && XEXP (XEXP (x
, 1), 0) == XEXP (x
, 0))
2797 rtx new_rtx
= eliminate_regs_1 (XEXP (XEXP (x
, 1), 1), mem_mode
,
2798 insn
, true, for_costs
);
2800 if (new_rtx
!= XEXP (XEXP (x
, 1), 1))
2801 return gen_rtx_fmt_ee (code
, GET_MODE (x
), XEXP (x
, 0),
2802 gen_rtx_PLUS (GET_MODE (x
),
2803 XEXP (x
, 0), new_rtx
));
2807 case STRICT_LOW_PART
:
2809 case SIGN_EXTEND
: case ZERO_EXTEND
:
2810 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2811 case FLOAT
: case FIX
:
2812 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2821 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), mem_mode
, insn
, false,
2823 if (new_rtx
!= XEXP (x
, 0))
2824 return gen_rtx_fmt_e (code
, GET_MODE (x
), new_rtx
);
2828 /* Similar to above processing, but preserve SUBREG_BYTE.
2829 Convert (subreg (mem)) to (mem) if not paradoxical.
2830 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2831 pseudo didn't get a hard reg, we must replace this with the
2832 eliminated version of the memory location because push_reload
2833 may do the replacement in certain circumstances. */
2834 if (REG_P (SUBREG_REG (x
))
2835 && (GET_MODE_SIZE (GET_MODE (x
))
2836 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2837 && reg_equiv_memory_loc
!= 0
2838 && reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))] != 0)
2840 new_rtx
= SUBREG_REG (x
);
2843 new_rtx
= eliminate_regs_1 (SUBREG_REG (x
), mem_mode
, insn
, false,
2846 if (new_rtx
!= SUBREG_REG (x
))
2848 int x_size
= GET_MODE_SIZE (GET_MODE (x
));
2849 int new_size
= GET_MODE_SIZE (GET_MODE (new_rtx
));
2852 && ((x_size
< new_size
2853 #ifdef WORD_REGISTER_OPERATIONS
2854 /* On these machines, combine can create rtl of the form
2855 (set (subreg:m1 (reg:m2 R) 0) ...)
2856 where m1 < m2, and expects something interesting to
2857 happen to the entire word. Moreover, it will use the
2858 (reg:m2 R) later, expecting all bits to be preserved.
2859 So if the number of words is the same, preserve the
2860 subreg so that push_reload can see it. */
2861 && ! ((x_size
- 1) / UNITS_PER_WORD
2862 == (new_size
-1 ) / UNITS_PER_WORD
)
2865 || x_size
== new_size
)
2867 return adjust_address_nv (new_rtx
, GET_MODE (x
), SUBREG_BYTE (x
));
2869 return gen_rtx_SUBREG (GET_MODE (x
), new_rtx
, SUBREG_BYTE (x
));
2875 /* Our only special processing is to pass the mode of the MEM to our
2876 recursive call and copy the flags. While we are here, handle this
2877 case more efficiently. */
2879 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), GET_MODE (x
), insn
, true,
2882 && memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2883 && !memory_address_p (GET_MODE (x
), new_rtx
))
2884 for_each_rtx (&XEXP (x
, 0), note_reg_elim_costly
, insn
);
2886 return replace_equiv_address_nv (x
, new_rtx
);
2889 /* Handle insn_list USE that a call to a pure function may generate. */
2890 new_rtx
= eliminate_regs_1 (XEXP (x
, 0), VOIDmode
, insn
, false,
2892 if (new_rtx
!= XEXP (x
, 0))
2893 return gen_rtx_USE (GET_MODE (x
), new_rtx
);
2897 gcc_assert (insn
&& DEBUG_INSN_P (insn
));
2908 /* Process each of our operands recursively. If any have changed, make a
2910 fmt
= GET_RTX_FORMAT (code
);
2911 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
2915 new_rtx
= eliminate_regs_1 (XEXP (x
, i
), mem_mode
, insn
, false,
2917 if (new_rtx
!= XEXP (x
, i
) && ! copied
)
2919 x
= shallow_copy_rtx (x
);
2922 XEXP (x
, i
) = new_rtx
;
2924 else if (*fmt
== 'E')
2927 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2929 new_rtx
= eliminate_regs_1 (XVECEXP (x
, i
, j
), mem_mode
, insn
, false,
2931 if (new_rtx
!= XVECEXP (x
, i
, j
) && ! copied_vec
)
2933 rtvec new_v
= gen_rtvec_v (XVECLEN (x
, i
),
2937 x
= shallow_copy_rtx (x
);
2940 XVEC (x
, i
) = new_v
;
2943 XVECEXP (x
, i
, j
) = new_rtx
;
2952 eliminate_regs (rtx x
, enum machine_mode mem_mode
, rtx insn
)
2954 return eliminate_regs_1 (x
, mem_mode
, insn
, false, false);
2957 /* Scan rtx X for modifications of elimination target registers. Update
2958 the table of eliminables to reflect the changed state. MEM_MODE is
2959 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2962 elimination_effects (rtx x
, enum machine_mode mem_mode
)
2964 enum rtx_code code
= GET_CODE (x
);
2965 struct elim_table
*ep
;
2990 /* First handle the case where we encounter a bare register that
2991 is eliminable. Replace it with a PLUS. */
2992 if (regno
< FIRST_PSEUDO_REGISTER
)
2994 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2996 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2999 ep
->ref_outside_mem
= 1;
3004 else if (reg_renumber
[regno
] < 0 && reg_equiv_constant
3005 && reg_equiv_constant
[regno
]
3006 && ! function_invariant_p (reg_equiv_constant
[regno
]))
3007 elimination_effects (reg_equiv_constant
[regno
], mem_mode
);
3016 /* If we modify the source of an elimination rule, disable it. */
3017 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3018 if (ep
->from_rtx
== XEXP (x
, 0))
3019 ep
->can_eliminate
= 0;
3021 /* If we modify the target of an elimination rule by adding a constant,
3022 update its offset. If we modify the target in any other way, we'll
3023 have to disable the rule as well. */
3024 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3025 if (ep
->to_rtx
== XEXP (x
, 0))
3027 int size
= GET_MODE_SIZE (mem_mode
);
3029 /* If more bytes than MEM_MODE are pushed, account for them. */
3030 #ifdef PUSH_ROUNDING
3031 if (ep
->to_rtx
== stack_pointer_rtx
)
3032 size
= PUSH_ROUNDING (size
);
3034 if (code
== PRE_DEC
|| code
== POST_DEC
)
3036 else if (code
== PRE_INC
|| code
== POST_INC
)
3038 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3040 if (GET_CODE (XEXP (x
, 1)) == PLUS
3041 && XEXP (x
, 0) == XEXP (XEXP (x
, 1), 0)
3042 && CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
3043 ep
->offset
-= INTVAL (XEXP (XEXP (x
, 1), 1));
3045 ep
->can_eliminate
= 0;
3049 /* These two aren't unary operators. */
3050 if (code
== POST_MODIFY
|| code
== PRE_MODIFY
)
3053 /* Fall through to generic unary operation case. */
3054 case STRICT_LOW_PART
:
3056 case SIGN_EXTEND
: case ZERO_EXTEND
:
3057 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3058 case FLOAT
: case FIX
:
3059 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3068 elimination_effects (XEXP (x
, 0), mem_mode
);
3072 if (REG_P (SUBREG_REG (x
))
3073 && (GET_MODE_SIZE (GET_MODE (x
))
3074 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3075 && reg_equiv_memory_loc
!= 0
3076 && reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))] != 0)
3079 elimination_effects (SUBREG_REG (x
), mem_mode
);
3083 /* If using a register that is the source of an eliminate we still
3084 think can be performed, note it cannot be performed since we don't
3085 know how this register is used. */
3086 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3087 if (ep
->from_rtx
== XEXP (x
, 0))
3088 ep
->can_eliminate
= 0;
3090 elimination_effects (XEXP (x
, 0), mem_mode
);
3094 /* If clobbering a register that is the replacement register for an
3095 elimination we still think can be performed, note that it cannot
3096 be performed. Otherwise, we need not be concerned about it. */
3097 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3098 if (ep
->to_rtx
== XEXP (x
, 0))
3099 ep
->can_eliminate
= 0;
3101 elimination_effects (XEXP (x
, 0), mem_mode
);
3105 /* Check for setting a register that we know about. */
3106 if (REG_P (SET_DEST (x
)))
3108 /* See if this is setting the replacement register for an
3111 If DEST is the hard frame pointer, we do nothing because we
3112 assume that all assignments to the frame pointer are for
3113 non-local gotos and are being done at a time when they are valid
3114 and do not disturb anything else. Some machines want to
3115 eliminate a fake argument pointer (or even a fake frame pointer)
3116 with either the real frame or the stack pointer. Assignments to
3117 the hard frame pointer must not prevent this elimination. */
3119 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3121 if (ep
->to_rtx
== SET_DEST (x
)
3122 && SET_DEST (x
) != hard_frame_pointer_rtx
)
3124 /* If it is being incremented, adjust the offset. Otherwise,
3125 this elimination can't be done. */
3126 rtx src
= SET_SRC (x
);
3128 if (GET_CODE (src
) == PLUS
3129 && XEXP (src
, 0) == SET_DEST (x
)
3130 && CONST_INT_P (XEXP (src
, 1)))
3131 ep
->offset
-= INTVAL (XEXP (src
, 1));
3133 ep
->can_eliminate
= 0;
3137 elimination_effects (SET_DEST (x
), VOIDmode
);
3138 elimination_effects (SET_SRC (x
), VOIDmode
);
3142 /* Our only special processing is to pass the mode of the MEM to our
3144 elimination_effects (XEXP (x
, 0), GET_MODE (x
));
3151 fmt
= GET_RTX_FORMAT (code
);
3152 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3155 elimination_effects (XEXP (x
, i
), mem_mode
);
3156 else if (*fmt
== 'E')
3157 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3158 elimination_effects (XVECEXP (x
, i
, j
), mem_mode
);
3162 /* Descend through rtx X and verify that no references to eliminable registers
3163 remain. If any do remain, mark the involved register as not
3167 check_eliminable_occurrences (rtx x
)
3176 code
= GET_CODE (x
);
3178 if (code
== REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3180 struct elim_table
*ep
;
3182 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3183 if (ep
->from_rtx
== x
)
3184 ep
->can_eliminate
= 0;
3188 fmt
= GET_RTX_FORMAT (code
);
3189 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3192 check_eliminable_occurrences (XEXP (x
, i
));
3193 else if (*fmt
== 'E')
3196 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3197 check_eliminable_occurrences (XVECEXP (x
, i
, j
));
3202 /* Scan INSN and eliminate all eliminable registers in it.
3204 If REPLACE is nonzero, do the replacement destructively. Also
3205 delete the insn as dead it if it is setting an eliminable register.
3207 If REPLACE is zero, do all our allocations in reload_obstack.
3209 If no eliminations were done and this insn doesn't require any elimination
3210 processing (these are not identical conditions: it might be updating sp,
3211 but not referencing fp; this needs to be seen during reload_as_needed so
3212 that the offset between fp and sp can be taken into consideration), zero
3213 is returned. Otherwise, 1 is returned. */
3216 eliminate_regs_in_insn (rtx insn
, int replace
)
3218 int icode
= recog_memoized (insn
);
3219 rtx old_body
= PATTERN (insn
);
3220 int insn_is_asm
= asm_noperands (old_body
) >= 0;
3221 rtx old_set
= single_set (insn
);
3225 rtx substed_operand
[MAX_RECOG_OPERANDS
];
3226 rtx orig_operand
[MAX_RECOG_OPERANDS
];
3227 struct elim_table
*ep
;
3228 rtx plus_src
, plus_cst_src
;
3230 if (! insn_is_asm
&& icode
< 0)
3232 gcc_assert (GET_CODE (PATTERN (insn
)) == USE
3233 || GET_CODE (PATTERN (insn
)) == CLOBBER
3234 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
3235 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
3236 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
3237 || DEBUG_INSN_P (insn
));
3238 if (DEBUG_INSN_P (insn
))
3239 INSN_VAR_LOCATION_LOC (insn
)
3240 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn
), VOIDmode
, insn
);
3244 if (old_set
!= 0 && REG_P (SET_DEST (old_set
))
3245 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3247 /* Check for setting an eliminable register. */
3248 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3249 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3251 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3252 /* If this is setting the frame pointer register to the
3253 hardware frame pointer register and this is an elimination
3254 that will be done (tested above), this insn is really
3255 adjusting the frame pointer downward to compensate for
3256 the adjustment done before a nonlocal goto. */
3257 if (ep
->from
== FRAME_POINTER_REGNUM
3258 && ep
->to
== HARD_FRAME_POINTER_REGNUM
)
3260 rtx base
= SET_SRC (old_set
);
3261 rtx base_insn
= insn
;
3262 HOST_WIDE_INT offset
= 0;
3264 while (base
!= ep
->to_rtx
)
3266 rtx prev_insn
, prev_set
;
3268 if (GET_CODE (base
) == PLUS
3269 && CONST_INT_P (XEXP (base
, 1)))
3271 offset
+= INTVAL (XEXP (base
, 1));
3272 base
= XEXP (base
, 0);
3274 else if ((prev_insn
= prev_nonnote_insn (base_insn
)) != 0
3275 && (prev_set
= single_set (prev_insn
)) != 0
3276 && rtx_equal_p (SET_DEST (prev_set
), base
))
3278 base
= SET_SRC (prev_set
);
3279 base_insn
= prev_insn
;
3285 if (base
== ep
->to_rtx
)
3288 = plus_constant (ep
->to_rtx
, offset
- ep
->offset
);
3290 new_body
= old_body
;
3293 new_body
= copy_insn (old_body
);
3294 if (REG_NOTES (insn
))
3295 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3297 PATTERN (insn
) = new_body
;
3298 old_set
= single_set (insn
);
3300 /* First see if this insn remains valid when we
3301 make the change. If not, keep the INSN_CODE
3302 the same and let reload fit it up. */
3303 validate_change (insn
, &SET_SRC (old_set
), src
, 1);
3304 validate_change (insn
, &SET_DEST (old_set
),
3306 if (! apply_change_group ())
3308 SET_SRC (old_set
) = src
;
3309 SET_DEST (old_set
) = ep
->to_rtx
;
3318 /* In this case this insn isn't serving a useful purpose. We
3319 will delete it in reload_as_needed once we know that this
3320 elimination is, in fact, being done.
3322 If REPLACE isn't set, we can't delete this insn, but needn't
3323 process it since it won't be used unless something changes. */
3326 delete_dead_insn (insn
);
3334 /* We allow one special case which happens to work on all machines we
3335 currently support: a single set with the source or a REG_EQUAL
3336 note being a PLUS of an eliminable register and a constant. */
3337 plus_src
= plus_cst_src
= 0;
3338 if (old_set
&& REG_P (SET_DEST (old_set
)))
3340 if (GET_CODE (SET_SRC (old_set
)) == PLUS
)
3341 plus_src
= SET_SRC (old_set
);
3342 /* First see if the source is of the form (plus (...) CST). */
3344 && CONST_INT_P (XEXP (plus_src
, 1)))
3345 plus_cst_src
= plus_src
;
3346 else if (REG_P (SET_SRC (old_set
))
3349 /* Otherwise, see if we have a REG_EQUAL note of the form
3350 (plus (...) CST). */
3352 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
3354 if ((REG_NOTE_KIND (links
) == REG_EQUAL
3355 || REG_NOTE_KIND (links
) == REG_EQUIV
)
3356 && GET_CODE (XEXP (links
, 0)) == PLUS
3357 && CONST_INT_P (XEXP (XEXP (links
, 0), 1)))
3359 plus_cst_src
= XEXP (links
, 0);
3365 /* Check that the first operand of the PLUS is a hard reg or
3366 the lowpart subreg of one. */
3369 rtx reg
= XEXP (plus_cst_src
, 0);
3370 if (GET_CODE (reg
) == SUBREG
&& subreg_lowpart_p (reg
))
3371 reg
= SUBREG_REG (reg
);
3373 if (!REG_P (reg
) || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
3379 rtx reg
= XEXP (plus_cst_src
, 0);
3380 HOST_WIDE_INT offset
= INTVAL (XEXP (plus_cst_src
, 1));
3382 if (GET_CODE (reg
) == SUBREG
)
3383 reg
= SUBREG_REG (reg
);
3385 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3386 if (ep
->from_rtx
== reg
&& ep
->can_eliminate
)
3388 rtx to_rtx
= ep
->to_rtx
;
3389 offset
+= ep
->offset
;
3390 offset
= trunc_int_for_mode (offset
, GET_MODE (plus_cst_src
));
3392 if (GET_CODE (XEXP (plus_cst_src
, 0)) == SUBREG
)
3393 to_rtx
= gen_lowpart (GET_MODE (XEXP (plus_cst_src
, 0)),
3395 /* If we have a nonzero offset, and the source is already
3396 a simple REG, the following transformation would
3397 increase the cost of the insn by replacing a simple REG
3398 with (plus (reg sp) CST). So try only when we already
3399 had a PLUS before. */
3400 if (offset
== 0 || plus_src
)
3402 rtx new_src
= plus_constant (to_rtx
, offset
);
3404 new_body
= old_body
;
3407 new_body
= copy_insn (old_body
);
3408 if (REG_NOTES (insn
))
3409 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3411 PATTERN (insn
) = new_body
;
3412 old_set
= single_set (insn
);
3414 /* First see if this insn remains valid when we make the
3415 change. If not, try to replace the whole pattern with
3416 a simple set (this may help if the original insn was a
3417 PARALLEL that was only recognized as single_set due to
3418 REG_UNUSED notes). If this isn't valid either, keep
3419 the INSN_CODE the same and let reload fix it up. */
3420 if (!validate_change (insn
, &SET_SRC (old_set
), new_src
, 0))
3422 rtx new_pat
= gen_rtx_SET (VOIDmode
,
3423 SET_DEST (old_set
), new_src
);
3425 if (!validate_change (insn
, &PATTERN (insn
), new_pat
, 0))
3426 SET_SRC (old_set
) = new_src
;
3433 /* This can't have an effect on elimination offsets, so skip right
3439 /* Determine the effects of this insn on elimination offsets. */
3440 elimination_effects (old_body
, VOIDmode
);
3442 /* Eliminate all eliminable registers occurring in operands that
3443 can be handled by reload. */
3444 extract_insn (insn
);
3445 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3447 orig_operand
[i
] = recog_data
.operand
[i
];
3448 substed_operand
[i
] = recog_data
.operand
[i
];
3450 /* For an asm statement, every operand is eliminable. */
3451 if (insn_is_asm
|| insn_data
[icode
].operand
[i
].eliminable
)
3453 bool is_set_src
, in_plus
;
3455 /* Check for setting a register that we know about. */
3456 if (recog_data
.operand_type
[i
] != OP_IN
3457 && REG_P (orig_operand
[i
]))
3459 /* If we are assigning to a register that can be eliminated, it
3460 must be as part of a PARALLEL, since the code above handles
3461 single SETs. We must indicate that we can no longer
3462 eliminate this reg. */
3463 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3465 if (ep
->from_rtx
== orig_operand
[i
])
3466 ep
->can_eliminate
= 0;
3469 /* Companion to the above plus substitution, we can allow
3470 invariants as the source of a plain move. */
3473 && recog_data
.operand_loc
[i
] == &SET_SRC (old_set
))
3477 && (recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 0)
3478 || recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 1)))
3482 = eliminate_regs_1 (recog_data
.operand
[i
], VOIDmode
,
3483 replace
? insn
: NULL_RTX
,
3484 is_set_src
|| in_plus
, false);
3485 if (substed_operand
[i
] != orig_operand
[i
])
3487 /* Terminate the search in check_eliminable_occurrences at
3489 *recog_data
.operand_loc
[i
] = 0;
3491 /* If an output operand changed from a REG to a MEM and INSN is an
3492 insn, write a CLOBBER insn. */
3493 if (recog_data
.operand_type
[i
] != OP_IN
3494 && REG_P (orig_operand
[i
])
3495 && MEM_P (substed_operand
[i
])
3497 emit_insn_after (gen_clobber (orig_operand
[i
]), insn
);
3501 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3502 *recog_data
.dup_loc
[i
]
3503 = *recog_data
.operand_loc
[(int) recog_data
.dup_num
[i
]];
3505 /* If any eliminable remain, they aren't eliminable anymore. */
3506 check_eliminable_occurrences (old_body
);
3508 /* Substitute the operands; the new values are in the substed_operand
3510 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3511 *recog_data
.operand_loc
[i
] = substed_operand
[i
];
3512 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3513 *recog_data
.dup_loc
[i
] = substed_operand
[(int) recog_data
.dup_num
[i
]];
3515 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3516 re-recognize the insn. We do this in case we had a simple addition
3517 but now can do this as a load-address. This saves an insn in this
3519 If re-recognition fails, the old insn code number will still be used,
3520 and some register operands may have changed into PLUS expressions.
3521 These will be handled by find_reloads by loading them into a register
3526 /* If we aren't replacing things permanently and we changed something,
3527 make another copy to ensure that all the RTL is new. Otherwise
3528 things can go wrong if find_reload swaps commutative operands
3529 and one is inside RTL that has been copied while the other is not. */
3530 new_body
= old_body
;
3533 new_body
= copy_insn (old_body
);
3534 if (REG_NOTES (insn
))
3535 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3537 PATTERN (insn
) = new_body
;
3539 /* If we had a move insn but now we don't, rerecognize it. This will
3540 cause spurious re-recognition if the old move had a PARALLEL since
3541 the new one still will, but we can't call single_set without
3542 having put NEW_BODY into the insn and the re-recognition won't
3543 hurt in this rare case. */
3544 /* ??? Why this huge if statement - why don't we just rerecognize the
3548 && ((REG_P (SET_SRC (old_set
))
3549 && (GET_CODE (new_body
) != SET
3550 || !REG_P (SET_SRC (new_body
))))
3551 /* If this was a load from or store to memory, compare
3552 the MEM in recog_data.operand to the one in the insn.
3553 If they are not equal, then rerecognize the insn. */
3555 && ((MEM_P (SET_SRC (old_set
))
3556 && SET_SRC (old_set
) != recog_data
.operand
[1])
3557 || (MEM_P (SET_DEST (old_set
))
3558 && SET_DEST (old_set
) != recog_data
.operand
[0])))
3559 /* If this was an add insn before, rerecognize. */
3560 || GET_CODE (SET_SRC (old_set
)) == PLUS
))
3562 int new_icode
= recog (PATTERN (insn
), insn
, 0);
3564 INSN_CODE (insn
) = new_icode
;
3568 /* Restore the old body. If there were any changes to it, we made a copy
3569 of it while the changes were still in place, so we'll correctly return
3570 a modified insn below. */
3573 /* Restore the old body. */
3574 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3575 /* Restoring a top-level match_parallel would clobber the new_body
3576 we installed in the insn. */
3577 if (recog_data
.operand_loc
[i
] != &PATTERN (insn
))
3578 *recog_data
.operand_loc
[i
] = orig_operand
[i
];
3579 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3580 *recog_data
.dup_loc
[i
] = orig_operand
[(int) recog_data
.dup_num
[i
]];
3583 /* Update all elimination pairs to reflect the status after the current
3584 insn. The changes we make were determined by the earlier call to
3585 elimination_effects.
3587 We also detect cases where register elimination cannot be done,
3588 namely, if a register would be both changed and referenced outside a MEM
3589 in the resulting insn since such an insn is often undefined and, even if
3590 not, we cannot know what meaning will be given to it. Note that it is
3591 valid to have a register used in an address in an insn that changes it
3592 (presumably with a pre- or post-increment or decrement).
3594 If anything changes, return nonzero. */
3596 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3598 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3599 ep
->can_eliminate
= 0;
3601 ep
->ref_outside_mem
= 0;
3603 if (ep
->previous_offset
!= ep
->offset
)
3608 /* If we changed something, perform elimination in REG_NOTES. This is
3609 needed even when REPLACE is zero because a REG_DEAD note might refer
3610 to a register that we eliminate and could cause a different number
3611 of spill registers to be needed in the final reload pass than in
3613 if (val
&& REG_NOTES (insn
) != 0)
3615 = eliminate_regs_1 (REG_NOTES (insn
), VOIDmode
, REG_NOTES (insn
), true,
3621 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3622 register allocator. INSN is the instruction we need to examine, we perform
3623 eliminations in its operands and record cases where eliminating a reg with
3624 an invariant equivalence would add extra cost. */
3627 elimination_costs_in_insn (rtx insn
)
3629 int icode
= recog_memoized (insn
);
3630 rtx old_body
= PATTERN (insn
);
3631 int insn_is_asm
= asm_noperands (old_body
) >= 0;
3632 rtx old_set
= single_set (insn
);
3634 rtx orig_operand
[MAX_RECOG_OPERANDS
];
3635 rtx orig_dup
[MAX_RECOG_OPERANDS
];
3636 struct elim_table
*ep
;
3637 rtx plus_src
, plus_cst_src
;
3640 if (! insn_is_asm
&& icode
< 0)
3642 gcc_assert (GET_CODE (PATTERN (insn
)) == USE
3643 || GET_CODE (PATTERN (insn
)) == CLOBBER
3644 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
3645 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
3646 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
3647 || DEBUG_INSN_P (insn
));
3651 if (old_set
!= 0 && REG_P (SET_DEST (old_set
))
3652 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3654 /* Check for setting an eliminable register. */
3655 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3656 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3660 /* We allow one special case which happens to work on all machines we
3661 currently support: a single set with the source or a REG_EQUAL
3662 note being a PLUS of an eliminable register and a constant. */
3663 plus_src
= plus_cst_src
= 0;
3665 if (old_set
&& REG_P (SET_DEST (old_set
)))
3668 if (GET_CODE (SET_SRC (old_set
)) == PLUS
)
3669 plus_src
= SET_SRC (old_set
);
3670 /* First see if the source is of the form (plus (...) CST). */
3672 && CONST_INT_P (XEXP (plus_src
, 1)))
3673 plus_cst_src
= plus_src
;
3674 else if (REG_P (SET_SRC (old_set
))
3677 /* Otherwise, see if we have a REG_EQUAL note of the form
3678 (plus (...) CST). */
3680 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
3682 if ((REG_NOTE_KIND (links
) == REG_EQUAL
3683 || REG_NOTE_KIND (links
) == REG_EQUIV
)
3684 && GET_CODE (XEXP (links
, 0)) == PLUS
3685 && CONST_INT_P (XEXP (XEXP (links
, 0), 1)))
3687 plus_cst_src
= XEXP (links
, 0);
3694 /* Determine the effects of this insn on elimination offsets. */
3695 elimination_effects (old_body
, VOIDmode
);
3697 /* Eliminate all eliminable registers occurring in operands that
3698 can be handled by reload. */
3699 extract_insn (insn
);
3700 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3701 orig_dup
[i
] = *recog_data
.dup_loc
[i
];
3703 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3705 orig_operand
[i
] = recog_data
.operand
[i
];
3707 /* For an asm statement, every operand is eliminable. */
3708 if (insn_is_asm
|| insn_data
[icode
].operand
[i
].eliminable
)
3710 bool is_set_src
, in_plus
;
3712 /* Check for setting a register that we know about. */
3713 if (recog_data
.operand_type
[i
] != OP_IN
3714 && REG_P (orig_operand
[i
]))
3716 /* If we are assigning to a register that can be eliminated, it
3717 must be as part of a PARALLEL, since the code above handles
3718 single SETs. We must indicate that we can no longer
3719 eliminate this reg. */
3720 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3722 if (ep
->from_rtx
== orig_operand
[i
])
3723 ep
->can_eliminate
= 0;
3726 /* Companion to the above plus substitution, we can allow
3727 invariants as the source of a plain move. */
3729 if (old_set
&& recog_data
.operand_loc
[i
] == &SET_SRC (old_set
))
3731 if (is_set_src
&& !sets_reg_p
)
3732 note_reg_elim_costly (&SET_SRC (old_set
), insn
);
3734 if (plus_src
&& sets_reg_p
3735 && (recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 0)
3736 || recog_data
.operand_loc
[i
] == &XEXP (plus_src
, 1)))
3739 eliminate_regs_1 (recog_data
.operand
[i
], VOIDmode
,
3741 is_set_src
|| in_plus
, true);
3742 /* Terminate the search in check_eliminable_occurrences at
3744 *recog_data
.operand_loc
[i
] = 0;
3748 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3749 *recog_data
.dup_loc
[i
]
3750 = *recog_data
.operand_loc
[(int) recog_data
.dup_num
[i
]];
3752 /* If any eliminable remain, they aren't eliminable anymore. */
3753 check_eliminable_occurrences (old_body
);
3755 /* Restore the old body. */
3756 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3757 *recog_data
.operand_loc
[i
] = orig_operand
[i
];
3758 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3759 *recog_data
.dup_loc
[i
] = orig_dup
[i
];
3761 /* Update all elimination pairs to reflect the status after the current
3762 insn. The changes we make were determined by the earlier call to
3763 elimination_effects. */
3765 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3767 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3768 ep
->can_eliminate
= 0;
3770 ep
->ref_outside_mem
= 0;
3776 /* Loop through all elimination pairs.
3777 Recalculate the number not at initial offset.
3779 Compute the maximum offset (minimum offset if the stack does not
3780 grow downward) for each elimination pair. */
3783 update_eliminable_offsets (void)
3785 struct elim_table
*ep
;
3787 num_not_at_initial_offset
= 0;
3788 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3790 ep
->previous_offset
= ep
->offset
;
3791 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3792 num_not_at_initial_offset
++;
3796 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3797 replacement we currently believe is valid, mark it as not eliminable if X
3798 modifies DEST in any way other than by adding a constant integer to it.
3800 If DEST is the frame pointer, we do nothing because we assume that
3801 all assignments to the hard frame pointer are nonlocal gotos and are being
3802 done at a time when they are valid and do not disturb anything else.
3803 Some machines want to eliminate a fake argument pointer with either the
3804 frame or stack pointer. Assignments to the hard frame pointer must not
3805 prevent this elimination.
3807 Called via note_stores from reload before starting its passes to scan
3808 the insns of the function. */
3811 mark_not_eliminable (rtx dest
, const_rtx x
, void *data ATTRIBUTE_UNUSED
)
3815 /* A SUBREG of a hard register here is just changing its mode. We should
3816 not see a SUBREG of an eliminable hard register, but check just in
3818 if (GET_CODE (dest
) == SUBREG
)
3819 dest
= SUBREG_REG (dest
);
3821 if (dest
== hard_frame_pointer_rtx
)
3824 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3825 if (reg_eliminate
[i
].can_eliminate
&& dest
== reg_eliminate
[i
].to_rtx
3826 && (GET_CODE (x
) != SET
3827 || GET_CODE (SET_SRC (x
)) != PLUS
3828 || XEXP (SET_SRC (x
), 0) != dest
3829 || !CONST_INT_P (XEXP (SET_SRC (x
), 1))))
3831 reg_eliminate
[i
].can_eliminate_previous
3832 = reg_eliminate
[i
].can_eliminate
= 0;
3837 /* Verify that the initial elimination offsets did not change since the
3838 last call to set_initial_elim_offsets. This is used to catch cases
3839 where something illegal happened during reload_as_needed that could
3840 cause incorrect code to be generated if we did not check for it. */
3843 verify_initial_elim_offsets (void)
3847 if (!num_eliminable
)
3850 #ifdef ELIMINABLE_REGS
3852 struct elim_table
*ep
;
3854 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3856 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, t
);
3857 if (t
!= ep
->initial_offset
)
3862 INITIAL_FRAME_POINTER_OFFSET (t
);
3863 if (t
!= reg_eliminate
[0].initial_offset
)
3870 /* Reset all offsets on eliminable registers to their initial values. */
3873 set_initial_elim_offsets (void)
3875 struct elim_table
*ep
= reg_eliminate
;
3877 #ifdef ELIMINABLE_REGS
3878 for (; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3880 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, ep
->initial_offset
);
3881 ep
->previous_offset
= ep
->offset
= ep
->initial_offset
;
3884 INITIAL_FRAME_POINTER_OFFSET (ep
->initial_offset
);
3885 ep
->previous_offset
= ep
->offset
= ep
->initial_offset
;
3888 num_not_at_initial_offset
= 0;
3891 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3894 set_initial_eh_label_offset (rtx label
)
3896 set_label_offsets (label
, NULL_RTX
, 1);
3899 /* Initialize the known label offsets.
3900 Set a known offset for each forced label to be at the initial offset
3901 of each elimination. We do this because we assume that all
3902 computed jumps occur from a location where each elimination is
3903 at its initial offset.
3904 For all other labels, show that we don't know the offsets. */
3907 set_initial_label_offsets (void)
3910 memset (offsets_known_at
, 0, num_labels
);
3912 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
3914 set_label_offsets (XEXP (x
, 0), NULL_RTX
, 1);
3916 for_each_eh_label (set_initial_eh_label_offset
);
3919 /* Set all elimination offsets to the known values for the code label given
3923 set_offsets_for_label (rtx insn
)
3926 int label_nr
= CODE_LABEL_NUMBER (insn
);
3927 struct elim_table
*ep
;
3929 num_not_at_initial_offset
= 0;
3930 for (i
= 0, ep
= reg_eliminate
; i
< NUM_ELIMINABLE_REGS
; ep
++, i
++)
3932 ep
->offset
= ep
->previous_offset
3933 = offsets_at
[label_nr
- first_label_num
][i
];
3934 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3935 num_not_at_initial_offset
++;
3939 /* See if anything that happened changes which eliminations are valid.
3940 For example, on the SPARC, whether or not the frame pointer can
3941 be eliminated can depend on what registers have been used. We need
3942 not check some conditions again (such as flag_omit_frame_pointer)
3943 since they can't have changed. */
3946 update_eliminables (HARD_REG_SET
*pset
)
3948 int previous_frame_pointer_needed
= frame_pointer_needed
;
3949 struct elim_table
*ep
;
3951 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3952 if ((ep
->from
== HARD_FRAME_POINTER_REGNUM
3953 && targetm
.frame_pointer_required ())
3954 #ifdef ELIMINABLE_REGS
3955 || ! targetm
.can_eliminate (ep
->from
, ep
->to
)
3958 ep
->can_eliminate
= 0;
3960 /* Look for the case where we have discovered that we can't replace
3961 register A with register B and that means that we will now be
3962 trying to replace register A with register C. This means we can
3963 no longer replace register C with register B and we need to disable
3964 such an elimination, if it exists. This occurs often with A == ap,
3965 B == sp, and C == fp. */
3967 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3969 struct elim_table
*op
;
3972 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
3974 /* Find the current elimination for ep->from, if there is a
3976 for (op
= reg_eliminate
;
3977 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
3978 if (op
->from
== ep
->from
&& op
->can_eliminate
)
3984 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3986 for (op
= reg_eliminate
;
3987 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
3988 if (op
->from
== new_to
&& op
->to
== ep
->to
)
3989 op
->can_eliminate
= 0;
3993 /* See if any registers that we thought we could eliminate the previous
3994 time are no longer eliminable. If so, something has changed and we
3995 must spill the register. Also, recompute the number of eliminable
3996 registers and see if the frame pointer is needed; it is if there is
3997 no elimination of the frame pointer that we can perform. */
3999 frame_pointer_needed
= 1;
4000 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
4002 if (ep
->can_eliminate
4003 && ep
->from
== FRAME_POINTER_REGNUM
4004 && ep
->to
!= HARD_FRAME_POINTER_REGNUM
4005 && (! SUPPORTS_STACK_ALIGNMENT
4006 || ! crtl
->stack_realign_needed
))
4007 frame_pointer_needed
= 0;
4009 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
4011 ep
->can_eliminate_previous
= 0;
4012 SET_HARD_REG_BIT (*pset
, ep
->from
);
4017 /* If we didn't need a frame pointer last time, but we do now, spill
4018 the hard frame pointer. */
4019 if (frame_pointer_needed
&& ! previous_frame_pointer_needed
)
4020 SET_HARD_REG_BIT (*pset
, HARD_FRAME_POINTER_REGNUM
);
4023 /* Return true if X is used as the target register of an elimination. */
4026 elimination_target_reg_p (rtx x
)
4028 struct elim_table
*ep
;
4030 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
4031 if (ep
->to_rtx
== x
&& ep
->can_eliminate
)
4037 /* Initialize the table of registers to eliminate.
4038 Pre-condition: global flag frame_pointer_needed has been set before
4039 calling this function. */
4042 init_elim_table (void)
4044 struct elim_table
*ep
;
4045 #ifdef ELIMINABLE_REGS
4046 const struct elim_table_1
*ep1
;
4050 reg_eliminate
= XCNEWVEC (struct elim_table
, NUM_ELIMINABLE_REGS
);
4054 #ifdef ELIMINABLE_REGS
4055 for (ep
= reg_eliminate
, ep1
= reg_eliminate_1
;
4056 ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++, ep1
++)
4058 ep
->from
= ep1
->from
;
4060 ep
->can_eliminate
= ep
->can_eliminate_previous
4061 = (targetm
.can_eliminate (ep
->from
, ep
->to
)
4062 && ! (ep
->to
== STACK_POINTER_REGNUM
4063 && frame_pointer_needed
4064 && (! SUPPORTS_STACK_ALIGNMENT
4065 || ! stack_realign_fp
)));
4068 reg_eliminate
[0].from
= reg_eliminate_1
[0].from
;
4069 reg_eliminate
[0].to
= reg_eliminate_1
[0].to
;
4070 reg_eliminate
[0].can_eliminate
= reg_eliminate
[0].can_eliminate_previous
4071 = ! frame_pointer_needed
;
4074 /* Count the number of eliminable registers and build the FROM and TO
4075 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4076 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4077 We depend on this. */
4078 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
4080 num_eliminable
+= ep
->can_eliminate
;
4081 ep
->from_rtx
= gen_rtx_REG (Pmode
, ep
->from
);
4082 ep
->to_rtx
= gen_rtx_REG (Pmode
, ep
->to
);
4086 /* Find all the pseudo registers that didn't get hard regs
4087 but do have known equivalent constants or memory slots.
4088 These include parameters (known equivalent to parameter slots)
4089 and cse'd or loop-moved constant memory addresses.
4091 Record constant equivalents in reg_equiv_constant
4092 so they will be substituted by find_reloads.
4093 Record memory equivalents in reg_mem_equiv so they can
4094 be substituted eventually by altering the REG-rtx's. */
4097 init_eliminable_invariants (rtx first
, bool do_subregs
)
4102 reg_equiv_constant
= XCNEWVEC (rtx
, max_regno
);
4103 reg_equiv_invariant
= XCNEWVEC (rtx
, max_regno
);
4104 reg_equiv_mem
= XCNEWVEC (rtx
, max_regno
);
4105 reg_equiv_alt_mem_list
= XCNEWVEC (rtx
, max_regno
);
4106 reg_equiv_address
= XCNEWVEC (rtx
, max_regno
);
4108 reg_max_ref_width
= XCNEWVEC (unsigned int, max_regno
);
4110 reg_max_ref_width
= NULL
;
4112 num_eliminable_invariants
= 0;
4114 first_label_num
= get_first_label_num ();
4115 num_labels
= max_label_num () - first_label_num
;
4117 /* Allocate the tables used to store offset information at labels. */
4118 offsets_known_at
= XNEWVEC (char, num_labels
);
4119 offsets_at
= (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS
]) xmalloc (num_labels
* NUM_ELIMINABLE_REGS
* sizeof (HOST_WIDE_INT
));
4121 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4122 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4123 find largest such for each pseudo. FIRST is the head of the insn
4126 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4128 rtx set
= single_set (insn
);
4130 /* We may introduce USEs that we want to remove at the end, so
4131 we'll mark them with QImode. Make sure there are no
4132 previously-marked insns left by say regmove. */
4133 if (INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == USE
4134 && GET_MODE (insn
) != VOIDmode
)
4135 PUT_MODE (insn
, VOIDmode
);
4137 if (do_subregs
&& NONDEBUG_INSN_P (insn
))
4138 scan_paradoxical_subregs (PATTERN (insn
));
4140 if (set
!= 0 && REG_P (SET_DEST (set
)))
4142 rtx note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
4148 i
= REGNO (SET_DEST (set
));
4151 if (i
<= LAST_VIRTUAL_REGISTER
)
4154 /* If flag_pic and we have constant, verify it's legitimate. */
4156 || !flag_pic
|| LEGITIMATE_PIC_OPERAND_P (x
))
4158 /* It can happen that a REG_EQUIV note contains a MEM
4159 that is not a legitimate memory operand. As later
4160 stages of reload assume that all addresses found
4161 in the reg_equiv_* arrays were originally legitimate,
4162 we ignore such REG_EQUIV notes. */
4163 if (memory_operand (x
, VOIDmode
))
4165 /* Always unshare the equivalence, so we can
4166 substitute into this insn without touching the
4168 reg_equiv_memory_loc
[i
] = copy_rtx (x
);
4170 else if (function_invariant_p (x
))
4172 if (GET_CODE (x
) == PLUS
)
4174 /* This is PLUS of frame pointer and a constant,
4175 and might be shared. Unshare it. */
4176 reg_equiv_invariant
[i
] = copy_rtx (x
);
4177 num_eliminable_invariants
++;
4179 else if (x
== frame_pointer_rtx
|| x
== arg_pointer_rtx
)
4181 reg_equiv_invariant
[i
] = x
;
4182 num_eliminable_invariants
++;
4184 else if (LEGITIMATE_CONSTANT_P (x
))
4185 reg_equiv_constant
[i
] = x
;
4188 reg_equiv_memory_loc
[i
]
4189 = force_const_mem (GET_MODE (SET_DEST (set
)), x
);
4190 if (! reg_equiv_memory_loc
[i
])
4191 reg_equiv_init
[i
] = NULL_RTX
;
4196 reg_equiv_init
[i
] = NULL_RTX
;
4201 reg_equiv_init
[i
] = NULL_RTX
;
4206 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
4207 if (reg_equiv_init
[i
])
4209 fprintf (dump_file
, "init_insns for %u: ", i
);
4210 print_inline_rtx (dump_file
, reg_equiv_init
[i
], 20);
4211 fprintf (dump_file
, "\n");
4215 /* Indicate that we no longer have known memory locations or constants.
4216 Free all data involved in tracking these. */
4219 free_reg_equiv (void)
4223 if (reg_equiv_constant
)
4224 free (reg_equiv_constant
);
4225 if (reg_equiv_invariant
)
4226 free (reg_equiv_invariant
);
4227 reg_equiv_constant
= 0;
4228 reg_equiv_invariant
= 0;
4229 VEC_free (rtx
, gc
, reg_equiv_memory_loc_vec
);
4230 reg_equiv_memory_loc
= 0;
4232 if (offsets_known_at
)
4233 free (offsets_known_at
);
4237 offsets_known_at
= 0;
4239 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4240 if (reg_equiv_alt_mem_list
[i
])
4241 free_EXPR_LIST_list (®_equiv_alt_mem_list
[i
]);
4242 free (reg_equiv_alt_mem_list
);
4244 free (reg_equiv_mem
);
4245 free (reg_equiv_address
);
4248 /* Kick all pseudos out of hard register REGNO.
4250 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4251 because we found we can't eliminate some register. In the case, no pseudos
4252 are allowed to be in the register, even if they are only in a block that
4253 doesn't require spill registers, unlike the case when we are spilling this
4254 hard reg to produce another spill register.
4256 Return nonzero if any pseudos needed to be kicked out. */
4259 spill_hard_reg (unsigned int regno
, int cant_eliminate
)
4265 SET_HARD_REG_BIT (bad_spill_regs_global
, regno
);
4266 df_set_regs_ever_live (regno
, true);
4269 /* Spill every pseudo reg that was allocated to this reg
4270 or to something that overlaps this reg. */
4272 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
4273 if (reg_renumber
[i
] >= 0
4274 && (unsigned int) reg_renumber
[i
] <= regno
4275 && end_hard_regno (PSEUDO_REGNO_MODE (i
), reg_renumber
[i
]) > regno
)
4276 SET_REGNO_REG_SET (&spilled_pseudos
, i
);
4279 /* After find_reload_regs has been run for all insn that need reloads,
4280 and/or spill_hard_regs was called, this function is used to actually
4281 spill pseudo registers and try to reallocate them. It also sets up the
4282 spill_regs array for use by choose_reload_regs. */
4285 finish_spills (int global
)
4287 struct insn_chain
*chain
;
4288 int something_changed
= 0;
4290 reg_set_iterator rsi
;
4292 /* Build the spill_regs array for the function. */
4293 /* If there are some registers still to eliminate and one of the spill regs
4294 wasn't ever used before, additional stack space may have to be
4295 allocated to store this register. Thus, we may have changed the offset
4296 between the stack and frame pointers, so mark that something has changed.
4298 One might think that we need only set VAL to 1 if this is a call-used
4299 register. However, the set of registers that must be saved by the
4300 prologue is not identical to the call-used set. For example, the
4301 register used by the call insn for the return PC is a call-used register,
4302 but must be saved by the prologue. */
4305 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4306 if (TEST_HARD_REG_BIT (used_spill_regs
, i
))
4308 spill_reg_order
[i
] = n_spills
;
4309 spill_regs
[n_spills
++] = i
;
4310 if (num_eliminable
&& ! df_regs_ever_live_p (i
))
4311 something_changed
= 1;
4312 df_set_regs_ever_live (i
, true);
4315 spill_reg_order
[i
] = -1;
4317 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
4318 if (! ira_conflicts_p
|| reg_renumber
[i
] >= 0)
4320 /* Record the current hard register the pseudo is allocated to
4321 in pseudo_previous_regs so we avoid reallocating it to the
4322 same hard reg in a later pass. */
4323 gcc_assert (reg_renumber
[i
] >= 0);
4325 SET_HARD_REG_BIT (pseudo_previous_regs
[i
], reg_renumber
[i
]);
4326 /* Mark it as no longer having a hard register home. */
4327 reg_renumber
[i
] = -1;
4328 if (ira_conflicts_p
)
4329 /* Inform IRA about the change. */
4330 ira_mark_allocation_change (i
);
4331 /* We will need to scan everything again. */
4332 something_changed
= 1;
4335 /* Retry global register allocation if possible. */
4336 if (global
&& ira_conflicts_p
)
4340 memset (pseudo_forbidden_regs
, 0, max_regno
* sizeof (HARD_REG_SET
));
4341 /* For every insn that needs reloads, set the registers used as spill
4342 regs in pseudo_forbidden_regs for every pseudo live across the
4344 for (chain
= insns_need_reload
; chain
; chain
= chain
->next_need_reload
)
4346 EXECUTE_IF_SET_IN_REG_SET
4347 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
4349 IOR_HARD_REG_SET (pseudo_forbidden_regs
[i
],
4350 chain
->used_spill_regs
);
4352 EXECUTE_IF_SET_IN_REG_SET
4353 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
4355 IOR_HARD_REG_SET (pseudo_forbidden_regs
[i
],
4356 chain
->used_spill_regs
);
4360 /* Retry allocating the pseudos spilled in IRA and the
4361 reload. For each reg, merge the various reg sets that
4362 indicate which hard regs can't be used, and call
4363 ira_reassign_pseudos. */
4364 for (n
= 0, i
= FIRST_PSEUDO_REGISTER
; i
< (unsigned) max_regno
; i
++)
4365 if (reg_old_renumber
[i
] != reg_renumber
[i
])
4367 if (reg_renumber
[i
] < 0)
4368 temp_pseudo_reg_arr
[n
++] = i
;
4370 CLEAR_REGNO_REG_SET (&spilled_pseudos
, i
);
4372 if (ira_reassign_pseudos (temp_pseudo_reg_arr
, n
,
4373 bad_spill_regs_global
,
4374 pseudo_forbidden_regs
, pseudo_previous_regs
,
4376 something_changed
= 1;
4378 /* Fix up the register information in the insn chain.
4379 This involves deleting those of the spilled pseudos which did not get
4380 a new hard register home from the live_{before,after} sets. */
4381 for (chain
= reload_insn_chain
; chain
; chain
= chain
->next
)
4383 HARD_REG_SET used_by_pseudos
;
4384 HARD_REG_SET used_by_pseudos2
;
4386 if (! ira_conflicts_p
)
4388 /* Don't do it for IRA because IRA and the reload still can
4389 assign hard registers to the spilled pseudos on next
4390 reload iterations. */
4391 AND_COMPL_REG_SET (&chain
->live_throughout
, &spilled_pseudos
);
4392 AND_COMPL_REG_SET (&chain
->dead_or_set
, &spilled_pseudos
);
4394 /* Mark any unallocated hard regs as available for spills. That
4395 makes inheritance work somewhat better. */
4396 if (chain
->need_reload
)
4398 REG_SET_TO_HARD_REG_SET (used_by_pseudos
, &chain
->live_throughout
);
4399 REG_SET_TO_HARD_REG_SET (used_by_pseudos2
, &chain
->dead_or_set
);
4400 IOR_HARD_REG_SET (used_by_pseudos
, used_by_pseudos2
);
4402 compute_use_by_pseudos (&used_by_pseudos
, &chain
->live_throughout
);
4403 compute_use_by_pseudos (&used_by_pseudos
, &chain
->dead_or_set
);
4404 /* Value of chain->used_spill_regs from previous iteration
4405 may be not included in the value calculated here because
4406 of possible removing caller-saves insns (see function
4407 delete_caller_save_insns. */
4408 COMPL_HARD_REG_SET (chain
->used_spill_regs
, used_by_pseudos
);
4409 AND_HARD_REG_SET (chain
->used_spill_regs
, used_spill_regs
);
4413 CLEAR_REG_SET (&changed_allocation_pseudos
);
4414 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4415 for (i
= FIRST_PSEUDO_REGISTER
; i
< (unsigned)max_regno
; i
++)
4417 int regno
= reg_renumber
[i
];
4418 if (reg_old_renumber
[i
] == regno
)
4421 SET_REGNO_REG_SET (&changed_allocation_pseudos
, i
);
4423 alter_reg (i
, reg_old_renumber
[i
], false);
4424 reg_old_renumber
[i
] = regno
;
4428 fprintf (dump_file
, " Register %d now on stack.\n\n", i
);
4430 fprintf (dump_file
, " Register %d now in %d.\n\n",
4431 i
, reg_renumber
[i
]);
4435 return something_changed
;
4438 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4441 scan_paradoxical_subregs (rtx x
)
4445 enum rtx_code code
= GET_CODE (x
);
4456 case CONST_VECTOR
: /* shouldn't happen, but just in case. */
4464 if (REG_P (SUBREG_REG (x
))
4465 && (GET_MODE_SIZE (GET_MODE (x
))
4466 > reg_max_ref_width
[REGNO (SUBREG_REG (x
))]))
4468 reg_max_ref_width
[REGNO (SUBREG_REG (x
))]
4469 = GET_MODE_SIZE (GET_MODE (x
));
4470 mark_home_live_1 (REGNO (SUBREG_REG (x
)), GET_MODE (x
));
4478 fmt
= GET_RTX_FORMAT (code
);
4479 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4482 scan_paradoxical_subregs (XEXP (x
, i
));
4483 else if (fmt
[i
] == 'E')
4486 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4487 scan_paradoxical_subregs (XVECEXP (x
, i
, j
));
4492 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4493 examine all of the reload insns between PREV and NEXT exclusive, and
4494 annotate all that may trap. */
4497 fixup_eh_region_note (rtx insn
, rtx prev
, rtx next
)
4499 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
4502 if (!insn_could_throw_p (insn
))
4503 remove_note (insn
, note
);
4504 copy_reg_eh_region_note_forward (note
, NEXT_INSN (prev
), next
);
4507 /* Reload pseudo-registers into hard regs around each insn as needed.
4508 Additional register load insns are output before the insn that needs it
4509 and perhaps store insns after insns that modify the reloaded pseudo reg.
4511 reg_last_reload_reg and reg_reloaded_contents keep track of
4512 which registers are already available in reload registers.
4513 We update these for the reloads that we perform,
4514 as the insns are scanned. */
4517 reload_as_needed (int live_known
)
4519 struct insn_chain
*chain
;
4520 #if defined (AUTO_INC_DEC)
4525 memset (spill_reg_rtx
, 0, sizeof spill_reg_rtx
);
4526 memset (spill_reg_store
, 0, sizeof spill_reg_store
);
4527 reg_last_reload_reg
= XCNEWVEC (rtx
, max_regno
);
4528 INIT_REG_SET (®_has_output_reload
);
4529 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4530 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered
);
4532 set_initial_elim_offsets ();
4534 for (chain
= reload_insn_chain
; chain
; chain
= chain
->next
)
4537 rtx insn
= chain
->insn
;
4538 rtx old_next
= NEXT_INSN (insn
);
4540 rtx old_prev
= PREV_INSN (insn
);
4543 /* If we pass a label, copy the offsets from the label information
4544 into the current offsets of each elimination. */
4546 set_offsets_for_label (insn
);
4548 else if (INSN_P (insn
))
4550 regset_head regs_to_forget
;
4551 INIT_REG_SET (®s_to_forget
);
4552 note_stores (PATTERN (insn
), forget_old_reloads_1
, ®s_to_forget
);
4554 /* If this is a USE and CLOBBER of a MEM, ensure that any
4555 references to eliminable registers have been removed. */
4557 if ((GET_CODE (PATTERN (insn
)) == USE
4558 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
4559 && MEM_P (XEXP (PATTERN (insn
), 0)))
4560 XEXP (XEXP (PATTERN (insn
), 0), 0)
4561 = eliminate_regs (XEXP (XEXP (PATTERN (insn
), 0), 0),
4562 GET_MODE (XEXP (PATTERN (insn
), 0)),
4565 /* If we need to do register elimination processing, do so.
4566 This might delete the insn, in which case we are done. */
4567 if ((num_eliminable
|| num_eliminable_invariants
) && chain
->need_elim
)
4569 eliminate_regs_in_insn (insn
, 1);
4572 update_eliminable_offsets ();
4573 CLEAR_REG_SET (®s_to_forget
);
4578 /* If need_elim is nonzero but need_reload is zero, one might think
4579 that we could simply set n_reloads to 0. However, find_reloads
4580 could have done some manipulation of the insn (such as swapping
4581 commutative operands), and these manipulations are lost during
4582 the first pass for every insn that needs register elimination.
4583 So the actions of find_reloads must be redone here. */
4585 if (! chain
->need_elim
&& ! chain
->need_reload
4586 && ! chain
->need_operand_change
)
4588 /* First find the pseudo regs that must be reloaded for this insn.
4589 This info is returned in the tables reload_... (see reload.h).
4590 Also modify the body of INSN by substituting RELOAD
4591 rtx's for those pseudo regs. */
4594 CLEAR_REG_SET (®_has_output_reload
);
4595 CLEAR_HARD_REG_SET (reg_is_output_reload
);
4597 find_reloads (insn
, 1, spill_indirect_levels
, live_known
,
4603 rtx next
= NEXT_INSN (insn
);
4606 prev
= PREV_INSN (insn
);
4608 /* Now compute which reload regs to reload them into. Perhaps
4609 reusing reload regs from previous insns, or else output
4610 load insns to reload them. Maybe output store insns too.
4611 Record the choices of reload reg in reload_reg_rtx. */
4612 choose_reload_regs (chain
);
4614 /* Merge any reloads that we didn't combine for fear of
4615 increasing the number of spill registers needed but now
4616 discover can be safely merged. */
4617 if (targetm
.small_register_classes_for_mode_p (VOIDmode
))
4618 merge_assigned_reloads (insn
);
4620 /* Generate the insns to reload operands into or out of
4621 their reload regs. */
4622 emit_reload_insns (chain
);
4624 /* Substitute the chosen reload regs from reload_reg_rtx
4625 into the insn's body (or perhaps into the bodies of other
4626 load and store insn that we just made for reloading
4627 and that we moved the structure into). */
4628 subst_reloads (insn
);
4630 /* Adjust the exception region notes for loads and stores. */
4631 if (cfun
->can_throw_non_call_exceptions
&& !CALL_P (insn
))
4632 fixup_eh_region_note (insn
, prev
, next
);
4634 /* If this was an ASM, make sure that all the reload insns
4635 we have generated are valid. If not, give an error
4637 if (asm_noperands (PATTERN (insn
)) >= 0)
4638 for (p
= NEXT_INSN (prev
); p
!= next
; p
= NEXT_INSN (p
))
4639 if (p
!= insn
&& INSN_P (p
)
4640 && GET_CODE (PATTERN (p
)) != USE
4641 && (recog_memoized (p
) < 0
4642 || (extract_insn (p
), ! constrain_operands (1))))
4644 error_for_asm (insn
,
4645 "%<asm%> operand requires "
4646 "impossible reload");
4651 if (num_eliminable
&& chain
->need_elim
)
4652 update_eliminable_offsets ();
4654 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4655 is no longer validly lying around to save a future reload.
4656 Note that this does not detect pseudos that were reloaded
4657 for this insn in order to be stored in
4658 (obeying register constraints). That is correct; such reload
4659 registers ARE still valid. */
4660 forget_marked_reloads (®s_to_forget
);
4661 CLEAR_REG_SET (®s_to_forget
);
4663 /* There may have been CLOBBER insns placed after INSN. So scan
4664 between INSN and NEXT and use them to forget old reloads. */
4665 for (x
= NEXT_INSN (insn
); x
!= old_next
; x
= NEXT_INSN (x
))
4666 if (NONJUMP_INSN_P (x
) && GET_CODE (PATTERN (x
)) == CLOBBER
)
4667 note_stores (PATTERN (x
), forget_old_reloads_1
, NULL
);
4670 /* Likewise for regs altered by auto-increment in this insn.
4671 REG_INC notes have been changed by reloading:
4672 find_reloads_address_1 records substitutions for them,
4673 which have been performed by subst_reloads above. */
4674 for (i
= n_reloads
- 1; i
>= 0; i
--)
4676 rtx in_reg
= rld
[i
].in_reg
;
4679 enum rtx_code code
= GET_CODE (in_reg
);
4680 /* PRE_INC / PRE_DEC will have the reload register ending up
4681 with the same value as the stack slot, but that doesn't
4682 hold true for POST_INC / POST_DEC. Either we have to
4683 convert the memory access to a true POST_INC / POST_DEC,
4684 or we can't use the reload register for inheritance. */
4685 if ((code
== POST_INC
|| code
== POST_DEC
)
4686 && TEST_HARD_REG_BIT (reg_reloaded_valid
,
4687 REGNO (rld
[i
].reg_rtx
))
4688 /* Make sure it is the inc/dec pseudo, and not
4689 some other (e.g. output operand) pseudo. */
4690 && ((unsigned) reg_reloaded_contents
[REGNO (rld
[i
].reg_rtx
)]
4691 == REGNO (XEXP (in_reg
, 0))))
4694 rtx reload_reg
= rld
[i
].reg_rtx
;
4695 enum machine_mode mode
= GET_MODE (reload_reg
);
4699 for (p
= PREV_INSN (old_next
); p
!= prev
; p
= PREV_INSN (p
))
4701 /* We really want to ignore REG_INC notes here, so
4702 use PATTERN (p) as argument to reg_set_p . */
4703 if (reg_set_p (reload_reg
, PATTERN (p
)))
4705 n
= count_occurrences (PATTERN (p
), reload_reg
, 0);
4711 = gen_rtx_fmt_e (code
, mode
, reload_reg
);
4713 validate_replace_rtx_group (reload_reg
,
4715 n
= verify_changes (0);
4717 /* We must also verify that the constraints
4718 are met after the replacement. Make sure
4719 extract_insn is only called for an insn
4720 where the replacements were found to be
4725 n
= constrain_operands (1);
4728 /* If the constraints were not met, then
4729 undo the replacement, else confirm it. */
4733 confirm_change_group ();
4739 add_reg_note (p
, REG_INC
, reload_reg
);
4740 /* Mark this as having an output reload so that the
4741 REG_INC processing code below won't invalidate
4742 the reload for inheritance. */
4743 SET_HARD_REG_BIT (reg_is_output_reload
,
4744 REGNO (reload_reg
));
4745 SET_REGNO_REG_SET (®_has_output_reload
,
4746 REGNO (XEXP (in_reg
, 0)));
4749 forget_old_reloads_1 (XEXP (in_reg
, 0), NULL_RTX
,
4752 else if ((code
== PRE_INC
|| code
== PRE_DEC
)
4753 && TEST_HARD_REG_BIT (reg_reloaded_valid
,
4754 REGNO (rld
[i
].reg_rtx
))
4755 /* Make sure it is the inc/dec pseudo, and not
4756 some other (e.g. output operand) pseudo. */
4757 && ((unsigned) reg_reloaded_contents
[REGNO (rld
[i
].reg_rtx
)]
4758 == REGNO (XEXP (in_reg
, 0))))
4760 SET_HARD_REG_BIT (reg_is_output_reload
,
4761 REGNO (rld
[i
].reg_rtx
));
4762 SET_REGNO_REG_SET (®_has_output_reload
,
4763 REGNO (XEXP (in_reg
, 0)));
4765 else if (code
== PRE_INC
|| code
== PRE_DEC
4766 || code
== POST_INC
|| code
== POST_DEC
)
4768 int in_regno
= REGNO (XEXP (in_reg
, 0));
4770 if (reg_last_reload_reg
[in_regno
] != NULL_RTX
)
4773 bool forget_p
= true;
4775 in_hard_regno
= REGNO (reg_last_reload_reg
[in_regno
]);
4776 if (TEST_HARD_REG_BIT (reg_reloaded_valid
,
4779 for (x
= old_prev
? NEXT_INSN (old_prev
) : insn
;
4782 if (x
== reg_reloaded_insn
[in_hard_regno
])
4788 /* If for some reasons, we didn't set up
4789 reg_last_reload_reg in this insn,
4790 invalidate inheritance from previous
4791 insns for the incremented/decremented
4792 register. Such registers will be not in
4793 reg_has_output_reload. Invalidate it
4794 also if the corresponding element in
4795 reg_reloaded_insn is also
4798 forget_old_reloads_1 (XEXP (in_reg
, 0),
4804 /* If a pseudo that got a hard register is auto-incremented,
4805 we must purge records of copying it into pseudos without
4807 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
4808 if (REG_NOTE_KIND (x
) == REG_INC
)
4810 /* See if this pseudo reg was reloaded in this insn.
4811 If so, its last-reload info is still valid
4812 because it is based on this insn's reload. */
4813 for (i
= 0; i
< n_reloads
; i
++)
4814 if (rld
[i
].out
== XEXP (x
, 0))
4818 forget_old_reloads_1 (XEXP (x
, 0), NULL_RTX
, NULL
);
4822 /* A reload reg's contents are unknown after a label. */
4824 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4826 /* Don't assume a reload reg is still good after a call insn
4827 if it is a call-used reg, or if it contains a value that will
4828 be partially clobbered by the call. */
4829 else if (CALL_P (insn
))
4831 AND_COMPL_HARD_REG_SET (reg_reloaded_valid
, call_used_reg_set
);
4832 AND_COMPL_HARD_REG_SET (reg_reloaded_valid
, reg_reloaded_call_part_clobbered
);
4837 free (reg_last_reload_reg
);
4838 CLEAR_REG_SET (®_has_output_reload
);
4841 /* Discard all record of any value reloaded from X,
4842 or reloaded in X from someplace else;
4843 unless X is an output reload reg of the current insn.
4845 X may be a hard reg (the reload reg)
4846 or it may be a pseudo reg that was reloaded from.
4848 When DATA is non-NULL just mark the registers in regset
4849 to be forgotten later. */
4852 forget_old_reloads_1 (rtx x
, const_rtx ignored ATTRIBUTE_UNUSED
,
4857 regset regs
= (regset
) data
;
4859 /* note_stores does give us subregs of hard regs,
4860 subreg_regno_offset requires a hard reg. */
4861 while (GET_CODE (x
) == SUBREG
)
4863 /* We ignore the subreg offset when calculating the regno,
4864 because we are using the entire underlying hard register
4874 if (regno
>= FIRST_PSEUDO_REGISTER
)
4880 nr
= hard_regno_nregs
[regno
][GET_MODE (x
)];
4881 /* Storing into a spilled-reg invalidates its contents.
4882 This can happen if a block-local pseudo is allocated to that reg
4883 and it wasn't spilled because this block's total need is 0.
4884 Then some insn might have an optional reload and use this reg. */
4886 for (i
= 0; i
< nr
; i
++)
4887 /* But don't do this if the reg actually serves as an output
4888 reload reg in the current instruction. */
4890 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, regno
+ i
))
4892 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, regno
+ i
);
4893 spill_reg_store
[regno
+ i
] = 0;
4899 SET_REGNO_REG_SET (regs
, regno
+ nr
);
4902 /* Since value of X has changed,
4903 forget any value previously copied from it. */
4906 /* But don't forget a copy if this is the output reload
4907 that establishes the copy's validity. */
4909 || !REGNO_REG_SET_P (®_has_output_reload
, regno
+ nr
))
4910 reg_last_reload_reg
[regno
+ nr
] = 0;
4914 /* Forget the reloads marked in regset by previous function. */
4916 forget_marked_reloads (regset regs
)
4919 reg_set_iterator rsi
;
4920 EXECUTE_IF_SET_IN_REG_SET (regs
, 0, reg
, rsi
)
4922 if (reg
< FIRST_PSEUDO_REGISTER
4923 /* But don't do this if the reg actually serves as an output
4924 reload reg in the current instruction. */
4926 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, reg
)))
4928 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, reg
);
4929 spill_reg_store
[reg
] = 0;
4932 || !REGNO_REG_SET_P (®_has_output_reload
, reg
))
4933 reg_last_reload_reg
[reg
] = 0;
4937 /* The following HARD_REG_SETs indicate when each hard register is
4938 used for a reload of various parts of the current insn. */
4940 /* If reg is unavailable for all reloads. */
4941 static HARD_REG_SET reload_reg_unavailable
;
4942 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4943 static HARD_REG_SET reload_reg_used
;
4944 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4945 static HARD_REG_SET reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4946 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4947 static HARD_REG_SET reload_reg_used_in_inpaddr_addr
[MAX_RECOG_OPERANDS
];
4948 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4949 static HARD_REG_SET reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4950 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4951 static HARD_REG_SET reload_reg_used_in_outaddr_addr
[MAX_RECOG_OPERANDS
];
4952 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4953 static HARD_REG_SET reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
4954 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4955 static HARD_REG_SET reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
4956 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4957 static HARD_REG_SET reload_reg_used_in_op_addr
;
4958 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4959 static HARD_REG_SET reload_reg_used_in_op_addr_reload
;
4960 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4961 static HARD_REG_SET reload_reg_used_in_insn
;
4962 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4963 static HARD_REG_SET reload_reg_used_in_other_addr
;
4965 /* If reg is in use as a reload reg for any sort of reload. */
4966 static HARD_REG_SET reload_reg_used_at_all
;
4968 /* If reg is use as an inherited reload. We just mark the first register
4970 static HARD_REG_SET reload_reg_used_for_inherit
;
4972 /* Records which hard regs are used in any way, either as explicit use or
4973 by being allocated to a pseudo during any point of the current insn. */
4974 static HARD_REG_SET reg_used_in_insn
;
4976 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4977 TYPE. MODE is used to indicate how many consecutive regs are
4981 mark_reload_reg_in_use (unsigned int regno
, int opnum
, enum reload_type type
,
4982 enum machine_mode mode
)
4984 unsigned int nregs
= hard_regno_nregs
[regno
][mode
];
4987 for (i
= regno
; i
< nregs
+ regno
; i
++)
4992 SET_HARD_REG_BIT (reload_reg_used
, i
);
4995 case RELOAD_FOR_INPUT_ADDRESS
:
4996 SET_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4999 case RELOAD_FOR_INPADDR_ADDRESS
:
5000 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], i
);
5003 case RELOAD_FOR_OUTPUT_ADDRESS
:
5004 SET_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
5007 case RELOAD_FOR_OUTADDR_ADDRESS
:
5008 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], i
);
5011 case RELOAD_FOR_OPERAND_ADDRESS
:
5012 SET_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
5015 case RELOAD_FOR_OPADDR_ADDR
:
5016 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
5019 case RELOAD_FOR_OTHER_ADDRESS
:
5020 SET_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
5023 case RELOAD_FOR_INPUT
:
5024 SET_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
5027 case RELOAD_FOR_OUTPUT
:
5028 SET_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
5031 case RELOAD_FOR_INSN
:
5032 SET_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
5036 SET_HARD_REG_BIT (reload_reg_used_at_all
, i
);
5040 /* Similarly, but show REGNO is no longer in use for a reload. */
5043 clear_reload_reg_in_use (unsigned int regno
, int opnum
,
5044 enum reload_type type
, enum machine_mode mode
)
5046 unsigned int nregs
= hard_regno_nregs
[regno
][mode
];
5047 unsigned int start_regno
, end_regno
, r
;
5049 /* A complication is that for some reload types, inheritance might
5050 allow multiple reloads of the same types to share a reload register.
5051 We set check_opnum if we have to check only reloads with the same
5052 operand number, and check_any if we have to check all reloads. */
5053 int check_opnum
= 0;
5055 HARD_REG_SET
*used_in_set
;
5060 used_in_set
= &reload_reg_used
;
5063 case RELOAD_FOR_INPUT_ADDRESS
:
5064 used_in_set
= &reload_reg_used_in_input_addr
[opnum
];
5067 case RELOAD_FOR_INPADDR_ADDRESS
:
5069 used_in_set
= &reload_reg_used_in_inpaddr_addr
[opnum
];
5072 case RELOAD_FOR_OUTPUT_ADDRESS
:
5073 used_in_set
= &reload_reg_used_in_output_addr
[opnum
];
5076 case RELOAD_FOR_OUTADDR_ADDRESS
:
5078 used_in_set
= &reload_reg_used_in_outaddr_addr
[opnum
];
5081 case RELOAD_FOR_OPERAND_ADDRESS
:
5082 used_in_set
= &reload_reg_used_in_op_addr
;
5085 case RELOAD_FOR_OPADDR_ADDR
:
5087 used_in_set
= &reload_reg_used_in_op_addr_reload
;
5090 case RELOAD_FOR_OTHER_ADDRESS
:
5091 used_in_set
= &reload_reg_used_in_other_addr
;
5095 case RELOAD_FOR_INPUT
:
5096 used_in_set
= &reload_reg_used_in_input
[opnum
];
5099 case RELOAD_FOR_OUTPUT
:
5100 used_in_set
= &reload_reg_used_in_output
[opnum
];
5103 case RELOAD_FOR_INSN
:
5104 used_in_set
= &reload_reg_used_in_insn
;
5109 /* We resolve conflicts with remaining reloads of the same type by
5110 excluding the intervals of reload registers by them from the
5111 interval of freed reload registers. Since we only keep track of
5112 one set of interval bounds, we might have to exclude somewhat
5113 more than what would be necessary if we used a HARD_REG_SET here.
5114 But this should only happen very infrequently, so there should
5115 be no reason to worry about it. */
5117 start_regno
= regno
;
5118 end_regno
= regno
+ nregs
;
5119 if (check_opnum
|| check_any
)
5121 for (i
= n_reloads
- 1; i
>= 0; i
--)
5123 if (rld
[i
].when_needed
== type
5124 && (check_any
|| rld
[i
].opnum
== opnum
)
5127 unsigned int conflict_start
= true_regnum (rld
[i
].reg_rtx
);
5128 unsigned int conflict_end
5129 = end_hard_regno (rld
[i
].mode
, conflict_start
);
5131 /* If there is an overlap with the first to-be-freed register,
5132 adjust the interval start. */
5133 if (conflict_start
<= start_regno
&& conflict_end
> start_regno
)
5134 start_regno
= conflict_end
;
5135 /* Otherwise, if there is a conflict with one of the other
5136 to-be-freed registers, adjust the interval end. */
5137 if (conflict_start
> start_regno
&& conflict_start
< end_regno
)
5138 end_regno
= conflict_start
;
5143 for (r
= start_regno
; r
< end_regno
; r
++)
5144 CLEAR_HARD_REG_BIT (*used_in_set
, r
);
5147 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5148 specified by OPNUM and TYPE. */
5151 reload_reg_free_p (unsigned int regno
, int opnum
, enum reload_type type
)
5155 /* In use for a RELOAD_OTHER means it's not available for anything. */
5156 if (TEST_HARD_REG_BIT (reload_reg_used
, regno
)
5157 || TEST_HARD_REG_BIT (reload_reg_unavailable
, regno
))
5163 /* In use for anything means we can't use it for RELOAD_OTHER. */
5164 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
5165 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5166 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
5167 || TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
5170 for (i
= 0; i
< reload_n_operands
; i
++)
5171 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5172 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
5173 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5174 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5175 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
5176 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5181 case RELOAD_FOR_INPUT
:
5182 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5183 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
5186 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
5189 /* If it is used for some other input, can't use it. */
5190 for (i
= 0; i
< reload_n_operands
; i
++)
5191 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5194 /* If it is used in a later operand's address, can't use it. */
5195 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5196 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5197 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
5202 case RELOAD_FOR_INPUT_ADDRESS
:
5203 /* Can't use a register if it is used for an input address for this
5204 operand or used as an input in an earlier one. */
5205 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
)
5206 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
5209 for (i
= 0; i
< opnum
; i
++)
5210 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5215 case RELOAD_FOR_INPADDR_ADDRESS
:
5216 /* Can't use a register if it is used for an input address
5217 for this operand or used as an input in an earlier
5219 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
5222 for (i
= 0; i
< opnum
; i
++)
5223 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5228 case RELOAD_FOR_OUTPUT_ADDRESS
:
5229 /* Can't use a register if it is used for an output address for this
5230 operand or used as an output in this or a later operand. Note
5231 that multiple output operands are emitted in reverse order, so
5232 the conflicting ones are those with lower indices. */
5233 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
5236 for (i
= 0; i
<= opnum
; i
++)
5237 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5242 case RELOAD_FOR_OUTADDR_ADDRESS
:
5243 /* Can't use a register if it is used for an output address
5244 for this operand or used as an output in this or a
5245 later operand. Note that multiple output operands are
5246 emitted in reverse order, so the conflicting ones are
5247 those with lower indices. */
5248 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
5251 for (i
= 0; i
<= opnum
; i
++)
5252 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5257 case RELOAD_FOR_OPERAND_ADDRESS
:
5258 for (i
= 0; i
< reload_n_operands
; i
++)
5259 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5262 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5263 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
5265 case RELOAD_FOR_OPADDR_ADDR
:
5266 for (i
= 0; i
< reload_n_operands
; i
++)
5267 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5270 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
));
5272 case RELOAD_FOR_OUTPUT
:
5273 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5274 outputs, or an operand address for this or an earlier output.
5275 Note that multiple output operands are emitted in reverse order,
5276 so the conflicting ones are those with higher indices. */
5277 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
5280 for (i
= 0; i
< reload_n_operands
; i
++)
5281 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5284 for (i
= opnum
; i
< reload_n_operands
; i
++)
5285 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5286 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
5291 case RELOAD_FOR_INSN
:
5292 for (i
= 0; i
< reload_n_operands
; i
++)
5293 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
5294 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5297 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5298 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
5300 case RELOAD_FOR_OTHER_ADDRESS
:
5301 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
5308 /* Return 1 if the value in reload reg REGNO, as used by a reload
5309 needed for the part of the insn specified by OPNUM and TYPE,
5310 is still available in REGNO at the end of the insn.
5312 We can assume that the reload reg was already tested for availability
5313 at the time it is needed, and we should not check this again,
5314 in case the reg has already been marked in use. */
5317 reload_reg_reaches_end_p (unsigned int regno
, int opnum
, enum reload_type type
)
5324 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5325 its value must reach the end. */
5328 /* If this use is for part of the insn,
5329 its value reaches if no subsequent part uses the same register.
5330 Just like the above function, don't try to do this with lots
5333 case RELOAD_FOR_OTHER_ADDRESS
:
5334 /* Here we check for everything else, since these don't conflict
5335 with anything else and everything comes later. */
5337 for (i
= 0; i
< reload_n_operands
; i
++)
5338 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5339 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5340 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
)
5341 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5342 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
5343 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5346 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5347 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
5348 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5349 && ! TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5351 case RELOAD_FOR_INPUT_ADDRESS
:
5352 case RELOAD_FOR_INPADDR_ADDRESS
:
5353 /* Similar, except that we check only for this and subsequent inputs
5354 and the address of only subsequent inputs and we do not need
5355 to check for RELOAD_OTHER objects since they are known not to
5358 for (i
= opnum
; i
< reload_n_operands
; i
++)
5359 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5362 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5363 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5364 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
5367 for (i
= 0; i
< reload_n_operands
; i
++)
5368 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5369 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5370 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5373 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
5376 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5377 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5378 && !TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5380 case RELOAD_FOR_INPUT
:
5381 /* Similar to input address, except we start at the next operand for
5382 both input and input address and we do not check for
5383 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5386 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5387 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5388 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
5389 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5392 /* ... fall through ... */
5394 case RELOAD_FOR_OPERAND_ADDRESS
:
5395 /* Check outputs and their addresses. */
5397 for (i
= 0; i
< reload_n_operands
; i
++)
5398 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5399 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5400 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5403 return (!TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5405 case RELOAD_FOR_OPADDR_ADDR
:
5406 for (i
= 0; i
< reload_n_operands
; i
++)
5407 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5408 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5409 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5412 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5413 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
5414 && !TEST_HARD_REG_BIT (reload_reg_used
, regno
));
5416 case RELOAD_FOR_INSN
:
5417 /* These conflict with other outputs with RELOAD_OTHER. So
5418 we need only check for output addresses. */
5420 opnum
= reload_n_operands
;
5422 /* ... fall through ... */
5424 case RELOAD_FOR_OUTPUT
:
5425 case RELOAD_FOR_OUTPUT_ADDRESS
:
5426 case RELOAD_FOR_OUTADDR_ADDRESS
:
5427 /* We already know these can't conflict with a later output. So the
5428 only thing to check are later output addresses.
5429 Note that multiple output operands are emitted in reverse order,
5430 so the conflicting ones are those with lower indices. */
5431 for (i
= 0; i
< opnum
; i
++)
5432 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5433 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
5443 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5444 every register in the range [REGNO, REGNO + NREGS). */
5447 reload_regs_reach_end_p (unsigned int regno
, int nregs
,
5448 int opnum
, enum reload_type type
)
5452 for (i
= 0; i
< nregs
; i
++)
5453 if (!reload_reg_reaches_end_p (regno
+ i
, opnum
, type
))
5459 /* Returns whether R1 and R2 are uniquely chained: the value of one
5460 is used by the other, and that value is not used by any other
5461 reload for this insn. This is used to partially undo the decision
5462 made in find_reloads when in the case of multiple
5463 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5464 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5465 reloads. This code tries to avoid the conflict created by that
5466 change. It might be cleaner to explicitly keep track of which
5467 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5468 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5469 this after the fact. */
5471 reloads_unique_chain_p (int r1
, int r2
)
5475 /* We only check input reloads. */
5476 if (! rld
[r1
].in
|| ! rld
[r2
].in
)
5479 /* Avoid anything with output reloads. */
5480 if (rld
[r1
].out
|| rld
[r2
].out
)
5483 /* "chained" means one reload is a component of the other reload,
5484 not the same as the other reload. */
5485 if (rld
[r1
].opnum
!= rld
[r2
].opnum
5486 || rtx_equal_p (rld
[r1
].in
, rld
[r2
].in
)
5487 || rld
[r1
].optional
|| rld
[r2
].optional
5488 || ! (reg_mentioned_p (rld
[r1
].in
, rld
[r2
].in
)
5489 || reg_mentioned_p (rld
[r2
].in
, rld
[r1
].in
)))
5492 for (i
= 0; i
< n_reloads
; i
++)
5493 /* Look for input reloads that aren't our two */
5494 if (i
!= r1
&& i
!= r2
&& rld
[i
].in
)
5496 /* If our reload is mentioned at all, it isn't a simple chain. */
5497 if (reg_mentioned_p (rld
[r1
].in
, rld
[i
].in
))
5503 /* The recursive function change all occurrences of WHAT in *WHERE
5506 substitute (rtx
*where
, const_rtx what
, rtx repl
)
5515 if (*where
== what
|| rtx_equal_p (*where
, what
))
5517 /* Record the location of the changed rtx. */
5518 VEC_safe_push (rtx_p
, heap
, substitute_stack
, where
);
5523 code
= GET_CODE (*where
);
5524 fmt
= GET_RTX_FORMAT (code
);
5525 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5531 for (j
= XVECLEN (*where
, i
) - 1; j
>= 0; j
--)
5532 substitute (&XVECEXP (*where
, i
, j
), what
, repl
);
5534 else if (fmt
[i
] == 'e')
5535 substitute (&XEXP (*where
, i
), what
, repl
);
5539 /* The function returns TRUE if chain of reload R1 and R2 (in any
5540 order) can be evaluated without usage of intermediate register for
5541 the reload containing another reload. It is important to see
5542 gen_reload to understand what the function is trying to do. As an
5543 example, let us have reload chain
5546 r1: <something> + const
5548 and reload R2 got reload reg HR. The function returns true if
5549 there is a correct insn HR = HR + <something>. Otherwise,
5550 gen_reload will use intermediate register (and this is the reload
5551 reg for R1) to reload <something>.
5553 We need this function to find a conflict for chain reloads. In our
5554 example, if HR = HR + <something> is incorrect insn, then we cannot
5555 use HR as a reload register for R2. If we do use it then we get a
5564 gen_reload_chain_without_interm_reg_p (int r1
, int r2
)
5566 /* Assume other cases in gen_reload are not possible for
5567 chain reloads or do need an intermediate hard registers. */
5570 rtx out
, in
, tem
, insn
;
5571 rtx last
= get_last_insn ();
5573 /* Make r2 a component of r1. */
5574 if (reg_mentioned_p (rld
[r1
].in
, rld
[r2
].in
))
5580 gcc_assert (reg_mentioned_p (rld
[r2
].in
, rld
[r1
].in
));
5581 regno
= rld
[r1
].regno
>= 0 ? rld
[r1
].regno
: rld
[r2
].regno
;
5582 gcc_assert (regno
>= 0);
5583 out
= gen_rtx_REG (rld
[r1
].mode
, regno
);
5585 substitute (&in
, rld
[r2
].in
, gen_rtx_REG (rld
[r2
].mode
, regno
));
5587 /* If IN is a paradoxical SUBREG, remove it and try to put the
5588 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5589 if (GET_CODE (in
) == SUBREG
5590 && (GET_MODE_SIZE (GET_MODE (in
))
5591 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))))
5592 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (in
)), out
)) != 0)
5593 in
= SUBREG_REG (in
), out
= tem
;
5595 if (GET_CODE (in
) == PLUS
5596 && (REG_P (XEXP (in
, 0))
5597 || GET_CODE (XEXP (in
, 0)) == SUBREG
5598 || MEM_P (XEXP (in
, 0)))
5599 && (REG_P (XEXP (in
, 1))
5600 || GET_CODE (XEXP (in
, 1)) == SUBREG
5601 || CONSTANT_P (XEXP (in
, 1))
5602 || MEM_P (XEXP (in
, 1))))
5604 insn
= emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
5605 code
= recog_memoized (insn
);
5610 extract_insn (insn
);
5611 /* We want constrain operands to treat this insn strictly in
5612 its validity determination, i.e., the way it would after
5613 reload has completed. */
5614 result
= constrain_operands (1);
5617 delete_insns_since (last
);
5620 /* Restore the original value at each changed address within R1. */
5621 while (!VEC_empty (rtx_p
, substitute_stack
))
5623 rtx
*where
= VEC_pop (rtx_p
, substitute_stack
);
5624 *where
= rld
[r2
].in
;
5630 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5633 This function uses the same algorithm as reload_reg_free_p above. */
5636 reloads_conflict (int r1
, int r2
)
5638 enum reload_type r1_type
= rld
[r1
].when_needed
;
5639 enum reload_type r2_type
= rld
[r2
].when_needed
;
5640 int r1_opnum
= rld
[r1
].opnum
;
5641 int r2_opnum
= rld
[r2
].opnum
;
5643 /* RELOAD_OTHER conflicts with everything. */
5644 if (r2_type
== RELOAD_OTHER
)
5647 /* Otherwise, check conflicts differently for each type. */
5651 case RELOAD_FOR_INPUT
:
5652 return (r2_type
== RELOAD_FOR_INSN
5653 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
5654 || r2_type
== RELOAD_FOR_OPADDR_ADDR
5655 || r2_type
== RELOAD_FOR_INPUT
5656 || ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
5657 || r2_type
== RELOAD_FOR_INPADDR_ADDRESS
)
5658 && r2_opnum
> r1_opnum
));
5660 case RELOAD_FOR_INPUT_ADDRESS
:
5661 return ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r1_opnum
== r2_opnum
)
5662 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
5664 case RELOAD_FOR_INPADDR_ADDRESS
:
5665 return ((r2_type
== RELOAD_FOR_INPADDR_ADDRESS
&& r1_opnum
== r2_opnum
)
5666 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
5668 case RELOAD_FOR_OUTPUT_ADDRESS
:
5669 return ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
&& r2_opnum
== r1_opnum
)
5670 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
<= r1_opnum
));
5672 case RELOAD_FOR_OUTADDR_ADDRESS
:
5673 return ((r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
&& r2_opnum
== r1_opnum
)
5674 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
<= r1_opnum
));
5676 case RELOAD_FOR_OPERAND_ADDRESS
:
5677 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_INSN
5678 || (r2_type
== RELOAD_FOR_OPERAND_ADDRESS
5679 && (!reloads_unique_chain_p (r1
, r2
)
5680 || !gen_reload_chain_without_interm_reg_p (r1
, r2
))));
5682 case RELOAD_FOR_OPADDR_ADDR
:
5683 return (r2_type
== RELOAD_FOR_INPUT
5684 || r2_type
== RELOAD_FOR_OPADDR_ADDR
);
5686 case RELOAD_FOR_OUTPUT
:
5687 return (r2_type
== RELOAD_FOR_INSN
|| r2_type
== RELOAD_FOR_OUTPUT
5688 || ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
5689 || r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
)
5690 && r2_opnum
>= r1_opnum
));
5692 case RELOAD_FOR_INSN
:
5693 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_OUTPUT
5694 || r2_type
== RELOAD_FOR_INSN
5695 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
5697 case RELOAD_FOR_OTHER_ADDRESS
:
5698 return r2_type
== RELOAD_FOR_OTHER_ADDRESS
;
5708 /* Indexed by reload number, 1 if incoming value
5709 inherited from previous insns. */
5710 static char reload_inherited
[MAX_RELOADS
];
5712 /* For an inherited reload, this is the insn the reload was inherited from,
5713 if we know it. Otherwise, this is 0. */
5714 static rtx reload_inheritance_insn
[MAX_RELOADS
];
5716 /* If nonzero, this is a place to get the value of the reload,
5717 rather than using reload_in. */
5718 static rtx reload_override_in
[MAX_RELOADS
];
5720 /* For each reload, the hard register number of the register used,
5721 or -1 if we did not need a register for this reload. */
5722 static int reload_spill_index
[MAX_RELOADS
];
5724 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5725 static rtx reload_reg_rtx_for_input
[MAX_RELOADS
];
5727 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5728 static rtx reload_reg_rtx_for_output
[MAX_RELOADS
];
5730 /* Subroutine of free_for_value_p, used to check a single register.
5731 START_REGNO is the starting regno of the full reload register
5732 (possibly comprising multiple hard registers) that we are considering. */
5735 reload_reg_free_for_value_p (int start_regno
, int regno
, int opnum
,
5736 enum reload_type type
, rtx value
, rtx out
,
5737 int reloadnum
, int ignore_address_reloads
)
5740 /* Set if we see an input reload that must not share its reload register
5741 with any new earlyclobber, but might otherwise share the reload
5742 register with an output or input-output reload. */
5743 int check_earlyclobber
= 0;
5747 if (TEST_HARD_REG_BIT (reload_reg_unavailable
, regno
))
5750 if (out
== const0_rtx
)
5756 /* We use some pseudo 'time' value to check if the lifetimes of the
5757 new register use would overlap with the one of a previous reload
5758 that is not read-only or uses a different value.
5759 The 'time' used doesn't have to be linear in any shape or form, just
5761 Some reload types use different 'buckets' for each operand.
5762 So there are MAX_RECOG_OPERANDS different time values for each
5764 We compute TIME1 as the time when the register for the prospective
5765 new reload ceases to be live, and TIME2 for each existing
5766 reload as the time when that the reload register of that reload
5768 Where there is little to be gained by exact lifetime calculations,
5769 we just make conservative assumptions, i.e. a longer lifetime;
5770 this is done in the 'default:' cases. */
5773 case RELOAD_FOR_OTHER_ADDRESS
:
5774 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5775 time1
= copy
? 0 : 1;
5778 time1
= copy
? 1 : MAX_RECOG_OPERANDS
* 5 + 5;
5780 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5781 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5782 respectively, to the time values for these, we get distinct time
5783 values. To get distinct time values for each operand, we have to
5784 multiply opnum by at least three. We round that up to four because
5785 multiply by four is often cheaper. */
5786 case RELOAD_FOR_INPADDR_ADDRESS
:
5787 time1
= opnum
* 4 + 2;
5789 case RELOAD_FOR_INPUT_ADDRESS
:
5790 time1
= opnum
* 4 + 3;
5792 case RELOAD_FOR_INPUT
:
5793 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5794 executes (inclusive). */
5795 time1
= copy
? opnum
* 4 + 4 : MAX_RECOG_OPERANDS
* 4 + 3;
5797 case RELOAD_FOR_OPADDR_ADDR
:
5799 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5800 time1
= MAX_RECOG_OPERANDS
* 4 + 1;
5802 case RELOAD_FOR_OPERAND_ADDRESS
:
5803 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5805 time1
= copy
? MAX_RECOG_OPERANDS
* 4 + 2 : MAX_RECOG_OPERANDS
* 4 + 3;
5807 case RELOAD_FOR_OUTADDR_ADDRESS
:
5808 time1
= MAX_RECOG_OPERANDS
* 4 + 4 + opnum
;
5810 case RELOAD_FOR_OUTPUT_ADDRESS
:
5811 time1
= MAX_RECOG_OPERANDS
* 4 + 5 + opnum
;
5814 time1
= MAX_RECOG_OPERANDS
* 5 + 5;
5817 for (i
= 0; i
< n_reloads
; i
++)
5819 rtx reg
= rld
[i
].reg_rtx
;
5820 if (reg
&& REG_P (reg
)
5821 && ((unsigned) regno
- true_regnum (reg
)
5822 <= hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)] - (unsigned) 1)
5825 rtx other_input
= rld
[i
].in
;
5827 /* If the other reload loads the same input value, that
5828 will not cause a conflict only if it's loading it into
5829 the same register. */
5830 if (true_regnum (reg
) != start_regno
)
5831 other_input
= NULL_RTX
;
5832 if (! other_input
|| ! rtx_equal_p (other_input
, value
)
5833 || rld
[i
].out
|| out
)
5836 switch (rld
[i
].when_needed
)
5838 case RELOAD_FOR_OTHER_ADDRESS
:
5841 case RELOAD_FOR_INPADDR_ADDRESS
:
5842 /* find_reloads makes sure that a
5843 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5844 by at most one - the first -
5845 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5846 address reload is inherited, the address address reload
5847 goes away, so we can ignore this conflict. */
5848 if (type
== RELOAD_FOR_INPUT_ADDRESS
&& reloadnum
== i
+ 1
5849 && ignore_address_reloads
5850 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5851 Then the address address is still needed to store
5852 back the new address. */
5853 && ! rld
[reloadnum
].out
)
5855 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5856 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5858 if (type
== RELOAD_FOR_INPUT
&& opnum
== rld
[i
].opnum
5859 && ignore_address_reloads
5860 /* Unless we are reloading an auto_inc expression. */
5861 && ! rld
[reloadnum
].out
)
5863 time2
= rld
[i
].opnum
* 4 + 2;
5865 case RELOAD_FOR_INPUT_ADDRESS
:
5866 if (type
== RELOAD_FOR_INPUT
&& opnum
== rld
[i
].opnum
5867 && ignore_address_reloads
5868 && ! rld
[reloadnum
].out
)
5870 time2
= rld
[i
].opnum
* 4 + 3;
5872 case RELOAD_FOR_INPUT
:
5873 time2
= rld
[i
].opnum
* 4 + 4;
5874 check_earlyclobber
= 1;
5876 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5877 == MAX_RECOG_OPERAND * 4 */
5878 case RELOAD_FOR_OPADDR_ADDR
:
5879 if (type
== RELOAD_FOR_OPERAND_ADDRESS
&& reloadnum
== i
+ 1
5880 && ignore_address_reloads
5881 && ! rld
[reloadnum
].out
)
5883 time2
= MAX_RECOG_OPERANDS
* 4 + 1;
5885 case RELOAD_FOR_OPERAND_ADDRESS
:
5886 time2
= MAX_RECOG_OPERANDS
* 4 + 2;
5887 check_earlyclobber
= 1;
5889 case RELOAD_FOR_INSN
:
5890 time2
= MAX_RECOG_OPERANDS
* 4 + 3;
5892 case RELOAD_FOR_OUTPUT
:
5893 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5894 instruction is executed. */
5895 time2
= MAX_RECOG_OPERANDS
* 4 + 4;
5897 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5898 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5900 case RELOAD_FOR_OUTADDR_ADDRESS
:
5901 if (type
== RELOAD_FOR_OUTPUT_ADDRESS
&& reloadnum
== i
+ 1
5902 && ignore_address_reloads
5903 && ! rld
[reloadnum
].out
)
5905 time2
= MAX_RECOG_OPERANDS
* 4 + 4 + rld
[i
].opnum
;
5907 case RELOAD_FOR_OUTPUT_ADDRESS
:
5908 time2
= MAX_RECOG_OPERANDS
* 4 + 5 + rld
[i
].opnum
;
5911 /* If there is no conflict in the input part, handle this
5912 like an output reload. */
5913 if (! rld
[i
].in
|| rtx_equal_p (other_input
, value
))
5915 time2
= MAX_RECOG_OPERANDS
* 4 + 4;
5916 /* Earlyclobbered outputs must conflict with inputs. */
5917 if (earlyclobber_operand_p (rld
[i
].out
))
5918 time2
= MAX_RECOG_OPERANDS
* 4 + 3;
5923 /* RELOAD_OTHER might be live beyond instruction execution,
5924 but this is not obvious when we set time2 = 1. So check
5925 here if there might be a problem with the new reload
5926 clobbering the register used by the RELOAD_OTHER. */
5934 && (! rld
[i
].in
|| rld
[i
].out
5935 || ! rtx_equal_p (other_input
, value
)))
5936 || (out
&& rld
[reloadnum
].out_reg
5937 && time2
>= MAX_RECOG_OPERANDS
* 4 + 3))
5943 /* Earlyclobbered outputs must conflict with inputs. */
5944 if (check_earlyclobber
&& out
&& earlyclobber_operand_p (out
))
5950 /* Return 1 if the value in reload reg REGNO, as used by a reload
5951 needed for the part of the insn specified by OPNUM and TYPE,
5952 may be used to load VALUE into it.
5954 MODE is the mode in which the register is used, this is needed to
5955 determine how many hard regs to test.
5957 Other read-only reloads with the same value do not conflict
5958 unless OUT is nonzero and these other reloads have to live while
5959 output reloads live.
5960 If OUT is CONST0_RTX, this is a special case: it means that the
5961 test should not be for using register REGNO as reload register, but
5962 for copying from register REGNO into the reload register.
5964 RELOADNUM is the number of the reload we want to load this value for;
5965 a reload does not conflict with itself.
5967 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5968 reloads that load an address for the very reload we are considering.
5970 The caller has to make sure that there is no conflict with the return
5974 free_for_value_p (int regno
, enum machine_mode mode
, int opnum
,
5975 enum reload_type type
, rtx value
, rtx out
, int reloadnum
,
5976 int ignore_address_reloads
)
5978 int nregs
= hard_regno_nregs
[regno
][mode
];
5980 if (! reload_reg_free_for_value_p (regno
, regno
+ nregs
, opnum
, type
,
5981 value
, out
, reloadnum
,
5982 ignore_address_reloads
))
5987 /* Return nonzero if the rtx X is invariant over the current function. */
5988 /* ??? Actually, the places where we use this expect exactly what is
5989 tested here, and not everything that is function invariant. In
5990 particular, the frame pointer and arg pointer are special cased;
5991 pic_offset_table_rtx is not, and we must not spill these things to
5995 function_invariant_p (const_rtx x
)
5999 if (x
== frame_pointer_rtx
|| x
== arg_pointer_rtx
)
6001 if (GET_CODE (x
) == PLUS
6002 && (XEXP (x
, 0) == frame_pointer_rtx
|| XEXP (x
, 0) == arg_pointer_rtx
)
6003 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
6008 /* Determine whether the reload reg X overlaps any rtx'es used for
6009 overriding inheritance. Return nonzero if so. */
6012 conflicts_with_override (rtx x
)
6015 for (i
= 0; i
< n_reloads
; i
++)
6016 if (reload_override_in
[i
]
6017 && reg_overlap_mentioned_p (x
, reload_override_in
[i
]))
6022 /* Give an error message saying we failed to find a reload for INSN,
6023 and clear out reload R. */
6025 failed_reload (rtx insn
, int r
)
6027 if (asm_noperands (PATTERN (insn
)) < 0)
6028 /* It's the compiler's fault. */
6029 fatal_insn ("could not find a spill register", insn
);
6031 /* It's the user's fault; the operand's mode and constraint
6032 don't match. Disable this reload so we don't crash in final. */
6033 error_for_asm (insn
,
6034 "%<asm%> operand constraint incompatible with operand size");
6038 rld
[r
].optional
= 1;
6039 rld
[r
].secondary_p
= 1;
6042 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6043 for reload R. If it's valid, get an rtx for it. Return nonzero if
6046 set_reload_reg (int i
, int r
)
6049 rtx reg
= spill_reg_rtx
[i
];
6051 if (reg
== 0 || GET_MODE (reg
) != rld
[r
].mode
)
6052 spill_reg_rtx
[i
] = reg
6053 = gen_rtx_REG (rld
[r
].mode
, spill_regs
[i
]);
6055 regno
= true_regnum (reg
);
6057 /* Detect when the reload reg can't hold the reload mode.
6058 This used to be one `if', but Sequent compiler can't handle that. */
6059 if (HARD_REGNO_MODE_OK (regno
, rld
[r
].mode
))
6061 enum machine_mode test_mode
= VOIDmode
;
6063 test_mode
= GET_MODE (rld
[r
].in
);
6064 /* If rld[r].in has VOIDmode, it means we will load it
6065 in whatever mode the reload reg has: to wit, rld[r].mode.
6066 We have already tested that for validity. */
6067 /* Aside from that, we need to test that the expressions
6068 to reload from or into have modes which are valid for this
6069 reload register. Otherwise the reload insns would be invalid. */
6070 if (! (rld
[r
].in
!= 0 && test_mode
!= VOIDmode
6071 && ! HARD_REGNO_MODE_OK (regno
, test_mode
)))
6072 if (! (rld
[r
].out
!= 0
6073 && ! HARD_REGNO_MODE_OK (regno
, GET_MODE (rld
[r
].out
))))
6075 /* The reg is OK. */
6078 /* Mark as in use for this insn the reload regs we use
6080 mark_reload_reg_in_use (spill_regs
[i
], rld
[r
].opnum
,
6081 rld
[r
].when_needed
, rld
[r
].mode
);
6083 rld
[r
].reg_rtx
= reg
;
6084 reload_spill_index
[r
] = spill_regs
[i
];
6091 /* Find a spill register to use as a reload register for reload R.
6092 LAST_RELOAD is nonzero if this is the last reload for the insn being
6095 Set rld[R].reg_rtx to the register allocated.
6097 We return 1 if successful, or 0 if we couldn't find a spill reg and
6098 we didn't change anything. */
6101 allocate_reload_reg (struct insn_chain
*chain ATTRIBUTE_UNUSED
, int r
,
6106 /* If we put this reload ahead, thinking it is a group,
6107 then insist on finding a group. Otherwise we can grab a
6108 reg that some other reload needs.
6109 (That can happen when we have a 68000 DATA_OR_FP_REG
6110 which is a group of data regs or one fp reg.)
6111 We need not be so restrictive if there are no more reloads
6114 ??? Really it would be nicer to have smarter handling
6115 for that kind of reg class, where a problem like this is normal.
6116 Perhaps those classes should be avoided for reloading
6117 by use of more alternatives. */
6119 int force_group
= rld
[r
].nregs
> 1 && ! last_reload
;
6121 /* If we want a single register and haven't yet found one,
6122 take any reg in the right class and not in use.
6123 If we want a consecutive group, here is where we look for it.
6125 We use three passes so we can first look for reload regs to
6126 reuse, which are already in use for other reloads in this insn,
6127 and only then use additional registers which are not "bad", then
6128 finally any register.
6130 I think that maximizing reuse is needed to make sure we don't
6131 run out of reload regs. Suppose we have three reloads, and
6132 reloads A and B can share regs. These need two regs.
6133 Suppose A and B are given different regs.
6134 That leaves none for C. */
6135 for (pass
= 0; pass
< 3; pass
++)
6137 /* I is the index in spill_regs.
6138 We advance it round-robin between insns to use all spill regs
6139 equally, so that inherited reloads have a chance
6140 of leapfrogging each other. */
6144 for (count
= 0; count
< n_spills
; count
++)
6146 int rclass
= (int) rld
[r
].rclass
;
6152 regnum
= spill_regs
[i
];
6154 if ((reload_reg_free_p (regnum
, rld
[r
].opnum
,
6157 /* We check reload_reg_used to make sure we
6158 don't clobber the return register. */
6159 && ! TEST_HARD_REG_BIT (reload_reg_used
, regnum
)
6160 && free_for_value_p (regnum
, rld
[r
].mode
, rld
[r
].opnum
,
6161 rld
[r
].when_needed
, rld
[r
].in
,
6163 && TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regnum
)
6164 && HARD_REGNO_MODE_OK (regnum
, rld
[r
].mode
)
6165 /* Look first for regs to share, then for unshared. But
6166 don't share regs used for inherited reloads; they are
6167 the ones we want to preserve. */
6169 || (TEST_HARD_REG_BIT (reload_reg_used_at_all
,
6171 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit
,
6174 int nr
= hard_regno_nregs
[regnum
][rld
[r
].mode
];
6176 /* During the second pass we want to avoid reload registers
6177 which are "bad" for this reload. */
6179 && ira_bad_reload_regno (regnum
, rld
[r
].in
, rld
[r
].out
))
6182 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6183 (on 68000) got us two FP regs. If NR is 1,
6184 we would reject both of them. */
6187 /* If we need only one reg, we have already won. */
6190 /* But reject a single reg if we demand a group. */
6195 /* Otherwise check that as many consecutive regs as we need
6196 are available here. */
6199 int regno
= regnum
+ nr
- 1;
6200 if (!(TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regno
)
6201 && spill_reg_order
[regno
] >= 0
6202 && reload_reg_free_p (regno
, rld
[r
].opnum
,
6203 rld
[r
].when_needed
)))
6212 /* If we found something on the current pass, omit later passes. */
6213 if (count
< n_spills
)
6217 /* We should have found a spill register by now. */
6218 if (count
>= n_spills
)
6221 /* I is the index in SPILL_REG_RTX of the reload register we are to
6222 allocate. Get an rtx for it and find its register number. */
6224 return set_reload_reg (i
, r
);
6227 /* Initialize all the tables needed to allocate reload registers.
6228 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6229 is the array we use to restore the reg_rtx field for every reload. */
6232 choose_reload_regs_init (struct insn_chain
*chain
, rtx
*save_reload_reg_rtx
)
6236 for (i
= 0; i
< n_reloads
; i
++)
6237 rld
[i
].reg_rtx
= save_reload_reg_rtx
[i
];
6239 memset (reload_inherited
, 0, MAX_RELOADS
);
6240 memset (reload_inheritance_insn
, 0, MAX_RELOADS
* sizeof (rtx
));
6241 memset (reload_override_in
, 0, MAX_RELOADS
* sizeof (rtx
));
6243 CLEAR_HARD_REG_SET (reload_reg_used
);
6244 CLEAR_HARD_REG_SET (reload_reg_used_at_all
);
6245 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr
);
6246 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload
);
6247 CLEAR_HARD_REG_SET (reload_reg_used_in_insn
);
6248 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr
);
6250 CLEAR_HARD_REG_SET (reg_used_in_insn
);
6253 REG_SET_TO_HARD_REG_SET (tmp
, &chain
->live_throughout
);
6254 IOR_HARD_REG_SET (reg_used_in_insn
, tmp
);
6255 REG_SET_TO_HARD_REG_SET (tmp
, &chain
->dead_or_set
);
6256 IOR_HARD_REG_SET (reg_used_in_insn
, tmp
);
6257 compute_use_by_pseudos (®_used_in_insn
, &chain
->live_throughout
);
6258 compute_use_by_pseudos (®_used_in_insn
, &chain
->dead_or_set
);
6261 for (i
= 0; i
< reload_n_operands
; i
++)
6263 CLEAR_HARD_REG_SET (reload_reg_used_in_output
[i
]);
6264 CLEAR_HARD_REG_SET (reload_reg_used_in_input
[i
]);
6265 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr
[i
]);
6266 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr
[i
]);
6267 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr
[i
]);
6268 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr
[i
]);
6271 COMPL_HARD_REG_SET (reload_reg_unavailable
, chain
->used_spill_regs
);
6273 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit
);
6275 for (i
= 0; i
< n_reloads
; i
++)
6276 /* If we have already decided to use a certain register,
6277 don't use it in another way. */
6279 mark_reload_reg_in_use (REGNO (rld
[i
].reg_rtx
), rld
[i
].opnum
,
6280 rld
[i
].when_needed
, rld
[i
].mode
);
6283 /* Assign hard reg targets for the pseudo-registers we must reload
6284 into hard regs for this insn.
6285 Also output the instructions to copy them in and out of the hard regs.
6287 For machines with register classes, we are responsible for
6288 finding a reload reg in the proper class. */
6291 choose_reload_regs (struct insn_chain
*chain
)
6293 rtx insn
= chain
->insn
;
6295 unsigned int max_group_size
= 1;
6296 enum reg_class group_class
= NO_REGS
;
6297 int pass
, win
, inheritance
;
6299 rtx save_reload_reg_rtx
[MAX_RELOADS
];
6301 /* In order to be certain of getting the registers we need,
6302 we must sort the reloads into order of increasing register class.
6303 Then our grabbing of reload registers will parallel the process
6304 that provided the reload registers.
6306 Also note whether any of the reloads wants a consecutive group of regs.
6307 If so, record the maximum size of the group desired and what
6308 register class contains all the groups needed by this insn. */
6310 for (j
= 0; j
< n_reloads
; j
++)
6312 reload_order
[j
] = j
;
6313 if (rld
[j
].reg_rtx
!= NULL_RTX
)
6315 gcc_assert (REG_P (rld
[j
].reg_rtx
)
6316 && HARD_REGISTER_P (rld
[j
].reg_rtx
));
6317 reload_spill_index
[j
] = REGNO (rld
[j
].reg_rtx
);
6320 reload_spill_index
[j
] = -1;
6322 if (rld
[j
].nregs
> 1)
6324 max_group_size
= MAX (rld
[j
].nregs
, max_group_size
);
6326 = reg_class_superunion
[(int) rld
[j
].rclass
][(int) group_class
];
6329 save_reload_reg_rtx
[j
] = rld
[j
].reg_rtx
;
6333 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
6335 /* If -O, try first with inheritance, then turning it off.
6336 If not -O, don't do inheritance.
6337 Using inheritance when not optimizing leads to paradoxes
6338 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6339 because one side of the comparison might be inherited. */
6341 for (inheritance
= optimize
> 0; inheritance
>= 0; inheritance
--)
6343 choose_reload_regs_init (chain
, save_reload_reg_rtx
);
6345 /* Process the reloads in order of preference just found.
6346 Beyond this point, subregs can be found in reload_reg_rtx.
6348 This used to look for an existing reloaded home for all of the
6349 reloads, and only then perform any new reloads. But that could lose
6350 if the reloads were done out of reg-class order because a later
6351 reload with a looser constraint might have an old home in a register
6352 needed by an earlier reload with a tighter constraint.
6354 To solve this, we make two passes over the reloads, in the order
6355 described above. In the first pass we try to inherit a reload
6356 from a previous insn. If there is a later reload that needs a
6357 class that is a proper subset of the class being processed, we must
6358 also allocate a spill register during the first pass.
6360 Then make a second pass over the reloads to allocate any reloads
6361 that haven't been given registers yet. */
6363 for (j
= 0; j
< n_reloads
; j
++)
6365 int r
= reload_order
[j
];
6366 rtx search_equiv
= NULL_RTX
;
6368 /* Ignore reloads that got marked inoperative. */
6369 if (rld
[r
].out
== 0 && rld
[r
].in
== 0
6370 && ! rld
[r
].secondary_p
)
6373 /* If find_reloads chose to use reload_in or reload_out as a reload
6374 register, we don't need to chose one. Otherwise, try even if it
6375 found one since we might save an insn if we find the value lying
6377 Try also when reload_in is a pseudo without a hard reg. */
6378 if (rld
[r
].in
!= 0 && rld
[r
].reg_rtx
!= 0
6379 && (rtx_equal_p (rld
[r
].in
, rld
[r
].reg_rtx
)
6380 || (rtx_equal_p (rld
[r
].out
, rld
[r
].reg_rtx
)
6381 && !MEM_P (rld
[r
].in
)
6382 && true_regnum (rld
[r
].in
) < FIRST_PSEUDO_REGISTER
)))
6385 #if 0 /* No longer needed for correct operation.
6386 It might give better code, or might not; worth an experiment? */
6387 /* If this is an optional reload, we can't inherit from earlier insns
6388 until we are sure that any non-optional reloads have been allocated.
6389 The following code takes advantage of the fact that optional reloads
6390 are at the end of reload_order. */
6391 if (rld
[r
].optional
!= 0)
6392 for (i
= 0; i
< j
; i
++)
6393 if ((rld
[reload_order
[i
]].out
!= 0
6394 || rld
[reload_order
[i
]].in
!= 0
6395 || rld
[reload_order
[i
]].secondary_p
)
6396 && ! rld
[reload_order
[i
]].optional
6397 && rld
[reload_order
[i
]].reg_rtx
== 0)
6398 allocate_reload_reg (chain
, reload_order
[i
], 0);
6401 /* First see if this pseudo is already available as reloaded
6402 for a previous insn. We cannot try to inherit for reloads
6403 that are smaller than the maximum number of registers needed
6404 for groups unless the register we would allocate cannot be used
6407 We could check here to see if this is a secondary reload for
6408 an object that is already in a register of the desired class.
6409 This would avoid the need for the secondary reload register.
6410 But this is complex because we can't easily determine what
6411 objects might want to be loaded via this reload. So let a
6412 register be allocated here. In `emit_reload_insns' we suppress
6413 one of the loads in the case described above. */
6419 enum machine_mode mode
= VOIDmode
;
6423 else if (REG_P (rld
[r
].in
))
6425 regno
= REGNO (rld
[r
].in
);
6426 mode
= GET_MODE (rld
[r
].in
);
6428 else if (REG_P (rld
[r
].in_reg
))
6430 regno
= REGNO (rld
[r
].in_reg
);
6431 mode
= GET_MODE (rld
[r
].in_reg
);
6433 else if (GET_CODE (rld
[r
].in_reg
) == SUBREG
6434 && REG_P (SUBREG_REG (rld
[r
].in_reg
)))
6436 regno
= REGNO (SUBREG_REG (rld
[r
].in_reg
));
6437 if (regno
< FIRST_PSEUDO_REGISTER
)
6438 regno
= subreg_regno (rld
[r
].in_reg
);
6440 byte
= SUBREG_BYTE (rld
[r
].in_reg
);
6441 mode
= GET_MODE (rld
[r
].in_reg
);
6444 else if (GET_RTX_CLASS (GET_CODE (rld
[r
].in_reg
)) == RTX_AUTOINC
6445 && REG_P (XEXP (rld
[r
].in_reg
, 0)))
6447 regno
= REGNO (XEXP (rld
[r
].in_reg
, 0));
6448 mode
= GET_MODE (XEXP (rld
[r
].in_reg
, 0));
6449 rld
[r
].out
= rld
[r
].in
;
6453 /* This won't work, since REGNO can be a pseudo reg number.
6454 Also, it takes much more hair to keep track of all the things
6455 that can invalidate an inherited reload of part of a pseudoreg. */
6456 else if (GET_CODE (rld
[r
].in
) == SUBREG
6457 && REG_P (SUBREG_REG (rld
[r
].in
)))
6458 regno
= subreg_regno (rld
[r
].in
);
6462 && reg_last_reload_reg
[regno
] != 0
6463 #ifdef CANNOT_CHANGE_MODE_CLASS
6464 /* Verify that the register it's in can be used in
6466 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg
[regno
]),
6467 GET_MODE (reg_last_reload_reg
[regno
]),
6472 enum reg_class rclass
= rld
[r
].rclass
, last_class
;
6473 rtx last_reg
= reg_last_reload_reg
[regno
];
6474 enum machine_mode need_mode
;
6476 i
= REGNO (last_reg
);
6477 i
+= subreg_regno_offset (i
, GET_MODE (last_reg
), byte
, mode
);
6478 last_class
= REGNO_REG_CLASS (i
);
6484 = smallest_mode_for_size
6485 (GET_MODE_BITSIZE (mode
) + byte
* BITS_PER_UNIT
,
6486 GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
6487 ? MODE_INT
: GET_MODE_CLASS (mode
));
6489 if ((GET_MODE_SIZE (GET_MODE (last_reg
))
6490 >= GET_MODE_SIZE (need_mode
))
6491 && reg_reloaded_contents
[i
] == regno
6492 && TEST_HARD_REG_BIT (reg_reloaded_valid
, i
)
6493 && HARD_REGNO_MODE_OK (i
, rld
[r
].mode
)
6494 && (TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
], i
)
6495 /* Even if we can't use this register as a reload
6496 register, we might use it for reload_override_in,
6497 if copying it to the desired class is cheap
6499 || ((REGISTER_MOVE_COST (mode
, last_class
, rclass
)
6500 < memory_move_cost (mode
, rclass
, true))
6501 && (secondary_reload_class (1, rclass
, mode
,
6504 #ifdef SECONDARY_MEMORY_NEEDED
6505 && ! SECONDARY_MEMORY_NEEDED (last_class
, rclass
,
6510 && (rld
[r
].nregs
== max_group_size
6511 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) group_class
],
6513 && free_for_value_p (i
, rld
[r
].mode
, rld
[r
].opnum
,
6514 rld
[r
].when_needed
, rld
[r
].in
,
6517 /* If a group is needed, verify that all the subsequent
6518 registers still have their values intact. */
6519 int nr
= hard_regno_nregs
[i
][rld
[r
].mode
];
6522 for (k
= 1; k
< nr
; k
++)
6523 if (reg_reloaded_contents
[i
+ k
] != regno
6524 || ! TEST_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
))
6532 last_reg
= (GET_MODE (last_reg
) == mode
6533 ? last_reg
: gen_rtx_REG (mode
, i
));
6536 for (k
= 0; k
< nr
; k
++)
6537 bad_for_class
|= ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[r
].rclass
],
6540 /* We found a register that contains the
6541 value we need. If this register is the
6542 same as an `earlyclobber' operand of the
6543 current insn, just mark it as a place to
6544 reload from since we can't use it as the
6545 reload register itself. */
6547 for (i1
= 0; i1
< n_earlyclobbers
; i1
++)
6548 if (reg_overlap_mentioned_for_reload_p
6549 (reg_last_reload_reg
[regno
],
6550 reload_earlyclobbers
[i1
]))
6553 if (i1
!= n_earlyclobbers
6554 || ! (free_for_value_p (i
, rld
[r
].mode
,
6556 rld
[r
].when_needed
, rld
[r
].in
,
6558 /* Don't use it if we'd clobber a pseudo reg. */
6559 || (TEST_HARD_REG_BIT (reg_used_in_insn
, i
)
6561 && ! TEST_HARD_REG_BIT (reg_reloaded_dead
, i
))
6562 /* Don't clobber the frame pointer. */
6563 || (i
== HARD_FRAME_POINTER_REGNUM
6564 && frame_pointer_needed
6566 /* Don't really use the inherited spill reg
6567 if we need it wider than we've got it. */
6568 || (GET_MODE_SIZE (rld
[r
].mode
)
6569 > GET_MODE_SIZE (mode
))
6572 /* If find_reloads chose reload_out as reload
6573 register, stay with it - that leaves the
6574 inherited register for subsequent reloads. */
6575 || (rld
[r
].out
&& rld
[r
].reg_rtx
6576 && rtx_equal_p (rld
[r
].out
, rld
[r
].reg_rtx
)))
6578 if (! rld
[r
].optional
)
6580 reload_override_in
[r
] = last_reg
;
6581 reload_inheritance_insn
[r
]
6582 = reg_reloaded_insn
[i
];
6588 /* We can use this as a reload reg. */
6589 /* Mark the register as in use for this part of
6591 mark_reload_reg_in_use (i
,
6595 rld
[r
].reg_rtx
= last_reg
;
6596 reload_inherited
[r
] = 1;
6597 reload_inheritance_insn
[r
]
6598 = reg_reloaded_insn
[i
];
6599 reload_spill_index
[r
] = i
;
6600 for (k
= 0; k
< nr
; k
++)
6601 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
6609 /* Here's another way to see if the value is already lying around. */
6612 && ! reload_inherited
[r
]
6614 && (CONSTANT_P (rld
[r
].in
)
6615 || GET_CODE (rld
[r
].in
) == PLUS
6616 || REG_P (rld
[r
].in
)
6617 || MEM_P (rld
[r
].in
))
6618 && (rld
[r
].nregs
== max_group_size
6619 || ! reg_classes_intersect_p (rld
[r
].rclass
, group_class
)))
6620 search_equiv
= rld
[r
].in
;
6621 /* If this is an output reload from a simple move insn, look
6622 if an equivalence for the input is available. */
6623 else if (inheritance
&& rld
[r
].in
== 0 && rld
[r
].out
!= 0)
6625 rtx set
= single_set (insn
);
6628 && rtx_equal_p (rld
[r
].out
, SET_DEST (set
))
6629 && CONSTANT_P (SET_SRC (set
)))
6630 search_equiv
= SET_SRC (set
);
6636 = find_equiv_reg (search_equiv
, insn
, rld
[r
].rclass
,
6637 -1, NULL
, 0, rld
[r
].mode
);
6643 regno
= REGNO (equiv
);
6646 /* This must be a SUBREG of a hard register.
6647 Make a new REG since this might be used in an
6648 address and not all machines support SUBREGs
6650 gcc_assert (GET_CODE (equiv
) == SUBREG
);
6651 regno
= subreg_regno (equiv
);
6652 equiv
= gen_rtx_REG (rld
[r
].mode
, regno
);
6653 /* If we choose EQUIV as the reload register, but the
6654 loop below decides to cancel the inheritance, we'll
6655 end up reloading EQUIV in rld[r].mode, not the mode
6656 it had originally. That isn't safe when EQUIV isn't
6657 available as a spill register since its value might
6658 still be live at this point. */
6659 for (i
= regno
; i
< regno
+ (int) rld
[r
].nregs
; i
++)
6660 if (TEST_HARD_REG_BIT (reload_reg_unavailable
, i
))
6665 /* If we found a spill reg, reject it unless it is free
6666 and of the desired class. */
6670 int bad_for_class
= 0;
6671 int max_regno
= regno
+ rld
[r
].nregs
;
6673 for (i
= regno
; i
< max_regno
; i
++)
6675 regs_used
|= TEST_HARD_REG_BIT (reload_reg_used_at_all
,
6677 bad_for_class
|= ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[r
].rclass
],
6682 && ! free_for_value_p (regno
, rld
[r
].mode
,
6683 rld
[r
].opnum
, rld
[r
].when_needed
,
6684 rld
[r
].in
, rld
[r
].out
, r
, 1))
6689 if (equiv
!= 0 && ! HARD_REGNO_MODE_OK (regno
, rld
[r
].mode
))
6692 /* We found a register that contains the value we need.
6693 If this register is the same as an `earlyclobber' operand
6694 of the current insn, just mark it as a place to reload from
6695 since we can't use it as the reload register itself. */
6698 for (i
= 0; i
< n_earlyclobbers
; i
++)
6699 if (reg_overlap_mentioned_for_reload_p (equiv
,
6700 reload_earlyclobbers
[i
]))
6702 if (! rld
[r
].optional
)
6703 reload_override_in
[r
] = equiv
;
6708 /* If the equiv register we have found is explicitly clobbered
6709 in the current insn, it depends on the reload type if we
6710 can use it, use it for reload_override_in, or not at all.
6711 In particular, we then can't use EQUIV for a
6712 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6716 if (regno_clobbered_p (regno
, insn
, rld
[r
].mode
, 2))
6717 switch (rld
[r
].when_needed
)
6719 case RELOAD_FOR_OTHER_ADDRESS
:
6720 case RELOAD_FOR_INPADDR_ADDRESS
:
6721 case RELOAD_FOR_INPUT_ADDRESS
:
6722 case RELOAD_FOR_OPADDR_ADDR
:
6725 case RELOAD_FOR_INPUT
:
6726 case RELOAD_FOR_OPERAND_ADDRESS
:
6727 if (! rld
[r
].optional
)
6728 reload_override_in
[r
] = equiv
;
6734 else if (regno_clobbered_p (regno
, insn
, rld
[r
].mode
, 1))
6735 switch (rld
[r
].when_needed
)
6737 case RELOAD_FOR_OTHER_ADDRESS
:
6738 case RELOAD_FOR_INPADDR_ADDRESS
:
6739 case RELOAD_FOR_INPUT_ADDRESS
:
6740 case RELOAD_FOR_OPADDR_ADDR
:
6741 case RELOAD_FOR_OPERAND_ADDRESS
:
6742 case RELOAD_FOR_INPUT
:
6745 if (! rld
[r
].optional
)
6746 reload_override_in
[r
] = equiv
;
6754 /* If we found an equivalent reg, say no code need be generated
6755 to load it, and use it as our reload reg. */
6757 && (regno
!= HARD_FRAME_POINTER_REGNUM
6758 || !frame_pointer_needed
))
6760 int nr
= hard_regno_nregs
[regno
][rld
[r
].mode
];
6762 rld
[r
].reg_rtx
= equiv
;
6763 reload_spill_index
[r
] = regno
;
6764 reload_inherited
[r
] = 1;
6766 /* If reg_reloaded_valid is not set for this register,
6767 there might be a stale spill_reg_store lying around.
6768 We must clear it, since otherwise emit_reload_insns
6769 might delete the store. */
6770 if (! TEST_HARD_REG_BIT (reg_reloaded_valid
, regno
))
6771 spill_reg_store
[regno
] = NULL_RTX
;
6772 /* If any of the hard registers in EQUIV are spill
6773 registers, mark them as in use for this insn. */
6774 for (k
= 0; k
< nr
; k
++)
6776 i
= spill_reg_order
[regno
+ k
];
6779 mark_reload_reg_in_use (regno
, rld
[r
].opnum
,
6782 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
6789 /* If we found a register to use already, or if this is an optional
6790 reload, we are done. */
6791 if (rld
[r
].reg_rtx
!= 0 || rld
[r
].optional
!= 0)
6795 /* No longer needed for correct operation. Might or might
6796 not give better code on the average. Want to experiment? */
6798 /* See if there is a later reload that has a class different from our
6799 class that intersects our class or that requires less register
6800 than our reload. If so, we must allocate a register to this
6801 reload now, since that reload might inherit a previous reload
6802 and take the only available register in our class. Don't do this
6803 for optional reloads since they will force all previous reloads
6804 to be allocated. Also don't do this for reloads that have been
6807 for (i
= j
+ 1; i
< n_reloads
; i
++)
6809 int s
= reload_order
[i
];
6811 if ((rld
[s
].in
== 0 && rld
[s
].out
== 0
6812 && ! rld
[s
].secondary_p
)
6816 if ((rld
[s
].rclass
!= rld
[r
].rclass
6817 && reg_classes_intersect_p (rld
[r
].rclass
,
6819 || rld
[s
].nregs
< rld
[r
].nregs
)
6826 allocate_reload_reg (chain
, r
, j
== n_reloads
- 1);
6830 /* Now allocate reload registers for anything non-optional that
6831 didn't get one yet. */
6832 for (j
= 0; j
< n_reloads
; j
++)
6834 int r
= reload_order
[j
];
6836 /* Ignore reloads that got marked inoperative. */
6837 if (rld
[r
].out
== 0 && rld
[r
].in
== 0 && ! rld
[r
].secondary_p
)
6840 /* Skip reloads that already have a register allocated or are
6842 if (rld
[r
].reg_rtx
!= 0 || rld
[r
].optional
)
6845 if (! allocate_reload_reg (chain
, r
, j
== n_reloads
- 1))
6849 /* If that loop got all the way, we have won. */
6856 /* Loop around and try without any inheritance. */
6861 /* First undo everything done by the failed attempt
6862 to allocate with inheritance. */
6863 choose_reload_regs_init (chain
, save_reload_reg_rtx
);
6865 /* Some sanity tests to verify that the reloads found in the first
6866 pass are identical to the ones we have now. */
6867 gcc_assert (chain
->n_reloads
== n_reloads
);
6869 for (i
= 0; i
< n_reloads
; i
++)
6871 if (chain
->rld
[i
].regno
< 0 || chain
->rld
[i
].reg_rtx
!= 0)
6873 gcc_assert (chain
->rld
[i
].when_needed
== rld
[i
].when_needed
);
6874 for (j
= 0; j
< n_spills
; j
++)
6875 if (spill_regs
[j
] == chain
->rld
[i
].regno
)
6876 if (! set_reload_reg (j
, i
))
6877 failed_reload (chain
->insn
, i
);
6881 /* If we thought we could inherit a reload, because it seemed that
6882 nothing else wanted the same reload register earlier in the insn,
6883 verify that assumption, now that all reloads have been assigned.
6884 Likewise for reloads where reload_override_in has been set. */
6886 /* If doing expensive optimizations, do one preliminary pass that doesn't
6887 cancel any inheritance, but removes reloads that have been needed only
6888 for reloads that we know can be inherited. */
6889 for (pass
= flag_expensive_optimizations
; pass
>= 0; pass
--)
6891 for (j
= 0; j
< n_reloads
; j
++)
6893 int r
= reload_order
[j
];
6895 if (reload_inherited
[r
] && rld
[r
].reg_rtx
)
6896 check_reg
= rld
[r
].reg_rtx
;
6897 else if (reload_override_in
[r
]
6898 && (REG_P (reload_override_in
[r
])
6899 || GET_CODE (reload_override_in
[r
]) == SUBREG
))
6900 check_reg
= reload_override_in
[r
];
6903 if (! free_for_value_p (true_regnum (check_reg
), rld
[r
].mode
,
6904 rld
[r
].opnum
, rld
[r
].when_needed
, rld
[r
].in
,
6905 (reload_inherited
[r
]
6906 ? rld
[r
].out
: const0_rtx
),
6911 reload_inherited
[r
] = 0;
6912 reload_override_in
[r
] = 0;
6914 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6915 reload_override_in, then we do not need its related
6916 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6917 likewise for other reload types.
6918 We handle this by removing a reload when its only replacement
6919 is mentioned in reload_in of the reload we are going to inherit.
6920 A special case are auto_inc expressions; even if the input is
6921 inherited, we still need the address for the output. We can
6922 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6923 If we succeeded removing some reload and we are doing a preliminary
6924 pass just to remove such reloads, make another pass, since the
6925 removal of one reload might allow us to inherit another one. */
6927 && rld
[r
].out
!= rld
[r
].in
6928 && remove_address_replacements (rld
[r
].in
) && pass
)
6933 /* Now that reload_override_in is known valid,
6934 actually override reload_in. */
6935 for (j
= 0; j
< n_reloads
; j
++)
6936 if (reload_override_in
[j
])
6937 rld
[j
].in
= reload_override_in
[j
];
6939 /* If this reload won't be done because it has been canceled or is
6940 optional and not inherited, clear reload_reg_rtx so other
6941 routines (such as subst_reloads) don't get confused. */
6942 for (j
= 0; j
< n_reloads
; j
++)
6943 if (rld
[j
].reg_rtx
!= 0
6944 && ((rld
[j
].optional
&& ! reload_inherited
[j
])
6945 || (rld
[j
].in
== 0 && rld
[j
].out
== 0
6946 && ! rld
[j
].secondary_p
)))
6948 int regno
= true_regnum (rld
[j
].reg_rtx
);
6950 if (spill_reg_order
[regno
] >= 0)
6951 clear_reload_reg_in_use (regno
, rld
[j
].opnum
,
6952 rld
[j
].when_needed
, rld
[j
].mode
);
6954 reload_spill_index
[j
] = -1;
6957 /* Record which pseudos and which spill regs have output reloads. */
6958 for (j
= 0; j
< n_reloads
; j
++)
6960 int r
= reload_order
[j
];
6962 i
= reload_spill_index
[r
];
6964 /* I is nonneg if this reload uses a register.
6965 If rld[r].reg_rtx is 0, this is an optional reload
6966 that we opted to ignore. */
6967 if (rld
[r
].out_reg
!= 0 && REG_P (rld
[r
].out_reg
)
6968 && rld
[r
].reg_rtx
!= 0)
6970 int nregno
= REGNO (rld
[r
].out_reg
);
6973 if (nregno
< FIRST_PSEUDO_REGISTER
)
6974 nr
= hard_regno_nregs
[nregno
][rld
[r
].mode
];
6977 SET_REGNO_REG_SET (®_has_output_reload
,
6982 nr
= hard_regno_nregs
[i
][rld
[r
].mode
];
6984 SET_HARD_REG_BIT (reg_is_output_reload
, i
+ nr
);
6987 gcc_assert (rld
[r
].when_needed
== RELOAD_OTHER
6988 || rld
[r
].when_needed
== RELOAD_FOR_OUTPUT
6989 || rld
[r
].when_needed
== RELOAD_FOR_INSN
);
6994 /* Deallocate the reload register for reload R. This is called from
6995 remove_address_replacements. */
6998 deallocate_reload_reg (int r
)
7002 if (! rld
[r
].reg_rtx
)
7004 regno
= true_regnum (rld
[r
].reg_rtx
);
7006 if (spill_reg_order
[regno
] >= 0)
7007 clear_reload_reg_in_use (regno
, rld
[r
].opnum
, rld
[r
].when_needed
,
7009 reload_spill_index
[r
] = -1;
7012 /* If the small_register_classes_for_mode_p target hook returns true for
7013 some machine modes, we may not have merged two reloads of the same item
7014 for fear that we might not have enough reload registers. However,
7015 normally they will get the same reload register and hence actually need
7016 not be loaded twice.
7018 Here we check for the most common case of this phenomenon: when we have
7019 a number of reloads for the same object, each of which were allocated
7020 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
7021 reload, and is not modified in the insn itself. If we find such,
7022 merge all the reloads and set the resulting reload to RELOAD_OTHER.
7023 This will not increase the number of spill registers needed and will
7024 prevent redundant code. */
7027 merge_assigned_reloads (rtx insn
)
7031 /* Scan all the reloads looking for ones that only load values and
7032 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
7033 assigned and not modified by INSN. */
7035 for (i
= 0; i
< n_reloads
; i
++)
7037 int conflicting_input
= 0;
7038 int max_input_address_opnum
= -1;
7039 int min_conflicting_input_opnum
= MAX_RECOG_OPERANDS
;
7041 if (rld
[i
].in
== 0 || rld
[i
].when_needed
== RELOAD_OTHER
7042 || rld
[i
].out
!= 0 || rld
[i
].reg_rtx
== 0
7043 || reg_set_p (rld
[i
].reg_rtx
, insn
))
7046 /* Look at all other reloads. Ensure that the only use of this
7047 reload_reg_rtx is in a reload that just loads the same value
7048 as we do. Note that any secondary reloads must be of the identical
7049 class since the values, modes, and result registers are the
7050 same, so we need not do anything with any secondary reloads. */
7052 for (j
= 0; j
< n_reloads
; j
++)
7054 if (i
== j
|| rld
[j
].reg_rtx
== 0
7055 || ! reg_overlap_mentioned_p (rld
[j
].reg_rtx
,
7059 if (rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
7060 && rld
[j
].opnum
> max_input_address_opnum
)
7061 max_input_address_opnum
= rld
[j
].opnum
;
7063 /* If the reload regs aren't exactly the same (e.g, different modes)
7064 or if the values are different, we can't merge this reload.
7065 But if it is an input reload, we might still merge
7066 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
7068 if (! rtx_equal_p (rld
[i
].reg_rtx
, rld
[j
].reg_rtx
)
7069 || rld
[j
].out
!= 0 || rld
[j
].in
== 0
7070 || ! rtx_equal_p (rld
[i
].in
, rld
[j
].in
))
7072 if (rld
[j
].when_needed
!= RELOAD_FOR_INPUT
7073 || ((rld
[i
].when_needed
!= RELOAD_FOR_INPUT_ADDRESS
7074 || rld
[i
].opnum
> rld
[j
].opnum
)
7075 && rld
[i
].when_needed
!= RELOAD_FOR_OTHER_ADDRESS
))
7077 conflicting_input
= 1;
7078 if (min_conflicting_input_opnum
> rld
[j
].opnum
)
7079 min_conflicting_input_opnum
= rld
[j
].opnum
;
7083 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
7084 we, in fact, found any matching reloads. */
7087 && max_input_address_opnum
<= min_conflicting_input_opnum
)
7089 gcc_assert (rld
[i
].when_needed
!= RELOAD_FOR_OUTPUT
);
7091 for (j
= 0; j
< n_reloads
; j
++)
7092 if (i
!= j
&& rld
[j
].reg_rtx
!= 0
7093 && rtx_equal_p (rld
[i
].reg_rtx
, rld
[j
].reg_rtx
)
7094 && (! conflicting_input
7095 || rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
7096 || rld
[j
].when_needed
== RELOAD_FOR_OTHER_ADDRESS
))
7098 rld
[i
].when_needed
= RELOAD_OTHER
;
7100 reload_spill_index
[j
] = -1;
7101 transfer_replacements (i
, j
);
7104 /* If this is now RELOAD_OTHER, look for any reloads that
7105 load parts of this operand and set them to
7106 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
7107 RELOAD_OTHER for outputs. Note that this test is
7108 equivalent to looking for reloads for this operand
7111 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
7112 it may share registers with a RELOAD_FOR_INPUT, so we can
7113 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
7114 never need to, since we do not modify RELOAD_FOR_OUTPUT.
7116 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
7117 instruction is assigned the same register as the earlier
7118 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
7119 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
7120 instruction to be deleted later on. */
7122 if (rld
[i
].when_needed
== RELOAD_OTHER
)
7123 for (j
= 0; j
< n_reloads
; j
++)
7125 && rld
[j
].when_needed
!= RELOAD_OTHER
7126 && rld
[j
].when_needed
!= RELOAD_FOR_OTHER_ADDRESS
7127 && rld
[j
].when_needed
!= RELOAD_FOR_OUTPUT_ADDRESS
7128 && rld
[j
].when_needed
!= RELOAD_FOR_OPERAND_ADDRESS
7129 && (! conflicting_input
7130 || rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
7131 || rld
[j
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
)
7132 && reg_overlap_mentioned_for_reload_p (rld
[j
].in
,
7138 = ((rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
7139 || rld
[j
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
)
7140 ? RELOAD_FOR_OTHER_ADDRESS
: RELOAD_OTHER
);
7142 /* Check to see if we accidentally converted two
7143 reloads that use the same reload register with
7144 different inputs to the same type. If so, the
7145 resulting code won't work. */
7147 for (k
= 0; k
< j
; k
++)
7148 gcc_assert (rld
[k
].in
== 0 || rld
[k
].reg_rtx
== 0
7149 || rld
[k
].when_needed
!= rld
[j
].when_needed
7150 || !rtx_equal_p (rld
[k
].reg_rtx
,
7152 || rtx_equal_p (rld
[k
].in
,
7159 /* These arrays are filled by emit_reload_insns and its subroutines. */
7160 static rtx input_reload_insns
[MAX_RECOG_OPERANDS
];
7161 static rtx other_input_address_reload_insns
= 0;
7162 static rtx other_input_reload_insns
= 0;
7163 static rtx input_address_reload_insns
[MAX_RECOG_OPERANDS
];
7164 static rtx inpaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
7165 static rtx output_reload_insns
[MAX_RECOG_OPERANDS
];
7166 static rtx output_address_reload_insns
[MAX_RECOG_OPERANDS
];
7167 static rtx outaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
7168 static rtx operand_reload_insns
= 0;
7169 static rtx other_operand_reload_insns
= 0;
7170 static rtx other_output_reload_insns
[MAX_RECOG_OPERANDS
];
7172 /* Values to be put in spill_reg_store are put here first. */
7173 static rtx new_spill_reg_store
[FIRST_PSEUDO_REGISTER
];
7174 static HARD_REG_SET reg_reloaded_died
;
7176 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7177 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7178 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7179 adjusted register, and return true. Otherwise, return false. */
7181 reload_adjust_reg_for_temp (rtx
*reload_reg
, rtx alt_reload_reg
,
7182 enum reg_class new_class
,
7183 enum machine_mode new_mode
)
7188 for (reg
= *reload_reg
; reg
; reg
= alt_reload_reg
, alt_reload_reg
= 0)
7190 unsigned regno
= REGNO (reg
);
7192 if (!TEST_HARD_REG_BIT (reg_class_contents
[(int) new_class
], regno
))
7194 if (GET_MODE (reg
) != new_mode
)
7196 if (!HARD_REGNO_MODE_OK (regno
, new_mode
))
7198 if (hard_regno_nregs
[regno
][new_mode
]
7199 > hard_regno_nregs
[regno
][GET_MODE (reg
)])
7201 reg
= reload_adjust_reg_for_mode (reg
, new_mode
);
7209 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7210 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7211 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7212 adjusted register, and return true. Otherwise, return false. */
7214 reload_adjust_reg_for_icode (rtx
*reload_reg
, rtx alt_reload_reg
,
7215 enum insn_code icode
)
7218 enum reg_class new_class
= scratch_reload_class (icode
);
7219 enum machine_mode new_mode
= insn_data
[(int) icode
].operand
[2].mode
;
7221 return reload_adjust_reg_for_temp (reload_reg
, alt_reload_reg
,
7222 new_class
, new_mode
);
7225 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7226 has the number J. OLD contains the value to be used as input. */
7229 emit_input_reload_insns (struct insn_chain
*chain
, struct reload
*rl
,
7232 rtx insn
= chain
->insn
;
7234 rtx oldequiv_reg
= 0;
7237 enum machine_mode mode
;
7240 /* delete_output_reload is only invoked properly if old contains
7241 the original pseudo register. Since this is replaced with a
7242 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7243 find the pseudo in RELOAD_IN_REG. */
7244 if (reload_override_in
[j
]
7245 && REG_P (rl
->in_reg
))
7252 else if (REG_P (oldequiv
))
7253 oldequiv_reg
= oldequiv
;
7254 else if (GET_CODE (oldequiv
) == SUBREG
)
7255 oldequiv_reg
= SUBREG_REG (oldequiv
);
7257 reloadreg
= reload_reg_rtx_for_input
[j
];
7258 mode
= GET_MODE (reloadreg
);
7260 /* If we are reloading from a register that was recently stored in
7261 with an output-reload, see if we can prove there was
7262 actually no need to store the old value in it. */
7264 if (optimize
&& REG_P (oldequiv
)
7265 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
7266 && spill_reg_store
[REGNO (oldequiv
)]
7268 && (dead_or_set_p (insn
, spill_reg_stored_to
[REGNO (oldequiv
)])
7269 || rtx_equal_p (spill_reg_stored_to
[REGNO (oldequiv
)],
7271 delete_output_reload (insn
, j
, REGNO (oldequiv
), reloadreg
);
7273 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7276 while (GET_CODE (oldequiv
) == SUBREG
&& GET_MODE (oldequiv
) != mode
)
7277 oldequiv
= SUBREG_REG (oldequiv
);
7278 if (GET_MODE (oldequiv
) != VOIDmode
7279 && mode
!= GET_MODE (oldequiv
))
7280 oldequiv
= gen_lowpart_SUBREG (mode
, oldequiv
);
7282 /* Switch to the right place to emit the reload insns. */
7283 switch (rl
->when_needed
)
7286 where
= &other_input_reload_insns
;
7288 case RELOAD_FOR_INPUT
:
7289 where
= &input_reload_insns
[rl
->opnum
];
7291 case RELOAD_FOR_INPUT_ADDRESS
:
7292 where
= &input_address_reload_insns
[rl
->opnum
];
7294 case RELOAD_FOR_INPADDR_ADDRESS
:
7295 where
= &inpaddr_address_reload_insns
[rl
->opnum
];
7297 case RELOAD_FOR_OUTPUT_ADDRESS
:
7298 where
= &output_address_reload_insns
[rl
->opnum
];
7300 case RELOAD_FOR_OUTADDR_ADDRESS
:
7301 where
= &outaddr_address_reload_insns
[rl
->opnum
];
7303 case RELOAD_FOR_OPERAND_ADDRESS
:
7304 where
= &operand_reload_insns
;
7306 case RELOAD_FOR_OPADDR_ADDR
:
7307 where
= &other_operand_reload_insns
;
7309 case RELOAD_FOR_OTHER_ADDRESS
:
7310 where
= &other_input_address_reload_insns
;
7316 push_to_sequence (*where
);
7318 /* Auto-increment addresses must be reloaded in a special way. */
7319 if (rl
->out
&& ! rl
->out_reg
)
7321 /* We are not going to bother supporting the case where a
7322 incremented register can't be copied directly from
7323 OLDEQUIV since this seems highly unlikely. */
7324 gcc_assert (rl
->secondary_in_reload
< 0);
7326 if (reload_inherited
[j
])
7327 oldequiv
= reloadreg
;
7329 old
= XEXP (rl
->in_reg
, 0);
7331 if (optimize
&& REG_P (oldequiv
)
7332 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
7333 && spill_reg_store
[REGNO (oldequiv
)]
7335 && (dead_or_set_p (insn
,
7336 spill_reg_stored_to
[REGNO (oldequiv
)])
7337 || rtx_equal_p (spill_reg_stored_to
[REGNO (oldequiv
)],
7339 delete_output_reload (insn
, j
, REGNO (oldequiv
), reloadreg
);
7341 /* Prevent normal processing of this reload. */
7343 /* Output a special code sequence for this case. */
7344 new_spill_reg_store
[REGNO (reloadreg
)]
7345 = inc_for_reload (reloadreg
, oldequiv
, rl
->out
,
7349 /* If we are reloading a pseudo-register that was set by the previous
7350 insn, see if we can get rid of that pseudo-register entirely
7351 by redirecting the previous insn into our reload register. */
7353 else if (optimize
&& REG_P (old
)
7354 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
7355 && dead_or_set_p (insn
, old
)
7356 /* This is unsafe if some other reload
7357 uses the same reg first. */
7358 && ! conflicts_with_override (reloadreg
)
7359 && free_for_value_p (REGNO (reloadreg
), rl
->mode
, rl
->opnum
,
7360 rl
->when_needed
, old
, rl
->out
, j
, 0))
7362 rtx temp
= PREV_INSN (insn
);
7363 while (temp
&& (NOTE_P (temp
) || DEBUG_INSN_P (temp
)))
7364 temp
= PREV_INSN (temp
);
7366 && NONJUMP_INSN_P (temp
)
7367 && GET_CODE (PATTERN (temp
)) == SET
7368 && SET_DEST (PATTERN (temp
)) == old
7369 /* Make sure we can access insn_operand_constraint. */
7370 && asm_noperands (PATTERN (temp
)) < 0
7371 /* This is unsafe if operand occurs more than once in current
7372 insn. Perhaps some occurrences aren't reloaded. */
7373 && count_occurrences (PATTERN (insn
), old
, 0) == 1)
7375 rtx old
= SET_DEST (PATTERN (temp
));
7376 /* Store into the reload register instead of the pseudo. */
7377 SET_DEST (PATTERN (temp
)) = reloadreg
;
7379 /* Verify that resulting insn is valid. */
7380 extract_insn (temp
);
7381 if (constrain_operands (1))
7383 /* If the previous insn is an output reload, the source is
7384 a reload register, and its spill_reg_store entry will
7385 contain the previous destination. This is now
7387 if (REG_P (SET_SRC (PATTERN (temp
)))
7388 && REGNO (SET_SRC (PATTERN (temp
))) < FIRST_PSEUDO_REGISTER
)
7390 spill_reg_store
[REGNO (SET_SRC (PATTERN (temp
)))] = 0;
7391 spill_reg_stored_to
[REGNO (SET_SRC (PATTERN (temp
)))] = 0;
7394 /* If these are the only uses of the pseudo reg,
7395 pretend for GDB it lives in the reload reg we used. */
7396 if (REG_N_DEATHS (REGNO (old
)) == 1
7397 && REG_N_SETS (REGNO (old
)) == 1)
7399 reg_renumber
[REGNO (old
)] = REGNO (reloadreg
);
7400 if (ira_conflicts_p
)
7401 /* Inform IRA about the change. */
7402 ira_mark_allocation_change (REGNO (old
));
7403 alter_reg (REGNO (old
), -1, false);
7407 /* Adjust any debug insns between temp and insn. */
7408 while ((temp
= NEXT_INSN (temp
)) != insn
)
7409 if (DEBUG_INSN_P (temp
))
7410 replace_rtx (PATTERN (temp
), old
, reloadreg
);
7412 gcc_assert (NOTE_P (temp
));
7416 SET_DEST (PATTERN (temp
)) = old
;
7421 /* We can't do that, so output an insn to load RELOADREG. */
7423 /* If we have a secondary reload, pick up the secondary register
7424 and icode, if any. If OLDEQUIV and OLD are different or
7425 if this is an in-out reload, recompute whether or not we
7426 still need a secondary register and what the icode should
7427 be. If we still need a secondary register and the class or
7428 icode is different, go back to reloading from OLD if using
7429 OLDEQUIV means that we got the wrong type of register. We
7430 cannot have different class or icode due to an in-out reload
7431 because we don't make such reloads when both the input and
7432 output need secondary reload registers. */
7434 if (! special
&& rl
->secondary_in_reload
>= 0)
7436 rtx second_reload_reg
= 0;
7437 rtx third_reload_reg
= 0;
7438 int secondary_reload
= rl
->secondary_in_reload
;
7439 rtx real_oldequiv
= oldequiv
;
7442 enum insn_code icode
;
7443 enum insn_code tertiary_icode
= CODE_FOR_nothing
;
7445 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7446 and similarly for OLD.
7447 See comments in get_secondary_reload in reload.c. */
7448 /* If it is a pseudo that cannot be replaced with its
7449 equivalent MEM, we must fall back to reload_in, which
7450 will have all the necessary substitutions registered.
7451 Likewise for a pseudo that can't be replaced with its
7452 equivalent constant.
7454 Take extra care for subregs of such pseudos. Note that
7455 we cannot use reg_equiv_mem in this case because it is
7456 not in the right mode. */
7459 if (GET_CODE (tmp
) == SUBREG
)
7460 tmp
= SUBREG_REG (tmp
);
7462 && REGNO (tmp
) >= FIRST_PSEUDO_REGISTER
7463 && (reg_equiv_memory_loc
[REGNO (tmp
)] != 0
7464 || reg_equiv_constant
[REGNO (tmp
)] != 0))
7466 if (! reg_equiv_mem
[REGNO (tmp
)]
7467 || num_not_at_initial_offset
7468 || GET_CODE (oldequiv
) == SUBREG
)
7469 real_oldequiv
= rl
->in
;
7471 real_oldequiv
= reg_equiv_mem
[REGNO (tmp
)];
7475 if (GET_CODE (tmp
) == SUBREG
)
7476 tmp
= SUBREG_REG (tmp
);
7478 && REGNO (tmp
) >= FIRST_PSEUDO_REGISTER
7479 && (reg_equiv_memory_loc
[REGNO (tmp
)] != 0
7480 || reg_equiv_constant
[REGNO (tmp
)] != 0))
7482 if (! reg_equiv_mem
[REGNO (tmp
)]
7483 || num_not_at_initial_offset
7484 || GET_CODE (old
) == SUBREG
)
7487 real_old
= reg_equiv_mem
[REGNO (tmp
)];
7490 second_reload_reg
= rld
[secondary_reload
].reg_rtx
;
7491 if (rld
[secondary_reload
].secondary_in_reload
>= 0)
7493 int tertiary_reload
= rld
[secondary_reload
].secondary_in_reload
;
7495 third_reload_reg
= rld
[tertiary_reload
].reg_rtx
;
7496 tertiary_icode
= rld
[secondary_reload
].secondary_in_icode
;
7497 /* We'd have to add more code for quartary reloads. */
7498 gcc_assert (rld
[tertiary_reload
].secondary_in_reload
< 0);
7500 icode
= rl
->secondary_in_icode
;
7502 if ((old
!= oldequiv
&& ! rtx_equal_p (old
, oldequiv
))
7503 || (rl
->in
!= 0 && rl
->out
!= 0))
7505 secondary_reload_info sri
, sri2
;
7506 enum reg_class new_class
, new_t_class
;
7508 sri
.icode
= CODE_FOR_nothing
;
7509 sri
.prev_sri
= NULL
;
7510 new_class
= targetm
.secondary_reload (1, real_oldequiv
, rl
->rclass
,
7513 if (new_class
== NO_REGS
&& sri
.icode
== CODE_FOR_nothing
)
7514 second_reload_reg
= 0;
7515 else if (new_class
== NO_REGS
)
7517 if (reload_adjust_reg_for_icode (&second_reload_reg
,
7519 (enum insn_code
) sri
.icode
))
7521 icode
= (enum insn_code
) sri
.icode
;
7522 third_reload_reg
= 0;
7527 real_oldequiv
= real_old
;
7530 else if (sri
.icode
!= CODE_FOR_nothing
)
7531 /* We currently lack a way to express this in reloads. */
7535 sri2
.icode
= CODE_FOR_nothing
;
7536 sri2
.prev_sri
= &sri
;
7537 new_t_class
= targetm
.secondary_reload (1, real_oldequiv
,
7538 new_class
, mode
, &sri
);
7539 if (new_t_class
== NO_REGS
&& sri2
.icode
== CODE_FOR_nothing
)
7541 if (reload_adjust_reg_for_temp (&second_reload_reg
,
7545 third_reload_reg
= 0;
7546 tertiary_icode
= (enum insn_code
) sri2
.icode
;
7551 real_oldequiv
= real_old
;
7554 else if (new_t_class
== NO_REGS
&& sri2
.icode
!= CODE_FOR_nothing
)
7556 rtx intermediate
= second_reload_reg
;
7558 if (reload_adjust_reg_for_temp (&intermediate
, NULL
,
7560 && reload_adjust_reg_for_icode (&third_reload_reg
, NULL
,
7564 second_reload_reg
= intermediate
;
7565 tertiary_icode
= (enum insn_code
) sri2
.icode
;
7570 real_oldequiv
= real_old
;
7573 else if (new_t_class
!= NO_REGS
&& sri2
.icode
== CODE_FOR_nothing
)
7575 rtx intermediate
= second_reload_reg
;
7577 if (reload_adjust_reg_for_temp (&intermediate
, NULL
,
7579 && reload_adjust_reg_for_temp (&third_reload_reg
, NULL
,
7582 second_reload_reg
= intermediate
;
7583 tertiary_icode
= (enum insn_code
) sri2
.icode
;
7588 real_oldequiv
= real_old
;
7593 /* This could be handled more intelligently too. */
7595 real_oldequiv
= real_old
;
7600 /* If we still need a secondary reload register, check
7601 to see if it is being used as a scratch or intermediate
7602 register and generate code appropriately. If we need
7603 a scratch register, use REAL_OLDEQUIV since the form of
7604 the insn may depend on the actual address if it is
7607 if (second_reload_reg
)
7609 if (icode
!= CODE_FOR_nothing
)
7611 /* We'd have to add extra code to handle this case. */
7612 gcc_assert (!third_reload_reg
);
7614 emit_insn (GEN_FCN (icode
) (reloadreg
, real_oldequiv
,
7615 second_reload_reg
));
7620 /* See if we need a scratch register to load the
7621 intermediate register (a tertiary reload). */
7622 if (tertiary_icode
!= CODE_FOR_nothing
)
7624 emit_insn ((GEN_FCN (tertiary_icode
)
7625 (second_reload_reg
, real_oldequiv
,
7626 third_reload_reg
)));
7628 else if (third_reload_reg
)
7630 gen_reload (third_reload_reg
, real_oldequiv
,
7633 gen_reload (second_reload_reg
, third_reload_reg
,
7638 gen_reload (second_reload_reg
, real_oldequiv
,
7642 oldequiv
= second_reload_reg
;
7647 if (! special
&& ! rtx_equal_p (reloadreg
, oldequiv
))
7649 rtx real_oldequiv
= oldequiv
;
7651 if ((REG_P (oldequiv
)
7652 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
7653 && (reg_equiv_memory_loc
[REGNO (oldequiv
)] != 0
7654 || reg_equiv_constant
[REGNO (oldequiv
)] != 0))
7655 || (GET_CODE (oldequiv
) == SUBREG
7656 && REG_P (SUBREG_REG (oldequiv
))
7657 && (REGNO (SUBREG_REG (oldequiv
))
7658 >= FIRST_PSEUDO_REGISTER
)
7659 && ((reg_equiv_memory_loc
7660 [REGNO (SUBREG_REG (oldequiv
))] != 0)
7661 || (reg_equiv_constant
7662 [REGNO (SUBREG_REG (oldequiv
))] != 0)))
7663 || (CONSTANT_P (oldequiv
)
7664 && (PREFERRED_RELOAD_CLASS (oldequiv
,
7665 REGNO_REG_CLASS (REGNO (reloadreg
)))
7667 real_oldequiv
= rl
->in
;
7668 gen_reload (reloadreg
, real_oldequiv
, rl
->opnum
,
7672 if (cfun
->can_throw_non_call_exceptions
)
7673 copy_reg_eh_region_note_forward (insn
, get_insns (), NULL
);
7675 /* End this sequence. */
7676 *where
= get_insns ();
7679 /* Update reload_override_in so that delete_address_reloads_1
7680 can see the actual register usage. */
7682 reload_override_in
[j
] = oldequiv
;
7685 /* Generate insns to for the output reload RL, which is for the insn described
7686 by CHAIN and has the number J. */
7688 emit_output_reload_insns (struct insn_chain
*chain
, struct reload
*rl
,
7692 rtx insn
= chain
->insn
;
7695 enum machine_mode mode
;
7699 if (rl
->when_needed
== RELOAD_OTHER
)
7702 push_to_sequence (output_reload_insns
[rl
->opnum
]);
7704 rl_reg_rtx
= reload_reg_rtx_for_output
[j
];
7705 mode
= GET_MODE (rl_reg_rtx
);
7707 reloadreg
= rl_reg_rtx
;
7709 /* If we need two reload regs, set RELOADREG to the intermediate
7710 one, since it will be stored into OLD. We might need a secondary
7711 register only for an input reload, so check again here. */
7713 if (rl
->secondary_out_reload
>= 0)
7716 int secondary_reload
= rl
->secondary_out_reload
;
7717 int tertiary_reload
= rld
[secondary_reload
].secondary_out_reload
;
7719 if (REG_P (old
) && REGNO (old
) >= FIRST_PSEUDO_REGISTER
7720 && reg_equiv_mem
[REGNO (old
)] != 0)
7721 real_old
= reg_equiv_mem
[REGNO (old
)];
7723 if (secondary_reload_class (0, rl
->rclass
, mode
, real_old
) != NO_REGS
)
7725 rtx second_reloadreg
= reloadreg
;
7726 reloadreg
= rld
[secondary_reload
].reg_rtx
;
7728 /* See if RELOADREG is to be used as a scratch register
7729 or as an intermediate register. */
7730 if (rl
->secondary_out_icode
!= CODE_FOR_nothing
)
7732 /* We'd have to add extra code to handle this case. */
7733 gcc_assert (tertiary_reload
< 0);
7735 emit_insn ((GEN_FCN (rl
->secondary_out_icode
)
7736 (real_old
, second_reloadreg
, reloadreg
)));
7741 /* See if we need both a scratch and intermediate reload
7744 enum insn_code tertiary_icode
7745 = rld
[secondary_reload
].secondary_out_icode
;
7747 /* We'd have to add more code for quartary reloads. */
7748 gcc_assert (tertiary_reload
< 0
7749 || rld
[tertiary_reload
].secondary_out_reload
< 0);
7751 if (GET_MODE (reloadreg
) != mode
)
7752 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, mode
);
7754 if (tertiary_icode
!= CODE_FOR_nothing
)
7756 rtx third_reloadreg
= rld
[tertiary_reload
].reg_rtx
;
7759 /* Copy primary reload reg to secondary reload reg.
7760 (Note that these have been swapped above, then
7761 secondary reload reg to OLD using our insn.) */
7763 /* If REAL_OLD is a paradoxical SUBREG, remove it
7764 and try to put the opposite SUBREG on
7766 if (GET_CODE (real_old
) == SUBREG
7767 && (GET_MODE_SIZE (GET_MODE (real_old
))
7768 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old
))))
7769 && 0 != (tem
= gen_lowpart_common
7770 (GET_MODE (SUBREG_REG (real_old
)),
7772 real_old
= SUBREG_REG (real_old
), reloadreg
= tem
;
7774 gen_reload (reloadreg
, second_reloadreg
,
7775 rl
->opnum
, rl
->when_needed
);
7776 emit_insn ((GEN_FCN (tertiary_icode
)
7777 (real_old
, reloadreg
, third_reloadreg
)));
7783 /* Copy between the reload regs here and then to
7786 gen_reload (reloadreg
, second_reloadreg
,
7787 rl
->opnum
, rl
->when_needed
);
7788 if (tertiary_reload
>= 0)
7790 rtx third_reloadreg
= rld
[tertiary_reload
].reg_rtx
;
7792 gen_reload (third_reloadreg
, reloadreg
,
7793 rl
->opnum
, rl
->when_needed
);
7794 reloadreg
= third_reloadreg
;
7801 /* Output the last reload insn. */
7806 /* Don't output the last reload if OLD is not the dest of
7807 INSN and is in the src and is clobbered by INSN. */
7808 if (! flag_expensive_optimizations
7810 || !(set
= single_set (insn
))
7811 || rtx_equal_p (old
, SET_DEST (set
))
7812 || !reg_mentioned_p (old
, SET_SRC (set
))
7813 || !((REGNO (old
) < FIRST_PSEUDO_REGISTER
)
7814 && regno_clobbered_p (REGNO (old
), insn
, rl
->mode
, 0)))
7815 gen_reload (old
, reloadreg
, rl
->opnum
,
7819 /* Look at all insns we emitted, just to be safe. */
7820 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
7823 rtx pat
= PATTERN (p
);
7825 /* If this output reload doesn't come from a spill reg,
7826 clear any memory of reloaded copies of the pseudo reg.
7827 If this output reload comes from a spill reg,
7828 reg_has_output_reload will make this do nothing. */
7829 note_stores (pat
, forget_old_reloads_1
, NULL
);
7831 if (reg_mentioned_p (rl_reg_rtx
, pat
))
7833 rtx set
= single_set (insn
);
7834 if (reload_spill_index
[j
] < 0
7836 && SET_SRC (set
) == rl_reg_rtx
)
7838 int src
= REGNO (SET_SRC (set
));
7840 reload_spill_index
[j
] = src
;
7841 SET_HARD_REG_BIT (reg_is_output_reload
, src
);
7842 if (find_regno_note (insn
, REG_DEAD
, src
))
7843 SET_HARD_REG_BIT (reg_reloaded_died
, src
);
7845 if (HARD_REGISTER_P (rl_reg_rtx
))
7847 int s
= rl
->secondary_out_reload
;
7848 set
= single_set (p
);
7849 /* If this reload copies only to the secondary reload
7850 register, the secondary reload does the actual
7852 if (s
>= 0 && set
== NULL_RTX
)
7853 /* We can't tell what function the secondary reload
7854 has and where the actual store to the pseudo is
7855 made; leave new_spill_reg_store alone. */
7858 && SET_SRC (set
) == rl_reg_rtx
7859 && SET_DEST (set
) == rld
[s
].reg_rtx
)
7861 /* Usually the next instruction will be the
7862 secondary reload insn; if we can confirm
7863 that it is, setting new_spill_reg_store to
7864 that insn will allow an extra optimization. */
7865 rtx s_reg
= rld
[s
].reg_rtx
;
7866 rtx next
= NEXT_INSN (p
);
7867 rld
[s
].out
= rl
->out
;
7868 rld
[s
].out_reg
= rl
->out_reg
;
7869 set
= single_set (next
);
7870 if (set
&& SET_SRC (set
) == s_reg
7871 && ! new_spill_reg_store
[REGNO (s_reg
)])
7873 SET_HARD_REG_BIT (reg_is_output_reload
,
7875 new_spill_reg_store
[REGNO (s_reg
)] = next
;
7879 new_spill_reg_store
[REGNO (rl_reg_rtx
)] = p
;
7884 if (rl
->when_needed
== RELOAD_OTHER
)
7886 emit_insn (other_output_reload_insns
[rl
->opnum
]);
7887 other_output_reload_insns
[rl
->opnum
] = get_insns ();
7890 output_reload_insns
[rl
->opnum
] = get_insns ();
7892 if (cfun
->can_throw_non_call_exceptions
)
7893 copy_reg_eh_region_note_forward (insn
, get_insns (), NULL
);
7898 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7899 and has the number J. */
7901 do_input_reload (struct insn_chain
*chain
, struct reload
*rl
, int j
)
7903 rtx insn
= chain
->insn
;
7904 rtx old
= (rl
->in
&& MEM_P (rl
->in
)
7905 ? rl
->in_reg
: rl
->in
);
7906 rtx reg_rtx
= rl
->reg_rtx
;
7910 enum machine_mode mode
;
7912 /* Determine the mode to reload in.
7913 This is very tricky because we have three to choose from.
7914 There is the mode the insn operand wants (rl->inmode).
7915 There is the mode of the reload register RELOADREG.
7916 There is the intrinsic mode of the operand, which we could find
7917 by stripping some SUBREGs.
7918 It turns out that RELOADREG's mode is irrelevant:
7919 we can change that arbitrarily.
7921 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7922 then the reload reg may not support QImode moves, so use SImode.
7923 If foo is in memory due to spilling a pseudo reg, this is safe,
7924 because the QImode value is in the least significant part of a
7925 slot big enough for a SImode. If foo is some other sort of
7926 memory reference, then it is impossible to reload this case,
7927 so previous passes had better make sure this never happens.
7929 Then consider a one-word union which has SImode and one of its
7930 members is a float, being fetched as (SUBREG:SF union:SI).
7931 We must fetch that as SFmode because we could be loading into
7932 a float-only register. In this case OLD's mode is correct.
7934 Consider an immediate integer: it has VOIDmode. Here we need
7935 to get a mode from something else.
7937 In some cases, there is a fourth mode, the operand's
7938 containing mode. If the insn specifies a containing mode for
7939 this operand, it overrides all others.
7941 I am not sure whether the algorithm here is always right,
7942 but it does the right things in those cases. */
7944 mode
= GET_MODE (old
);
7945 if (mode
== VOIDmode
)
7948 /* We cannot use gen_lowpart_common since it can do the wrong thing
7949 when REG_RTX has a multi-word mode. Note that REG_RTX must
7950 always be a REG here. */
7951 if (GET_MODE (reg_rtx
) != mode
)
7952 reg_rtx
= reload_adjust_reg_for_mode (reg_rtx
, mode
);
7954 reload_reg_rtx_for_input
[j
] = reg_rtx
;
7957 /* AUTO_INC reloads need to be handled even if inherited. We got an
7958 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7959 && (! reload_inherited
[j
] || (rl
->out
&& ! rl
->out_reg
))
7960 && ! rtx_equal_p (reg_rtx
, old
)
7962 emit_input_reload_insns (chain
, rld
+ j
, old
, j
);
7964 /* When inheriting a wider reload, we have a MEM in rl->in,
7965 e.g. inheriting a SImode output reload for
7966 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7967 if (optimize
&& reload_inherited
[j
] && rl
->in
7969 && MEM_P (rl
->in_reg
)
7970 && reload_spill_index
[j
] >= 0
7971 && TEST_HARD_REG_BIT (reg_reloaded_valid
, reload_spill_index
[j
]))
7972 rl
->in
= regno_reg_rtx
[reg_reloaded_contents
[reload_spill_index
[j
]]];
7974 /* If we are reloading a register that was recently stored in with an
7975 output-reload, see if we can prove there was
7976 actually no need to store the old value in it. */
7979 && (reload_inherited
[j
] || reload_override_in
[j
])
7982 && spill_reg_store
[REGNO (reg_rtx
)] != 0
7984 /* There doesn't seem to be any reason to restrict this to pseudos
7985 and doing so loses in the case where we are copying from a
7986 register of the wrong class. */
7987 && !HARD_REGISTER_P (spill_reg_stored_to
[REGNO (reg_rtx
)])
7989 /* The insn might have already some references to stackslots
7990 replaced by MEMs, while reload_out_reg still names the
7992 && (dead_or_set_p (insn
, spill_reg_stored_to
[REGNO (reg_rtx
)])
7993 || rtx_equal_p (spill_reg_stored_to
[REGNO (reg_rtx
)], rl
->out_reg
)))
7994 delete_output_reload (insn
, j
, REGNO (reg_rtx
), reg_rtx
);
7997 /* Do output reloading for reload RL, which is for the insn described by
7998 CHAIN and has the number J.
7999 ??? At some point we need to support handling output reloads of
8000 JUMP_INSNs or insns that set cc0. */
8002 do_output_reload (struct insn_chain
*chain
, struct reload
*rl
, int j
)
8005 rtx insn
= chain
->insn
;
8006 /* If this is an output reload that stores something that is
8007 not loaded in this same reload, see if we can eliminate a previous
8009 rtx pseudo
= rl
->out_reg
;
8010 rtx reg_rtx
= rl
->reg_rtx
;
8012 if (rl
->out
&& reg_rtx
)
8014 enum machine_mode mode
;
8016 /* Determine the mode to reload in.
8017 See comments above (for input reloading). */
8018 mode
= GET_MODE (rl
->out
);
8019 if (mode
== VOIDmode
)
8021 /* VOIDmode should never happen for an output. */
8022 if (asm_noperands (PATTERN (insn
)) < 0)
8023 /* It's the compiler's fault. */
8024 fatal_insn ("VOIDmode on an output", insn
);
8025 error_for_asm (insn
, "output operand is constant in %<asm%>");
8026 /* Prevent crash--use something we know is valid. */
8028 rl
->out
= gen_rtx_REG (mode
, REGNO (reg_rtx
));
8030 if (GET_MODE (reg_rtx
) != mode
)
8031 reg_rtx
= reload_adjust_reg_for_mode (reg_rtx
, mode
);
8033 reload_reg_rtx_for_output
[j
] = reg_rtx
;
8038 && ! rtx_equal_p (rl
->in_reg
, pseudo
)
8039 && REGNO (pseudo
) >= FIRST_PSEUDO_REGISTER
8040 && reg_last_reload_reg
[REGNO (pseudo
)])
8042 int pseudo_no
= REGNO (pseudo
);
8043 int last_regno
= REGNO (reg_last_reload_reg
[pseudo_no
]);
8045 /* We don't need to test full validity of last_regno for
8046 inherit here; we only want to know if the store actually
8047 matches the pseudo. */
8048 if (TEST_HARD_REG_BIT (reg_reloaded_valid
, last_regno
)
8049 && reg_reloaded_contents
[last_regno
] == pseudo_no
8050 && spill_reg_store
[last_regno
]
8051 && rtx_equal_p (pseudo
, spill_reg_stored_to
[last_regno
]))
8052 delete_output_reload (insn
, j
, last_regno
, reg_rtx
);
8058 || rtx_equal_p (old
, reg_rtx
))
8061 /* An output operand that dies right away does need a reload,
8062 but need not be copied from it. Show the new location in the
8064 if ((REG_P (old
) || GET_CODE (old
) == SCRATCH
)
8065 && (note
= find_reg_note (insn
, REG_UNUSED
, old
)) != 0)
8067 XEXP (note
, 0) = reg_rtx
;
8070 /* Likewise for a SUBREG of an operand that dies. */
8071 else if (GET_CODE (old
) == SUBREG
8072 && REG_P (SUBREG_REG (old
))
8073 && 0 != (note
= find_reg_note (insn
, REG_UNUSED
,
8076 XEXP (note
, 0) = gen_lowpart_common (GET_MODE (old
), reg_rtx
);
8079 else if (GET_CODE (old
) == SCRATCH
)
8080 /* If we aren't optimizing, there won't be a REG_UNUSED note,
8081 but we don't want to make an output reload. */
8084 /* If is a JUMP_INSN, we can't support output reloads yet. */
8085 gcc_assert (NONJUMP_INSN_P (insn
));
8087 emit_output_reload_insns (chain
, rld
+ j
, j
);
8090 /* A reload copies values of MODE from register SRC to register DEST.
8091 Return true if it can be treated for inheritance purposes like a
8092 group of reloads, each one reloading a single hard register. The
8093 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8094 occupy the same number of hard registers. */
8097 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED
,
8098 int src ATTRIBUTE_UNUSED
,
8099 enum machine_mode mode ATTRIBUTE_UNUSED
)
8101 #ifdef CANNOT_CHANGE_MODE_CLASS
8102 return (!REG_CANNOT_CHANGE_MODE_P (dest
, mode
, reg_raw_mode
[dest
])
8103 && !REG_CANNOT_CHANGE_MODE_P (src
, mode
, reg_raw_mode
[src
]));
8109 /* Output insns to reload values in and out of the chosen reload regs. */
8112 emit_reload_insns (struct insn_chain
*chain
)
8114 rtx insn
= chain
->insn
;
8118 CLEAR_HARD_REG_SET (reg_reloaded_died
);
8120 for (j
= 0; j
< reload_n_operands
; j
++)
8121 input_reload_insns
[j
] = input_address_reload_insns
[j
]
8122 = inpaddr_address_reload_insns
[j
]
8123 = output_reload_insns
[j
] = output_address_reload_insns
[j
]
8124 = outaddr_address_reload_insns
[j
]
8125 = other_output_reload_insns
[j
] = 0;
8126 other_input_address_reload_insns
= 0;
8127 other_input_reload_insns
= 0;
8128 operand_reload_insns
= 0;
8129 other_operand_reload_insns
= 0;
8131 /* Dump reloads into the dump file. */
8134 fprintf (dump_file
, "\nReloads for insn # %d\n", INSN_UID (insn
));
8135 debug_reload_to_stream (dump_file
);
8138 /* Now output the instructions to copy the data into and out of the
8139 reload registers. Do these in the order that the reloads were reported,
8140 since reloads of base and index registers precede reloads of operands
8141 and the operands may need the base and index registers reloaded. */
8143 for (j
= 0; j
< n_reloads
; j
++)
8145 if (rld
[j
].reg_rtx
&& HARD_REGISTER_P (rld
[j
].reg_rtx
))
8149 for (i
= REGNO (rld
[j
].reg_rtx
); i
< END_REGNO (rld
[j
].reg_rtx
); i
++)
8150 new_spill_reg_store
[i
] = 0;
8153 do_input_reload (chain
, rld
+ j
, j
);
8154 do_output_reload (chain
, rld
+ j
, j
);
8157 /* Now write all the insns we made for reloads in the order expected by
8158 the allocation functions. Prior to the insn being reloaded, we write
8159 the following reloads:
8161 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8163 RELOAD_OTHER reloads.
8165 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8166 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8167 RELOAD_FOR_INPUT reload for the operand.
8169 RELOAD_FOR_OPADDR_ADDRS reloads.
8171 RELOAD_FOR_OPERAND_ADDRESS reloads.
8173 After the insn being reloaded, we write the following:
8175 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8176 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8177 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8178 reloads for the operand. The RELOAD_OTHER output reloads are
8179 output in descending order by reload number. */
8181 emit_insn_before (other_input_address_reload_insns
, insn
);
8182 emit_insn_before (other_input_reload_insns
, insn
);
8184 for (j
= 0; j
< reload_n_operands
; j
++)
8186 emit_insn_before (inpaddr_address_reload_insns
[j
], insn
);
8187 emit_insn_before (input_address_reload_insns
[j
], insn
);
8188 emit_insn_before (input_reload_insns
[j
], insn
);
8191 emit_insn_before (other_operand_reload_insns
, insn
);
8192 emit_insn_before (operand_reload_insns
, insn
);
8194 for (j
= 0; j
< reload_n_operands
; j
++)
8196 rtx x
= emit_insn_after (outaddr_address_reload_insns
[j
], insn
);
8197 x
= emit_insn_after (output_address_reload_insns
[j
], x
);
8198 x
= emit_insn_after (output_reload_insns
[j
], x
);
8199 emit_insn_after (other_output_reload_insns
[j
], x
);
8202 /* For all the spill regs newly reloaded in this instruction,
8203 record what they were reloaded from, so subsequent instructions
8204 can inherit the reloads.
8206 Update spill_reg_store for the reloads of this insn.
8207 Copy the elements that were updated in the loop above. */
8209 for (j
= 0; j
< n_reloads
; j
++)
8211 int r
= reload_order
[j
];
8212 int i
= reload_spill_index
[r
];
8214 /* If this is a non-inherited input reload from a pseudo, we must
8215 clear any memory of a previous store to the same pseudo. Only do
8216 something if there will not be an output reload for the pseudo
8218 if (rld
[r
].in_reg
!= 0
8219 && ! (reload_inherited
[r
] || reload_override_in
[r
]))
8221 rtx reg
= rld
[r
].in_reg
;
8223 if (GET_CODE (reg
) == SUBREG
)
8224 reg
= SUBREG_REG (reg
);
8227 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
8228 && !REGNO_REG_SET_P (®_has_output_reload
, REGNO (reg
)))
8230 int nregno
= REGNO (reg
);
8232 if (reg_last_reload_reg
[nregno
])
8234 int last_regno
= REGNO (reg_last_reload_reg
[nregno
]);
8236 if (reg_reloaded_contents
[last_regno
] == nregno
)
8237 spill_reg_store
[last_regno
] = 0;
8242 /* I is nonneg if this reload used a register.
8243 If rld[r].reg_rtx is 0, this is an optional reload
8244 that we opted to ignore. */
8246 if (i
>= 0 && rld
[r
].reg_rtx
!= 0)
8248 int nr
= hard_regno_nregs
[i
][GET_MODE (rld
[r
].reg_rtx
)];
8251 /* For a multi register reload, we need to check if all or part
8252 of the value lives to the end. */
8253 for (k
= 0; k
< nr
; k
++)
8254 if (reload_reg_reaches_end_p (i
+ k
, rld
[r
].opnum
,
8255 rld
[r
].when_needed
))
8256 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
8258 /* Maybe the spill reg contains a copy of reload_out. */
8260 && (REG_P (rld
[r
].out
)
8264 || REG_P (rld
[r
].out_reg
)))
8267 enum machine_mode mode
;
8270 reg
= reload_reg_rtx_for_output
[r
];
8271 mode
= GET_MODE (reg
);
8272 regno
= REGNO (reg
);
8273 nregs
= hard_regno_nregs
[regno
][mode
];
8274 if (reload_regs_reach_end_p (regno
, nregs
, rld
[r
].opnum
,
8275 rld
[r
].when_needed
))
8277 rtx out
= (REG_P (rld
[r
].out
)
8281 /* AUTO_INC */ : XEXP (rld
[r
].in_reg
, 0));
8282 int out_regno
= REGNO (out
);
8283 int out_nregs
= (!HARD_REGISTER_NUM_P (out_regno
) ? 1
8284 : hard_regno_nregs
[out_regno
][mode
]);
8287 spill_reg_store
[regno
] = new_spill_reg_store
[regno
];
8288 spill_reg_stored_to
[regno
] = out
;
8289 reg_last_reload_reg
[out_regno
] = reg
;
8291 piecemeal
= (HARD_REGISTER_NUM_P (out_regno
)
8292 && nregs
== out_nregs
8293 && inherit_piecemeal_p (out_regno
, regno
, mode
));
8295 /* If OUT_REGNO is a hard register, it may occupy more than
8296 one register. If it does, say what is in the
8297 rest of the registers assuming that both registers
8298 agree on how many words the object takes. If not,
8299 invalidate the subsequent registers. */
8301 if (HARD_REGISTER_NUM_P (out_regno
))
8302 for (k
= 1; k
< out_nregs
; k
++)
8303 reg_last_reload_reg
[out_regno
+ k
]
8304 = (piecemeal
? regno_reg_rtx
[regno
+ k
] : 0);
8306 /* Now do the inverse operation. */
8307 for (k
= 0; k
< nregs
; k
++)
8309 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, regno
+ k
);
8310 reg_reloaded_contents
[regno
+ k
]
8311 = (!HARD_REGISTER_NUM_P (out_regno
) || !piecemeal
8314 reg_reloaded_insn
[regno
+ k
] = insn
;
8315 SET_HARD_REG_BIT (reg_reloaded_valid
, regno
+ k
);
8316 if (HARD_REGNO_CALL_PART_CLOBBERED (regno
+ k
, mode
))
8317 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8320 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8325 /* Maybe the spill reg contains a copy of reload_in. Only do
8326 something if there will not be an output reload for
8327 the register being reloaded. */
8328 else if (rld
[r
].out_reg
== 0
8330 && ((REG_P (rld
[r
].in
)
8331 && !HARD_REGISTER_P (rld
[r
].in
)
8332 && !REGNO_REG_SET_P (®_has_output_reload
,
8334 || (REG_P (rld
[r
].in_reg
)
8335 && !REGNO_REG_SET_P (®_has_output_reload
,
8336 REGNO (rld
[r
].in_reg
))))
8337 && !reg_set_p (reload_reg_rtx_for_input
[r
], PATTERN (insn
)))
8340 enum machine_mode mode
;
8343 reg
= reload_reg_rtx_for_input
[r
];
8344 mode
= GET_MODE (reg
);
8345 regno
= REGNO (reg
);
8346 nregs
= hard_regno_nregs
[regno
][mode
];
8347 if (reload_regs_reach_end_p (regno
, nregs
, rld
[r
].opnum
,
8348 rld
[r
].when_needed
))
8355 if (REG_P (rld
[r
].in
)
8356 && REGNO (rld
[r
].in
) >= FIRST_PSEUDO_REGISTER
)
8358 else if (REG_P (rld
[r
].in_reg
))
8361 in
= XEXP (rld
[r
].in_reg
, 0);
8362 in_regno
= REGNO (in
);
8364 in_nregs
= (!HARD_REGISTER_NUM_P (in_regno
) ? 1
8365 : hard_regno_nregs
[in_regno
][mode
]);
8367 reg_last_reload_reg
[in_regno
] = reg
;
8369 piecemeal
= (HARD_REGISTER_NUM_P (in_regno
)
8370 && nregs
== in_nregs
8371 && inherit_piecemeal_p (regno
, in_regno
, mode
));
8373 if (HARD_REGISTER_NUM_P (in_regno
))
8374 for (k
= 1; k
< in_nregs
; k
++)
8375 reg_last_reload_reg
[in_regno
+ k
]
8376 = (piecemeal
? regno_reg_rtx
[regno
+ k
] : 0);
8378 /* Unless we inherited this reload, show we haven't
8379 recently done a store.
8380 Previous stores of inherited auto_inc expressions
8381 also have to be discarded. */
8382 if (! reload_inherited
[r
]
8383 || (rld
[r
].out
&& ! rld
[r
].out_reg
))
8384 spill_reg_store
[regno
] = 0;
8386 for (k
= 0; k
< nregs
; k
++)
8388 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, regno
+ k
);
8389 reg_reloaded_contents
[regno
+ k
]
8390 = (!HARD_REGISTER_NUM_P (in_regno
) || !piecemeal
8393 reg_reloaded_insn
[regno
+ k
] = insn
;
8394 SET_HARD_REG_BIT (reg_reloaded_valid
, regno
+ k
);
8395 if (HARD_REGNO_CALL_PART_CLOBBERED (regno
+ k
, mode
))
8396 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8399 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8406 /* The following if-statement was #if 0'd in 1.34 (or before...).
8407 It's reenabled in 1.35 because supposedly nothing else
8408 deals with this problem. */
8410 /* If a register gets output-reloaded from a non-spill register,
8411 that invalidates any previous reloaded copy of it.
8412 But forget_old_reloads_1 won't get to see it, because
8413 it thinks only about the original insn. So invalidate it here.
8414 Also do the same thing for RELOAD_OTHER constraints where the
8415 output is discarded. */
8417 && ((rld
[r
].out
!= 0
8418 && (REG_P (rld
[r
].out
)
8419 || (MEM_P (rld
[r
].out
)
8420 && REG_P (rld
[r
].out_reg
))))
8421 || (rld
[r
].out
== 0 && rld
[r
].out_reg
8422 && REG_P (rld
[r
].out_reg
))))
8424 rtx out
= ((rld
[r
].out
&& REG_P (rld
[r
].out
))
8425 ? rld
[r
].out
: rld
[r
].out_reg
);
8426 int out_regno
= REGNO (out
);
8427 enum machine_mode mode
= GET_MODE (out
);
8429 /* REG_RTX is now set or clobbered by the main instruction.
8430 As the comment above explains, forget_old_reloads_1 only
8431 sees the original instruction, and there is no guarantee
8432 that the original instruction also clobbered REG_RTX.
8433 For example, if find_reloads sees that the input side of
8434 a matched operand pair dies in this instruction, it may
8435 use the input register as the reload register.
8437 Calling forget_old_reloads_1 is a waste of effort if
8438 REG_RTX is also the output register.
8440 If we know that REG_RTX holds the value of a pseudo
8441 register, the code after the call will record that fact. */
8442 if (rld
[r
].reg_rtx
&& rld
[r
].reg_rtx
!= out
)
8443 forget_old_reloads_1 (rld
[r
].reg_rtx
, NULL_RTX
, NULL
);
8445 if (!HARD_REGISTER_NUM_P (out_regno
))
8447 rtx src_reg
, store_insn
= NULL_RTX
;
8449 reg_last_reload_reg
[out_regno
] = 0;
8451 /* If we can find a hard register that is stored, record
8452 the storing insn so that we may delete this insn with
8453 delete_output_reload. */
8454 src_reg
= reload_reg_rtx_for_output
[r
];
8456 /* If this is an optional reload, try to find the source reg
8457 from an input reload. */
8460 rtx set
= single_set (insn
);
8461 if (set
&& SET_DEST (set
) == rld
[r
].out
)
8465 src_reg
= SET_SRC (set
);
8467 for (k
= 0; k
< n_reloads
; k
++)
8469 if (rld
[k
].in
== src_reg
)
8471 src_reg
= reload_reg_rtx_for_input
[k
];
8478 store_insn
= new_spill_reg_store
[REGNO (src_reg
)];
8479 if (src_reg
&& REG_P (src_reg
)
8480 && REGNO (src_reg
) < FIRST_PSEUDO_REGISTER
)
8482 int src_regno
, src_nregs
, k
;
8485 gcc_assert (GET_MODE (src_reg
) == mode
);
8486 src_regno
= REGNO (src_reg
);
8487 src_nregs
= hard_regno_nregs
[src_regno
][mode
];
8488 /* The place where to find a death note varies with
8489 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8490 necessarily checked exactly in the code that moves
8491 notes, so just check both locations. */
8492 note
= find_regno_note (insn
, REG_DEAD
, src_regno
);
8493 if (! note
&& store_insn
)
8494 note
= find_regno_note (store_insn
, REG_DEAD
, src_regno
);
8495 for (k
= 0; k
< src_nregs
; k
++)
8497 spill_reg_store
[src_regno
+ k
] = store_insn
;
8498 spill_reg_stored_to
[src_regno
+ k
] = out
;
8499 reg_reloaded_contents
[src_regno
+ k
] = out_regno
;
8500 reg_reloaded_insn
[src_regno
+ k
] = store_insn
;
8501 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, src_regno
+ k
);
8502 SET_HARD_REG_BIT (reg_reloaded_valid
, src_regno
+ k
);
8503 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno
+ k
,
8505 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8508 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
8510 SET_HARD_REG_BIT (reg_is_output_reload
, src_regno
+ k
);
8512 SET_HARD_REG_BIT (reg_reloaded_died
, src_regno
);
8514 CLEAR_HARD_REG_BIT (reg_reloaded_died
, src_regno
);
8516 reg_last_reload_reg
[out_regno
] = src_reg
;
8517 /* We have to set reg_has_output_reload here, or else
8518 forget_old_reloads_1 will clear reg_last_reload_reg
8520 SET_REGNO_REG_SET (®_has_output_reload
,
8526 int k
, out_nregs
= hard_regno_nregs
[out_regno
][mode
];
8528 for (k
= 0; k
< out_nregs
; k
++)
8529 reg_last_reload_reg
[out_regno
+ k
] = 0;
8533 IOR_HARD_REG_SET (reg_reloaded_dead
, reg_reloaded_died
);
8536 /* Go through the motions to emit INSN and test if it is strictly valid.
8537 Return the emitted insn if valid, else return NULL. */
8540 emit_insn_if_valid_for_reload (rtx insn
)
8542 rtx last
= get_last_insn ();
8545 insn
= emit_insn (insn
);
8546 code
= recog_memoized (insn
);
8550 extract_insn (insn
);
8551 /* We want constrain operands to treat this insn strictly in its
8552 validity determination, i.e., the way it would after reload has
8554 if (constrain_operands (1))
8558 delete_insns_since (last
);
8562 /* Emit code to perform a reload from IN (which may be a reload register) to
8563 OUT (which may also be a reload register). IN or OUT is from operand
8564 OPNUM with reload type TYPE.
8566 Returns first insn emitted. */
8569 gen_reload (rtx out
, rtx in
, int opnum
, enum reload_type type
)
8571 rtx last
= get_last_insn ();
8574 /* If IN is a paradoxical SUBREG, remove it and try to put the
8575 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8576 if (GET_CODE (in
) == SUBREG
8577 && (GET_MODE_SIZE (GET_MODE (in
))
8578 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))))
8579 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (in
)), out
)) != 0)
8580 in
= SUBREG_REG (in
), out
= tem
;
8581 else if (GET_CODE (out
) == SUBREG
8582 && (GET_MODE_SIZE (GET_MODE (out
))
8583 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))))
8584 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (out
)), in
)) != 0)
8585 out
= SUBREG_REG (out
), in
= tem
;
8587 /* How to do this reload can get quite tricky. Normally, we are being
8588 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8589 register that didn't get a hard register. In that case we can just
8590 call emit_move_insn.
8592 We can also be asked to reload a PLUS that adds a register or a MEM to
8593 another register, constant or MEM. This can occur during frame pointer
8594 elimination and while reloading addresses. This case is handled by
8595 trying to emit a single insn to perform the add. If it is not valid,
8596 we use a two insn sequence.
8598 Or we can be asked to reload an unary operand that was a fragment of
8599 an addressing mode, into a register. If it isn't recognized as-is,
8600 we try making the unop operand and the reload-register the same:
8601 (set reg:X (unop:X expr:Y))
8602 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8604 Finally, we could be called to handle an 'o' constraint by putting
8605 an address into a register. In that case, we first try to do this
8606 with a named pattern of "reload_load_address". If no such pattern
8607 exists, we just emit a SET insn and hope for the best (it will normally
8608 be valid on machines that use 'o').
8610 This entire process is made complex because reload will never
8611 process the insns we generate here and so we must ensure that
8612 they will fit their constraints and also by the fact that parts of
8613 IN might be being reloaded separately and replaced with spill registers.
8614 Because of this, we are, in some sense, just guessing the right approach
8615 here. The one listed above seems to work.
8617 ??? At some point, this whole thing needs to be rethought. */
8619 if (GET_CODE (in
) == PLUS
8620 && (REG_P (XEXP (in
, 0))
8621 || GET_CODE (XEXP (in
, 0)) == SUBREG
8622 || MEM_P (XEXP (in
, 0)))
8623 && (REG_P (XEXP (in
, 1))
8624 || GET_CODE (XEXP (in
, 1)) == SUBREG
8625 || CONSTANT_P (XEXP (in
, 1))
8626 || MEM_P (XEXP (in
, 1))))
8628 /* We need to compute the sum of a register or a MEM and another
8629 register, constant, or MEM, and put it into the reload
8630 register. The best possible way of doing this is if the machine
8631 has a three-operand ADD insn that accepts the required operands.
8633 The simplest approach is to try to generate such an insn and see if it
8634 is recognized and matches its constraints. If so, it can be used.
8636 It might be better not to actually emit the insn unless it is valid,
8637 but we need to pass the insn as an operand to `recog' and
8638 `extract_insn' and it is simpler to emit and then delete the insn if
8639 not valid than to dummy things up. */
8641 rtx op0
, op1
, tem
, insn
;
8644 op0
= find_replacement (&XEXP (in
, 0));
8645 op1
= find_replacement (&XEXP (in
, 1));
8647 /* Since constraint checking is strict, commutativity won't be
8648 checked, so we need to do that here to avoid spurious failure
8649 if the add instruction is two-address and the second operand
8650 of the add is the same as the reload reg, which is frequently
8651 the case. If the insn would be A = B + A, rearrange it so
8652 it will be A = A + B as constrain_operands expects. */
8654 if (REG_P (XEXP (in
, 1))
8655 && REGNO (out
) == REGNO (XEXP (in
, 1)))
8656 tem
= op0
, op0
= op1
, op1
= tem
;
8658 if (op0
!= XEXP (in
, 0) || op1
!= XEXP (in
, 1))
8659 in
= gen_rtx_PLUS (GET_MODE (in
), op0
, op1
);
8661 insn
= emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode
, out
, in
));
8665 /* If that failed, we must use a conservative two-insn sequence.
8667 Use a move to copy one operand into the reload register. Prefer
8668 to reload a constant, MEM or pseudo since the move patterns can
8669 handle an arbitrary operand. If OP1 is not a constant, MEM or
8670 pseudo and OP1 is not a valid operand for an add instruction, then
8673 After reloading one of the operands into the reload register, add
8674 the reload register to the output register.
8676 If there is another way to do this for a specific machine, a
8677 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8680 code
= (int) optab_handler (add_optab
, GET_MODE (out
))->insn_code
;
8682 if (CONSTANT_P (op1
) || MEM_P (op1
) || GET_CODE (op1
) == SUBREG
8684 && REGNO (op1
) >= FIRST_PSEUDO_REGISTER
)
8685 || (code
!= CODE_FOR_nothing
8686 && ! ((*insn_data
[code
].operand
[2].predicate
)
8687 (op1
, insn_data
[code
].operand
[2].mode
))))
8688 tem
= op0
, op0
= op1
, op1
= tem
;
8690 gen_reload (out
, op0
, opnum
, type
);
8692 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8693 This fixes a problem on the 32K where the stack pointer cannot
8694 be used as an operand of an add insn. */
8696 if (rtx_equal_p (op0
, op1
))
8699 insn
= emit_insn_if_valid_for_reload (gen_add2_insn (out
, op1
));
8702 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8703 set_unique_reg_note (insn
, REG_EQUIV
, in
);
8707 /* If that failed, copy the address register to the reload register.
8708 Then add the constant to the reload register. */
8710 gcc_assert (!reg_overlap_mentioned_p (out
, op0
));
8711 gen_reload (out
, op1
, opnum
, type
);
8712 insn
= emit_insn (gen_add2_insn (out
, op0
));
8713 set_unique_reg_note (insn
, REG_EQUIV
, in
);
8716 #ifdef SECONDARY_MEMORY_NEEDED
8717 /* If we need a memory location to do the move, do it that way. */
8718 else if ((REG_P (in
)
8719 || (GET_CODE (in
) == SUBREG
&& REG_P (SUBREG_REG (in
))))
8720 && reg_or_subregno (in
) < FIRST_PSEUDO_REGISTER
8722 || (GET_CODE (out
) == SUBREG
&& REG_P (SUBREG_REG (out
))))
8723 && reg_or_subregno (out
) < FIRST_PSEUDO_REGISTER
8724 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in
)),
8725 REGNO_REG_CLASS (reg_or_subregno (out
)),
8728 /* Get the memory to use and rewrite both registers to its mode. */
8729 rtx loc
= get_secondary_mem (in
, GET_MODE (out
), opnum
, type
);
8731 if (GET_MODE (loc
) != GET_MODE (out
))
8732 out
= gen_rtx_REG (GET_MODE (loc
), REGNO (out
));
8734 if (GET_MODE (loc
) != GET_MODE (in
))
8735 in
= gen_rtx_REG (GET_MODE (loc
), REGNO (in
));
8737 gen_reload (loc
, in
, opnum
, type
);
8738 gen_reload (out
, loc
, opnum
, type
);
8741 else if (REG_P (out
) && UNARY_P (in
))
8748 op1
= find_replacement (&XEXP (in
, 0));
8749 if (op1
!= XEXP (in
, 0))
8750 in
= gen_rtx_fmt_e (GET_CODE (in
), GET_MODE (in
), op1
);
8752 /* First, try a plain SET. */
8753 set
= emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode
, out
, in
));
8757 /* If that failed, move the inner operand to the reload
8758 register, and try the same unop with the inner expression
8759 replaced with the reload register. */
8761 if (GET_MODE (op1
) != GET_MODE (out
))
8762 out_moded
= gen_rtx_REG (GET_MODE (op1
), REGNO (out
));
8766 gen_reload (out_moded
, op1
, opnum
, type
);
8769 = gen_rtx_SET (VOIDmode
, out
,
8770 gen_rtx_fmt_e (GET_CODE (in
), GET_MODE (in
),
8772 insn
= emit_insn_if_valid_for_reload (insn
);
8775 set_unique_reg_note (insn
, REG_EQUIV
, in
);
8779 fatal_insn ("Failure trying to reload:", set
);
8781 /* If IN is a simple operand, use gen_move_insn. */
8782 else if (OBJECT_P (in
) || GET_CODE (in
) == SUBREG
)
8784 tem
= emit_insn (gen_move_insn (out
, in
));
8785 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8786 mark_jump_label (in
, tem
, 0);
8789 #ifdef HAVE_reload_load_address
8790 else if (HAVE_reload_load_address
)
8791 emit_insn (gen_reload_load_address (out
, in
));
8794 /* Otherwise, just write (set OUT IN) and hope for the best. */
8796 emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
8798 /* Return the first insn emitted.
8799 We can not just return get_last_insn, because there may have
8800 been multiple instructions emitted. Also note that gen_move_insn may
8801 emit more than one insn itself, so we can not assume that there is one
8802 insn emitted per emit_insn_before call. */
8804 return last
? NEXT_INSN (last
) : get_insns ();
8807 /* Delete a previously made output-reload whose result we now believe
8808 is not needed. First we double-check.
8810 INSN is the insn now being processed.
8811 LAST_RELOAD_REG is the hard register number for which we want to delete
8812 the last output reload.
8813 J is the reload-number that originally used REG. The caller has made
8814 certain that reload J doesn't use REG any longer for input.
8815 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8818 delete_output_reload (rtx insn
, int j
, int last_reload_reg
, rtx new_reload_reg
)
8820 rtx output_reload_insn
= spill_reg_store
[last_reload_reg
];
8821 rtx reg
= spill_reg_stored_to
[last_reload_reg
];
8824 int n_inherited
= 0;
8828 /* It is possible that this reload has been only used to set another reload
8829 we eliminated earlier and thus deleted this instruction too. */
8830 if (INSN_DELETED_P (output_reload_insn
))
8833 /* Get the raw pseudo-register referred to. */
8835 while (GET_CODE (reg
) == SUBREG
)
8836 reg
= SUBREG_REG (reg
);
8837 substed
= reg_equiv_memory_loc
[REGNO (reg
)];
8839 /* This is unsafe if the operand occurs more often in the current
8840 insn than it is inherited. */
8841 for (k
= n_reloads
- 1; k
>= 0; k
--)
8843 rtx reg2
= rld
[k
].in
;
8846 if (MEM_P (reg2
) || reload_override_in
[k
])
8847 reg2
= rld
[k
].in_reg
;
8849 if (rld
[k
].out
&& ! rld
[k
].out_reg
)
8850 reg2
= XEXP (rld
[k
].in_reg
, 0);
8852 while (GET_CODE (reg2
) == SUBREG
)
8853 reg2
= SUBREG_REG (reg2
);
8854 if (rtx_equal_p (reg2
, reg
))
8856 if (reload_inherited
[k
] || reload_override_in
[k
] || k
== j
)
8862 n_occurrences
= count_occurrences (PATTERN (insn
), reg
, 0);
8863 if (CALL_P (insn
) && CALL_INSN_FUNCTION_USAGE (insn
))
8864 n_occurrences
+= count_occurrences (CALL_INSN_FUNCTION_USAGE (insn
),
8867 n_occurrences
+= count_occurrences (PATTERN (insn
),
8868 eliminate_regs (substed
, VOIDmode
,
8870 for (i1
= reg_equiv_alt_mem_list
[REGNO (reg
)]; i1
; i1
= XEXP (i1
, 1))
8872 gcc_assert (!rtx_equal_p (XEXP (i1
, 0), substed
));
8873 n_occurrences
+= count_occurrences (PATTERN (insn
), XEXP (i1
, 0), 0);
8875 if (n_occurrences
> n_inherited
)
8878 /* If the pseudo-reg we are reloading is no longer referenced
8879 anywhere between the store into it and here,
8880 and we're within the same basic block, then the value can only
8881 pass through the reload reg and end up here.
8882 Otherwise, give up--return. */
8883 for (i1
= NEXT_INSN (output_reload_insn
);
8884 i1
!= insn
; i1
= NEXT_INSN (i1
))
8886 if (NOTE_INSN_BASIC_BLOCK_P (i1
))
8888 if ((NONJUMP_INSN_P (i1
) || CALL_P (i1
))
8889 && reg_mentioned_p (reg
, PATTERN (i1
)))
8891 /* If this is USE in front of INSN, we only have to check that
8892 there are no more references than accounted for by inheritance. */
8893 while (NONJUMP_INSN_P (i1
) && GET_CODE (PATTERN (i1
)) == USE
)
8895 n_occurrences
+= rtx_equal_p (reg
, XEXP (PATTERN (i1
), 0)) != 0;
8896 i1
= NEXT_INSN (i1
);
8898 if (n_occurrences
<= n_inherited
&& i1
== insn
)
8904 /* We will be deleting the insn. Remove the spill reg information. */
8905 for (k
= hard_regno_nregs
[last_reload_reg
][GET_MODE (reg
)]; k
-- > 0; )
8907 spill_reg_store
[last_reload_reg
+ k
] = 0;
8908 spill_reg_stored_to
[last_reload_reg
+ k
] = 0;
8911 /* The caller has already checked that REG dies or is set in INSN.
8912 It has also checked that we are optimizing, and thus some
8913 inaccuracies in the debugging information are acceptable.
8914 So we could just delete output_reload_insn. But in some cases
8915 we can improve the debugging information without sacrificing
8916 optimization - maybe even improving the code: See if the pseudo
8917 reg has been completely replaced with reload regs. If so, delete
8918 the store insn and forget we had a stack slot for the pseudo. */
8919 if (rld
[j
].out
!= rld
[j
].in
8920 && REG_N_DEATHS (REGNO (reg
)) == 1
8921 && REG_N_SETS (REGNO (reg
)) == 1
8922 && REG_BASIC_BLOCK (REGNO (reg
)) >= NUM_FIXED_BLOCKS
8923 && find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
8927 /* We know that it was used only between here and the beginning of
8928 the current basic block. (We also know that the last use before
8929 INSN was the output reload we are thinking of deleting, but never
8930 mind that.) Search that range; see if any ref remains. */
8931 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
8933 rtx set
= single_set (i2
);
8935 /* Uses which just store in the pseudo don't count,
8936 since if they are the only uses, they are dead. */
8937 if (set
!= 0 && SET_DEST (set
) == reg
)
8942 if ((NONJUMP_INSN_P (i2
) || CALL_P (i2
))
8943 && reg_mentioned_p (reg
, PATTERN (i2
)))
8945 /* Some other ref remains; just delete the output reload we
8947 delete_address_reloads (output_reload_insn
, insn
);
8948 delete_insn (output_reload_insn
);
8953 /* Delete the now-dead stores into this pseudo. Note that this
8954 loop also takes care of deleting output_reload_insn. */
8955 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
8957 rtx set
= single_set (i2
);
8959 if (set
!= 0 && SET_DEST (set
) == reg
)
8961 delete_address_reloads (i2
, insn
);
8969 /* For the debugging info, say the pseudo lives in this reload reg. */
8970 reg_renumber
[REGNO (reg
)] = REGNO (new_reload_reg
);
8971 if (ira_conflicts_p
)
8972 /* Inform IRA about the change. */
8973 ira_mark_allocation_change (REGNO (reg
));
8974 alter_reg (REGNO (reg
), -1, false);
8978 delete_address_reloads (output_reload_insn
, insn
);
8979 delete_insn (output_reload_insn
);
8983 /* We are going to delete DEAD_INSN. Recursively delete loads of
8984 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8985 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8987 delete_address_reloads (rtx dead_insn
, rtx current_insn
)
8989 rtx set
= single_set (dead_insn
);
8990 rtx set2
, dst
, prev
, next
;
8993 rtx dst
= SET_DEST (set
);
8995 delete_address_reloads_1 (dead_insn
, XEXP (dst
, 0), current_insn
);
8997 /* If we deleted the store from a reloaded post_{in,de}c expression,
8998 we can delete the matching adds. */
8999 prev
= PREV_INSN (dead_insn
);
9000 next
= NEXT_INSN (dead_insn
);
9001 if (! prev
|| ! next
)
9003 set
= single_set (next
);
9004 set2
= single_set (prev
);
9006 || GET_CODE (SET_SRC (set
)) != PLUS
|| GET_CODE (SET_SRC (set2
)) != PLUS
9007 || !CONST_INT_P (XEXP (SET_SRC (set
), 1))
9008 || !CONST_INT_P (XEXP (SET_SRC (set2
), 1)))
9010 dst
= SET_DEST (set
);
9011 if (! rtx_equal_p (dst
, SET_DEST (set2
))
9012 || ! rtx_equal_p (dst
, XEXP (SET_SRC (set
), 0))
9013 || ! rtx_equal_p (dst
, XEXP (SET_SRC (set2
), 0))
9014 || (INTVAL (XEXP (SET_SRC (set
), 1))
9015 != -INTVAL (XEXP (SET_SRC (set2
), 1))))
9017 delete_related_insns (prev
);
9018 delete_related_insns (next
);
9021 /* Subfunction of delete_address_reloads: process registers found in X. */
9023 delete_address_reloads_1 (rtx dead_insn
, rtx x
, rtx current_insn
)
9025 rtx prev
, set
, dst
, i2
;
9027 enum rtx_code code
= GET_CODE (x
);
9031 const char *fmt
= GET_RTX_FORMAT (code
);
9032 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
9035 delete_address_reloads_1 (dead_insn
, XEXP (x
, i
), current_insn
);
9036 else if (fmt
[i
] == 'E')
9038 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
9039 delete_address_reloads_1 (dead_insn
, XVECEXP (x
, i
, j
),
9046 if (spill_reg_order
[REGNO (x
)] < 0)
9049 /* Scan backwards for the insn that sets x. This might be a way back due
9051 for (prev
= PREV_INSN (dead_insn
); prev
; prev
= PREV_INSN (prev
))
9053 code
= GET_CODE (prev
);
9054 if (code
== CODE_LABEL
|| code
== JUMP_INSN
)
9058 if (reg_set_p (x
, PATTERN (prev
)))
9060 if (reg_referenced_p (x
, PATTERN (prev
)))
9063 if (! prev
|| INSN_UID (prev
) < reload_first_uid
)
9065 /* Check that PREV only sets the reload register. */
9066 set
= single_set (prev
);
9069 dst
= SET_DEST (set
);
9071 || ! rtx_equal_p (dst
, x
))
9073 if (! reg_set_p (dst
, PATTERN (dead_insn
)))
9075 /* Check if DST was used in a later insn -
9076 it might have been inherited. */
9077 for (i2
= NEXT_INSN (dead_insn
); i2
; i2
= NEXT_INSN (i2
))
9083 if (reg_referenced_p (dst
, PATTERN (i2
)))
9085 /* If there is a reference to the register in the current insn,
9086 it might be loaded in a non-inherited reload. If no other
9087 reload uses it, that means the register is set before
9089 if (i2
== current_insn
)
9091 for (j
= n_reloads
- 1; j
>= 0; j
--)
9092 if ((rld
[j
].reg_rtx
== dst
&& reload_inherited
[j
])
9093 || reload_override_in
[j
] == dst
)
9095 for (j
= n_reloads
- 1; j
>= 0; j
--)
9096 if (rld
[j
].in
&& rld
[j
].reg_rtx
== dst
)
9105 /* If DST is still live at CURRENT_INSN, check if it is used for
9106 any reload. Note that even if CURRENT_INSN sets DST, we still
9107 have to check the reloads. */
9108 if (i2
== current_insn
)
9110 for (j
= n_reloads
- 1; j
>= 0; j
--)
9111 if ((rld
[j
].reg_rtx
== dst
&& reload_inherited
[j
])
9112 || reload_override_in
[j
] == dst
)
9114 /* ??? We can't finish the loop here, because dst might be
9115 allocated to a pseudo in this block if no reload in this
9116 block needs any of the classes containing DST - see
9117 spill_hard_reg. There is no easy way to tell this, so we
9118 have to scan till the end of the basic block. */
9120 if (reg_set_p (dst
, PATTERN (i2
)))
9124 delete_address_reloads_1 (prev
, SET_SRC (set
), current_insn
);
9125 reg_reloaded_contents
[REGNO (dst
)] = -1;
9129 /* Output reload-insns to reload VALUE into RELOADREG.
9130 VALUE is an autoincrement or autodecrement RTX whose operand
9131 is a register or memory location;
9132 so reloading involves incrementing that location.
9133 IN is either identical to VALUE, or some cheaper place to reload from.
9135 INC_AMOUNT is the number to increment or decrement by (always positive).
9136 This cannot be deduced from VALUE.
9138 Return the instruction that stores into RELOADREG. */
9141 inc_for_reload (rtx reloadreg
, rtx in
, rtx value
, int inc_amount
)
9143 /* REG or MEM to be copied and incremented. */
9144 rtx incloc
= find_replacement (&XEXP (value
, 0));
9145 /* Nonzero if increment after copying. */
9146 int post
= (GET_CODE (value
) == POST_DEC
|| GET_CODE (value
) == POST_INC
9147 || GET_CODE (value
) == POST_MODIFY
);
9153 rtx real_in
= in
== value
? incloc
: in
;
9155 /* No hard register is equivalent to this register after
9156 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9157 we could inc/dec that register as well (maybe even using it for
9158 the source), but I'm not sure it's worth worrying about. */
9160 reg_last_reload_reg
[REGNO (incloc
)] = 0;
9162 if (GET_CODE (value
) == PRE_MODIFY
|| GET_CODE (value
) == POST_MODIFY
)
9164 gcc_assert (GET_CODE (XEXP (value
, 1)) == PLUS
);
9165 inc
= find_replacement (&XEXP (XEXP (value
, 1), 1));
9169 if (GET_CODE (value
) == PRE_DEC
|| GET_CODE (value
) == POST_DEC
)
9170 inc_amount
= -inc_amount
;
9172 inc
= GEN_INT (inc_amount
);
9175 /* If this is post-increment, first copy the location to the reload reg. */
9176 if (post
&& real_in
!= reloadreg
)
9177 emit_insn (gen_move_insn (reloadreg
, real_in
));
9181 /* See if we can directly increment INCLOC. Use a method similar to
9182 that in gen_reload. */
9184 last
= get_last_insn ();
9185 add_insn
= emit_insn (gen_rtx_SET (VOIDmode
, incloc
,
9186 gen_rtx_PLUS (GET_MODE (incloc
),
9189 code
= recog_memoized (add_insn
);
9192 extract_insn (add_insn
);
9193 if (constrain_operands (1))
9195 /* If this is a pre-increment and we have incremented the value
9196 where it lives, copy the incremented value to RELOADREG to
9197 be used as an address. */
9200 emit_insn (gen_move_insn (reloadreg
, incloc
));
9205 delete_insns_since (last
);
9208 /* If couldn't do the increment directly, must increment in RELOADREG.
9209 The way we do this depends on whether this is pre- or post-increment.
9210 For pre-increment, copy INCLOC to the reload register, increment it
9211 there, then save back. */
9215 if (in
!= reloadreg
)
9216 emit_insn (gen_move_insn (reloadreg
, real_in
));
9217 emit_insn (gen_add2_insn (reloadreg
, inc
));
9218 store
= emit_insn (gen_move_insn (incloc
, reloadreg
));
9223 Because this might be a jump insn or a compare, and because RELOADREG
9224 may not be available after the insn in an input reload, we must do
9225 the incrementation before the insn being reloaded for.
9227 We have already copied IN to RELOADREG. Increment the copy in
9228 RELOADREG, save that back, then decrement RELOADREG so it has
9229 the original value. */
9231 emit_insn (gen_add2_insn (reloadreg
, inc
));
9232 store
= emit_insn (gen_move_insn (incloc
, reloadreg
));
9233 if (CONST_INT_P (inc
))
9234 emit_insn (gen_add2_insn (reloadreg
, GEN_INT (-INTVAL (inc
))));
9236 emit_insn (gen_sub2_insn (reloadreg
, inc
));
9244 add_auto_inc_notes (rtx insn
, rtx x
)
9246 enum rtx_code code
= GET_CODE (x
);
9250 if (code
== MEM
&& auto_inc_p (XEXP (x
, 0)))
9252 add_reg_note (insn
, REG_INC
, XEXP (XEXP (x
, 0), 0));
9256 /* Scan all the operand sub-expressions. */
9257 fmt
= GET_RTX_FORMAT (code
);
9258 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
9261 add_auto_inc_notes (insn
, XEXP (x
, i
));
9262 else if (fmt
[i
] == 'E')
9263 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
9264 add_auto_inc_notes (insn
, XVECEXP (x
, i
, j
));
9269 /* This is used by reload pass, that does emit some instructions after
9270 abnormal calls moving basic block end, but in fact it wants to emit
9271 them on the edge. Looks for abnormal call edges, find backward the
9272 proper call and fix the damage.
9274 Similar handle instructions throwing exceptions internally. */
9276 fixup_abnormal_edges (void)
9278 bool inserted
= false;
9286 /* Look for cases we are interested in - calls or instructions causing
9288 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
9290 if (e
->flags
& EDGE_ABNORMAL_CALL
)
9292 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
9293 == (EDGE_ABNORMAL
| EDGE_EH
))
9296 if (e
&& !CALL_P (BB_END (bb
))
9297 && !can_throw_internal (BB_END (bb
)))
9301 /* Get past the new insns generated. Allow notes, as the insns
9302 may be already deleted. */
9304 while ((NONJUMP_INSN_P (insn
) || NOTE_P (insn
))
9305 && !can_throw_internal (insn
)
9306 && insn
!= BB_HEAD (bb
))
9307 insn
= PREV_INSN (insn
);
9309 if (CALL_P (insn
) || can_throw_internal (insn
))
9313 stop
= NEXT_INSN (BB_END (bb
));
9315 insn
= NEXT_INSN (insn
);
9317 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
9318 if (e
->flags
& EDGE_FALLTHRU
)
9321 while (insn
&& insn
!= stop
)
9323 next
= NEXT_INSN (insn
);
9328 /* Sometimes there's still the return value USE.
9329 If it's placed after a trapping call (i.e. that
9330 call is the last insn anyway), we have no fallthru
9331 edge. Simply delete this use and don't try to insert
9332 on the non-existent edge. */
9333 if (GET_CODE (PATTERN (insn
)) != USE
)
9335 /* We're not deleting it, we're moving it. */
9336 INSN_DELETED_P (insn
) = 0;
9337 PREV_INSN (insn
) = NULL_RTX
;
9338 NEXT_INSN (insn
) = NULL_RTX
;
9340 insert_insn_on_edge (insn
, e
);
9344 else if (!BARRIER_P (insn
))
9345 set_block_for_insn (insn
, NULL
);
9350 /* It may be that we don't find any such trapping insn. In this
9351 case we discovered quite late that the insn that had been
9352 marked as can_throw_internal in fact couldn't trap at all.
9353 So we should in fact delete the EH edges out of the block. */
9355 purge_dead_edges (bb
);
9359 /* We've possibly turned single trapping insn into multiple ones. */
9360 if (cfun
->can_throw_non_call_exceptions
)
9363 blocks
= sbitmap_alloc (last_basic_block
);
9364 sbitmap_ones (blocks
);
9365 find_many_sub_basic_blocks (blocks
);
9366 sbitmap_free (blocks
);
9370 commit_edge_insertions ();
9372 #ifdef ENABLE_CHECKING
9373 /* Verify that we didn't turn one trapping insn into many, and that
9374 we found and corrected all of the problems wrt fixups on the
9376 verify_flow_info ();