intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "addresses.h"
39 #include "basic-block.h"
40 #include "df.h"
41 #include "reload.h"
42 #include "recog.h"
43 #include "output.h"
44 #include "toplev.h"
45 #include "except.h"
46 #include "tree.h"
47 #include "ira.h"
48 #include "target.h"
49 #include "emit-rtl.h"
50
51 /* This file contains the reload pass of the compiler, which is
52 run after register allocation has been done. It checks that
53 each insn is valid (operands required to be in registers really
54 are in registers of the proper class) and fixes up invalid ones
55 by copying values temporarily into registers for the insns
56 that need them.
57
58 The results of register allocation are described by the vector
59 reg_renumber; the insns still contain pseudo regs, but reg_renumber
60 can be used to find which hard reg, if any, a pseudo reg is in.
61
62 The technique we always use is to free up a few hard regs that are
63 called ``reload regs'', and for each place where a pseudo reg
64 must be in a hard reg, copy it temporarily into one of the reload regs.
65
66 Reload regs are allocated locally for every instruction that needs
67 reloads. When there are pseudos which are allocated to a register that
68 has been chosen as a reload reg, such pseudos must be ``spilled''.
69 This means that they go to other hard regs, or to stack slots if no other
70 available hard regs can be found. Spilling can invalidate more
71 insns, requiring additional need for reloads, so we must keep checking
72 until the process stabilizes.
73
74 For machines with different classes of registers, we must keep track
75 of the register class needed for each reload, and make sure that
76 we allocate enough reload registers of each class.
77
78 The file reload.c contains the code that checks one insn for
79 validity and reports the reloads that it needs. This file
80 is in charge of scanning the entire rtl code, accumulating the
81 reload needs, spilling, assigning reload registers to use for
82 fixing up each insn, and generating the new insns to copy values
83 into the reload registers. */
84 \f
85 /* During reload_as_needed, element N contains a REG rtx for the hard reg
86 into which reg N has been reloaded (perhaps for a previous insn). */
87 static rtx *reg_last_reload_reg;
88
89 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
90 for an output reload that stores into reg N. */
91 static regset_head reg_has_output_reload;
92
93 /* Indicates which hard regs are reload-registers for an output reload
94 in the current insn. */
95 static HARD_REG_SET reg_is_output_reload;
96
97 /* Element N is the constant value to which pseudo reg N is equivalent,
98 or zero if pseudo reg N is not equivalent to a constant.
99 find_reloads looks at this in order to replace pseudo reg N
100 with the constant it stands for. */
101 rtx *reg_equiv_constant;
102
103 /* Element N is an invariant value to which pseudo reg N is equivalent.
104 eliminate_regs_in_insn uses this to replace pseudos in particular
105 contexts. */
106 rtx *reg_equiv_invariant;
107
108 /* Element N is a memory location to which pseudo reg N is equivalent,
109 prior to any register elimination (such as frame pointer to stack
110 pointer). Depending on whether or not it is a valid address, this value
111 is transferred to either reg_equiv_address or reg_equiv_mem. */
112 rtx *reg_equiv_memory_loc;
113
114 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
115 collector can keep track of what is inside. */
116 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
117
118 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
119 This is used when the address is not valid as a memory address
120 (because its displacement is too big for the machine.) */
121 rtx *reg_equiv_address;
122
123 /* Element N is the memory slot to which pseudo reg N is equivalent,
124 or zero if pseudo reg N is not equivalent to a memory slot. */
125 rtx *reg_equiv_mem;
126
127 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
128 alternate representations of the location of pseudo reg N. */
129 rtx *reg_equiv_alt_mem_list;
130
131 /* Widest width in which each pseudo reg is referred to (via subreg). */
132 static unsigned int *reg_max_ref_width;
133
134 /* Element N is the list of insns that initialized reg N from its equivalent
135 constant or memory slot. */
136 rtx *reg_equiv_init;
137 int reg_equiv_init_size;
138
139 /* Vector to remember old contents of reg_renumber before spilling. */
140 static short *reg_old_renumber;
141
142 /* During reload_as_needed, element N contains the last pseudo regno reloaded
143 into hard register N. If that pseudo reg occupied more than one register,
144 reg_reloaded_contents points to that pseudo for each spill register in
145 use; all of these must remain set for an inheritance to occur. */
146 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
147
148 /* During reload_as_needed, element N contains the insn for which
149 hard register N was last used. Its contents are significant only
150 when reg_reloaded_valid is set for this register. */
151 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
152
153 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
154 static HARD_REG_SET reg_reloaded_valid;
155 /* Indicate if the register was dead at the end of the reload.
156 This is only valid if reg_reloaded_contents is set and valid. */
157 static HARD_REG_SET reg_reloaded_dead;
158
159 /* Indicate whether the register's current value is one that is not
160 safe to retain across a call, even for registers that are normally
161 call-saved. This is only meaningful for members of reg_reloaded_valid. */
162 static HARD_REG_SET reg_reloaded_call_part_clobbered;
163
164 /* Number of spill-regs so far; number of valid elements of spill_regs. */
165 static int n_spills;
166
167 /* In parallel with spill_regs, contains REG rtx's for those regs.
168 Holds the last rtx used for any given reg, or 0 if it has never
169 been used for spilling yet. This rtx is reused, provided it has
170 the proper mode. */
171 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
172
173 /* In parallel with spill_regs, contains nonzero for a spill reg
174 that was stored after the last time it was used.
175 The precise value is the insn generated to do the store. */
176 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
177
178 /* This is the register that was stored with spill_reg_store. This is a
179 copy of reload_out / reload_out_reg when the value was stored; if
180 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
181 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
182
183 /* This table is the inverse mapping of spill_regs:
184 indexed by hard reg number,
185 it contains the position of that reg in spill_regs,
186 or -1 for something that is not in spill_regs.
187
188 ?!? This is no longer accurate. */
189 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
190
191 /* This reg set indicates registers that can't be used as spill registers for
192 the currently processed insn. These are the hard registers which are live
193 during the insn, but not allocated to pseudos, as well as fixed
194 registers. */
195 static HARD_REG_SET bad_spill_regs;
196
197 /* These are the hard registers that can't be used as spill register for any
198 insn. This includes registers used for user variables and registers that
199 we can't eliminate. A register that appears in this set also can't be used
200 to retry register allocation. */
201 static HARD_REG_SET bad_spill_regs_global;
202
203 /* Describes order of use of registers for reloading
204 of spilled pseudo-registers. `n_spills' is the number of
205 elements that are actually valid; new ones are added at the end.
206
207 Both spill_regs and spill_reg_order are used on two occasions:
208 once during find_reload_regs, where they keep track of the spill registers
209 for a single insn, but also during reload_as_needed where they show all
210 the registers ever used by reload. For the latter case, the information
211 is calculated during finish_spills. */
212 static short spill_regs[FIRST_PSEUDO_REGISTER];
213
214 /* This vector of reg sets indicates, for each pseudo, which hard registers
215 may not be used for retrying global allocation because the register was
216 formerly spilled from one of them. If we allowed reallocating a pseudo to
217 a register that it was already allocated to, reload might not
218 terminate. */
219 static HARD_REG_SET *pseudo_previous_regs;
220
221 /* This vector of reg sets indicates, for each pseudo, which hard
222 registers may not be used for retrying global allocation because they
223 are used as spill registers during one of the insns in which the
224 pseudo is live. */
225 static HARD_REG_SET *pseudo_forbidden_regs;
226
227 /* All hard regs that have been used as spill registers for any insn are
228 marked in this set. */
229 static HARD_REG_SET used_spill_regs;
230
231 /* Index of last register assigned as a spill register. We allocate in
232 a round-robin fashion. */
233 static int last_spill_reg;
234
235 /* Nonzero if indirect addressing is supported on the machine; this means
236 that spilling (REG n) does not require reloading it into a register in
237 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
238 value indicates the level of indirect addressing supported, e.g., two
239 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
240 a hard register. */
241 static char spill_indirect_levels;
242
243 /* Nonzero if indirect addressing is supported when the innermost MEM is
244 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
245 which these are valid is the same as spill_indirect_levels, above. */
246 char indirect_symref_ok;
247
248 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
249 char double_reg_address_ok;
250
251 /* Record the stack slot for each spilled hard register. */
252 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
253
254 /* Width allocated so far for that stack slot. */
255 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
256
257 /* Record which pseudos needed to be spilled. */
258 static regset_head spilled_pseudos;
259
260 /* Record which pseudos changed their allocation in finish_spills. */
261 static regset_head changed_allocation_pseudos;
262
263 /* Used for communication between order_regs_for_reload and count_pseudo.
264 Used to avoid counting one pseudo twice. */
265 static regset_head pseudos_counted;
266
267 /* First uid used by insns created by reload in this function.
268 Used in find_equiv_reg. */
269 int reload_first_uid;
270
271 /* Flag set by local-alloc or global-alloc if anything is live in
272 a call-clobbered reg across calls. */
273 int caller_save_needed;
274
275 /* Set to 1 while reload_as_needed is operating.
276 Required by some machines to handle any generated moves differently. */
277 int reload_in_progress = 0;
278
279 /* These arrays record the insn_code of insns that may be needed to
280 perform input and output reloads of special objects. They provide a
281 place to pass a scratch register. */
282 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
283 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
284
285 /* This obstack is used for allocation of rtl during register elimination.
286 The allocated storage can be freed once find_reloads has processed the
287 insn. */
288 static struct obstack reload_obstack;
289
290 /* Points to the beginning of the reload_obstack. All insn_chain structures
291 are allocated first. */
292 static char *reload_startobj;
293
294 /* The point after all insn_chain structures. Used to quickly deallocate
295 memory allocated in copy_reloads during calculate_needs_all_insns. */
296 static char *reload_firstobj;
297
298 /* This points before all local rtl generated by register elimination.
299 Used to quickly free all memory after processing one insn. */
300 static char *reload_insn_firstobj;
301
302 /* List of insn_chain instructions, one for every insn that reload needs to
303 examine. */
304 struct insn_chain *reload_insn_chain;
305
306 /* List of all insns needing reloads. */
307 static struct insn_chain *insns_need_reload;
308 \f
309 /* This structure is used to record information about register eliminations.
310 Each array entry describes one possible way of eliminating a register
311 in favor of another. If there is more than one way of eliminating a
312 particular register, the most preferred should be specified first. */
313
314 struct elim_table
315 {
316 int from; /* Register number to be eliminated. */
317 int to; /* Register number used as replacement. */
318 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
319 int can_eliminate; /* Nonzero if this elimination can be done. */
320 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
321 target hook in previous scan over insns
322 made by reload. */
323 HOST_WIDE_INT offset; /* Current offset between the two regs. */
324 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
325 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
326 rtx from_rtx; /* REG rtx for the register to be eliminated.
327 We cannot simply compare the number since
328 we might then spuriously replace a hard
329 register corresponding to a pseudo
330 assigned to the reg to be eliminated. */
331 rtx to_rtx; /* REG rtx for the replacement. */
332 };
333
334 static struct elim_table *reg_eliminate = 0;
335
336 /* This is an intermediate structure to initialize the table. It has
337 exactly the members provided by ELIMINABLE_REGS. */
338 static const struct elim_table_1
339 {
340 const int from;
341 const int to;
342 } reg_eliminate_1[] =
343
344 /* If a set of eliminable registers was specified, define the table from it.
345 Otherwise, default to the normal case of the frame pointer being
346 replaced by the stack pointer. */
347
348 #ifdef ELIMINABLE_REGS
349 ELIMINABLE_REGS;
350 #else
351 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
352 #endif
353
354 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
355
356 /* Record the number of pending eliminations that have an offset not equal
357 to their initial offset. If nonzero, we use a new copy of each
358 replacement result in any insns encountered. */
359 int num_not_at_initial_offset;
360
361 /* Count the number of registers that we may be able to eliminate. */
362 static int num_eliminable;
363 /* And the number of registers that are equivalent to a constant that
364 can be eliminated to frame_pointer / arg_pointer + constant. */
365 static int num_eliminable_invariants;
366
367 /* For each label, we record the offset of each elimination. If we reach
368 a label by more than one path and an offset differs, we cannot do the
369 elimination. This information is indexed by the difference of the
370 number of the label and the first label number. We can't offset the
371 pointer itself as this can cause problems on machines with segmented
372 memory. The first table is an array of flags that records whether we
373 have yet encountered a label and the second table is an array of arrays,
374 one entry in the latter array for each elimination. */
375
376 static int first_label_num;
377 static char *offsets_known_at;
378 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
379
380 /* Stack of addresses where an rtx has been changed. We can undo the
381 changes by popping items off the stack and restoring the original
382 value at each location.
383
384 We use this simplistic undo capability rather than copy_rtx as copy_rtx
385 will not make a deep copy of a normally sharable rtx, such as
386 (const (plus (symbol_ref) (const_int))). If such an expression appears
387 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
388 rtx expression would be changed. See PR 42431. */
389
390 typedef rtx *rtx_p;
391 DEF_VEC_P(rtx_p);
392 DEF_VEC_ALLOC_P(rtx_p,heap);
393 static VEC(rtx_p,heap) *substitute_stack;
394
395 /* Number of labels in the current function. */
396
397 static int num_labels;
398 \f
399 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
400 static void maybe_fix_stack_asms (void);
401 static void copy_reloads (struct insn_chain *);
402 static void calculate_needs_all_insns (int);
403 static int find_reg (struct insn_chain *, int);
404 static void find_reload_regs (struct insn_chain *);
405 static void select_reload_regs (void);
406 static void delete_caller_save_insns (void);
407
408 static void spill_failure (rtx, enum reg_class);
409 static void count_spilled_pseudo (int, int, int);
410 static void delete_dead_insn (rtx);
411 static void alter_reg (int, int, bool);
412 static void set_label_offsets (rtx, rtx, int);
413 static void check_eliminable_occurrences (rtx);
414 static void elimination_effects (rtx, enum machine_mode);
415 static rtx eliminate_regs_1 (rtx, enum machine_mode, rtx, bool, bool);
416 static int eliminate_regs_in_insn (rtx, int);
417 static void update_eliminable_offsets (void);
418 static void mark_not_eliminable (rtx, const_rtx, void *);
419 static void set_initial_elim_offsets (void);
420 static bool verify_initial_elim_offsets (void);
421 static void set_initial_label_offsets (void);
422 static void set_offsets_for_label (rtx);
423 static void init_eliminable_invariants (rtx, bool);
424 static void init_elim_table (void);
425 static void free_reg_equiv (void);
426 static void update_eliminables (HARD_REG_SET *);
427 static void elimination_costs_in_insn (rtx);
428 static void spill_hard_reg (unsigned int, int);
429 static int finish_spills (int);
430 static void scan_paradoxical_subregs (rtx);
431 static void count_pseudo (int);
432 static void order_regs_for_reload (struct insn_chain *);
433 static void reload_as_needed (int);
434 static void forget_old_reloads_1 (rtx, const_rtx, void *);
435 static void forget_marked_reloads (regset);
436 static int reload_reg_class_lower (const void *, const void *);
437 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
438 enum machine_mode);
439 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
440 enum machine_mode);
441 static int reload_reg_free_p (unsigned int, int, enum reload_type);
442 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
443 rtx, rtx, int, int);
444 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
445 rtx, rtx, int, int);
446 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
447 static int allocate_reload_reg (struct insn_chain *, int, int);
448 static int conflicts_with_override (rtx);
449 static void failed_reload (rtx, int);
450 static int set_reload_reg (int, int);
451 static void choose_reload_regs_init (struct insn_chain *, rtx *);
452 static void choose_reload_regs (struct insn_chain *);
453 static void merge_assigned_reloads (rtx);
454 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
455 rtx, int);
456 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
457 int);
458 static void do_input_reload (struct insn_chain *, struct reload *, int);
459 static void do_output_reload (struct insn_chain *, struct reload *, int);
460 static void emit_reload_insns (struct insn_chain *);
461 static void delete_output_reload (rtx, int, int, rtx);
462 static void delete_address_reloads (rtx, rtx);
463 static void delete_address_reloads_1 (rtx, rtx, rtx);
464 static rtx inc_for_reload (rtx, rtx, rtx, int);
465 #ifdef AUTO_INC_DEC
466 static void add_auto_inc_notes (rtx, rtx);
467 #endif
468 static void substitute (rtx *, const_rtx, rtx);
469 static bool gen_reload_chain_without_interm_reg_p (int, int);
470 static int reloads_conflict (int, int);
471 static rtx gen_reload (rtx, rtx, int, enum reload_type);
472 static rtx emit_insn_if_valid_for_reload (rtx);
473 \f
474 /* Initialize the reload pass. This is called at the beginning of compilation
475 and may be called again if the target is reinitialized. */
476
477 void
478 init_reload (void)
479 {
480 int i;
481
482 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
483 Set spill_indirect_levels to the number of levels such addressing is
484 permitted, zero if it is not permitted at all. */
485
486 rtx tem
487 = gen_rtx_MEM (Pmode,
488 gen_rtx_PLUS (Pmode,
489 gen_rtx_REG (Pmode,
490 LAST_VIRTUAL_REGISTER + 1),
491 GEN_INT (4)));
492 spill_indirect_levels = 0;
493
494 while (memory_address_p (QImode, tem))
495 {
496 spill_indirect_levels++;
497 tem = gen_rtx_MEM (Pmode, tem);
498 }
499
500 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
501
502 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
503 indirect_symref_ok = memory_address_p (QImode, tem);
504
505 /* See if reg+reg is a valid (and offsettable) address. */
506
507 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
508 {
509 tem = gen_rtx_PLUS (Pmode,
510 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
511 gen_rtx_REG (Pmode, i));
512
513 /* This way, we make sure that reg+reg is an offsettable address. */
514 tem = plus_constant (tem, 4);
515
516 if (memory_address_p (QImode, tem))
517 {
518 double_reg_address_ok = 1;
519 break;
520 }
521 }
522
523 /* Initialize obstack for our rtl allocation. */
524 gcc_obstack_init (&reload_obstack);
525 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
526
527 INIT_REG_SET (&spilled_pseudos);
528 INIT_REG_SET (&changed_allocation_pseudos);
529 INIT_REG_SET (&pseudos_counted);
530 }
531
532 /* List of insn chains that are currently unused. */
533 static struct insn_chain *unused_insn_chains = 0;
534
535 /* Allocate an empty insn_chain structure. */
536 struct insn_chain *
537 new_insn_chain (void)
538 {
539 struct insn_chain *c;
540
541 if (unused_insn_chains == 0)
542 {
543 c = XOBNEW (&reload_obstack, struct insn_chain);
544 INIT_REG_SET (&c->live_throughout);
545 INIT_REG_SET (&c->dead_or_set);
546 }
547 else
548 {
549 c = unused_insn_chains;
550 unused_insn_chains = c->next;
551 }
552 c->is_caller_save_insn = 0;
553 c->need_operand_change = 0;
554 c->need_reload = 0;
555 c->need_elim = 0;
556 return c;
557 }
558
559 /* Small utility function to set all regs in hard reg set TO which are
560 allocated to pseudos in regset FROM. */
561
562 void
563 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
564 {
565 unsigned int regno;
566 reg_set_iterator rsi;
567
568 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
569 {
570 int r = reg_renumber[regno];
571
572 if (r < 0)
573 {
574 /* reload_combine uses the information from DF_LIVE_IN,
575 which might still contain registers that have not
576 actually been allocated since they have an
577 equivalence. */
578 gcc_assert (ira_conflicts_p || reload_completed);
579 }
580 else
581 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
582 }
583 }
584
585 /* Replace all pseudos found in LOC with their corresponding
586 equivalences. */
587
588 static void
589 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
590 {
591 rtx x = *loc;
592 enum rtx_code code;
593 const char *fmt;
594 int i, j;
595
596 if (! x)
597 return;
598
599 code = GET_CODE (x);
600 if (code == REG)
601 {
602 unsigned int regno = REGNO (x);
603
604 if (regno < FIRST_PSEUDO_REGISTER)
605 return;
606
607 x = eliminate_regs (x, mem_mode, usage);
608 if (x != *loc)
609 {
610 *loc = x;
611 replace_pseudos_in (loc, mem_mode, usage);
612 return;
613 }
614
615 if (reg_equiv_constant[regno])
616 *loc = reg_equiv_constant[regno];
617 else if (reg_equiv_mem[regno])
618 *loc = reg_equiv_mem[regno];
619 else if (reg_equiv_address[regno])
620 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
621 else
622 {
623 gcc_assert (!REG_P (regno_reg_rtx[regno])
624 || REGNO (regno_reg_rtx[regno]) != regno);
625 *loc = regno_reg_rtx[regno];
626 }
627
628 return;
629 }
630 else if (code == MEM)
631 {
632 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
633 return;
634 }
635
636 /* Process each of our operands recursively. */
637 fmt = GET_RTX_FORMAT (code);
638 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
639 if (*fmt == 'e')
640 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
641 else if (*fmt == 'E')
642 for (j = 0; j < XVECLEN (x, i); j++)
643 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
644 }
645
646 /* Determine if the current function has an exception receiver block
647 that reaches the exit block via non-exceptional edges */
648
649 static bool
650 has_nonexceptional_receiver (void)
651 {
652 edge e;
653 edge_iterator ei;
654 basic_block *tos, *worklist, bb;
655
656 /* If we're not optimizing, then just err on the safe side. */
657 if (!optimize)
658 return true;
659
660 /* First determine which blocks can reach exit via normal paths. */
661 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
662
663 FOR_EACH_BB (bb)
664 bb->flags &= ~BB_REACHABLE;
665
666 /* Place the exit block on our worklist. */
667 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
668 *tos++ = EXIT_BLOCK_PTR;
669
670 /* Iterate: find everything reachable from what we've already seen. */
671 while (tos != worklist)
672 {
673 bb = *--tos;
674
675 FOR_EACH_EDGE (e, ei, bb->preds)
676 if (!(e->flags & EDGE_ABNORMAL))
677 {
678 basic_block src = e->src;
679
680 if (!(src->flags & BB_REACHABLE))
681 {
682 src->flags |= BB_REACHABLE;
683 *tos++ = src;
684 }
685 }
686 }
687 free (worklist);
688
689 /* Now see if there's a reachable block with an exceptional incoming
690 edge. */
691 FOR_EACH_BB (bb)
692 if (bb->flags & BB_REACHABLE)
693 FOR_EACH_EDGE (e, ei, bb->preds)
694 if (e->flags & EDGE_ABNORMAL)
695 return true;
696
697 /* No exceptional block reached exit unexceptionally. */
698 return false;
699 }
700
701 \f
702 /* Global variables used by reload and its subroutines. */
703
704 /* The current basic block while in calculate_elim_costs_all_insns. */
705 static basic_block elim_bb;
706
707 /* Set during calculate_needs if an insn needs register elimination. */
708 static int something_needs_elimination;
709 /* Set during calculate_needs if an insn needs an operand changed. */
710 static int something_needs_operands_changed;
711 /* Set by alter_regs if we spilled a register to the stack. */
712 static bool something_was_spilled;
713
714 /* Nonzero means we couldn't get enough spill regs. */
715 static int failure;
716
717 /* Temporary array of pseudo-register number. */
718 static int *temp_pseudo_reg_arr;
719
720 /* Main entry point for the reload pass.
721
722 FIRST is the first insn of the function being compiled.
723
724 GLOBAL nonzero means we were called from global_alloc
725 and should attempt to reallocate any pseudoregs that we
726 displace from hard regs we will use for reloads.
727 If GLOBAL is zero, we do not have enough information to do that,
728 so any pseudo reg that is spilled must go to the stack.
729
730 Return value is nonzero if reload failed
731 and we must not do any more for this function. */
732
733 int
734 reload (rtx first, int global)
735 {
736 int i, n;
737 rtx insn;
738 struct elim_table *ep;
739 basic_block bb;
740
741 /* Make sure even insns with volatile mem refs are recognizable. */
742 init_recog ();
743
744 failure = 0;
745
746 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
747
748 /* Make sure that the last insn in the chain
749 is not something that needs reloading. */
750 emit_note (NOTE_INSN_DELETED);
751
752 /* Enable find_equiv_reg to distinguish insns made by reload. */
753 reload_first_uid = get_max_uid ();
754
755 #ifdef SECONDARY_MEMORY_NEEDED
756 /* Initialize the secondary memory table. */
757 clear_secondary_mem ();
758 #endif
759
760 /* We don't have a stack slot for any spill reg yet. */
761 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
762 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
763
764 /* Initialize the save area information for caller-save, in case some
765 are needed. */
766 init_save_areas ();
767
768 /* Compute which hard registers are now in use
769 as homes for pseudo registers.
770 This is done here rather than (eg) in global_alloc
771 because this point is reached even if not optimizing. */
772 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
773 mark_home_live (i);
774
775 /* A function that has a nonlocal label that can reach the exit
776 block via non-exceptional paths must save all call-saved
777 registers. */
778 if (cfun->has_nonlocal_label
779 && has_nonexceptional_receiver ())
780 crtl->saves_all_registers = 1;
781
782 if (crtl->saves_all_registers)
783 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
784 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
785 df_set_regs_ever_live (i, true);
786
787 reg_old_renumber = XCNEWVEC (short, max_regno);
788 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
789 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
790 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
791
792 CLEAR_HARD_REG_SET (bad_spill_regs_global);
793
794 init_eliminable_invariants (first, true);
795 init_elim_table ();
796
797 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
798 stack slots to the pseudos that lack hard regs or equivalents.
799 Do not touch virtual registers. */
800
801 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
802 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
803 temp_pseudo_reg_arr[n++] = i;
804
805 if (ira_conflicts_p)
806 /* Ask IRA to order pseudo-registers for better stack slot
807 sharing. */
808 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
809
810 for (i = 0; i < n; i++)
811 alter_reg (temp_pseudo_reg_arr[i], -1, false);
812
813 /* If we have some registers we think can be eliminated, scan all insns to
814 see if there is an insn that sets one of these registers to something
815 other than itself plus a constant. If so, the register cannot be
816 eliminated. Doing this scan here eliminates an extra pass through the
817 main reload loop in the most common case where register elimination
818 cannot be done. */
819 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
820 if (INSN_P (insn))
821 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
822
823 maybe_fix_stack_asms ();
824
825 insns_need_reload = 0;
826 something_needs_elimination = 0;
827
828 /* Initialize to -1, which means take the first spill register. */
829 last_spill_reg = -1;
830
831 /* Spill any hard regs that we know we can't eliminate. */
832 CLEAR_HARD_REG_SET (used_spill_regs);
833 /* There can be multiple ways to eliminate a register;
834 they should be listed adjacently.
835 Elimination for any register fails only if all possible ways fail. */
836 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
837 {
838 int from = ep->from;
839 int can_eliminate = 0;
840 do
841 {
842 can_eliminate |= ep->can_eliminate;
843 ep++;
844 }
845 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
846 if (! can_eliminate)
847 spill_hard_reg (from, 1);
848 }
849
850 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
851 if (frame_pointer_needed)
852 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
853 #endif
854 finish_spills (global);
855
856 /* From now on, we may need to generate moves differently. We may also
857 allow modifications of insns which cause them to not be recognized.
858 Any such modifications will be cleaned up during reload itself. */
859 reload_in_progress = 1;
860
861 /* This loop scans the entire function each go-round
862 and repeats until one repetition spills no additional hard regs. */
863 for (;;)
864 {
865 int something_changed;
866 int did_spill;
867 HOST_WIDE_INT starting_frame_size;
868
869 starting_frame_size = get_frame_size ();
870 something_was_spilled = false;
871
872 set_initial_elim_offsets ();
873 set_initial_label_offsets ();
874
875 /* For each pseudo register that has an equivalent location defined,
876 try to eliminate any eliminable registers (such as the frame pointer)
877 assuming initial offsets for the replacement register, which
878 is the normal case.
879
880 If the resulting location is directly addressable, substitute
881 the MEM we just got directly for the old REG.
882
883 If it is not addressable but is a constant or the sum of a hard reg
884 and constant, it is probably not addressable because the constant is
885 out of range, in that case record the address; we will generate
886 hairy code to compute the address in a register each time it is
887 needed. Similarly if it is a hard register, but one that is not
888 valid as an address register.
889
890 If the location is not addressable, but does not have one of the
891 above forms, assign a stack slot. We have to do this to avoid the
892 potential of producing lots of reloads if, e.g., a location involves
893 a pseudo that didn't get a hard register and has an equivalent memory
894 location that also involves a pseudo that didn't get a hard register.
895
896 Perhaps at some point we will improve reload_when_needed handling
897 so this problem goes away. But that's very hairy. */
898
899 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
900 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
901 {
902 rtx x = eliminate_regs (reg_equiv_memory_loc[i], VOIDmode,
903 NULL_RTX);
904
905 if (strict_memory_address_addr_space_p
906 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
907 MEM_ADDR_SPACE (x)))
908 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
909 else if (CONSTANT_P (XEXP (x, 0))
910 || (REG_P (XEXP (x, 0))
911 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
912 || (GET_CODE (XEXP (x, 0)) == PLUS
913 && REG_P (XEXP (XEXP (x, 0), 0))
914 && (REGNO (XEXP (XEXP (x, 0), 0))
915 < FIRST_PSEUDO_REGISTER)
916 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
917 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
918 else
919 {
920 /* Make a new stack slot. Then indicate that something
921 changed so we go back and recompute offsets for
922 eliminable registers because the allocation of memory
923 below might change some offset. reg_equiv_{mem,address}
924 will be set up for this pseudo on the next pass around
925 the loop. */
926 reg_equiv_memory_loc[i] = 0;
927 reg_equiv_init[i] = 0;
928 alter_reg (i, -1, true);
929 }
930 }
931
932 if (caller_save_needed)
933 setup_save_areas ();
934
935 /* If we allocated another stack slot, redo elimination bookkeeping. */
936 if (something_was_spilled || starting_frame_size != get_frame_size ())
937 continue;
938 if (starting_frame_size && crtl->stack_alignment_needed)
939 {
940 /* If we have a stack frame, we must align it now. The
941 stack size may be a part of the offset computation for
942 register elimination. So if this changes the stack size,
943 then repeat the elimination bookkeeping. We don't
944 realign when there is no stack, as that will cause a
945 stack frame when none is needed should
946 STARTING_FRAME_OFFSET not be already aligned to
947 STACK_BOUNDARY. */
948 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
949 if (starting_frame_size != get_frame_size ())
950 continue;
951 }
952
953 if (caller_save_needed)
954 {
955 save_call_clobbered_regs ();
956 /* That might have allocated new insn_chain structures. */
957 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
958 }
959
960 calculate_needs_all_insns (global);
961
962 if (! ira_conflicts_p)
963 /* Don't do it for IRA. We need this info because we don't
964 change live_throughout and dead_or_set for chains when IRA
965 is used. */
966 CLEAR_REG_SET (&spilled_pseudos);
967
968 did_spill = 0;
969
970 something_changed = 0;
971
972 /* If we allocated any new memory locations, make another pass
973 since it might have changed elimination offsets. */
974 if (something_was_spilled || starting_frame_size != get_frame_size ())
975 something_changed = 1;
976
977 /* Even if the frame size remained the same, we might still have
978 changed elimination offsets, e.g. if find_reloads called
979 force_const_mem requiring the back end to allocate a constant
980 pool base register that needs to be saved on the stack. */
981 else if (!verify_initial_elim_offsets ())
982 something_changed = 1;
983
984 {
985 HARD_REG_SET to_spill;
986 CLEAR_HARD_REG_SET (to_spill);
987 update_eliminables (&to_spill);
988 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
989
990 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
991 if (TEST_HARD_REG_BIT (to_spill, i))
992 {
993 spill_hard_reg (i, 1);
994 did_spill = 1;
995
996 /* Regardless of the state of spills, if we previously had
997 a register that we thought we could eliminate, but now can
998 not eliminate, we must run another pass.
999
1000 Consider pseudos which have an entry in reg_equiv_* which
1001 reference an eliminable register. We must make another pass
1002 to update reg_equiv_* so that we do not substitute in the
1003 old value from when we thought the elimination could be
1004 performed. */
1005 something_changed = 1;
1006 }
1007 }
1008
1009 select_reload_regs ();
1010 if (failure)
1011 goto failed;
1012
1013 if (insns_need_reload != 0 || did_spill)
1014 something_changed |= finish_spills (global);
1015
1016 if (! something_changed)
1017 break;
1018
1019 if (caller_save_needed)
1020 delete_caller_save_insns ();
1021
1022 obstack_free (&reload_obstack, reload_firstobj);
1023 }
1024
1025 /* If global-alloc was run, notify it of any register eliminations we have
1026 done. */
1027 if (global)
1028 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1029 if (ep->can_eliminate)
1030 mark_elimination (ep->from, ep->to);
1031
1032 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1033 If that insn didn't set the register (i.e., it copied the register to
1034 memory), just delete that insn instead of the equivalencing insn plus
1035 anything now dead. If we call delete_dead_insn on that insn, we may
1036 delete the insn that actually sets the register if the register dies
1037 there and that is incorrect. */
1038
1039 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1040 {
1041 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1042 {
1043 rtx list;
1044 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1045 {
1046 rtx equiv_insn = XEXP (list, 0);
1047
1048 /* If we already deleted the insn or if it may trap, we can't
1049 delete it. The latter case shouldn't happen, but can
1050 if an insn has a variable address, gets a REG_EH_REGION
1051 note added to it, and then gets converted into a load
1052 from a constant address. */
1053 if (NOTE_P (equiv_insn)
1054 || can_throw_internal (equiv_insn))
1055 ;
1056 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1057 delete_dead_insn (equiv_insn);
1058 else
1059 SET_INSN_DELETED (equiv_insn);
1060 }
1061 }
1062 }
1063
1064 /* Use the reload registers where necessary
1065 by generating move instructions to move the must-be-register
1066 values into or out of the reload registers. */
1067
1068 if (insns_need_reload != 0 || something_needs_elimination
1069 || something_needs_operands_changed)
1070 {
1071 HOST_WIDE_INT old_frame_size = get_frame_size ();
1072
1073 reload_as_needed (global);
1074
1075 gcc_assert (old_frame_size == get_frame_size ());
1076
1077 gcc_assert (verify_initial_elim_offsets ());
1078 }
1079
1080 /* If we were able to eliminate the frame pointer, show that it is no
1081 longer live at the start of any basic block. If it ls live by
1082 virtue of being in a pseudo, that pseudo will be marked live
1083 and hence the frame pointer will be known to be live via that
1084 pseudo. */
1085
1086 if (! frame_pointer_needed)
1087 FOR_EACH_BB (bb)
1088 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1089
1090 /* Come here (with failure set nonzero) if we can't get enough spill
1091 regs. */
1092 failed:
1093
1094 CLEAR_REG_SET (&changed_allocation_pseudos);
1095 CLEAR_REG_SET (&spilled_pseudos);
1096 reload_in_progress = 0;
1097
1098 /* Now eliminate all pseudo regs by modifying them into
1099 their equivalent memory references.
1100 The REG-rtx's for the pseudos are modified in place,
1101 so all insns that used to refer to them now refer to memory.
1102
1103 For a reg that has a reg_equiv_address, all those insns
1104 were changed by reloading so that no insns refer to it any longer;
1105 but the DECL_RTL of a variable decl may refer to it,
1106 and if so this causes the debugging info to mention the variable. */
1107
1108 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1109 {
1110 rtx addr = 0;
1111
1112 if (reg_equiv_mem[i])
1113 addr = XEXP (reg_equiv_mem[i], 0);
1114
1115 if (reg_equiv_address[i])
1116 addr = reg_equiv_address[i];
1117
1118 if (addr)
1119 {
1120 if (reg_renumber[i] < 0)
1121 {
1122 rtx reg = regno_reg_rtx[i];
1123
1124 REG_USERVAR_P (reg) = 0;
1125 PUT_CODE (reg, MEM);
1126 XEXP (reg, 0) = addr;
1127 if (reg_equiv_memory_loc[i])
1128 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1129 else
1130 {
1131 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1132 MEM_ATTRS (reg) = 0;
1133 }
1134 MEM_NOTRAP_P (reg) = 1;
1135 }
1136 else if (reg_equiv_mem[i])
1137 XEXP (reg_equiv_mem[i], 0) = addr;
1138 }
1139
1140 /* We don't want complex addressing modes in debug insns
1141 if simpler ones will do, so delegitimize equivalences
1142 in debug insns. */
1143 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1144 {
1145 rtx reg = regno_reg_rtx[i];
1146 rtx equiv = 0;
1147 df_ref use, next;
1148
1149 if (reg_equiv_constant[i])
1150 equiv = reg_equiv_constant[i];
1151 else if (reg_equiv_invariant[i])
1152 equiv = reg_equiv_invariant[i];
1153 else if (reg && MEM_P (reg))
1154 equiv = targetm.delegitimize_address (reg);
1155 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1156 equiv = reg;
1157
1158 if (equiv == reg)
1159 continue;
1160
1161 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1162 {
1163 insn = DF_REF_INSN (use);
1164
1165 /* Make sure the next ref is for a different instruction,
1166 so that we're not affected by the rescan. */
1167 next = DF_REF_NEXT_REG (use);
1168 while (next && DF_REF_INSN (next) == insn)
1169 next = DF_REF_NEXT_REG (next);
1170
1171 if (DEBUG_INSN_P (insn))
1172 {
1173 if (!equiv)
1174 {
1175 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1176 df_insn_rescan_debug_internal (insn);
1177 }
1178 else
1179 INSN_VAR_LOCATION_LOC (insn)
1180 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1181 reg, equiv);
1182 }
1183 }
1184 }
1185 }
1186
1187 /* We must set reload_completed now since the cleanup_subreg_operands call
1188 below will re-recognize each insn and reload may have generated insns
1189 which are only valid during and after reload. */
1190 reload_completed = 1;
1191
1192 /* Make a pass over all the insns and delete all USEs which we inserted
1193 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1194 notes. Delete all CLOBBER insns, except those that refer to the return
1195 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1196 from misarranging variable-array code, and simplify (subreg (reg))
1197 operands. Strip and regenerate REG_INC notes that may have been moved
1198 around. */
1199
1200 for (insn = first; insn; insn = NEXT_INSN (insn))
1201 if (INSN_P (insn))
1202 {
1203 rtx *pnote;
1204
1205 if (CALL_P (insn))
1206 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1207 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1208
1209 if ((GET_CODE (PATTERN (insn)) == USE
1210 /* We mark with QImode USEs introduced by reload itself. */
1211 && (GET_MODE (insn) == QImode
1212 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1213 || (GET_CODE (PATTERN (insn)) == CLOBBER
1214 && (!MEM_P (XEXP (PATTERN (insn), 0))
1215 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1216 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1217 && XEXP (XEXP (PATTERN (insn), 0), 0)
1218 != stack_pointer_rtx))
1219 && (!REG_P (XEXP (PATTERN (insn), 0))
1220 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1221 {
1222 delete_insn (insn);
1223 continue;
1224 }
1225
1226 /* Some CLOBBERs may survive until here and still reference unassigned
1227 pseudos with const equivalent, which may in turn cause ICE in later
1228 passes if the reference remains in place. */
1229 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1230 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1231 VOIDmode, PATTERN (insn));
1232
1233 /* Discard obvious no-ops, even without -O. This optimization
1234 is fast and doesn't interfere with debugging. */
1235 if (NONJUMP_INSN_P (insn)
1236 && GET_CODE (PATTERN (insn)) == SET
1237 && REG_P (SET_SRC (PATTERN (insn)))
1238 && REG_P (SET_DEST (PATTERN (insn)))
1239 && (REGNO (SET_SRC (PATTERN (insn)))
1240 == REGNO (SET_DEST (PATTERN (insn)))))
1241 {
1242 delete_insn (insn);
1243 continue;
1244 }
1245
1246 pnote = &REG_NOTES (insn);
1247 while (*pnote != 0)
1248 {
1249 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1250 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1251 || REG_NOTE_KIND (*pnote) == REG_INC)
1252 *pnote = XEXP (*pnote, 1);
1253 else
1254 pnote = &XEXP (*pnote, 1);
1255 }
1256
1257 #ifdef AUTO_INC_DEC
1258 add_auto_inc_notes (insn, PATTERN (insn));
1259 #endif
1260
1261 /* Simplify (subreg (reg)) if it appears as an operand. */
1262 cleanup_subreg_operands (insn);
1263
1264 /* Clean up invalid ASMs so that they don't confuse later passes.
1265 See PR 21299. */
1266 if (asm_noperands (PATTERN (insn)) >= 0)
1267 {
1268 extract_insn (insn);
1269 if (!constrain_operands (1))
1270 {
1271 error_for_asm (insn,
1272 "%<asm%> operand has impossible constraints");
1273 delete_insn (insn);
1274 continue;
1275 }
1276 }
1277 }
1278
1279 /* If we are doing generic stack checking, give a warning if this
1280 function's frame size is larger than we expect. */
1281 if (flag_stack_check == GENERIC_STACK_CHECK)
1282 {
1283 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1284 static int verbose_warned = 0;
1285
1286 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1287 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1288 size += UNITS_PER_WORD;
1289
1290 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1291 {
1292 warning (0, "frame size too large for reliable stack checking");
1293 if (! verbose_warned)
1294 {
1295 warning (0, "try reducing the number of local variables");
1296 verbose_warned = 1;
1297 }
1298 }
1299 }
1300
1301 free (temp_pseudo_reg_arr);
1302
1303 /* Indicate that we no longer have known memory locations or constants. */
1304 free_reg_equiv ();
1305 reg_equiv_init = 0;
1306 free (reg_max_ref_width);
1307 free (reg_old_renumber);
1308 free (pseudo_previous_regs);
1309 free (pseudo_forbidden_regs);
1310
1311 CLEAR_HARD_REG_SET (used_spill_regs);
1312 for (i = 0; i < n_spills; i++)
1313 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1314
1315 /* Free all the insn_chain structures at once. */
1316 obstack_free (&reload_obstack, reload_startobj);
1317 unused_insn_chains = 0;
1318 fixup_abnormal_edges ();
1319
1320 /* Replacing pseudos with their memory equivalents might have
1321 created shared rtx. Subsequent passes would get confused
1322 by this, so unshare everything here. */
1323 unshare_all_rtl_again (first);
1324
1325 #ifdef STACK_BOUNDARY
1326 /* init_emit has set the alignment of the hard frame pointer
1327 to STACK_BOUNDARY. It is very likely no longer valid if
1328 the hard frame pointer was used for register allocation. */
1329 if (!frame_pointer_needed)
1330 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1331 #endif
1332
1333 VEC_free (rtx_p, heap, substitute_stack);
1334
1335 return failure;
1336 }
1337
1338 /* Yet another special case. Unfortunately, reg-stack forces people to
1339 write incorrect clobbers in asm statements. These clobbers must not
1340 cause the register to appear in bad_spill_regs, otherwise we'll call
1341 fatal_insn later. We clear the corresponding regnos in the live
1342 register sets to avoid this.
1343 The whole thing is rather sick, I'm afraid. */
1344
1345 static void
1346 maybe_fix_stack_asms (void)
1347 {
1348 #ifdef STACK_REGS
1349 const char *constraints[MAX_RECOG_OPERANDS];
1350 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1351 struct insn_chain *chain;
1352
1353 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1354 {
1355 int i, noperands;
1356 HARD_REG_SET clobbered, allowed;
1357 rtx pat;
1358
1359 if (! INSN_P (chain->insn)
1360 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1361 continue;
1362 pat = PATTERN (chain->insn);
1363 if (GET_CODE (pat) != PARALLEL)
1364 continue;
1365
1366 CLEAR_HARD_REG_SET (clobbered);
1367 CLEAR_HARD_REG_SET (allowed);
1368
1369 /* First, make a mask of all stack regs that are clobbered. */
1370 for (i = 0; i < XVECLEN (pat, 0); i++)
1371 {
1372 rtx t = XVECEXP (pat, 0, i);
1373 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1374 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1375 }
1376
1377 /* Get the operand values and constraints out of the insn. */
1378 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1379 constraints, operand_mode, NULL);
1380
1381 /* For every operand, see what registers are allowed. */
1382 for (i = 0; i < noperands; i++)
1383 {
1384 const char *p = constraints[i];
1385 /* For every alternative, we compute the class of registers allowed
1386 for reloading in CLS, and merge its contents into the reg set
1387 ALLOWED. */
1388 int cls = (int) NO_REGS;
1389
1390 for (;;)
1391 {
1392 char c = *p;
1393
1394 if (c == '\0' || c == ',' || c == '#')
1395 {
1396 /* End of one alternative - mark the regs in the current
1397 class, and reset the class. */
1398 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1399 cls = NO_REGS;
1400 p++;
1401 if (c == '#')
1402 do {
1403 c = *p++;
1404 } while (c != '\0' && c != ',');
1405 if (c == '\0')
1406 break;
1407 continue;
1408 }
1409
1410 switch (c)
1411 {
1412 case '=': case '+': case '*': case '%': case '?': case '!':
1413 case '0': case '1': case '2': case '3': case '4': case '<':
1414 case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1415 case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1416 case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1417 case TARGET_MEM_CONSTRAINT:
1418 break;
1419
1420 case 'p':
1421 cls = (int) reg_class_subunion[cls]
1422 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1423 break;
1424
1425 case 'g':
1426 case 'r':
1427 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1428 break;
1429
1430 default:
1431 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1432 cls = (int) reg_class_subunion[cls]
1433 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1434 else
1435 cls = (int) reg_class_subunion[cls]
1436 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1437 }
1438 p += CONSTRAINT_LEN (c, p);
1439 }
1440 }
1441 /* Those of the registers which are clobbered, but allowed by the
1442 constraints, must be usable as reload registers. So clear them
1443 out of the life information. */
1444 AND_HARD_REG_SET (allowed, clobbered);
1445 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1446 if (TEST_HARD_REG_BIT (allowed, i))
1447 {
1448 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1449 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1450 }
1451 }
1452
1453 #endif
1454 }
1455 \f
1456 /* Copy the global variables n_reloads and rld into the corresponding elts
1457 of CHAIN. */
1458 static void
1459 copy_reloads (struct insn_chain *chain)
1460 {
1461 chain->n_reloads = n_reloads;
1462 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1463 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1464 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1465 }
1466
1467 /* Walk the chain of insns, and determine for each whether it needs reloads
1468 and/or eliminations. Build the corresponding insns_need_reload list, and
1469 set something_needs_elimination as appropriate. */
1470 static void
1471 calculate_needs_all_insns (int global)
1472 {
1473 struct insn_chain **pprev_reload = &insns_need_reload;
1474 struct insn_chain *chain, *next = 0;
1475
1476 something_needs_elimination = 0;
1477
1478 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1479 for (chain = reload_insn_chain; chain != 0; chain = next)
1480 {
1481 rtx insn = chain->insn;
1482
1483 next = chain->next;
1484
1485 /* Clear out the shortcuts. */
1486 chain->n_reloads = 0;
1487 chain->need_elim = 0;
1488 chain->need_reload = 0;
1489 chain->need_operand_change = 0;
1490
1491 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1492 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1493 what effects this has on the known offsets at labels. */
1494
1495 if (LABEL_P (insn) || JUMP_P (insn)
1496 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1497 set_label_offsets (insn, insn, 0);
1498
1499 if (INSN_P (insn))
1500 {
1501 rtx old_body = PATTERN (insn);
1502 int old_code = INSN_CODE (insn);
1503 rtx old_notes = REG_NOTES (insn);
1504 int did_elimination = 0;
1505 int operands_changed = 0;
1506 rtx set = single_set (insn);
1507
1508 /* Skip insns that only set an equivalence. */
1509 if (set && REG_P (SET_DEST (set))
1510 && reg_renumber[REGNO (SET_DEST (set))] < 0
1511 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1512 || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1513 && reg_equiv_init[REGNO (SET_DEST (set))])
1514 continue;
1515
1516 /* If needed, eliminate any eliminable registers. */
1517 if (num_eliminable || num_eliminable_invariants)
1518 did_elimination = eliminate_regs_in_insn (insn, 0);
1519
1520 /* Analyze the instruction. */
1521 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1522 global, spill_reg_order);
1523
1524 /* If a no-op set needs more than one reload, this is likely
1525 to be something that needs input address reloads. We
1526 can't get rid of this cleanly later, and it is of no use
1527 anyway, so discard it now.
1528 We only do this when expensive_optimizations is enabled,
1529 since this complements reload inheritance / output
1530 reload deletion, and it can make debugging harder. */
1531 if (flag_expensive_optimizations && n_reloads > 1)
1532 {
1533 rtx set = single_set (insn);
1534 if (set
1535 &&
1536 ((SET_SRC (set) == SET_DEST (set)
1537 && REG_P (SET_SRC (set))
1538 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1539 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1540 && reg_renumber[REGNO (SET_SRC (set))] < 0
1541 && reg_renumber[REGNO (SET_DEST (set))] < 0
1542 && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1543 && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1544 && rtx_equal_p (reg_equiv_memory_loc
1545 [REGNO (SET_SRC (set))],
1546 reg_equiv_memory_loc
1547 [REGNO (SET_DEST (set))]))))
1548 {
1549 if (ira_conflicts_p)
1550 /* Inform IRA about the insn deletion. */
1551 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1552 REGNO (SET_SRC (set)));
1553 delete_insn (insn);
1554 /* Delete it from the reload chain. */
1555 if (chain->prev)
1556 chain->prev->next = next;
1557 else
1558 reload_insn_chain = next;
1559 if (next)
1560 next->prev = chain->prev;
1561 chain->next = unused_insn_chains;
1562 unused_insn_chains = chain;
1563 continue;
1564 }
1565 }
1566 if (num_eliminable)
1567 update_eliminable_offsets ();
1568
1569 /* Remember for later shortcuts which insns had any reloads or
1570 register eliminations. */
1571 chain->need_elim = did_elimination;
1572 chain->need_reload = n_reloads > 0;
1573 chain->need_operand_change = operands_changed;
1574
1575 /* Discard any register replacements done. */
1576 if (did_elimination)
1577 {
1578 obstack_free (&reload_obstack, reload_insn_firstobj);
1579 PATTERN (insn) = old_body;
1580 INSN_CODE (insn) = old_code;
1581 REG_NOTES (insn) = old_notes;
1582 something_needs_elimination = 1;
1583 }
1584
1585 something_needs_operands_changed |= operands_changed;
1586
1587 if (n_reloads != 0)
1588 {
1589 copy_reloads (chain);
1590 *pprev_reload = chain;
1591 pprev_reload = &chain->next_need_reload;
1592 }
1593 }
1594 }
1595 *pprev_reload = 0;
1596 }
1597 \f
1598 /* This function is called from the register allocator to set up estimates
1599 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1600 an invariant. The structure is similar to calculate_needs_all_insns. */
1601
1602 void
1603 calculate_elim_costs_all_insns (void)
1604 {
1605 int *reg_equiv_init_cost;
1606 basic_block bb;
1607 int i;
1608
1609 reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1610 init_elim_table ();
1611 init_eliminable_invariants (get_insns (), false);
1612
1613 set_initial_elim_offsets ();
1614 set_initial_label_offsets ();
1615
1616 FOR_EACH_BB (bb)
1617 {
1618 rtx insn;
1619 elim_bb = bb;
1620
1621 FOR_BB_INSNS (bb, insn)
1622 {
1623 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1624 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1625 what effects this has on the known offsets at labels. */
1626
1627 if (LABEL_P (insn) || JUMP_P (insn)
1628 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1629 set_label_offsets (insn, insn, 0);
1630
1631 if (INSN_P (insn))
1632 {
1633 rtx set = single_set (insn);
1634
1635 /* Skip insns that only set an equivalence. */
1636 if (set && REG_P (SET_DEST (set))
1637 && reg_renumber[REGNO (SET_DEST (set))] < 0
1638 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1639 || (reg_equiv_invariant[REGNO (SET_DEST (set))])))
1640 {
1641 unsigned regno = REGNO (SET_DEST (set));
1642 rtx init = reg_equiv_init[regno];
1643 if (init)
1644 {
1645 rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1646 false, true);
1647 int cost = rtx_cost (t, SET,
1648 optimize_bb_for_speed_p (bb));
1649 int freq = REG_FREQ_FROM_BB (bb);
1650
1651 reg_equiv_init_cost[regno] = cost * freq;
1652 continue;
1653 }
1654 }
1655 /* If needed, eliminate any eliminable registers. */
1656 if (num_eliminable || num_eliminable_invariants)
1657 elimination_costs_in_insn (insn);
1658
1659 if (num_eliminable)
1660 update_eliminable_offsets ();
1661 }
1662 }
1663 }
1664 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1665 {
1666 if (reg_equiv_invariant[i])
1667 {
1668 if (reg_equiv_init[i])
1669 {
1670 int cost = reg_equiv_init_cost[i];
1671 if (dump_file)
1672 fprintf (dump_file,
1673 "Reg %d has equivalence, initial gains %d\n", i, cost);
1674 if (cost != 0)
1675 ira_adjust_equiv_reg_cost (i, cost);
1676 }
1677 else
1678 {
1679 if (dump_file)
1680 fprintf (dump_file,
1681 "Reg %d had equivalence, but can't be eliminated\n",
1682 i);
1683 ira_adjust_equiv_reg_cost (i, 0);
1684 }
1685 }
1686 }
1687
1688 free_reg_equiv ();
1689 free (reg_equiv_init_cost);
1690 }
1691 \f
1692 /* Comparison function for qsort to decide which of two reloads
1693 should be handled first. *P1 and *P2 are the reload numbers. */
1694
1695 static int
1696 reload_reg_class_lower (const void *r1p, const void *r2p)
1697 {
1698 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1699 int t;
1700
1701 /* Consider required reloads before optional ones. */
1702 t = rld[r1].optional - rld[r2].optional;
1703 if (t != 0)
1704 return t;
1705
1706 /* Count all solitary classes before non-solitary ones. */
1707 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1708 - (reg_class_size[(int) rld[r1].rclass] == 1));
1709 if (t != 0)
1710 return t;
1711
1712 /* Aside from solitaires, consider all multi-reg groups first. */
1713 t = rld[r2].nregs - rld[r1].nregs;
1714 if (t != 0)
1715 return t;
1716
1717 /* Consider reloads in order of increasing reg-class number. */
1718 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1719 if (t != 0)
1720 return t;
1721
1722 /* If reloads are equally urgent, sort by reload number,
1723 so that the results of qsort leave nothing to chance. */
1724 return r1 - r2;
1725 }
1726 \f
1727 /* The cost of spilling each hard reg. */
1728 static int spill_cost[FIRST_PSEUDO_REGISTER];
1729
1730 /* When spilling multiple hard registers, we use SPILL_COST for the first
1731 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1732 only the first hard reg for a multi-reg pseudo. */
1733 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1734
1735 /* Map of hard regno to pseudo regno currently occupying the hard
1736 reg. */
1737 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1738
1739 /* Update the spill cost arrays, considering that pseudo REG is live. */
1740
1741 static void
1742 count_pseudo (int reg)
1743 {
1744 int freq = REG_FREQ (reg);
1745 int r = reg_renumber[reg];
1746 int nregs;
1747
1748 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1749 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1750 /* Ignore spilled pseudo-registers which can be here only if IRA
1751 is used. */
1752 || (ira_conflicts_p && r < 0))
1753 return;
1754
1755 SET_REGNO_REG_SET (&pseudos_counted, reg);
1756
1757 gcc_assert (r >= 0);
1758
1759 spill_add_cost[r] += freq;
1760 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1761 while (nregs-- > 0)
1762 {
1763 hard_regno_to_pseudo_regno[r + nregs] = reg;
1764 spill_cost[r + nregs] += freq;
1765 }
1766 }
1767
1768 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1769 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1770
1771 static void
1772 order_regs_for_reload (struct insn_chain *chain)
1773 {
1774 unsigned i;
1775 HARD_REG_SET used_by_pseudos;
1776 HARD_REG_SET used_by_pseudos2;
1777 reg_set_iterator rsi;
1778
1779 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1780
1781 memset (spill_cost, 0, sizeof spill_cost);
1782 memset (spill_add_cost, 0, sizeof spill_add_cost);
1783 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1784 hard_regno_to_pseudo_regno[i] = -1;
1785
1786 /* Count number of uses of each hard reg by pseudo regs allocated to it
1787 and then order them by decreasing use. First exclude hard registers
1788 that are live in or across this insn. */
1789
1790 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1791 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1792 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1793 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1794
1795 /* Now find out which pseudos are allocated to it, and update
1796 hard_reg_n_uses. */
1797 CLEAR_REG_SET (&pseudos_counted);
1798
1799 EXECUTE_IF_SET_IN_REG_SET
1800 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1801 {
1802 count_pseudo (i);
1803 }
1804 EXECUTE_IF_SET_IN_REG_SET
1805 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1806 {
1807 count_pseudo (i);
1808 }
1809 CLEAR_REG_SET (&pseudos_counted);
1810 }
1811 \f
1812 /* Vector of reload-numbers showing the order in which the reloads should
1813 be processed. */
1814 static short reload_order[MAX_RELOADS];
1815
1816 /* This is used to keep track of the spill regs used in one insn. */
1817 static HARD_REG_SET used_spill_regs_local;
1818
1819 /* We decided to spill hard register SPILLED, which has a size of
1820 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1821 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1822 update SPILL_COST/SPILL_ADD_COST. */
1823
1824 static void
1825 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1826 {
1827 int freq = REG_FREQ (reg);
1828 int r = reg_renumber[reg];
1829 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1830
1831 /* Ignore spilled pseudo-registers which can be here only if IRA is
1832 used. */
1833 if ((ira_conflicts_p && r < 0)
1834 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1835 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1836 return;
1837
1838 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1839
1840 spill_add_cost[r] -= freq;
1841 while (nregs-- > 0)
1842 {
1843 hard_regno_to_pseudo_regno[r + nregs] = -1;
1844 spill_cost[r + nregs] -= freq;
1845 }
1846 }
1847
1848 /* Find reload register to use for reload number ORDER. */
1849
1850 static int
1851 find_reg (struct insn_chain *chain, int order)
1852 {
1853 int rnum = reload_order[order];
1854 struct reload *rl = rld + rnum;
1855 int best_cost = INT_MAX;
1856 int best_reg = -1;
1857 unsigned int i, j, n;
1858 int k;
1859 HARD_REG_SET not_usable;
1860 HARD_REG_SET used_by_other_reload;
1861 reg_set_iterator rsi;
1862 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1863 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1864
1865 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1866 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1867 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1868
1869 CLEAR_HARD_REG_SET (used_by_other_reload);
1870 for (k = 0; k < order; k++)
1871 {
1872 int other = reload_order[k];
1873
1874 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1875 for (j = 0; j < rld[other].nregs; j++)
1876 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1877 }
1878
1879 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1880 {
1881 #ifdef REG_ALLOC_ORDER
1882 unsigned int regno = reg_alloc_order[i];
1883 #else
1884 unsigned int regno = i;
1885 #endif
1886
1887 if (! TEST_HARD_REG_BIT (not_usable, regno)
1888 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1889 && HARD_REGNO_MODE_OK (regno, rl->mode))
1890 {
1891 int this_cost = spill_cost[regno];
1892 int ok = 1;
1893 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1894
1895 for (j = 1; j < this_nregs; j++)
1896 {
1897 this_cost += spill_add_cost[regno + j];
1898 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1899 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1900 ok = 0;
1901 }
1902 if (! ok)
1903 continue;
1904
1905 if (ira_conflicts_p)
1906 {
1907 /* Ask IRA to find a better pseudo-register for
1908 spilling. */
1909 for (n = j = 0; j < this_nregs; j++)
1910 {
1911 int r = hard_regno_to_pseudo_regno[regno + j];
1912
1913 if (r < 0)
1914 continue;
1915 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1916 regno_pseudo_regs[n++] = r;
1917 }
1918 regno_pseudo_regs[n++] = -1;
1919 if (best_reg < 0
1920 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1921 best_regno_pseudo_regs,
1922 rl->in, rl->out,
1923 chain->insn))
1924 {
1925 best_reg = regno;
1926 for (j = 0;; j++)
1927 {
1928 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1929 if (regno_pseudo_regs[j] < 0)
1930 break;
1931 }
1932 }
1933 continue;
1934 }
1935
1936 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1937 this_cost--;
1938 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1939 this_cost--;
1940 if (this_cost < best_cost
1941 /* Among registers with equal cost, prefer caller-saved ones, or
1942 use REG_ALLOC_ORDER if it is defined. */
1943 || (this_cost == best_cost
1944 #ifdef REG_ALLOC_ORDER
1945 && (inv_reg_alloc_order[regno]
1946 < inv_reg_alloc_order[best_reg])
1947 #else
1948 && call_used_regs[regno]
1949 && ! call_used_regs[best_reg]
1950 #endif
1951 ))
1952 {
1953 best_reg = regno;
1954 best_cost = this_cost;
1955 }
1956 }
1957 }
1958 if (best_reg == -1)
1959 return 0;
1960
1961 if (dump_file)
1962 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1963
1964 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1965 rl->regno = best_reg;
1966
1967 EXECUTE_IF_SET_IN_REG_SET
1968 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1969 {
1970 count_spilled_pseudo (best_reg, rl->nregs, j);
1971 }
1972
1973 EXECUTE_IF_SET_IN_REG_SET
1974 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1975 {
1976 count_spilled_pseudo (best_reg, rl->nregs, j);
1977 }
1978
1979 for (i = 0; i < rl->nregs; i++)
1980 {
1981 gcc_assert (spill_cost[best_reg + i] == 0);
1982 gcc_assert (spill_add_cost[best_reg + i] == 0);
1983 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1984 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1985 }
1986 return 1;
1987 }
1988
1989 /* Find more reload regs to satisfy the remaining need of an insn, which
1990 is given by CHAIN.
1991 Do it by ascending class number, since otherwise a reg
1992 might be spilled for a big class and might fail to count
1993 for a smaller class even though it belongs to that class. */
1994
1995 static void
1996 find_reload_regs (struct insn_chain *chain)
1997 {
1998 int i;
1999
2000 /* In order to be certain of getting the registers we need,
2001 we must sort the reloads into order of increasing register class.
2002 Then our grabbing of reload registers will parallel the process
2003 that provided the reload registers. */
2004 for (i = 0; i < chain->n_reloads; i++)
2005 {
2006 /* Show whether this reload already has a hard reg. */
2007 if (chain->rld[i].reg_rtx)
2008 {
2009 int regno = REGNO (chain->rld[i].reg_rtx);
2010 chain->rld[i].regno = regno;
2011 chain->rld[i].nregs
2012 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2013 }
2014 else
2015 chain->rld[i].regno = -1;
2016 reload_order[i] = i;
2017 }
2018
2019 n_reloads = chain->n_reloads;
2020 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2021
2022 CLEAR_HARD_REG_SET (used_spill_regs_local);
2023
2024 if (dump_file)
2025 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2026
2027 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2028
2029 /* Compute the order of preference for hard registers to spill. */
2030
2031 order_regs_for_reload (chain);
2032
2033 for (i = 0; i < n_reloads; i++)
2034 {
2035 int r = reload_order[i];
2036
2037 /* Ignore reloads that got marked inoperative. */
2038 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2039 && ! rld[r].optional
2040 && rld[r].regno == -1)
2041 if (! find_reg (chain, i))
2042 {
2043 if (dump_file)
2044 fprintf (dump_file, "reload failure for reload %d\n", r);
2045 spill_failure (chain->insn, rld[r].rclass);
2046 failure = 1;
2047 return;
2048 }
2049 }
2050
2051 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2052 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2053
2054 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2055 }
2056
2057 static void
2058 select_reload_regs (void)
2059 {
2060 struct insn_chain *chain;
2061
2062 /* Try to satisfy the needs for each insn. */
2063 for (chain = insns_need_reload; chain != 0;
2064 chain = chain->next_need_reload)
2065 find_reload_regs (chain);
2066 }
2067 \f
2068 /* Delete all insns that were inserted by emit_caller_save_insns during
2069 this iteration. */
2070 static void
2071 delete_caller_save_insns (void)
2072 {
2073 struct insn_chain *c = reload_insn_chain;
2074
2075 while (c != 0)
2076 {
2077 while (c != 0 && c->is_caller_save_insn)
2078 {
2079 struct insn_chain *next = c->next;
2080 rtx insn = c->insn;
2081
2082 if (c == reload_insn_chain)
2083 reload_insn_chain = next;
2084 delete_insn (insn);
2085
2086 if (next)
2087 next->prev = c->prev;
2088 if (c->prev)
2089 c->prev->next = next;
2090 c->next = unused_insn_chains;
2091 unused_insn_chains = c;
2092 c = next;
2093 }
2094 if (c != 0)
2095 c = c->next;
2096 }
2097 }
2098 \f
2099 /* Handle the failure to find a register to spill.
2100 INSN should be one of the insns which needed this particular spill reg. */
2101
2102 static void
2103 spill_failure (rtx insn, enum reg_class rclass)
2104 {
2105 if (asm_noperands (PATTERN (insn)) >= 0)
2106 error_for_asm (insn, "can't find a register in class %qs while "
2107 "reloading %<asm%>",
2108 reg_class_names[rclass]);
2109 else
2110 {
2111 error ("unable to find a register to spill in class %qs",
2112 reg_class_names[rclass]);
2113
2114 if (dump_file)
2115 {
2116 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2117 debug_reload_to_stream (dump_file);
2118 }
2119 fatal_insn ("this is the insn:", insn);
2120 }
2121 }
2122 \f
2123 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2124 data that is dead in INSN. */
2125
2126 static void
2127 delete_dead_insn (rtx insn)
2128 {
2129 rtx prev = prev_real_insn (insn);
2130 rtx prev_dest;
2131
2132 /* If the previous insn sets a register that dies in our insn, delete it
2133 too. */
2134 if (prev && GET_CODE (PATTERN (prev)) == SET
2135 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2136 && reg_mentioned_p (prev_dest, PATTERN (insn))
2137 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2138 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2139 delete_dead_insn (prev);
2140
2141 SET_INSN_DELETED (insn);
2142 }
2143
2144 /* Modify the home of pseudo-reg I.
2145 The new home is present in reg_renumber[I].
2146
2147 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2148 or it may be -1, meaning there is none or it is not relevant.
2149 This is used so that all pseudos spilled from a given hard reg
2150 can share one stack slot. */
2151
2152 static void
2153 alter_reg (int i, int from_reg, bool dont_share_p)
2154 {
2155 /* When outputting an inline function, this can happen
2156 for a reg that isn't actually used. */
2157 if (regno_reg_rtx[i] == 0)
2158 return;
2159
2160 /* If the reg got changed to a MEM at rtl-generation time,
2161 ignore it. */
2162 if (!REG_P (regno_reg_rtx[i]))
2163 return;
2164
2165 /* Modify the reg-rtx to contain the new hard reg
2166 number or else to contain its pseudo reg number. */
2167 SET_REGNO (regno_reg_rtx[i],
2168 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2169
2170 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2171 allocate a stack slot for it. */
2172
2173 if (reg_renumber[i] < 0
2174 && REG_N_REFS (i) > 0
2175 && reg_equiv_constant[i] == 0
2176 && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2177 && reg_equiv_memory_loc[i] == 0)
2178 {
2179 rtx x = NULL_RTX;
2180 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2181 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2182 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2183 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2184 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2185 int adjust = 0;
2186
2187 something_was_spilled = true;
2188
2189 if (ira_conflicts_p)
2190 {
2191 /* Mark the spill for IRA. */
2192 SET_REGNO_REG_SET (&spilled_pseudos, i);
2193 if (!dont_share_p)
2194 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2195 }
2196
2197 if (x)
2198 ;
2199
2200 /* Each pseudo reg has an inherent size which comes from its own mode,
2201 and a total size which provides room for paradoxical subregs
2202 which refer to the pseudo reg in wider modes.
2203
2204 We can use a slot already allocated if it provides both
2205 enough inherent space and enough total space.
2206 Otherwise, we allocate a new slot, making sure that it has no less
2207 inherent space, and no less total space, then the previous slot. */
2208 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2209 {
2210 rtx stack_slot;
2211
2212 /* No known place to spill from => no slot to reuse. */
2213 x = assign_stack_local (mode, total_size,
2214 min_align > inherent_align
2215 || total_size > inherent_size ? -1 : 0);
2216
2217 stack_slot = x;
2218
2219 /* Cancel the big-endian correction done in assign_stack_local.
2220 Get the address of the beginning of the slot. This is so we
2221 can do a big-endian correction unconditionally below. */
2222 if (BYTES_BIG_ENDIAN)
2223 {
2224 adjust = inherent_size - total_size;
2225 if (adjust)
2226 stack_slot
2227 = adjust_address_nv (x, mode_for_size (total_size
2228 * BITS_PER_UNIT,
2229 MODE_INT, 1),
2230 adjust);
2231 }
2232
2233 if (! dont_share_p && ira_conflicts_p)
2234 /* Inform IRA about allocation a new stack slot. */
2235 ira_mark_new_stack_slot (stack_slot, i, total_size);
2236 }
2237
2238 /* Reuse a stack slot if possible. */
2239 else if (spill_stack_slot[from_reg] != 0
2240 && spill_stack_slot_width[from_reg] >= total_size
2241 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2242 >= inherent_size)
2243 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2244 x = spill_stack_slot[from_reg];
2245
2246 /* Allocate a bigger slot. */
2247 else
2248 {
2249 /* Compute maximum size needed, both for inherent size
2250 and for total size. */
2251 rtx stack_slot;
2252
2253 if (spill_stack_slot[from_reg])
2254 {
2255 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2256 > inherent_size)
2257 mode = GET_MODE (spill_stack_slot[from_reg]);
2258 if (spill_stack_slot_width[from_reg] > total_size)
2259 total_size = spill_stack_slot_width[from_reg];
2260 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2261 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2262 }
2263
2264 /* Make a slot with that size. */
2265 x = assign_stack_local (mode, total_size,
2266 min_align > inherent_align
2267 || total_size > inherent_size ? -1 : 0);
2268 stack_slot = x;
2269
2270 /* Cancel the big-endian correction done in assign_stack_local.
2271 Get the address of the beginning of the slot. This is so we
2272 can do a big-endian correction unconditionally below. */
2273 if (BYTES_BIG_ENDIAN)
2274 {
2275 adjust = GET_MODE_SIZE (mode) - total_size;
2276 if (adjust)
2277 stack_slot
2278 = adjust_address_nv (x, mode_for_size (total_size
2279 * BITS_PER_UNIT,
2280 MODE_INT, 1),
2281 adjust);
2282 }
2283
2284 spill_stack_slot[from_reg] = stack_slot;
2285 spill_stack_slot_width[from_reg] = total_size;
2286 }
2287
2288 /* On a big endian machine, the "address" of the slot
2289 is the address of the low part that fits its inherent mode. */
2290 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2291 adjust += (total_size - inherent_size);
2292
2293 /* If we have any adjustment to make, or if the stack slot is the
2294 wrong mode, make a new stack slot. */
2295 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2296
2297 /* Set all of the memory attributes as appropriate for a spill. */
2298 set_mem_attrs_for_spill (x);
2299
2300 /* Save the stack slot for later. */
2301 reg_equiv_memory_loc[i] = x;
2302 }
2303 }
2304
2305 /* Mark the slots in regs_ever_live for the hard regs used by
2306 pseudo-reg number REGNO, accessed in MODE. */
2307
2308 static void
2309 mark_home_live_1 (int regno, enum machine_mode mode)
2310 {
2311 int i, lim;
2312
2313 i = reg_renumber[regno];
2314 if (i < 0)
2315 return;
2316 lim = end_hard_regno (mode, i);
2317 while (i < lim)
2318 df_set_regs_ever_live(i++, true);
2319 }
2320
2321 /* Mark the slots in regs_ever_live for the hard regs
2322 used by pseudo-reg number REGNO. */
2323
2324 void
2325 mark_home_live (int regno)
2326 {
2327 if (reg_renumber[regno] >= 0)
2328 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2329 }
2330 \f
2331 /* This function handles the tracking of elimination offsets around branches.
2332
2333 X is a piece of RTL being scanned.
2334
2335 INSN is the insn that it came from, if any.
2336
2337 INITIAL_P is nonzero if we are to set the offset to be the initial
2338 offset and zero if we are setting the offset of the label to be the
2339 current offset. */
2340
2341 static void
2342 set_label_offsets (rtx x, rtx insn, int initial_p)
2343 {
2344 enum rtx_code code = GET_CODE (x);
2345 rtx tem;
2346 unsigned int i;
2347 struct elim_table *p;
2348
2349 switch (code)
2350 {
2351 case LABEL_REF:
2352 if (LABEL_REF_NONLOCAL_P (x))
2353 return;
2354
2355 x = XEXP (x, 0);
2356
2357 /* ... fall through ... */
2358
2359 case CODE_LABEL:
2360 /* If we know nothing about this label, set the desired offsets. Note
2361 that this sets the offset at a label to be the offset before a label
2362 if we don't know anything about the label. This is not correct for
2363 the label after a BARRIER, but is the best guess we can make. If
2364 we guessed wrong, we will suppress an elimination that might have
2365 been possible had we been able to guess correctly. */
2366
2367 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2368 {
2369 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2370 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2371 = (initial_p ? reg_eliminate[i].initial_offset
2372 : reg_eliminate[i].offset);
2373 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2374 }
2375
2376 /* Otherwise, if this is the definition of a label and it is
2377 preceded by a BARRIER, set our offsets to the known offset of
2378 that label. */
2379
2380 else if (x == insn
2381 && (tem = prev_nonnote_insn (insn)) != 0
2382 && BARRIER_P (tem))
2383 set_offsets_for_label (insn);
2384 else
2385 /* If neither of the above cases is true, compare each offset
2386 with those previously recorded and suppress any eliminations
2387 where the offsets disagree. */
2388
2389 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2390 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2391 != (initial_p ? reg_eliminate[i].initial_offset
2392 : reg_eliminate[i].offset))
2393 reg_eliminate[i].can_eliminate = 0;
2394
2395 return;
2396
2397 case JUMP_INSN:
2398 set_label_offsets (PATTERN (insn), insn, initial_p);
2399
2400 /* ... fall through ... */
2401
2402 case INSN:
2403 case CALL_INSN:
2404 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2405 to indirectly and hence must have all eliminations at their
2406 initial offsets. */
2407 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2408 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2409 set_label_offsets (XEXP (tem, 0), insn, 1);
2410 return;
2411
2412 case PARALLEL:
2413 case ADDR_VEC:
2414 case ADDR_DIFF_VEC:
2415 /* Each of the labels in the parallel or address vector must be
2416 at their initial offsets. We want the first field for PARALLEL
2417 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2418
2419 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2420 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2421 insn, initial_p);
2422 return;
2423
2424 case SET:
2425 /* We only care about setting PC. If the source is not RETURN,
2426 IF_THEN_ELSE, or a label, disable any eliminations not at
2427 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2428 isn't one of those possibilities. For branches to a label,
2429 call ourselves recursively.
2430
2431 Note that this can disable elimination unnecessarily when we have
2432 a non-local goto since it will look like a non-constant jump to
2433 someplace in the current function. This isn't a significant
2434 problem since such jumps will normally be when all elimination
2435 pairs are back to their initial offsets. */
2436
2437 if (SET_DEST (x) != pc_rtx)
2438 return;
2439
2440 switch (GET_CODE (SET_SRC (x)))
2441 {
2442 case PC:
2443 case RETURN:
2444 return;
2445
2446 case LABEL_REF:
2447 set_label_offsets (SET_SRC (x), insn, initial_p);
2448 return;
2449
2450 case IF_THEN_ELSE:
2451 tem = XEXP (SET_SRC (x), 1);
2452 if (GET_CODE (tem) == LABEL_REF)
2453 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2454 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2455 break;
2456
2457 tem = XEXP (SET_SRC (x), 2);
2458 if (GET_CODE (tem) == LABEL_REF)
2459 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2460 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2461 break;
2462 return;
2463
2464 default:
2465 break;
2466 }
2467
2468 /* If we reach here, all eliminations must be at their initial
2469 offset because we are doing a jump to a variable address. */
2470 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2471 if (p->offset != p->initial_offset)
2472 p->can_eliminate = 0;
2473 break;
2474
2475 default:
2476 break;
2477 }
2478 }
2479 \f
2480 /* Called through for_each_rtx, this function examines every reg that occurs
2481 in PX and adjusts the costs for its elimination which are gathered by IRA.
2482 DATA is the insn in which PX occurs. We do not recurse into MEM
2483 expressions. */
2484
2485 static int
2486 note_reg_elim_costly (rtx *px, void *data)
2487 {
2488 rtx insn = (rtx)data;
2489 rtx x = *px;
2490
2491 if (MEM_P (x))
2492 return -1;
2493
2494 if (REG_P (x)
2495 && REGNO (x) >= FIRST_PSEUDO_REGISTER
2496 && reg_equiv_init[REGNO (x)]
2497 && reg_equiv_invariant[REGNO (x)])
2498 {
2499 rtx t = reg_equiv_invariant[REGNO (x)];
2500 rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2501 int cost = rtx_cost (new_rtx, SET, optimize_bb_for_speed_p (elim_bb));
2502 int freq = REG_FREQ_FROM_BB (elim_bb);
2503
2504 if (cost != 0)
2505 ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2506 }
2507 return 0;
2508 }
2509
2510 /* Scan X and replace any eliminable registers (such as fp) with a
2511 replacement (such as sp), plus an offset.
2512
2513 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2514 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2515 MEM, we are allowed to replace a sum of a register and the constant zero
2516 with the register, which we cannot do outside a MEM. In addition, we need
2517 to record the fact that a register is referenced outside a MEM.
2518
2519 If INSN is an insn, it is the insn containing X. If we replace a REG
2520 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2521 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2522 the REG is being modified.
2523
2524 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2525 That's used when we eliminate in expressions stored in notes.
2526 This means, do not set ref_outside_mem even if the reference
2527 is outside of MEMs.
2528
2529 If FOR_COSTS is true, we are being called before reload in order to
2530 estimate the costs of keeping registers with an equivalence unallocated.
2531
2532 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2533 replacements done assuming all offsets are at their initial values. If
2534 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2535 encounter, return the actual location so that find_reloads will do
2536 the proper thing. */
2537
2538 static rtx
2539 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2540 bool may_use_invariant, bool for_costs)
2541 {
2542 enum rtx_code code = GET_CODE (x);
2543 struct elim_table *ep;
2544 int regno;
2545 rtx new_rtx;
2546 int i, j;
2547 const char *fmt;
2548 int copied = 0;
2549
2550 if (! current_function_decl)
2551 return x;
2552
2553 switch (code)
2554 {
2555 case CONST_INT:
2556 case CONST_DOUBLE:
2557 case CONST_FIXED:
2558 case CONST_VECTOR:
2559 case CONST:
2560 case SYMBOL_REF:
2561 case CODE_LABEL:
2562 case PC:
2563 case CC0:
2564 case ASM_INPUT:
2565 case ADDR_VEC:
2566 case ADDR_DIFF_VEC:
2567 case RETURN:
2568 return x;
2569
2570 case REG:
2571 regno = REGNO (x);
2572
2573 /* First handle the case where we encounter a bare register that
2574 is eliminable. Replace it with a PLUS. */
2575 if (regno < FIRST_PSEUDO_REGISTER)
2576 {
2577 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2578 ep++)
2579 if (ep->from_rtx == x && ep->can_eliminate)
2580 return plus_constant (ep->to_rtx, ep->previous_offset);
2581
2582 }
2583 else if (reg_renumber && reg_renumber[regno] < 0
2584 && reg_equiv_invariant && reg_equiv_invariant[regno])
2585 {
2586 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2587 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2588 mem_mode, insn, true, for_costs);
2589 /* There exists at least one use of REGNO that cannot be
2590 eliminated. Prevent the defining insn from being deleted. */
2591 reg_equiv_init[regno] = NULL_RTX;
2592 if (!for_costs)
2593 alter_reg (regno, -1, true);
2594 }
2595 return x;
2596
2597 /* You might think handling MINUS in a manner similar to PLUS is a
2598 good idea. It is not. It has been tried multiple times and every
2599 time the change has had to have been reverted.
2600
2601 Other parts of reload know a PLUS is special (gen_reload for example)
2602 and require special code to handle code a reloaded PLUS operand.
2603
2604 Also consider backends where the flags register is clobbered by a
2605 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2606 lea instruction comes to mind). If we try to reload a MINUS, we
2607 may kill the flags register that was holding a useful value.
2608
2609 So, please before trying to handle MINUS, consider reload as a
2610 whole instead of this little section as well as the backend issues. */
2611 case PLUS:
2612 /* If this is the sum of an eliminable register and a constant, rework
2613 the sum. */
2614 if (REG_P (XEXP (x, 0))
2615 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2616 && CONSTANT_P (XEXP (x, 1)))
2617 {
2618 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2619 ep++)
2620 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2621 {
2622 /* The only time we want to replace a PLUS with a REG (this
2623 occurs when the constant operand of the PLUS is the negative
2624 of the offset) is when we are inside a MEM. We won't want
2625 to do so at other times because that would change the
2626 structure of the insn in a way that reload can't handle.
2627 We special-case the commonest situation in
2628 eliminate_regs_in_insn, so just replace a PLUS with a
2629 PLUS here, unless inside a MEM. */
2630 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2631 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2632 return ep->to_rtx;
2633 else
2634 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2635 plus_constant (XEXP (x, 1),
2636 ep->previous_offset));
2637 }
2638
2639 /* If the register is not eliminable, we are done since the other
2640 operand is a constant. */
2641 return x;
2642 }
2643
2644 /* If this is part of an address, we want to bring any constant to the
2645 outermost PLUS. We will do this by doing register replacement in
2646 our operands and seeing if a constant shows up in one of them.
2647
2648 Note that there is no risk of modifying the structure of the insn,
2649 since we only get called for its operands, thus we are either
2650 modifying the address inside a MEM, or something like an address
2651 operand of a load-address insn. */
2652
2653 {
2654 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2655 for_costs);
2656 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2657 for_costs);
2658
2659 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2660 {
2661 /* If one side is a PLUS and the other side is a pseudo that
2662 didn't get a hard register but has a reg_equiv_constant,
2663 we must replace the constant here since it may no longer
2664 be in the position of any operand. */
2665 if (GET_CODE (new0) == PLUS && REG_P (new1)
2666 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2667 && reg_renumber[REGNO (new1)] < 0
2668 && reg_equiv_constant != 0
2669 && reg_equiv_constant[REGNO (new1)] != 0)
2670 new1 = reg_equiv_constant[REGNO (new1)];
2671 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2672 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2673 && reg_renumber[REGNO (new0)] < 0
2674 && reg_equiv_constant[REGNO (new0)] != 0)
2675 new0 = reg_equiv_constant[REGNO (new0)];
2676
2677 new_rtx = form_sum (GET_MODE (x), new0, new1);
2678
2679 /* As above, if we are not inside a MEM we do not want to
2680 turn a PLUS into something else. We might try to do so here
2681 for an addition of 0 if we aren't optimizing. */
2682 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2683 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2684 else
2685 return new_rtx;
2686 }
2687 }
2688 return x;
2689
2690 case MULT:
2691 /* If this is the product of an eliminable register and a
2692 constant, apply the distribute law and move the constant out
2693 so that we have (plus (mult ..) ..). This is needed in order
2694 to keep load-address insns valid. This case is pathological.
2695 We ignore the possibility of overflow here. */
2696 if (REG_P (XEXP (x, 0))
2697 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2698 && CONST_INT_P (XEXP (x, 1)))
2699 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2700 ep++)
2701 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2702 {
2703 if (! mem_mode
2704 /* Refs inside notes or in DEBUG_INSNs don't count for
2705 this purpose. */
2706 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2707 || GET_CODE (insn) == INSN_LIST
2708 || DEBUG_INSN_P (insn))))
2709 ep->ref_outside_mem = 1;
2710
2711 return
2712 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2713 ep->previous_offset * INTVAL (XEXP (x, 1)));
2714 }
2715
2716 /* ... fall through ... */
2717
2718 case CALL:
2719 case COMPARE:
2720 /* See comments before PLUS about handling MINUS. */
2721 case MINUS:
2722 case DIV: case UDIV:
2723 case MOD: case UMOD:
2724 case AND: case IOR: case XOR:
2725 case ROTATERT: case ROTATE:
2726 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2727 case NE: case EQ:
2728 case GE: case GT: case GEU: case GTU:
2729 case LE: case LT: case LEU: case LTU:
2730 {
2731 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2732 for_costs);
2733 rtx new1 = XEXP (x, 1)
2734 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2735 for_costs) : 0;
2736
2737 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2738 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2739 }
2740 return x;
2741
2742 case EXPR_LIST:
2743 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2744 if (XEXP (x, 0))
2745 {
2746 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2747 for_costs);
2748 if (new_rtx != XEXP (x, 0))
2749 {
2750 /* If this is a REG_DEAD note, it is not valid anymore.
2751 Using the eliminated version could result in creating a
2752 REG_DEAD note for the stack or frame pointer. */
2753 if (REG_NOTE_KIND (x) == REG_DEAD)
2754 return (XEXP (x, 1)
2755 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2756 for_costs)
2757 : NULL_RTX);
2758
2759 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2760 }
2761 }
2762
2763 /* ... fall through ... */
2764
2765 case INSN_LIST:
2766 /* Now do eliminations in the rest of the chain. If this was
2767 an EXPR_LIST, this might result in allocating more memory than is
2768 strictly needed, but it simplifies the code. */
2769 if (XEXP (x, 1))
2770 {
2771 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2772 for_costs);
2773 if (new_rtx != XEXP (x, 1))
2774 return
2775 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2776 }
2777 return x;
2778
2779 case PRE_INC:
2780 case POST_INC:
2781 case PRE_DEC:
2782 case POST_DEC:
2783 /* We do not support elimination of a register that is modified.
2784 elimination_effects has already make sure that this does not
2785 happen. */
2786 return x;
2787
2788 case PRE_MODIFY:
2789 case POST_MODIFY:
2790 /* We do not support elimination of a register that is modified.
2791 elimination_effects has already make sure that this does not
2792 happen. The only remaining case we need to consider here is
2793 that the increment value may be an eliminable register. */
2794 if (GET_CODE (XEXP (x, 1)) == PLUS
2795 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2796 {
2797 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2798 insn, true, for_costs);
2799
2800 if (new_rtx != XEXP (XEXP (x, 1), 1))
2801 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2802 gen_rtx_PLUS (GET_MODE (x),
2803 XEXP (x, 0), new_rtx));
2804 }
2805 return x;
2806
2807 case STRICT_LOW_PART:
2808 case NEG: case NOT:
2809 case SIGN_EXTEND: case ZERO_EXTEND:
2810 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2811 case FLOAT: case FIX:
2812 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2813 case ABS:
2814 case SQRT:
2815 case FFS:
2816 case CLZ:
2817 case CTZ:
2818 case POPCOUNT:
2819 case PARITY:
2820 case BSWAP:
2821 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2822 for_costs);
2823 if (new_rtx != XEXP (x, 0))
2824 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2825 return x;
2826
2827 case SUBREG:
2828 /* Similar to above processing, but preserve SUBREG_BYTE.
2829 Convert (subreg (mem)) to (mem) if not paradoxical.
2830 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2831 pseudo didn't get a hard reg, we must replace this with the
2832 eliminated version of the memory location because push_reload
2833 may do the replacement in certain circumstances. */
2834 if (REG_P (SUBREG_REG (x))
2835 && (GET_MODE_SIZE (GET_MODE (x))
2836 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2837 && reg_equiv_memory_loc != 0
2838 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2839 {
2840 new_rtx = SUBREG_REG (x);
2841 }
2842 else
2843 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false,
2844 for_costs);
2845
2846 if (new_rtx != SUBREG_REG (x))
2847 {
2848 int x_size = GET_MODE_SIZE (GET_MODE (x));
2849 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2850
2851 if (MEM_P (new_rtx)
2852 && ((x_size < new_size
2853 #ifdef WORD_REGISTER_OPERATIONS
2854 /* On these machines, combine can create rtl of the form
2855 (set (subreg:m1 (reg:m2 R) 0) ...)
2856 where m1 < m2, and expects something interesting to
2857 happen to the entire word. Moreover, it will use the
2858 (reg:m2 R) later, expecting all bits to be preserved.
2859 So if the number of words is the same, preserve the
2860 subreg so that push_reload can see it. */
2861 && ! ((x_size - 1) / UNITS_PER_WORD
2862 == (new_size -1 ) / UNITS_PER_WORD)
2863 #endif
2864 )
2865 || x_size == new_size)
2866 )
2867 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2868 else
2869 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2870 }
2871
2872 return x;
2873
2874 case MEM:
2875 /* Our only special processing is to pass the mode of the MEM to our
2876 recursive call and copy the flags. While we are here, handle this
2877 case more efficiently. */
2878
2879 new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2880 for_costs);
2881 if (for_costs
2882 && memory_address_p (GET_MODE (x), XEXP (x, 0))
2883 && !memory_address_p (GET_MODE (x), new_rtx))
2884 for_each_rtx (&XEXP (x, 0), note_reg_elim_costly, insn);
2885
2886 return replace_equiv_address_nv (x, new_rtx);
2887
2888 case USE:
2889 /* Handle insn_list USE that a call to a pure function may generate. */
2890 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2891 for_costs);
2892 if (new_rtx != XEXP (x, 0))
2893 return gen_rtx_USE (GET_MODE (x), new_rtx);
2894 return x;
2895
2896 case CLOBBER:
2897 gcc_assert (insn && DEBUG_INSN_P (insn));
2898 break;
2899
2900 case ASM_OPERANDS:
2901 case SET:
2902 gcc_unreachable ();
2903
2904 default:
2905 break;
2906 }
2907
2908 /* Process each of our operands recursively. If any have changed, make a
2909 copy of the rtx. */
2910 fmt = GET_RTX_FORMAT (code);
2911 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2912 {
2913 if (*fmt == 'e')
2914 {
2915 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2916 for_costs);
2917 if (new_rtx != XEXP (x, i) && ! copied)
2918 {
2919 x = shallow_copy_rtx (x);
2920 copied = 1;
2921 }
2922 XEXP (x, i) = new_rtx;
2923 }
2924 else if (*fmt == 'E')
2925 {
2926 int copied_vec = 0;
2927 for (j = 0; j < XVECLEN (x, i); j++)
2928 {
2929 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2930 for_costs);
2931 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2932 {
2933 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2934 XVEC (x, i)->elem);
2935 if (! copied)
2936 {
2937 x = shallow_copy_rtx (x);
2938 copied = 1;
2939 }
2940 XVEC (x, i) = new_v;
2941 copied_vec = 1;
2942 }
2943 XVECEXP (x, i, j) = new_rtx;
2944 }
2945 }
2946 }
2947
2948 return x;
2949 }
2950
2951 rtx
2952 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2953 {
2954 return eliminate_regs_1 (x, mem_mode, insn, false, false);
2955 }
2956
2957 /* Scan rtx X for modifications of elimination target registers. Update
2958 the table of eliminables to reflect the changed state. MEM_MODE is
2959 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2960
2961 static void
2962 elimination_effects (rtx x, enum machine_mode mem_mode)
2963 {
2964 enum rtx_code code = GET_CODE (x);
2965 struct elim_table *ep;
2966 int regno;
2967 int i, j;
2968 const char *fmt;
2969
2970 switch (code)
2971 {
2972 case CONST_INT:
2973 case CONST_DOUBLE:
2974 case CONST_FIXED:
2975 case CONST_VECTOR:
2976 case CONST:
2977 case SYMBOL_REF:
2978 case CODE_LABEL:
2979 case PC:
2980 case CC0:
2981 case ASM_INPUT:
2982 case ADDR_VEC:
2983 case ADDR_DIFF_VEC:
2984 case RETURN:
2985 return;
2986
2987 case REG:
2988 regno = REGNO (x);
2989
2990 /* First handle the case where we encounter a bare register that
2991 is eliminable. Replace it with a PLUS. */
2992 if (regno < FIRST_PSEUDO_REGISTER)
2993 {
2994 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2995 ep++)
2996 if (ep->from_rtx == x && ep->can_eliminate)
2997 {
2998 if (! mem_mode)
2999 ep->ref_outside_mem = 1;
3000 return;
3001 }
3002
3003 }
3004 else if (reg_renumber[regno] < 0 && reg_equiv_constant
3005 && reg_equiv_constant[regno]
3006 && ! function_invariant_p (reg_equiv_constant[regno]))
3007 elimination_effects (reg_equiv_constant[regno], mem_mode);
3008 return;
3009
3010 case PRE_INC:
3011 case POST_INC:
3012 case PRE_DEC:
3013 case POST_DEC:
3014 case POST_MODIFY:
3015 case PRE_MODIFY:
3016 /* If we modify the source of an elimination rule, disable it. */
3017 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3018 if (ep->from_rtx == XEXP (x, 0))
3019 ep->can_eliminate = 0;
3020
3021 /* If we modify the target of an elimination rule by adding a constant,
3022 update its offset. If we modify the target in any other way, we'll
3023 have to disable the rule as well. */
3024 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3025 if (ep->to_rtx == XEXP (x, 0))
3026 {
3027 int size = GET_MODE_SIZE (mem_mode);
3028
3029 /* If more bytes than MEM_MODE are pushed, account for them. */
3030 #ifdef PUSH_ROUNDING
3031 if (ep->to_rtx == stack_pointer_rtx)
3032 size = PUSH_ROUNDING (size);
3033 #endif
3034 if (code == PRE_DEC || code == POST_DEC)
3035 ep->offset += size;
3036 else if (code == PRE_INC || code == POST_INC)
3037 ep->offset -= size;
3038 else if (code == PRE_MODIFY || code == POST_MODIFY)
3039 {
3040 if (GET_CODE (XEXP (x, 1)) == PLUS
3041 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3042 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3043 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3044 else
3045 ep->can_eliminate = 0;
3046 }
3047 }
3048
3049 /* These two aren't unary operators. */
3050 if (code == POST_MODIFY || code == PRE_MODIFY)
3051 break;
3052
3053 /* Fall through to generic unary operation case. */
3054 case STRICT_LOW_PART:
3055 case NEG: case NOT:
3056 case SIGN_EXTEND: case ZERO_EXTEND:
3057 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3058 case FLOAT: case FIX:
3059 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3060 case ABS:
3061 case SQRT:
3062 case FFS:
3063 case CLZ:
3064 case CTZ:
3065 case POPCOUNT:
3066 case PARITY:
3067 case BSWAP:
3068 elimination_effects (XEXP (x, 0), mem_mode);
3069 return;
3070
3071 case SUBREG:
3072 if (REG_P (SUBREG_REG (x))
3073 && (GET_MODE_SIZE (GET_MODE (x))
3074 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3075 && reg_equiv_memory_loc != 0
3076 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3077 return;
3078
3079 elimination_effects (SUBREG_REG (x), mem_mode);
3080 return;
3081
3082 case USE:
3083 /* If using a register that is the source of an eliminate we still
3084 think can be performed, note it cannot be performed since we don't
3085 know how this register is used. */
3086 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3087 if (ep->from_rtx == XEXP (x, 0))
3088 ep->can_eliminate = 0;
3089
3090 elimination_effects (XEXP (x, 0), mem_mode);
3091 return;
3092
3093 case CLOBBER:
3094 /* If clobbering a register that is the replacement register for an
3095 elimination we still think can be performed, note that it cannot
3096 be performed. Otherwise, we need not be concerned about it. */
3097 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3098 if (ep->to_rtx == XEXP (x, 0))
3099 ep->can_eliminate = 0;
3100
3101 elimination_effects (XEXP (x, 0), mem_mode);
3102 return;
3103
3104 case SET:
3105 /* Check for setting a register that we know about. */
3106 if (REG_P (SET_DEST (x)))
3107 {
3108 /* See if this is setting the replacement register for an
3109 elimination.
3110
3111 If DEST is the hard frame pointer, we do nothing because we
3112 assume that all assignments to the frame pointer are for
3113 non-local gotos and are being done at a time when they are valid
3114 and do not disturb anything else. Some machines want to
3115 eliminate a fake argument pointer (or even a fake frame pointer)
3116 with either the real frame or the stack pointer. Assignments to
3117 the hard frame pointer must not prevent this elimination. */
3118
3119 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3120 ep++)
3121 if (ep->to_rtx == SET_DEST (x)
3122 && SET_DEST (x) != hard_frame_pointer_rtx)
3123 {
3124 /* If it is being incremented, adjust the offset. Otherwise,
3125 this elimination can't be done. */
3126 rtx src = SET_SRC (x);
3127
3128 if (GET_CODE (src) == PLUS
3129 && XEXP (src, 0) == SET_DEST (x)
3130 && CONST_INT_P (XEXP (src, 1)))
3131 ep->offset -= INTVAL (XEXP (src, 1));
3132 else
3133 ep->can_eliminate = 0;
3134 }
3135 }
3136
3137 elimination_effects (SET_DEST (x), VOIDmode);
3138 elimination_effects (SET_SRC (x), VOIDmode);
3139 return;
3140
3141 case MEM:
3142 /* Our only special processing is to pass the mode of the MEM to our
3143 recursive call. */
3144 elimination_effects (XEXP (x, 0), GET_MODE (x));
3145 return;
3146
3147 default:
3148 break;
3149 }
3150
3151 fmt = GET_RTX_FORMAT (code);
3152 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3153 {
3154 if (*fmt == 'e')
3155 elimination_effects (XEXP (x, i), mem_mode);
3156 else if (*fmt == 'E')
3157 for (j = 0; j < XVECLEN (x, i); j++)
3158 elimination_effects (XVECEXP (x, i, j), mem_mode);
3159 }
3160 }
3161
3162 /* Descend through rtx X and verify that no references to eliminable registers
3163 remain. If any do remain, mark the involved register as not
3164 eliminable. */
3165
3166 static void
3167 check_eliminable_occurrences (rtx x)
3168 {
3169 const char *fmt;
3170 int i;
3171 enum rtx_code code;
3172
3173 if (x == 0)
3174 return;
3175
3176 code = GET_CODE (x);
3177
3178 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3179 {
3180 struct elim_table *ep;
3181
3182 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3183 if (ep->from_rtx == x)
3184 ep->can_eliminate = 0;
3185 return;
3186 }
3187
3188 fmt = GET_RTX_FORMAT (code);
3189 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3190 {
3191 if (*fmt == 'e')
3192 check_eliminable_occurrences (XEXP (x, i));
3193 else if (*fmt == 'E')
3194 {
3195 int j;
3196 for (j = 0; j < XVECLEN (x, i); j++)
3197 check_eliminable_occurrences (XVECEXP (x, i, j));
3198 }
3199 }
3200 }
3201 \f
3202 /* Scan INSN and eliminate all eliminable registers in it.
3203
3204 If REPLACE is nonzero, do the replacement destructively. Also
3205 delete the insn as dead it if it is setting an eliminable register.
3206
3207 If REPLACE is zero, do all our allocations in reload_obstack.
3208
3209 If no eliminations were done and this insn doesn't require any elimination
3210 processing (these are not identical conditions: it might be updating sp,
3211 but not referencing fp; this needs to be seen during reload_as_needed so
3212 that the offset between fp and sp can be taken into consideration), zero
3213 is returned. Otherwise, 1 is returned. */
3214
3215 static int
3216 eliminate_regs_in_insn (rtx insn, int replace)
3217 {
3218 int icode = recog_memoized (insn);
3219 rtx old_body = PATTERN (insn);
3220 int insn_is_asm = asm_noperands (old_body) >= 0;
3221 rtx old_set = single_set (insn);
3222 rtx new_body;
3223 int val = 0;
3224 int i;
3225 rtx substed_operand[MAX_RECOG_OPERANDS];
3226 rtx orig_operand[MAX_RECOG_OPERANDS];
3227 struct elim_table *ep;
3228 rtx plus_src, plus_cst_src;
3229
3230 if (! insn_is_asm && icode < 0)
3231 {
3232 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3233 || GET_CODE (PATTERN (insn)) == CLOBBER
3234 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3235 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3236 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3237 || DEBUG_INSN_P (insn));
3238 if (DEBUG_INSN_P (insn))
3239 INSN_VAR_LOCATION_LOC (insn)
3240 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3241 return 0;
3242 }
3243
3244 if (old_set != 0 && REG_P (SET_DEST (old_set))
3245 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3246 {
3247 /* Check for setting an eliminable register. */
3248 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3249 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3250 {
3251 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3252 /* If this is setting the frame pointer register to the
3253 hardware frame pointer register and this is an elimination
3254 that will be done (tested above), this insn is really
3255 adjusting the frame pointer downward to compensate for
3256 the adjustment done before a nonlocal goto. */
3257 if (ep->from == FRAME_POINTER_REGNUM
3258 && ep->to == HARD_FRAME_POINTER_REGNUM)
3259 {
3260 rtx base = SET_SRC (old_set);
3261 rtx base_insn = insn;
3262 HOST_WIDE_INT offset = 0;
3263
3264 while (base != ep->to_rtx)
3265 {
3266 rtx prev_insn, prev_set;
3267
3268 if (GET_CODE (base) == PLUS
3269 && CONST_INT_P (XEXP (base, 1)))
3270 {
3271 offset += INTVAL (XEXP (base, 1));
3272 base = XEXP (base, 0);
3273 }
3274 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3275 && (prev_set = single_set (prev_insn)) != 0
3276 && rtx_equal_p (SET_DEST (prev_set), base))
3277 {
3278 base = SET_SRC (prev_set);
3279 base_insn = prev_insn;
3280 }
3281 else
3282 break;
3283 }
3284
3285 if (base == ep->to_rtx)
3286 {
3287 rtx src
3288 = plus_constant (ep->to_rtx, offset - ep->offset);
3289
3290 new_body = old_body;
3291 if (! replace)
3292 {
3293 new_body = copy_insn (old_body);
3294 if (REG_NOTES (insn))
3295 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3296 }
3297 PATTERN (insn) = new_body;
3298 old_set = single_set (insn);
3299
3300 /* First see if this insn remains valid when we
3301 make the change. If not, keep the INSN_CODE
3302 the same and let reload fit it up. */
3303 validate_change (insn, &SET_SRC (old_set), src, 1);
3304 validate_change (insn, &SET_DEST (old_set),
3305 ep->to_rtx, 1);
3306 if (! apply_change_group ())
3307 {
3308 SET_SRC (old_set) = src;
3309 SET_DEST (old_set) = ep->to_rtx;
3310 }
3311
3312 val = 1;
3313 goto done;
3314 }
3315 }
3316 #endif
3317
3318 /* In this case this insn isn't serving a useful purpose. We
3319 will delete it in reload_as_needed once we know that this
3320 elimination is, in fact, being done.
3321
3322 If REPLACE isn't set, we can't delete this insn, but needn't
3323 process it since it won't be used unless something changes. */
3324 if (replace)
3325 {
3326 delete_dead_insn (insn);
3327 return 1;
3328 }
3329 val = 1;
3330 goto done;
3331 }
3332 }
3333
3334 /* We allow one special case which happens to work on all machines we
3335 currently support: a single set with the source or a REG_EQUAL
3336 note being a PLUS of an eliminable register and a constant. */
3337 plus_src = plus_cst_src = 0;
3338 if (old_set && REG_P (SET_DEST (old_set)))
3339 {
3340 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3341 plus_src = SET_SRC (old_set);
3342 /* First see if the source is of the form (plus (...) CST). */
3343 if (plus_src
3344 && CONST_INT_P (XEXP (plus_src, 1)))
3345 plus_cst_src = plus_src;
3346 else if (REG_P (SET_SRC (old_set))
3347 || plus_src)
3348 {
3349 /* Otherwise, see if we have a REG_EQUAL note of the form
3350 (plus (...) CST). */
3351 rtx links;
3352 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3353 {
3354 if ((REG_NOTE_KIND (links) == REG_EQUAL
3355 || REG_NOTE_KIND (links) == REG_EQUIV)
3356 && GET_CODE (XEXP (links, 0)) == PLUS
3357 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3358 {
3359 plus_cst_src = XEXP (links, 0);
3360 break;
3361 }
3362 }
3363 }
3364
3365 /* Check that the first operand of the PLUS is a hard reg or
3366 the lowpart subreg of one. */
3367 if (plus_cst_src)
3368 {
3369 rtx reg = XEXP (plus_cst_src, 0);
3370 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3371 reg = SUBREG_REG (reg);
3372
3373 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3374 plus_cst_src = 0;
3375 }
3376 }
3377 if (plus_cst_src)
3378 {
3379 rtx reg = XEXP (plus_cst_src, 0);
3380 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3381
3382 if (GET_CODE (reg) == SUBREG)
3383 reg = SUBREG_REG (reg);
3384
3385 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3386 if (ep->from_rtx == reg && ep->can_eliminate)
3387 {
3388 rtx to_rtx = ep->to_rtx;
3389 offset += ep->offset;
3390 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3391
3392 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3393 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3394 to_rtx);
3395 /* If we have a nonzero offset, and the source is already
3396 a simple REG, the following transformation would
3397 increase the cost of the insn by replacing a simple REG
3398 with (plus (reg sp) CST). So try only when we already
3399 had a PLUS before. */
3400 if (offset == 0 || plus_src)
3401 {
3402 rtx new_src = plus_constant (to_rtx, offset);
3403
3404 new_body = old_body;
3405 if (! replace)
3406 {
3407 new_body = copy_insn (old_body);
3408 if (REG_NOTES (insn))
3409 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3410 }
3411 PATTERN (insn) = new_body;
3412 old_set = single_set (insn);
3413
3414 /* First see if this insn remains valid when we make the
3415 change. If not, try to replace the whole pattern with
3416 a simple set (this may help if the original insn was a
3417 PARALLEL that was only recognized as single_set due to
3418 REG_UNUSED notes). If this isn't valid either, keep
3419 the INSN_CODE the same and let reload fix it up. */
3420 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3421 {
3422 rtx new_pat = gen_rtx_SET (VOIDmode,
3423 SET_DEST (old_set), new_src);
3424
3425 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3426 SET_SRC (old_set) = new_src;
3427 }
3428 }
3429 else
3430 break;
3431
3432 val = 1;
3433 /* This can't have an effect on elimination offsets, so skip right
3434 to the end. */
3435 goto done;
3436 }
3437 }
3438
3439 /* Determine the effects of this insn on elimination offsets. */
3440 elimination_effects (old_body, VOIDmode);
3441
3442 /* Eliminate all eliminable registers occurring in operands that
3443 can be handled by reload. */
3444 extract_insn (insn);
3445 for (i = 0; i < recog_data.n_operands; i++)
3446 {
3447 orig_operand[i] = recog_data.operand[i];
3448 substed_operand[i] = recog_data.operand[i];
3449
3450 /* For an asm statement, every operand is eliminable. */
3451 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3452 {
3453 bool is_set_src, in_plus;
3454
3455 /* Check for setting a register that we know about. */
3456 if (recog_data.operand_type[i] != OP_IN
3457 && REG_P (orig_operand[i]))
3458 {
3459 /* If we are assigning to a register that can be eliminated, it
3460 must be as part of a PARALLEL, since the code above handles
3461 single SETs. We must indicate that we can no longer
3462 eliminate this reg. */
3463 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3464 ep++)
3465 if (ep->from_rtx == orig_operand[i])
3466 ep->can_eliminate = 0;
3467 }
3468
3469 /* Companion to the above plus substitution, we can allow
3470 invariants as the source of a plain move. */
3471 is_set_src = false;
3472 if (old_set
3473 && recog_data.operand_loc[i] == &SET_SRC (old_set))
3474 is_set_src = true;
3475 in_plus = false;
3476 if (plus_src
3477 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3478 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3479 in_plus = true;
3480
3481 substed_operand[i]
3482 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3483 replace ? insn : NULL_RTX,
3484 is_set_src || in_plus, false);
3485 if (substed_operand[i] != orig_operand[i])
3486 val = 1;
3487 /* Terminate the search in check_eliminable_occurrences at
3488 this point. */
3489 *recog_data.operand_loc[i] = 0;
3490
3491 /* If an output operand changed from a REG to a MEM and INSN is an
3492 insn, write a CLOBBER insn. */
3493 if (recog_data.operand_type[i] != OP_IN
3494 && REG_P (orig_operand[i])
3495 && MEM_P (substed_operand[i])
3496 && replace)
3497 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3498 }
3499 }
3500
3501 for (i = 0; i < recog_data.n_dups; i++)
3502 *recog_data.dup_loc[i]
3503 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3504
3505 /* If any eliminable remain, they aren't eliminable anymore. */
3506 check_eliminable_occurrences (old_body);
3507
3508 /* Substitute the operands; the new values are in the substed_operand
3509 array. */
3510 for (i = 0; i < recog_data.n_operands; i++)
3511 *recog_data.operand_loc[i] = substed_operand[i];
3512 for (i = 0; i < recog_data.n_dups; i++)
3513 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3514
3515 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3516 re-recognize the insn. We do this in case we had a simple addition
3517 but now can do this as a load-address. This saves an insn in this
3518 common case.
3519 If re-recognition fails, the old insn code number will still be used,
3520 and some register operands may have changed into PLUS expressions.
3521 These will be handled by find_reloads by loading them into a register
3522 again. */
3523
3524 if (val)
3525 {
3526 /* If we aren't replacing things permanently and we changed something,
3527 make another copy to ensure that all the RTL is new. Otherwise
3528 things can go wrong if find_reload swaps commutative operands
3529 and one is inside RTL that has been copied while the other is not. */
3530 new_body = old_body;
3531 if (! replace)
3532 {
3533 new_body = copy_insn (old_body);
3534 if (REG_NOTES (insn))
3535 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3536 }
3537 PATTERN (insn) = new_body;
3538
3539 /* If we had a move insn but now we don't, rerecognize it. This will
3540 cause spurious re-recognition if the old move had a PARALLEL since
3541 the new one still will, but we can't call single_set without
3542 having put NEW_BODY into the insn and the re-recognition won't
3543 hurt in this rare case. */
3544 /* ??? Why this huge if statement - why don't we just rerecognize the
3545 thing always? */
3546 if (! insn_is_asm
3547 && old_set != 0
3548 && ((REG_P (SET_SRC (old_set))
3549 && (GET_CODE (new_body) != SET
3550 || !REG_P (SET_SRC (new_body))))
3551 /* If this was a load from or store to memory, compare
3552 the MEM in recog_data.operand to the one in the insn.
3553 If they are not equal, then rerecognize the insn. */
3554 || (old_set != 0
3555 && ((MEM_P (SET_SRC (old_set))
3556 && SET_SRC (old_set) != recog_data.operand[1])
3557 || (MEM_P (SET_DEST (old_set))
3558 && SET_DEST (old_set) != recog_data.operand[0])))
3559 /* If this was an add insn before, rerecognize. */
3560 || GET_CODE (SET_SRC (old_set)) == PLUS))
3561 {
3562 int new_icode = recog (PATTERN (insn), insn, 0);
3563 if (new_icode >= 0)
3564 INSN_CODE (insn) = new_icode;
3565 }
3566 }
3567
3568 /* Restore the old body. If there were any changes to it, we made a copy
3569 of it while the changes were still in place, so we'll correctly return
3570 a modified insn below. */
3571 if (! replace)
3572 {
3573 /* Restore the old body. */
3574 for (i = 0; i < recog_data.n_operands; i++)
3575 /* Restoring a top-level match_parallel would clobber the new_body
3576 we installed in the insn. */
3577 if (recog_data.operand_loc[i] != &PATTERN (insn))
3578 *recog_data.operand_loc[i] = orig_operand[i];
3579 for (i = 0; i < recog_data.n_dups; i++)
3580 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3581 }
3582
3583 /* Update all elimination pairs to reflect the status after the current
3584 insn. The changes we make were determined by the earlier call to
3585 elimination_effects.
3586
3587 We also detect cases where register elimination cannot be done,
3588 namely, if a register would be both changed and referenced outside a MEM
3589 in the resulting insn since such an insn is often undefined and, even if
3590 not, we cannot know what meaning will be given to it. Note that it is
3591 valid to have a register used in an address in an insn that changes it
3592 (presumably with a pre- or post-increment or decrement).
3593
3594 If anything changes, return nonzero. */
3595
3596 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3597 {
3598 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3599 ep->can_eliminate = 0;
3600
3601 ep->ref_outside_mem = 0;
3602
3603 if (ep->previous_offset != ep->offset)
3604 val = 1;
3605 }
3606
3607 done:
3608 /* If we changed something, perform elimination in REG_NOTES. This is
3609 needed even when REPLACE is zero because a REG_DEAD note might refer
3610 to a register that we eliminate and could cause a different number
3611 of spill registers to be needed in the final reload pass than in
3612 the pre-passes. */
3613 if (val && REG_NOTES (insn) != 0)
3614 REG_NOTES (insn)
3615 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3616 false);
3617
3618 return val;
3619 }
3620
3621 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3622 register allocator. INSN is the instruction we need to examine, we perform
3623 eliminations in its operands and record cases where eliminating a reg with
3624 an invariant equivalence would add extra cost. */
3625
3626 static void
3627 elimination_costs_in_insn (rtx insn)
3628 {
3629 int icode = recog_memoized (insn);
3630 rtx old_body = PATTERN (insn);
3631 int insn_is_asm = asm_noperands (old_body) >= 0;
3632 rtx old_set = single_set (insn);
3633 int i;
3634 rtx orig_operand[MAX_RECOG_OPERANDS];
3635 rtx orig_dup[MAX_RECOG_OPERANDS];
3636 struct elim_table *ep;
3637 rtx plus_src, plus_cst_src;
3638 bool sets_reg_p;
3639
3640 if (! insn_is_asm && icode < 0)
3641 {
3642 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3643 || GET_CODE (PATTERN (insn)) == CLOBBER
3644 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3645 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3646 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3647 || DEBUG_INSN_P (insn));
3648 return;
3649 }
3650
3651 if (old_set != 0 && REG_P (SET_DEST (old_set))
3652 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3653 {
3654 /* Check for setting an eliminable register. */
3655 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3656 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3657 return;
3658 }
3659
3660 /* We allow one special case which happens to work on all machines we
3661 currently support: a single set with the source or a REG_EQUAL
3662 note being a PLUS of an eliminable register and a constant. */
3663 plus_src = plus_cst_src = 0;
3664 sets_reg_p = false;
3665 if (old_set && REG_P (SET_DEST (old_set)))
3666 {
3667 sets_reg_p = true;
3668 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3669 plus_src = SET_SRC (old_set);
3670 /* First see if the source is of the form (plus (...) CST). */
3671 if (plus_src
3672 && CONST_INT_P (XEXP (plus_src, 1)))
3673 plus_cst_src = plus_src;
3674 else if (REG_P (SET_SRC (old_set))
3675 || plus_src)
3676 {
3677 /* Otherwise, see if we have a REG_EQUAL note of the form
3678 (plus (...) CST). */
3679 rtx links;
3680 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3681 {
3682 if ((REG_NOTE_KIND (links) == REG_EQUAL
3683 || REG_NOTE_KIND (links) == REG_EQUIV)
3684 && GET_CODE (XEXP (links, 0)) == PLUS
3685 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3686 {
3687 plus_cst_src = XEXP (links, 0);
3688 break;
3689 }
3690 }
3691 }
3692 }
3693
3694 /* Determine the effects of this insn on elimination offsets. */
3695 elimination_effects (old_body, VOIDmode);
3696
3697 /* Eliminate all eliminable registers occurring in operands that
3698 can be handled by reload. */
3699 extract_insn (insn);
3700 for (i = 0; i < recog_data.n_dups; i++)
3701 orig_dup[i] = *recog_data.dup_loc[i];
3702
3703 for (i = 0; i < recog_data.n_operands; i++)
3704 {
3705 orig_operand[i] = recog_data.operand[i];
3706
3707 /* For an asm statement, every operand is eliminable. */
3708 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3709 {
3710 bool is_set_src, in_plus;
3711
3712 /* Check for setting a register that we know about. */
3713 if (recog_data.operand_type[i] != OP_IN
3714 && REG_P (orig_operand[i]))
3715 {
3716 /* If we are assigning to a register that can be eliminated, it
3717 must be as part of a PARALLEL, since the code above handles
3718 single SETs. We must indicate that we can no longer
3719 eliminate this reg. */
3720 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3721 ep++)
3722 if (ep->from_rtx == orig_operand[i])
3723 ep->can_eliminate = 0;
3724 }
3725
3726 /* Companion to the above plus substitution, we can allow
3727 invariants as the source of a plain move. */
3728 is_set_src = false;
3729 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3730 is_set_src = true;
3731 if (is_set_src && !sets_reg_p)
3732 note_reg_elim_costly (&SET_SRC (old_set), insn);
3733 in_plus = false;
3734 if (plus_src && sets_reg_p
3735 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3736 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3737 in_plus = true;
3738
3739 eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3740 NULL_RTX,
3741 is_set_src || in_plus, true);
3742 /* Terminate the search in check_eliminable_occurrences at
3743 this point. */
3744 *recog_data.operand_loc[i] = 0;
3745 }
3746 }
3747
3748 for (i = 0; i < recog_data.n_dups; i++)
3749 *recog_data.dup_loc[i]
3750 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3751
3752 /* If any eliminable remain, they aren't eliminable anymore. */
3753 check_eliminable_occurrences (old_body);
3754
3755 /* Restore the old body. */
3756 for (i = 0; i < recog_data.n_operands; i++)
3757 *recog_data.operand_loc[i] = orig_operand[i];
3758 for (i = 0; i < recog_data.n_dups; i++)
3759 *recog_data.dup_loc[i] = orig_dup[i];
3760
3761 /* Update all elimination pairs to reflect the status after the current
3762 insn. The changes we make were determined by the earlier call to
3763 elimination_effects. */
3764
3765 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3766 {
3767 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3768 ep->can_eliminate = 0;
3769
3770 ep->ref_outside_mem = 0;
3771 }
3772
3773 return;
3774 }
3775
3776 /* Loop through all elimination pairs.
3777 Recalculate the number not at initial offset.
3778
3779 Compute the maximum offset (minimum offset if the stack does not
3780 grow downward) for each elimination pair. */
3781
3782 static void
3783 update_eliminable_offsets (void)
3784 {
3785 struct elim_table *ep;
3786
3787 num_not_at_initial_offset = 0;
3788 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3789 {
3790 ep->previous_offset = ep->offset;
3791 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3792 num_not_at_initial_offset++;
3793 }
3794 }
3795
3796 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3797 replacement we currently believe is valid, mark it as not eliminable if X
3798 modifies DEST in any way other than by adding a constant integer to it.
3799
3800 If DEST is the frame pointer, we do nothing because we assume that
3801 all assignments to the hard frame pointer are nonlocal gotos and are being
3802 done at a time when they are valid and do not disturb anything else.
3803 Some machines want to eliminate a fake argument pointer with either the
3804 frame or stack pointer. Assignments to the hard frame pointer must not
3805 prevent this elimination.
3806
3807 Called via note_stores from reload before starting its passes to scan
3808 the insns of the function. */
3809
3810 static void
3811 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3812 {
3813 unsigned int i;
3814
3815 /* A SUBREG of a hard register here is just changing its mode. We should
3816 not see a SUBREG of an eliminable hard register, but check just in
3817 case. */
3818 if (GET_CODE (dest) == SUBREG)
3819 dest = SUBREG_REG (dest);
3820
3821 if (dest == hard_frame_pointer_rtx)
3822 return;
3823
3824 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3825 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3826 && (GET_CODE (x) != SET
3827 || GET_CODE (SET_SRC (x)) != PLUS
3828 || XEXP (SET_SRC (x), 0) != dest
3829 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3830 {
3831 reg_eliminate[i].can_eliminate_previous
3832 = reg_eliminate[i].can_eliminate = 0;
3833 num_eliminable--;
3834 }
3835 }
3836
3837 /* Verify that the initial elimination offsets did not change since the
3838 last call to set_initial_elim_offsets. This is used to catch cases
3839 where something illegal happened during reload_as_needed that could
3840 cause incorrect code to be generated if we did not check for it. */
3841
3842 static bool
3843 verify_initial_elim_offsets (void)
3844 {
3845 HOST_WIDE_INT t;
3846
3847 if (!num_eliminable)
3848 return true;
3849
3850 #ifdef ELIMINABLE_REGS
3851 {
3852 struct elim_table *ep;
3853
3854 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3855 {
3856 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3857 if (t != ep->initial_offset)
3858 return false;
3859 }
3860 }
3861 #else
3862 INITIAL_FRAME_POINTER_OFFSET (t);
3863 if (t != reg_eliminate[0].initial_offset)
3864 return false;
3865 #endif
3866
3867 return true;
3868 }
3869
3870 /* Reset all offsets on eliminable registers to their initial values. */
3871
3872 static void
3873 set_initial_elim_offsets (void)
3874 {
3875 struct elim_table *ep = reg_eliminate;
3876
3877 #ifdef ELIMINABLE_REGS
3878 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3879 {
3880 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3881 ep->previous_offset = ep->offset = ep->initial_offset;
3882 }
3883 #else
3884 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3885 ep->previous_offset = ep->offset = ep->initial_offset;
3886 #endif
3887
3888 num_not_at_initial_offset = 0;
3889 }
3890
3891 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3892
3893 static void
3894 set_initial_eh_label_offset (rtx label)
3895 {
3896 set_label_offsets (label, NULL_RTX, 1);
3897 }
3898
3899 /* Initialize the known label offsets.
3900 Set a known offset for each forced label to be at the initial offset
3901 of each elimination. We do this because we assume that all
3902 computed jumps occur from a location where each elimination is
3903 at its initial offset.
3904 For all other labels, show that we don't know the offsets. */
3905
3906 static void
3907 set_initial_label_offsets (void)
3908 {
3909 rtx x;
3910 memset (offsets_known_at, 0, num_labels);
3911
3912 for (x = forced_labels; x; x = XEXP (x, 1))
3913 if (XEXP (x, 0))
3914 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3915
3916 for_each_eh_label (set_initial_eh_label_offset);
3917 }
3918
3919 /* Set all elimination offsets to the known values for the code label given
3920 by INSN. */
3921
3922 static void
3923 set_offsets_for_label (rtx insn)
3924 {
3925 unsigned int i;
3926 int label_nr = CODE_LABEL_NUMBER (insn);
3927 struct elim_table *ep;
3928
3929 num_not_at_initial_offset = 0;
3930 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3931 {
3932 ep->offset = ep->previous_offset
3933 = offsets_at[label_nr - first_label_num][i];
3934 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3935 num_not_at_initial_offset++;
3936 }
3937 }
3938
3939 /* See if anything that happened changes which eliminations are valid.
3940 For example, on the SPARC, whether or not the frame pointer can
3941 be eliminated can depend on what registers have been used. We need
3942 not check some conditions again (such as flag_omit_frame_pointer)
3943 since they can't have changed. */
3944
3945 static void
3946 update_eliminables (HARD_REG_SET *pset)
3947 {
3948 int previous_frame_pointer_needed = frame_pointer_needed;
3949 struct elim_table *ep;
3950
3951 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3952 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3953 && targetm.frame_pointer_required ())
3954 #ifdef ELIMINABLE_REGS
3955 || ! targetm.can_eliminate (ep->from, ep->to)
3956 #endif
3957 )
3958 ep->can_eliminate = 0;
3959
3960 /* Look for the case where we have discovered that we can't replace
3961 register A with register B and that means that we will now be
3962 trying to replace register A with register C. This means we can
3963 no longer replace register C with register B and we need to disable
3964 such an elimination, if it exists. This occurs often with A == ap,
3965 B == sp, and C == fp. */
3966
3967 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3968 {
3969 struct elim_table *op;
3970 int new_to = -1;
3971
3972 if (! ep->can_eliminate && ep->can_eliminate_previous)
3973 {
3974 /* Find the current elimination for ep->from, if there is a
3975 new one. */
3976 for (op = reg_eliminate;
3977 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3978 if (op->from == ep->from && op->can_eliminate)
3979 {
3980 new_to = op->to;
3981 break;
3982 }
3983
3984 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3985 disable it. */
3986 for (op = reg_eliminate;
3987 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3988 if (op->from == new_to && op->to == ep->to)
3989 op->can_eliminate = 0;
3990 }
3991 }
3992
3993 /* See if any registers that we thought we could eliminate the previous
3994 time are no longer eliminable. If so, something has changed and we
3995 must spill the register. Also, recompute the number of eliminable
3996 registers and see if the frame pointer is needed; it is if there is
3997 no elimination of the frame pointer that we can perform. */
3998
3999 frame_pointer_needed = 1;
4000 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4001 {
4002 if (ep->can_eliminate
4003 && ep->from == FRAME_POINTER_REGNUM
4004 && ep->to != HARD_FRAME_POINTER_REGNUM
4005 && (! SUPPORTS_STACK_ALIGNMENT
4006 || ! crtl->stack_realign_needed))
4007 frame_pointer_needed = 0;
4008
4009 if (! ep->can_eliminate && ep->can_eliminate_previous)
4010 {
4011 ep->can_eliminate_previous = 0;
4012 SET_HARD_REG_BIT (*pset, ep->from);
4013 num_eliminable--;
4014 }
4015 }
4016
4017 /* If we didn't need a frame pointer last time, but we do now, spill
4018 the hard frame pointer. */
4019 if (frame_pointer_needed && ! previous_frame_pointer_needed)
4020 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4021 }
4022
4023 /* Return true if X is used as the target register of an elimination. */
4024
4025 bool
4026 elimination_target_reg_p (rtx x)
4027 {
4028 struct elim_table *ep;
4029
4030 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4031 if (ep->to_rtx == x && ep->can_eliminate)
4032 return true;
4033
4034 return false;
4035 }
4036
4037 /* Initialize the table of registers to eliminate.
4038 Pre-condition: global flag frame_pointer_needed has been set before
4039 calling this function. */
4040
4041 static void
4042 init_elim_table (void)
4043 {
4044 struct elim_table *ep;
4045 #ifdef ELIMINABLE_REGS
4046 const struct elim_table_1 *ep1;
4047 #endif
4048
4049 if (!reg_eliminate)
4050 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4051
4052 num_eliminable = 0;
4053
4054 #ifdef ELIMINABLE_REGS
4055 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4056 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4057 {
4058 ep->from = ep1->from;
4059 ep->to = ep1->to;
4060 ep->can_eliminate = ep->can_eliminate_previous
4061 = (targetm.can_eliminate (ep->from, ep->to)
4062 && ! (ep->to == STACK_POINTER_REGNUM
4063 && frame_pointer_needed
4064 && (! SUPPORTS_STACK_ALIGNMENT
4065 || ! stack_realign_fp)));
4066 }
4067 #else
4068 reg_eliminate[0].from = reg_eliminate_1[0].from;
4069 reg_eliminate[0].to = reg_eliminate_1[0].to;
4070 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4071 = ! frame_pointer_needed;
4072 #endif
4073
4074 /* Count the number of eliminable registers and build the FROM and TO
4075 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4076 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4077 We depend on this. */
4078 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4079 {
4080 num_eliminable += ep->can_eliminate;
4081 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4082 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4083 }
4084 }
4085
4086 /* Find all the pseudo registers that didn't get hard regs
4087 but do have known equivalent constants or memory slots.
4088 These include parameters (known equivalent to parameter slots)
4089 and cse'd or loop-moved constant memory addresses.
4090
4091 Record constant equivalents in reg_equiv_constant
4092 so they will be substituted by find_reloads.
4093 Record memory equivalents in reg_mem_equiv so they can
4094 be substituted eventually by altering the REG-rtx's. */
4095
4096 static void
4097 init_eliminable_invariants (rtx first, bool do_subregs)
4098 {
4099 int i;
4100 rtx insn;
4101
4102 reg_equiv_constant = XCNEWVEC (rtx, max_regno);
4103 reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
4104 reg_equiv_mem = XCNEWVEC (rtx, max_regno);
4105 reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
4106 reg_equiv_address = XCNEWVEC (rtx, max_regno);
4107 if (do_subregs)
4108 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4109 else
4110 reg_max_ref_width = NULL;
4111
4112 num_eliminable_invariants = 0;
4113
4114 first_label_num = get_first_label_num ();
4115 num_labels = max_label_num () - first_label_num;
4116
4117 /* Allocate the tables used to store offset information at labels. */
4118 offsets_known_at = XNEWVEC (char, num_labels);
4119 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4120
4121 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4122 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4123 find largest such for each pseudo. FIRST is the head of the insn
4124 list. */
4125
4126 for (insn = first; insn; insn = NEXT_INSN (insn))
4127 {
4128 rtx set = single_set (insn);
4129
4130 /* We may introduce USEs that we want to remove at the end, so
4131 we'll mark them with QImode. Make sure there are no
4132 previously-marked insns left by say regmove. */
4133 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4134 && GET_MODE (insn) != VOIDmode)
4135 PUT_MODE (insn, VOIDmode);
4136
4137 if (do_subregs && NONDEBUG_INSN_P (insn))
4138 scan_paradoxical_subregs (PATTERN (insn));
4139
4140 if (set != 0 && REG_P (SET_DEST (set)))
4141 {
4142 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4143 rtx x;
4144
4145 if (! note)
4146 continue;
4147
4148 i = REGNO (SET_DEST (set));
4149 x = XEXP (note, 0);
4150
4151 if (i <= LAST_VIRTUAL_REGISTER)
4152 continue;
4153
4154 /* If flag_pic and we have constant, verify it's legitimate. */
4155 if (!CONSTANT_P (x)
4156 || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4157 {
4158 /* It can happen that a REG_EQUIV note contains a MEM
4159 that is not a legitimate memory operand. As later
4160 stages of reload assume that all addresses found
4161 in the reg_equiv_* arrays were originally legitimate,
4162 we ignore such REG_EQUIV notes. */
4163 if (memory_operand (x, VOIDmode))
4164 {
4165 /* Always unshare the equivalence, so we can
4166 substitute into this insn without touching the
4167 equivalence. */
4168 reg_equiv_memory_loc[i] = copy_rtx (x);
4169 }
4170 else if (function_invariant_p (x))
4171 {
4172 if (GET_CODE (x) == PLUS)
4173 {
4174 /* This is PLUS of frame pointer and a constant,
4175 and might be shared. Unshare it. */
4176 reg_equiv_invariant[i] = copy_rtx (x);
4177 num_eliminable_invariants++;
4178 }
4179 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4180 {
4181 reg_equiv_invariant[i] = x;
4182 num_eliminable_invariants++;
4183 }
4184 else if (LEGITIMATE_CONSTANT_P (x))
4185 reg_equiv_constant[i] = x;
4186 else
4187 {
4188 reg_equiv_memory_loc[i]
4189 = force_const_mem (GET_MODE (SET_DEST (set)), x);
4190 if (! reg_equiv_memory_loc[i])
4191 reg_equiv_init[i] = NULL_RTX;
4192 }
4193 }
4194 else
4195 {
4196 reg_equiv_init[i] = NULL_RTX;
4197 continue;
4198 }
4199 }
4200 else
4201 reg_equiv_init[i] = NULL_RTX;
4202 }
4203 }
4204
4205 if (dump_file)
4206 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4207 if (reg_equiv_init[i])
4208 {
4209 fprintf (dump_file, "init_insns for %u: ", i);
4210 print_inline_rtx (dump_file, reg_equiv_init[i], 20);
4211 fprintf (dump_file, "\n");
4212 }
4213 }
4214
4215 /* Indicate that we no longer have known memory locations or constants.
4216 Free all data involved in tracking these. */
4217
4218 static void
4219 free_reg_equiv (void)
4220 {
4221 int i;
4222
4223 if (reg_equiv_constant)
4224 free (reg_equiv_constant);
4225 if (reg_equiv_invariant)
4226 free (reg_equiv_invariant);
4227 reg_equiv_constant = 0;
4228 reg_equiv_invariant = 0;
4229 VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
4230 reg_equiv_memory_loc = 0;
4231
4232 if (offsets_known_at)
4233 free (offsets_known_at);
4234 if (offsets_at)
4235 free (offsets_at);
4236 offsets_at = 0;
4237 offsets_known_at = 0;
4238
4239 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4240 if (reg_equiv_alt_mem_list[i])
4241 free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
4242 free (reg_equiv_alt_mem_list);
4243
4244 free (reg_equiv_mem);
4245 free (reg_equiv_address);
4246 }
4247 \f
4248 /* Kick all pseudos out of hard register REGNO.
4249
4250 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4251 because we found we can't eliminate some register. In the case, no pseudos
4252 are allowed to be in the register, even if they are only in a block that
4253 doesn't require spill registers, unlike the case when we are spilling this
4254 hard reg to produce another spill register.
4255
4256 Return nonzero if any pseudos needed to be kicked out. */
4257
4258 static void
4259 spill_hard_reg (unsigned int regno, int cant_eliminate)
4260 {
4261 int i;
4262
4263 if (cant_eliminate)
4264 {
4265 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4266 df_set_regs_ever_live (regno, true);
4267 }
4268
4269 /* Spill every pseudo reg that was allocated to this reg
4270 or to something that overlaps this reg. */
4271
4272 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4273 if (reg_renumber[i] >= 0
4274 && (unsigned int) reg_renumber[i] <= regno
4275 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4276 SET_REGNO_REG_SET (&spilled_pseudos, i);
4277 }
4278
4279 /* After find_reload_regs has been run for all insn that need reloads,
4280 and/or spill_hard_regs was called, this function is used to actually
4281 spill pseudo registers and try to reallocate them. It also sets up the
4282 spill_regs array for use by choose_reload_regs. */
4283
4284 static int
4285 finish_spills (int global)
4286 {
4287 struct insn_chain *chain;
4288 int something_changed = 0;
4289 unsigned i;
4290 reg_set_iterator rsi;
4291
4292 /* Build the spill_regs array for the function. */
4293 /* If there are some registers still to eliminate and one of the spill regs
4294 wasn't ever used before, additional stack space may have to be
4295 allocated to store this register. Thus, we may have changed the offset
4296 between the stack and frame pointers, so mark that something has changed.
4297
4298 One might think that we need only set VAL to 1 if this is a call-used
4299 register. However, the set of registers that must be saved by the
4300 prologue is not identical to the call-used set. For example, the
4301 register used by the call insn for the return PC is a call-used register,
4302 but must be saved by the prologue. */
4303
4304 n_spills = 0;
4305 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4306 if (TEST_HARD_REG_BIT (used_spill_regs, i))
4307 {
4308 spill_reg_order[i] = n_spills;
4309 spill_regs[n_spills++] = i;
4310 if (num_eliminable && ! df_regs_ever_live_p (i))
4311 something_changed = 1;
4312 df_set_regs_ever_live (i, true);
4313 }
4314 else
4315 spill_reg_order[i] = -1;
4316
4317 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4318 if (! ira_conflicts_p || reg_renumber[i] >= 0)
4319 {
4320 /* Record the current hard register the pseudo is allocated to
4321 in pseudo_previous_regs so we avoid reallocating it to the
4322 same hard reg in a later pass. */
4323 gcc_assert (reg_renumber[i] >= 0);
4324
4325 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4326 /* Mark it as no longer having a hard register home. */
4327 reg_renumber[i] = -1;
4328 if (ira_conflicts_p)
4329 /* Inform IRA about the change. */
4330 ira_mark_allocation_change (i);
4331 /* We will need to scan everything again. */
4332 something_changed = 1;
4333 }
4334
4335 /* Retry global register allocation if possible. */
4336 if (global && ira_conflicts_p)
4337 {
4338 unsigned int n;
4339
4340 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4341 /* For every insn that needs reloads, set the registers used as spill
4342 regs in pseudo_forbidden_regs for every pseudo live across the
4343 insn. */
4344 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4345 {
4346 EXECUTE_IF_SET_IN_REG_SET
4347 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4348 {
4349 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4350 chain->used_spill_regs);
4351 }
4352 EXECUTE_IF_SET_IN_REG_SET
4353 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4354 {
4355 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4356 chain->used_spill_regs);
4357 }
4358 }
4359
4360 /* Retry allocating the pseudos spilled in IRA and the
4361 reload. For each reg, merge the various reg sets that
4362 indicate which hard regs can't be used, and call
4363 ira_reassign_pseudos. */
4364 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4365 if (reg_old_renumber[i] != reg_renumber[i])
4366 {
4367 if (reg_renumber[i] < 0)
4368 temp_pseudo_reg_arr[n++] = i;
4369 else
4370 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4371 }
4372 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4373 bad_spill_regs_global,
4374 pseudo_forbidden_regs, pseudo_previous_regs,
4375 &spilled_pseudos))
4376 something_changed = 1;
4377 }
4378 /* Fix up the register information in the insn chain.
4379 This involves deleting those of the spilled pseudos which did not get
4380 a new hard register home from the live_{before,after} sets. */
4381 for (chain = reload_insn_chain; chain; chain = chain->next)
4382 {
4383 HARD_REG_SET used_by_pseudos;
4384 HARD_REG_SET used_by_pseudos2;
4385
4386 if (! ira_conflicts_p)
4387 {
4388 /* Don't do it for IRA because IRA and the reload still can
4389 assign hard registers to the spilled pseudos on next
4390 reload iterations. */
4391 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4392 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4393 }
4394 /* Mark any unallocated hard regs as available for spills. That
4395 makes inheritance work somewhat better. */
4396 if (chain->need_reload)
4397 {
4398 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4399 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4400 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4401
4402 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4403 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4404 /* Value of chain->used_spill_regs from previous iteration
4405 may be not included in the value calculated here because
4406 of possible removing caller-saves insns (see function
4407 delete_caller_save_insns. */
4408 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4409 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4410 }
4411 }
4412
4413 CLEAR_REG_SET (&changed_allocation_pseudos);
4414 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4415 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4416 {
4417 int regno = reg_renumber[i];
4418 if (reg_old_renumber[i] == regno)
4419 continue;
4420
4421 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4422
4423 alter_reg (i, reg_old_renumber[i], false);
4424 reg_old_renumber[i] = regno;
4425 if (dump_file)
4426 {
4427 if (regno == -1)
4428 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4429 else
4430 fprintf (dump_file, " Register %d now in %d.\n\n",
4431 i, reg_renumber[i]);
4432 }
4433 }
4434
4435 return something_changed;
4436 }
4437 \f
4438 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4439
4440 static void
4441 scan_paradoxical_subregs (rtx x)
4442 {
4443 int i;
4444 const char *fmt;
4445 enum rtx_code code = GET_CODE (x);
4446
4447 switch (code)
4448 {
4449 case REG:
4450 case CONST_INT:
4451 case CONST:
4452 case SYMBOL_REF:
4453 case LABEL_REF:
4454 case CONST_DOUBLE:
4455 case CONST_FIXED:
4456 case CONST_VECTOR: /* shouldn't happen, but just in case. */
4457 case CC0:
4458 case PC:
4459 case USE:
4460 case CLOBBER:
4461 return;
4462
4463 case SUBREG:
4464 if (REG_P (SUBREG_REG (x))
4465 && (GET_MODE_SIZE (GET_MODE (x))
4466 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4467 {
4468 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4469 = GET_MODE_SIZE (GET_MODE (x));
4470 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4471 }
4472 return;
4473
4474 default:
4475 break;
4476 }
4477
4478 fmt = GET_RTX_FORMAT (code);
4479 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4480 {
4481 if (fmt[i] == 'e')
4482 scan_paradoxical_subregs (XEXP (x, i));
4483 else if (fmt[i] == 'E')
4484 {
4485 int j;
4486 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4487 scan_paradoxical_subregs (XVECEXP (x, i, j));
4488 }
4489 }
4490 }
4491 \f
4492 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4493 examine all of the reload insns between PREV and NEXT exclusive, and
4494 annotate all that may trap. */
4495
4496 static void
4497 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4498 {
4499 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4500 if (note == NULL)
4501 return;
4502 if (!insn_could_throw_p (insn))
4503 remove_note (insn, note);
4504 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4505 }
4506
4507 /* Reload pseudo-registers into hard regs around each insn as needed.
4508 Additional register load insns are output before the insn that needs it
4509 and perhaps store insns after insns that modify the reloaded pseudo reg.
4510
4511 reg_last_reload_reg and reg_reloaded_contents keep track of
4512 which registers are already available in reload registers.
4513 We update these for the reloads that we perform,
4514 as the insns are scanned. */
4515
4516 static void
4517 reload_as_needed (int live_known)
4518 {
4519 struct insn_chain *chain;
4520 #if defined (AUTO_INC_DEC)
4521 int i;
4522 #endif
4523 rtx x;
4524
4525 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4526 memset (spill_reg_store, 0, sizeof spill_reg_store);
4527 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4528 INIT_REG_SET (&reg_has_output_reload);
4529 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4530 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4531
4532 set_initial_elim_offsets ();
4533
4534 for (chain = reload_insn_chain; chain; chain = chain->next)
4535 {
4536 rtx prev = 0;
4537 rtx insn = chain->insn;
4538 rtx old_next = NEXT_INSN (insn);
4539 #ifdef AUTO_INC_DEC
4540 rtx old_prev = PREV_INSN (insn);
4541 #endif
4542
4543 /* If we pass a label, copy the offsets from the label information
4544 into the current offsets of each elimination. */
4545 if (LABEL_P (insn))
4546 set_offsets_for_label (insn);
4547
4548 else if (INSN_P (insn))
4549 {
4550 regset_head regs_to_forget;
4551 INIT_REG_SET (&regs_to_forget);
4552 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4553
4554 /* If this is a USE and CLOBBER of a MEM, ensure that any
4555 references to eliminable registers have been removed. */
4556
4557 if ((GET_CODE (PATTERN (insn)) == USE
4558 || GET_CODE (PATTERN (insn)) == CLOBBER)
4559 && MEM_P (XEXP (PATTERN (insn), 0)))
4560 XEXP (XEXP (PATTERN (insn), 0), 0)
4561 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4562 GET_MODE (XEXP (PATTERN (insn), 0)),
4563 NULL_RTX);
4564
4565 /* If we need to do register elimination processing, do so.
4566 This might delete the insn, in which case we are done. */
4567 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4568 {
4569 eliminate_regs_in_insn (insn, 1);
4570 if (NOTE_P (insn))
4571 {
4572 update_eliminable_offsets ();
4573 CLEAR_REG_SET (&regs_to_forget);
4574 continue;
4575 }
4576 }
4577
4578 /* If need_elim is nonzero but need_reload is zero, one might think
4579 that we could simply set n_reloads to 0. However, find_reloads
4580 could have done some manipulation of the insn (such as swapping
4581 commutative operands), and these manipulations are lost during
4582 the first pass for every insn that needs register elimination.
4583 So the actions of find_reloads must be redone here. */
4584
4585 if (! chain->need_elim && ! chain->need_reload
4586 && ! chain->need_operand_change)
4587 n_reloads = 0;
4588 /* First find the pseudo regs that must be reloaded for this insn.
4589 This info is returned in the tables reload_... (see reload.h).
4590 Also modify the body of INSN by substituting RELOAD
4591 rtx's for those pseudo regs. */
4592 else
4593 {
4594 CLEAR_REG_SET (&reg_has_output_reload);
4595 CLEAR_HARD_REG_SET (reg_is_output_reload);
4596
4597 find_reloads (insn, 1, spill_indirect_levels, live_known,
4598 spill_reg_order);
4599 }
4600
4601 if (n_reloads > 0)
4602 {
4603 rtx next = NEXT_INSN (insn);
4604 rtx p;
4605
4606 prev = PREV_INSN (insn);
4607
4608 /* Now compute which reload regs to reload them into. Perhaps
4609 reusing reload regs from previous insns, or else output
4610 load insns to reload them. Maybe output store insns too.
4611 Record the choices of reload reg in reload_reg_rtx. */
4612 choose_reload_regs (chain);
4613
4614 /* Merge any reloads that we didn't combine for fear of
4615 increasing the number of spill registers needed but now
4616 discover can be safely merged. */
4617 if (targetm.small_register_classes_for_mode_p (VOIDmode))
4618 merge_assigned_reloads (insn);
4619
4620 /* Generate the insns to reload operands into or out of
4621 their reload regs. */
4622 emit_reload_insns (chain);
4623
4624 /* Substitute the chosen reload regs from reload_reg_rtx
4625 into the insn's body (or perhaps into the bodies of other
4626 load and store insn that we just made for reloading
4627 and that we moved the structure into). */
4628 subst_reloads (insn);
4629
4630 /* Adjust the exception region notes for loads and stores. */
4631 if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4632 fixup_eh_region_note (insn, prev, next);
4633
4634 /* If this was an ASM, make sure that all the reload insns
4635 we have generated are valid. If not, give an error
4636 and delete them. */
4637 if (asm_noperands (PATTERN (insn)) >= 0)
4638 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4639 if (p != insn && INSN_P (p)
4640 && GET_CODE (PATTERN (p)) != USE
4641 && (recog_memoized (p) < 0
4642 || (extract_insn (p), ! constrain_operands (1))))
4643 {
4644 error_for_asm (insn,
4645 "%<asm%> operand requires "
4646 "impossible reload");
4647 delete_insn (p);
4648 }
4649 }
4650
4651 if (num_eliminable && chain->need_elim)
4652 update_eliminable_offsets ();
4653
4654 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4655 is no longer validly lying around to save a future reload.
4656 Note that this does not detect pseudos that were reloaded
4657 for this insn in order to be stored in
4658 (obeying register constraints). That is correct; such reload
4659 registers ARE still valid. */
4660 forget_marked_reloads (&regs_to_forget);
4661 CLEAR_REG_SET (&regs_to_forget);
4662
4663 /* There may have been CLOBBER insns placed after INSN. So scan
4664 between INSN and NEXT and use them to forget old reloads. */
4665 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4666 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4667 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4668
4669 #ifdef AUTO_INC_DEC
4670 /* Likewise for regs altered by auto-increment in this insn.
4671 REG_INC notes have been changed by reloading:
4672 find_reloads_address_1 records substitutions for them,
4673 which have been performed by subst_reloads above. */
4674 for (i = n_reloads - 1; i >= 0; i--)
4675 {
4676 rtx in_reg = rld[i].in_reg;
4677 if (in_reg)
4678 {
4679 enum rtx_code code = GET_CODE (in_reg);
4680 /* PRE_INC / PRE_DEC will have the reload register ending up
4681 with the same value as the stack slot, but that doesn't
4682 hold true for POST_INC / POST_DEC. Either we have to
4683 convert the memory access to a true POST_INC / POST_DEC,
4684 or we can't use the reload register for inheritance. */
4685 if ((code == POST_INC || code == POST_DEC)
4686 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4687 REGNO (rld[i].reg_rtx))
4688 /* Make sure it is the inc/dec pseudo, and not
4689 some other (e.g. output operand) pseudo. */
4690 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4691 == REGNO (XEXP (in_reg, 0))))
4692
4693 {
4694 rtx reload_reg = rld[i].reg_rtx;
4695 enum machine_mode mode = GET_MODE (reload_reg);
4696 int n = 0;
4697 rtx p;
4698
4699 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4700 {
4701 /* We really want to ignore REG_INC notes here, so
4702 use PATTERN (p) as argument to reg_set_p . */
4703 if (reg_set_p (reload_reg, PATTERN (p)))
4704 break;
4705 n = count_occurrences (PATTERN (p), reload_reg, 0);
4706 if (! n)
4707 continue;
4708 if (n == 1)
4709 {
4710 rtx replace_reg
4711 = gen_rtx_fmt_e (code, mode, reload_reg);
4712
4713 validate_replace_rtx_group (reload_reg,
4714 replace_reg, p);
4715 n = verify_changes (0);
4716
4717 /* We must also verify that the constraints
4718 are met after the replacement. Make sure
4719 extract_insn is only called for an insn
4720 where the replacements were found to be
4721 valid so far. */
4722 if (n)
4723 {
4724 extract_insn (p);
4725 n = constrain_operands (1);
4726 }
4727
4728 /* If the constraints were not met, then
4729 undo the replacement, else confirm it. */
4730 if (!n)
4731 cancel_changes (0);
4732 else
4733 confirm_change_group ();
4734 }
4735 break;
4736 }
4737 if (n == 1)
4738 {
4739 add_reg_note (p, REG_INC, reload_reg);
4740 /* Mark this as having an output reload so that the
4741 REG_INC processing code below won't invalidate
4742 the reload for inheritance. */
4743 SET_HARD_REG_BIT (reg_is_output_reload,
4744 REGNO (reload_reg));
4745 SET_REGNO_REG_SET (&reg_has_output_reload,
4746 REGNO (XEXP (in_reg, 0)));
4747 }
4748 else
4749 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4750 NULL);
4751 }
4752 else if ((code == PRE_INC || code == PRE_DEC)
4753 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4754 REGNO (rld[i].reg_rtx))
4755 /* Make sure it is the inc/dec pseudo, and not
4756 some other (e.g. output operand) pseudo. */
4757 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4758 == REGNO (XEXP (in_reg, 0))))
4759 {
4760 SET_HARD_REG_BIT (reg_is_output_reload,
4761 REGNO (rld[i].reg_rtx));
4762 SET_REGNO_REG_SET (&reg_has_output_reload,
4763 REGNO (XEXP (in_reg, 0)));
4764 }
4765 else if (code == PRE_INC || code == PRE_DEC
4766 || code == POST_INC || code == POST_DEC)
4767 {
4768 int in_regno = REGNO (XEXP (in_reg, 0));
4769
4770 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4771 {
4772 int in_hard_regno;
4773 bool forget_p = true;
4774
4775 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4776 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4777 in_hard_regno))
4778 {
4779 for (x = old_prev ? NEXT_INSN (old_prev) : insn;
4780 x != old_next;
4781 x = NEXT_INSN (x))
4782 if (x == reg_reloaded_insn[in_hard_regno])
4783 {
4784 forget_p = false;
4785 break;
4786 }
4787 }
4788 /* If for some reasons, we didn't set up
4789 reg_last_reload_reg in this insn,
4790 invalidate inheritance from previous
4791 insns for the incremented/decremented
4792 register. Such registers will be not in
4793 reg_has_output_reload. Invalidate it
4794 also if the corresponding element in
4795 reg_reloaded_insn is also
4796 invalidated. */
4797 if (forget_p)
4798 forget_old_reloads_1 (XEXP (in_reg, 0),
4799 NULL_RTX, NULL);
4800 }
4801 }
4802 }
4803 }
4804 /* If a pseudo that got a hard register is auto-incremented,
4805 we must purge records of copying it into pseudos without
4806 hard registers. */
4807 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4808 if (REG_NOTE_KIND (x) == REG_INC)
4809 {
4810 /* See if this pseudo reg was reloaded in this insn.
4811 If so, its last-reload info is still valid
4812 because it is based on this insn's reload. */
4813 for (i = 0; i < n_reloads; i++)
4814 if (rld[i].out == XEXP (x, 0))
4815 break;
4816
4817 if (i == n_reloads)
4818 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4819 }
4820 #endif
4821 }
4822 /* A reload reg's contents are unknown after a label. */
4823 if (LABEL_P (insn))
4824 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4825
4826 /* Don't assume a reload reg is still good after a call insn
4827 if it is a call-used reg, or if it contains a value that will
4828 be partially clobbered by the call. */
4829 else if (CALL_P (insn))
4830 {
4831 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4832 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4833 }
4834 }
4835
4836 /* Clean up. */
4837 free (reg_last_reload_reg);
4838 CLEAR_REG_SET (&reg_has_output_reload);
4839 }
4840
4841 /* Discard all record of any value reloaded from X,
4842 or reloaded in X from someplace else;
4843 unless X is an output reload reg of the current insn.
4844
4845 X may be a hard reg (the reload reg)
4846 or it may be a pseudo reg that was reloaded from.
4847
4848 When DATA is non-NULL just mark the registers in regset
4849 to be forgotten later. */
4850
4851 static void
4852 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4853 void *data)
4854 {
4855 unsigned int regno;
4856 unsigned int nr;
4857 regset regs = (regset) data;
4858
4859 /* note_stores does give us subregs of hard regs,
4860 subreg_regno_offset requires a hard reg. */
4861 while (GET_CODE (x) == SUBREG)
4862 {
4863 /* We ignore the subreg offset when calculating the regno,
4864 because we are using the entire underlying hard register
4865 below. */
4866 x = SUBREG_REG (x);
4867 }
4868
4869 if (!REG_P (x))
4870 return;
4871
4872 regno = REGNO (x);
4873
4874 if (regno >= FIRST_PSEUDO_REGISTER)
4875 nr = 1;
4876 else
4877 {
4878 unsigned int i;
4879
4880 nr = hard_regno_nregs[regno][GET_MODE (x)];
4881 /* Storing into a spilled-reg invalidates its contents.
4882 This can happen if a block-local pseudo is allocated to that reg
4883 and it wasn't spilled because this block's total need is 0.
4884 Then some insn might have an optional reload and use this reg. */
4885 if (!regs)
4886 for (i = 0; i < nr; i++)
4887 /* But don't do this if the reg actually serves as an output
4888 reload reg in the current instruction. */
4889 if (n_reloads == 0
4890 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4891 {
4892 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4893 spill_reg_store[regno + i] = 0;
4894 }
4895 }
4896
4897 if (regs)
4898 while (nr-- > 0)
4899 SET_REGNO_REG_SET (regs, regno + nr);
4900 else
4901 {
4902 /* Since value of X has changed,
4903 forget any value previously copied from it. */
4904
4905 while (nr-- > 0)
4906 /* But don't forget a copy if this is the output reload
4907 that establishes the copy's validity. */
4908 if (n_reloads == 0
4909 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4910 reg_last_reload_reg[regno + nr] = 0;
4911 }
4912 }
4913
4914 /* Forget the reloads marked in regset by previous function. */
4915 static void
4916 forget_marked_reloads (regset regs)
4917 {
4918 unsigned int reg;
4919 reg_set_iterator rsi;
4920 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4921 {
4922 if (reg < FIRST_PSEUDO_REGISTER
4923 /* But don't do this if the reg actually serves as an output
4924 reload reg in the current instruction. */
4925 && (n_reloads == 0
4926 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4927 {
4928 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4929 spill_reg_store[reg] = 0;
4930 }
4931 if (n_reloads == 0
4932 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4933 reg_last_reload_reg[reg] = 0;
4934 }
4935 }
4936 \f
4937 /* The following HARD_REG_SETs indicate when each hard register is
4938 used for a reload of various parts of the current insn. */
4939
4940 /* If reg is unavailable for all reloads. */
4941 static HARD_REG_SET reload_reg_unavailable;
4942 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4943 static HARD_REG_SET reload_reg_used;
4944 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4945 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4946 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4947 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4948 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4949 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4950 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4951 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4952 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4953 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4954 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4955 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4956 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4957 static HARD_REG_SET reload_reg_used_in_op_addr;
4958 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4959 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4960 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4961 static HARD_REG_SET reload_reg_used_in_insn;
4962 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4963 static HARD_REG_SET reload_reg_used_in_other_addr;
4964
4965 /* If reg is in use as a reload reg for any sort of reload. */
4966 static HARD_REG_SET reload_reg_used_at_all;
4967
4968 /* If reg is use as an inherited reload. We just mark the first register
4969 in the group. */
4970 static HARD_REG_SET reload_reg_used_for_inherit;
4971
4972 /* Records which hard regs are used in any way, either as explicit use or
4973 by being allocated to a pseudo during any point of the current insn. */
4974 static HARD_REG_SET reg_used_in_insn;
4975
4976 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4977 TYPE. MODE is used to indicate how many consecutive regs are
4978 actually used. */
4979
4980 static void
4981 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4982 enum machine_mode mode)
4983 {
4984 unsigned int nregs = hard_regno_nregs[regno][mode];
4985 unsigned int i;
4986
4987 for (i = regno; i < nregs + regno; i++)
4988 {
4989 switch (type)
4990 {
4991 case RELOAD_OTHER:
4992 SET_HARD_REG_BIT (reload_reg_used, i);
4993 break;
4994
4995 case RELOAD_FOR_INPUT_ADDRESS:
4996 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4997 break;
4998
4999 case RELOAD_FOR_INPADDR_ADDRESS:
5000 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
5001 break;
5002
5003 case RELOAD_FOR_OUTPUT_ADDRESS:
5004 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
5005 break;
5006
5007 case RELOAD_FOR_OUTADDR_ADDRESS:
5008 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
5009 break;
5010
5011 case RELOAD_FOR_OPERAND_ADDRESS:
5012 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
5013 break;
5014
5015 case RELOAD_FOR_OPADDR_ADDR:
5016 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
5017 break;
5018
5019 case RELOAD_FOR_OTHER_ADDRESS:
5020 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
5021 break;
5022
5023 case RELOAD_FOR_INPUT:
5024 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
5025 break;
5026
5027 case RELOAD_FOR_OUTPUT:
5028 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
5029 break;
5030
5031 case RELOAD_FOR_INSN:
5032 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
5033 break;
5034 }
5035
5036 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
5037 }
5038 }
5039
5040 /* Similarly, but show REGNO is no longer in use for a reload. */
5041
5042 static void
5043 clear_reload_reg_in_use (unsigned int regno, int opnum,
5044 enum reload_type type, enum machine_mode mode)
5045 {
5046 unsigned int nregs = hard_regno_nregs[regno][mode];
5047 unsigned int start_regno, end_regno, r;
5048 int i;
5049 /* A complication is that for some reload types, inheritance might
5050 allow multiple reloads of the same types to share a reload register.
5051 We set check_opnum if we have to check only reloads with the same
5052 operand number, and check_any if we have to check all reloads. */
5053 int check_opnum = 0;
5054 int check_any = 0;
5055 HARD_REG_SET *used_in_set;
5056
5057 switch (type)
5058 {
5059 case RELOAD_OTHER:
5060 used_in_set = &reload_reg_used;
5061 break;
5062
5063 case RELOAD_FOR_INPUT_ADDRESS:
5064 used_in_set = &reload_reg_used_in_input_addr[opnum];
5065 break;
5066
5067 case RELOAD_FOR_INPADDR_ADDRESS:
5068 check_opnum = 1;
5069 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5070 break;
5071
5072 case RELOAD_FOR_OUTPUT_ADDRESS:
5073 used_in_set = &reload_reg_used_in_output_addr[opnum];
5074 break;
5075
5076 case RELOAD_FOR_OUTADDR_ADDRESS:
5077 check_opnum = 1;
5078 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5079 break;
5080
5081 case RELOAD_FOR_OPERAND_ADDRESS:
5082 used_in_set = &reload_reg_used_in_op_addr;
5083 break;
5084
5085 case RELOAD_FOR_OPADDR_ADDR:
5086 check_any = 1;
5087 used_in_set = &reload_reg_used_in_op_addr_reload;
5088 break;
5089
5090 case RELOAD_FOR_OTHER_ADDRESS:
5091 used_in_set = &reload_reg_used_in_other_addr;
5092 check_any = 1;
5093 break;
5094
5095 case RELOAD_FOR_INPUT:
5096 used_in_set = &reload_reg_used_in_input[opnum];
5097 break;
5098
5099 case RELOAD_FOR_OUTPUT:
5100 used_in_set = &reload_reg_used_in_output[opnum];
5101 break;
5102
5103 case RELOAD_FOR_INSN:
5104 used_in_set = &reload_reg_used_in_insn;
5105 break;
5106 default:
5107 gcc_unreachable ();
5108 }
5109 /* We resolve conflicts with remaining reloads of the same type by
5110 excluding the intervals of reload registers by them from the
5111 interval of freed reload registers. Since we only keep track of
5112 one set of interval bounds, we might have to exclude somewhat
5113 more than what would be necessary if we used a HARD_REG_SET here.
5114 But this should only happen very infrequently, so there should
5115 be no reason to worry about it. */
5116
5117 start_regno = regno;
5118 end_regno = regno + nregs;
5119 if (check_opnum || check_any)
5120 {
5121 for (i = n_reloads - 1; i >= 0; i--)
5122 {
5123 if (rld[i].when_needed == type
5124 && (check_any || rld[i].opnum == opnum)
5125 && rld[i].reg_rtx)
5126 {
5127 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5128 unsigned int conflict_end
5129 = end_hard_regno (rld[i].mode, conflict_start);
5130
5131 /* If there is an overlap with the first to-be-freed register,
5132 adjust the interval start. */
5133 if (conflict_start <= start_regno && conflict_end > start_regno)
5134 start_regno = conflict_end;
5135 /* Otherwise, if there is a conflict with one of the other
5136 to-be-freed registers, adjust the interval end. */
5137 if (conflict_start > start_regno && conflict_start < end_regno)
5138 end_regno = conflict_start;
5139 }
5140 }
5141 }
5142
5143 for (r = start_regno; r < end_regno; r++)
5144 CLEAR_HARD_REG_BIT (*used_in_set, r);
5145 }
5146
5147 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5148 specified by OPNUM and TYPE. */
5149
5150 static int
5151 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5152 {
5153 int i;
5154
5155 /* In use for a RELOAD_OTHER means it's not available for anything. */
5156 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5157 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5158 return 0;
5159
5160 switch (type)
5161 {
5162 case RELOAD_OTHER:
5163 /* In use for anything means we can't use it for RELOAD_OTHER. */
5164 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5165 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5166 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5167 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5168 return 0;
5169
5170 for (i = 0; i < reload_n_operands; i++)
5171 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5172 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5173 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5174 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5175 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5176 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5177 return 0;
5178
5179 return 1;
5180
5181 case RELOAD_FOR_INPUT:
5182 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5183 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5184 return 0;
5185
5186 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5187 return 0;
5188
5189 /* If it is used for some other input, can't use it. */
5190 for (i = 0; i < reload_n_operands; i++)
5191 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5192 return 0;
5193
5194 /* If it is used in a later operand's address, can't use it. */
5195 for (i = opnum + 1; i < reload_n_operands; i++)
5196 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5197 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5198 return 0;
5199
5200 return 1;
5201
5202 case RELOAD_FOR_INPUT_ADDRESS:
5203 /* Can't use a register if it is used for an input address for this
5204 operand or used as an input in an earlier one. */
5205 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5206 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5207 return 0;
5208
5209 for (i = 0; i < opnum; i++)
5210 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5211 return 0;
5212
5213 return 1;
5214
5215 case RELOAD_FOR_INPADDR_ADDRESS:
5216 /* Can't use a register if it is used for an input address
5217 for this operand or used as an input in an earlier
5218 one. */
5219 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5220 return 0;
5221
5222 for (i = 0; i < opnum; i++)
5223 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5224 return 0;
5225
5226 return 1;
5227
5228 case RELOAD_FOR_OUTPUT_ADDRESS:
5229 /* Can't use a register if it is used for an output address for this
5230 operand or used as an output in this or a later operand. Note
5231 that multiple output operands are emitted in reverse order, so
5232 the conflicting ones are those with lower indices. */
5233 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5234 return 0;
5235
5236 for (i = 0; i <= opnum; i++)
5237 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5238 return 0;
5239
5240 return 1;
5241
5242 case RELOAD_FOR_OUTADDR_ADDRESS:
5243 /* Can't use a register if it is used for an output address
5244 for this operand or used as an output in this or a
5245 later operand. Note that multiple output operands are
5246 emitted in reverse order, so the conflicting ones are
5247 those with lower indices. */
5248 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5249 return 0;
5250
5251 for (i = 0; i <= opnum; i++)
5252 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5253 return 0;
5254
5255 return 1;
5256
5257 case RELOAD_FOR_OPERAND_ADDRESS:
5258 for (i = 0; i < reload_n_operands; i++)
5259 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5260 return 0;
5261
5262 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5263 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5264
5265 case RELOAD_FOR_OPADDR_ADDR:
5266 for (i = 0; i < reload_n_operands; i++)
5267 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5268 return 0;
5269
5270 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5271
5272 case RELOAD_FOR_OUTPUT:
5273 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5274 outputs, or an operand address for this or an earlier output.
5275 Note that multiple output operands are emitted in reverse order,
5276 so the conflicting ones are those with higher indices. */
5277 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5278 return 0;
5279
5280 for (i = 0; i < reload_n_operands; i++)
5281 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5282 return 0;
5283
5284 for (i = opnum; i < reload_n_operands; i++)
5285 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5286 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5287 return 0;
5288
5289 return 1;
5290
5291 case RELOAD_FOR_INSN:
5292 for (i = 0; i < reload_n_operands; i++)
5293 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5294 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5295 return 0;
5296
5297 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5298 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5299
5300 case RELOAD_FOR_OTHER_ADDRESS:
5301 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5302
5303 default:
5304 gcc_unreachable ();
5305 }
5306 }
5307
5308 /* Return 1 if the value in reload reg REGNO, as used by a reload
5309 needed for the part of the insn specified by OPNUM and TYPE,
5310 is still available in REGNO at the end of the insn.
5311
5312 We can assume that the reload reg was already tested for availability
5313 at the time it is needed, and we should not check this again,
5314 in case the reg has already been marked in use. */
5315
5316 static int
5317 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
5318 {
5319 int i;
5320
5321 switch (type)
5322 {
5323 case RELOAD_OTHER:
5324 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5325 its value must reach the end. */
5326 return 1;
5327
5328 /* If this use is for part of the insn,
5329 its value reaches if no subsequent part uses the same register.
5330 Just like the above function, don't try to do this with lots
5331 of fallthroughs. */
5332
5333 case RELOAD_FOR_OTHER_ADDRESS:
5334 /* Here we check for everything else, since these don't conflict
5335 with anything else and everything comes later. */
5336
5337 for (i = 0; i < reload_n_operands; i++)
5338 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5339 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5340 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5341 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5342 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5343 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5344 return 0;
5345
5346 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5347 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5348 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5349 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5350
5351 case RELOAD_FOR_INPUT_ADDRESS:
5352 case RELOAD_FOR_INPADDR_ADDRESS:
5353 /* Similar, except that we check only for this and subsequent inputs
5354 and the address of only subsequent inputs and we do not need
5355 to check for RELOAD_OTHER objects since they are known not to
5356 conflict. */
5357
5358 for (i = opnum; i < reload_n_operands; i++)
5359 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5360 return 0;
5361
5362 for (i = opnum + 1; i < reload_n_operands; i++)
5363 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5364 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5365 return 0;
5366
5367 for (i = 0; i < reload_n_operands; i++)
5368 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5369 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5370 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5371 return 0;
5372
5373 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5374 return 0;
5375
5376 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5377 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5378 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5379
5380 case RELOAD_FOR_INPUT:
5381 /* Similar to input address, except we start at the next operand for
5382 both input and input address and we do not check for
5383 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5384 would conflict. */
5385
5386 for (i = opnum + 1; i < reload_n_operands; i++)
5387 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5388 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5389 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5390 return 0;
5391
5392 /* ... fall through ... */
5393
5394 case RELOAD_FOR_OPERAND_ADDRESS:
5395 /* Check outputs and their addresses. */
5396
5397 for (i = 0; i < reload_n_operands; i++)
5398 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5399 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5400 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5401 return 0;
5402
5403 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5404
5405 case RELOAD_FOR_OPADDR_ADDR:
5406 for (i = 0; i < reload_n_operands; i++)
5407 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5408 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5409 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5410 return 0;
5411
5412 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5413 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5414 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5415
5416 case RELOAD_FOR_INSN:
5417 /* These conflict with other outputs with RELOAD_OTHER. So
5418 we need only check for output addresses. */
5419
5420 opnum = reload_n_operands;
5421
5422 /* ... fall through ... */
5423
5424 case RELOAD_FOR_OUTPUT:
5425 case RELOAD_FOR_OUTPUT_ADDRESS:
5426 case RELOAD_FOR_OUTADDR_ADDRESS:
5427 /* We already know these can't conflict with a later output. So the
5428 only thing to check are later output addresses.
5429 Note that multiple output operands are emitted in reverse order,
5430 so the conflicting ones are those with lower indices. */
5431 for (i = 0; i < opnum; i++)
5432 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5433 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5434 return 0;
5435
5436 return 1;
5437
5438 default:
5439 gcc_unreachable ();
5440 }
5441 }
5442
5443 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5444 every register in the range [REGNO, REGNO + NREGS). */
5445
5446 static bool
5447 reload_regs_reach_end_p (unsigned int regno, int nregs,
5448 int opnum, enum reload_type type)
5449 {
5450 int i;
5451
5452 for (i = 0; i < nregs; i++)
5453 if (!reload_reg_reaches_end_p (regno + i, opnum, type))
5454 return false;
5455 return true;
5456 }
5457 \f
5458
5459 /* Returns whether R1 and R2 are uniquely chained: the value of one
5460 is used by the other, and that value is not used by any other
5461 reload for this insn. This is used to partially undo the decision
5462 made in find_reloads when in the case of multiple
5463 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5464 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5465 reloads. This code tries to avoid the conflict created by that
5466 change. It might be cleaner to explicitly keep track of which
5467 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5468 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5469 this after the fact. */
5470 static bool
5471 reloads_unique_chain_p (int r1, int r2)
5472 {
5473 int i;
5474
5475 /* We only check input reloads. */
5476 if (! rld[r1].in || ! rld[r2].in)
5477 return false;
5478
5479 /* Avoid anything with output reloads. */
5480 if (rld[r1].out || rld[r2].out)
5481 return false;
5482
5483 /* "chained" means one reload is a component of the other reload,
5484 not the same as the other reload. */
5485 if (rld[r1].opnum != rld[r2].opnum
5486 || rtx_equal_p (rld[r1].in, rld[r2].in)
5487 || rld[r1].optional || rld[r2].optional
5488 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5489 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5490 return false;
5491
5492 for (i = 0; i < n_reloads; i ++)
5493 /* Look for input reloads that aren't our two */
5494 if (i != r1 && i != r2 && rld[i].in)
5495 {
5496 /* If our reload is mentioned at all, it isn't a simple chain. */
5497 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5498 return false;
5499 }
5500 return true;
5501 }
5502
5503 /* The recursive function change all occurrences of WHAT in *WHERE
5504 to REPL. */
5505 static void
5506 substitute (rtx *where, const_rtx what, rtx repl)
5507 {
5508 const char *fmt;
5509 int i;
5510 enum rtx_code code;
5511
5512 if (*where == 0)
5513 return;
5514
5515 if (*where == what || rtx_equal_p (*where, what))
5516 {
5517 /* Record the location of the changed rtx. */
5518 VEC_safe_push (rtx_p, heap, substitute_stack, where);
5519 *where = repl;
5520 return;
5521 }
5522
5523 code = GET_CODE (*where);
5524 fmt = GET_RTX_FORMAT (code);
5525 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5526 {
5527 if (fmt[i] == 'E')
5528 {
5529 int j;
5530
5531 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5532 substitute (&XVECEXP (*where, i, j), what, repl);
5533 }
5534 else if (fmt[i] == 'e')
5535 substitute (&XEXP (*where, i), what, repl);
5536 }
5537 }
5538
5539 /* The function returns TRUE if chain of reload R1 and R2 (in any
5540 order) can be evaluated without usage of intermediate register for
5541 the reload containing another reload. It is important to see
5542 gen_reload to understand what the function is trying to do. As an
5543 example, let us have reload chain
5544
5545 r2: const
5546 r1: <something> + const
5547
5548 and reload R2 got reload reg HR. The function returns true if
5549 there is a correct insn HR = HR + <something>. Otherwise,
5550 gen_reload will use intermediate register (and this is the reload
5551 reg for R1) to reload <something>.
5552
5553 We need this function to find a conflict for chain reloads. In our
5554 example, if HR = HR + <something> is incorrect insn, then we cannot
5555 use HR as a reload register for R2. If we do use it then we get a
5556 wrong code:
5557
5558 HR = const
5559 HR = <something>
5560 HR = HR + HR
5561
5562 */
5563 static bool
5564 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5565 {
5566 /* Assume other cases in gen_reload are not possible for
5567 chain reloads or do need an intermediate hard registers. */
5568 bool result = true;
5569 int regno, n, code;
5570 rtx out, in, tem, insn;
5571 rtx last = get_last_insn ();
5572
5573 /* Make r2 a component of r1. */
5574 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5575 {
5576 n = r1;
5577 r1 = r2;
5578 r2 = n;
5579 }
5580 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5581 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5582 gcc_assert (regno >= 0);
5583 out = gen_rtx_REG (rld[r1].mode, regno);
5584 in = rld[r1].in;
5585 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5586
5587 /* If IN is a paradoxical SUBREG, remove it and try to put the
5588 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5589 if (GET_CODE (in) == SUBREG
5590 && (GET_MODE_SIZE (GET_MODE (in))
5591 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
5592 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
5593 in = SUBREG_REG (in), out = tem;
5594
5595 if (GET_CODE (in) == PLUS
5596 && (REG_P (XEXP (in, 0))
5597 || GET_CODE (XEXP (in, 0)) == SUBREG
5598 || MEM_P (XEXP (in, 0)))
5599 && (REG_P (XEXP (in, 1))
5600 || GET_CODE (XEXP (in, 1)) == SUBREG
5601 || CONSTANT_P (XEXP (in, 1))
5602 || MEM_P (XEXP (in, 1))))
5603 {
5604 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5605 code = recog_memoized (insn);
5606 result = false;
5607
5608 if (code >= 0)
5609 {
5610 extract_insn (insn);
5611 /* We want constrain operands to treat this insn strictly in
5612 its validity determination, i.e., the way it would after
5613 reload has completed. */
5614 result = constrain_operands (1);
5615 }
5616
5617 delete_insns_since (last);
5618 }
5619
5620 /* Restore the original value at each changed address within R1. */
5621 while (!VEC_empty (rtx_p, substitute_stack))
5622 {
5623 rtx *where = VEC_pop (rtx_p, substitute_stack);
5624 *where = rld[r2].in;
5625 }
5626
5627 return result;
5628 }
5629
5630 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5631 Return 0 otherwise.
5632
5633 This function uses the same algorithm as reload_reg_free_p above. */
5634
5635 static int
5636 reloads_conflict (int r1, int r2)
5637 {
5638 enum reload_type r1_type = rld[r1].when_needed;
5639 enum reload_type r2_type = rld[r2].when_needed;
5640 int r1_opnum = rld[r1].opnum;
5641 int r2_opnum = rld[r2].opnum;
5642
5643 /* RELOAD_OTHER conflicts with everything. */
5644 if (r2_type == RELOAD_OTHER)
5645 return 1;
5646
5647 /* Otherwise, check conflicts differently for each type. */
5648
5649 switch (r1_type)
5650 {
5651 case RELOAD_FOR_INPUT:
5652 return (r2_type == RELOAD_FOR_INSN
5653 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5654 || r2_type == RELOAD_FOR_OPADDR_ADDR
5655 || r2_type == RELOAD_FOR_INPUT
5656 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5657 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5658 && r2_opnum > r1_opnum));
5659
5660 case RELOAD_FOR_INPUT_ADDRESS:
5661 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5662 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5663
5664 case RELOAD_FOR_INPADDR_ADDRESS:
5665 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5666 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5667
5668 case RELOAD_FOR_OUTPUT_ADDRESS:
5669 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5670 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5671
5672 case RELOAD_FOR_OUTADDR_ADDRESS:
5673 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5674 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5675
5676 case RELOAD_FOR_OPERAND_ADDRESS:
5677 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5678 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5679 && (!reloads_unique_chain_p (r1, r2)
5680 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5681
5682 case RELOAD_FOR_OPADDR_ADDR:
5683 return (r2_type == RELOAD_FOR_INPUT
5684 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5685
5686 case RELOAD_FOR_OUTPUT:
5687 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5688 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5689 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5690 && r2_opnum >= r1_opnum));
5691
5692 case RELOAD_FOR_INSN:
5693 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5694 || r2_type == RELOAD_FOR_INSN
5695 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5696
5697 case RELOAD_FOR_OTHER_ADDRESS:
5698 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5699
5700 case RELOAD_OTHER:
5701 return 1;
5702
5703 default:
5704 gcc_unreachable ();
5705 }
5706 }
5707 \f
5708 /* Indexed by reload number, 1 if incoming value
5709 inherited from previous insns. */
5710 static char reload_inherited[MAX_RELOADS];
5711
5712 /* For an inherited reload, this is the insn the reload was inherited from,
5713 if we know it. Otherwise, this is 0. */
5714 static rtx reload_inheritance_insn[MAX_RELOADS];
5715
5716 /* If nonzero, this is a place to get the value of the reload,
5717 rather than using reload_in. */
5718 static rtx reload_override_in[MAX_RELOADS];
5719
5720 /* For each reload, the hard register number of the register used,
5721 or -1 if we did not need a register for this reload. */
5722 static int reload_spill_index[MAX_RELOADS];
5723
5724 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5725 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5726
5727 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5728 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5729
5730 /* Subroutine of free_for_value_p, used to check a single register.
5731 START_REGNO is the starting regno of the full reload register
5732 (possibly comprising multiple hard registers) that we are considering. */
5733
5734 static int
5735 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5736 enum reload_type type, rtx value, rtx out,
5737 int reloadnum, int ignore_address_reloads)
5738 {
5739 int time1;
5740 /* Set if we see an input reload that must not share its reload register
5741 with any new earlyclobber, but might otherwise share the reload
5742 register with an output or input-output reload. */
5743 int check_earlyclobber = 0;
5744 int i;
5745 int copy = 0;
5746
5747 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5748 return 0;
5749
5750 if (out == const0_rtx)
5751 {
5752 copy = 1;
5753 out = NULL_RTX;
5754 }
5755
5756 /* We use some pseudo 'time' value to check if the lifetimes of the
5757 new register use would overlap with the one of a previous reload
5758 that is not read-only or uses a different value.
5759 The 'time' used doesn't have to be linear in any shape or form, just
5760 monotonic.
5761 Some reload types use different 'buckets' for each operand.
5762 So there are MAX_RECOG_OPERANDS different time values for each
5763 such reload type.
5764 We compute TIME1 as the time when the register for the prospective
5765 new reload ceases to be live, and TIME2 for each existing
5766 reload as the time when that the reload register of that reload
5767 becomes live.
5768 Where there is little to be gained by exact lifetime calculations,
5769 we just make conservative assumptions, i.e. a longer lifetime;
5770 this is done in the 'default:' cases. */
5771 switch (type)
5772 {
5773 case RELOAD_FOR_OTHER_ADDRESS:
5774 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5775 time1 = copy ? 0 : 1;
5776 break;
5777 case RELOAD_OTHER:
5778 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5779 break;
5780 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5781 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5782 respectively, to the time values for these, we get distinct time
5783 values. To get distinct time values for each operand, we have to
5784 multiply opnum by at least three. We round that up to four because
5785 multiply by four is often cheaper. */
5786 case RELOAD_FOR_INPADDR_ADDRESS:
5787 time1 = opnum * 4 + 2;
5788 break;
5789 case RELOAD_FOR_INPUT_ADDRESS:
5790 time1 = opnum * 4 + 3;
5791 break;
5792 case RELOAD_FOR_INPUT:
5793 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5794 executes (inclusive). */
5795 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5796 break;
5797 case RELOAD_FOR_OPADDR_ADDR:
5798 /* opnum * 4 + 4
5799 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5800 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5801 break;
5802 case RELOAD_FOR_OPERAND_ADDRESS:
5803 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5804 is executed. */
5805 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5806 break;
5807 case RELOAD_FOR_OUTADDR_ADDRESS:
5808 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5809 break;
5810 case RELOAD_FOR_OUTPUT_ADDRESS:
5811 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5812 break;
5813 default:
5814 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5815 }
5816
5817 for (i = 0; i < n_reloads; i++)
5818 {
5819 rtx reg = rld[i].reg_rtx;
5820 if (reg && REG_P (reg)
5821 && ((unsigned) regno - true_regnum (reg)
5822 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5823 && i != reloadnum)
5824 {
5825 rtx other_input = rld[i].in;
5826
5827 /* If the other reload loads the same input value, that
5828 will not cause a conflict only if it's loading it into
5829 the same register. */
5830 if (true_regnum (reg) != start_regno)
5831 other_input = NULL_RTX;
5832 if (! other_input || ! rtx_equal_p (other_input, value)
5833 || rld[i].out || out)
5834 {
5835 int time2;
5836 switch (rld[i].when_needed)
5837 {
5838 case RELOAD_FOR_OTHER_ADDRESS:
5839 time2 = 0;
5840 break;
5841 case RELOAD_FOR_INPADDR_ADDRESS:
5842 /* find_reloads makes sure that a
5843 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5844 by at most one - the first -
5845 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5846 address reload is inherited, the address address reload
5847 goes away, so we can ignore this conflict. */
5848 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5849 && ignore_address_reloads
5850 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5851 Then the address address is still needed to store
5852 back the new address. */
5853 && ! rld[reloadnum].out)
5854 continue;
5855 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5856 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5857 reloads go away. */
5858 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5859 && ignore_address_reloads
5860 /* Unless we are reloading an auto_inc expression. */
5861 && ! rld[reloadnum].out)
5862 continue;
5863 time2 = rld[i].opnum * 4 + 2;
5864 break;
5865 case RELOAD_FOR_INPUT_ADDRESS:
5866 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5867 && ignore_address_reloads
5868 && ! rld[reloadnum].out)
5869 continue;
5870 time2 = rld[i].opnum * 4 + 3;
5871 break;
5872 case RELOAD_FOR_INPUT:
5873 time2 = rld[i].opnum * 4 + 4;
5874 check_earlyclobber = 1;
5875 break;
5876 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5877 == MAX_RECOG_OPERAND * 4 */
5878 case RELOAD_FOR_OPADDR_ADDR:
5879 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5880 && ignore_address_reloads
5881 && ! rld[reloadnum].out)
5882 continue;
5883 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5884 break;
5885 case RELOAD_FOR_OPERAND_ADDRESS:
5886 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5887 check_earlyclobber = 1;
5888 break;
5889 case RELOAD_FOR_INSN:
5890 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5891 break;
5892 case RELOAD_FOR_OUTPUT:
5893 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5894 instruction is executed. */
5895 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5896 break;
5897 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5898 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5899 value. */
5900 case RELOAD_FOR_OUTADDR_ADDRESS:
5901 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5902 && ignore_address_reloads
5903 && ! rld[reloadnum].out)
5904 continue;
5905 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5906 break;
5907 case RELOAD_FOR_OUTPUT_ADDRESS:
5908 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5909 break;
5910 case RELOAD_OTHER:
5911 /* If there is no conflict in the input part, handle this
5912 like an output reload. */
5913 if (! rld[i].in || rtx_equal_p (other_input, value))
5914 {
5915 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5916 /* Earlyclobbered outputs must conflict with inputs. */
5917 if (earlyclobber_operand_p (rld[i].out))
5918 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5919
5920 break;
5921 }
5922 time2 = 1;
5923 /* RELOAD_OTHER might be live beyond instruction execution,
5924 but this is not obvious when we set time2 = 1. So check
5925 here if there might be a problem with the new reload
5926 clobbering the register used by the RELOAD_OTHER. */
5927 if (out)
5928 return 0;
5929 break;
5930 default:
5931 return 0;
5932 }
5933 if ((time1 >= time2
5934 && (! rld[i].in || rld[i].out
5935 || ! rtx_equal_p (other_input, value)))
5936 || (out && rld[reloadnum].out_reg
5937 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5938 return 0;
5939 }
5940 }
5941 }
5942
5943 /* Earlyclobbered outputs must conflict with inputs. */
5944 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5945 return 0;
5946
5947 return 1;
5948 }
5949
5950 /* Return 1 if the value in reload reg REGNO, as used by a reload
5951 needed for the part of the insn specified by OPNUM and TYPE,
5952 may be used to load VALUE into it.
5953
5954 MODE is the mode in which the register is used, this is needed to
5955 determine how many hard regs to test.
5956
5957 Other read-only reloads with the same value do not conflict
5958 unless OUT is nonzero and these other reloads have to live while
5959 output reloads live.
5960 If OUT is CONST0_RTX, this is a special case: it means that the
5961 test should not be for using register REGNO as reload register, but
5962 for copying from register REGNO into the reload register.
5963
5964 RELOADNUM is the number of the reload we want to load this value for;
5965 a reload does not conflict with itself.
5966
5967 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5968 reloads that load an address for the very reload we are considering.
5969
5970 The caller has to make sure that there is no conflict with the return
5971 register. */
5972
5973 static int
5974 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5975 enum reload_type type, rtx value, rtx out, int reloadnum,
5976 int ignore_address_reloads)
5977 {
5978 int nregs = hard_regno_nregs[regno][mode];
5979 while (nregs-- > 0)
5980 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5981 value, out, reloadnum,
5982 ignore_address_reloads))
5983 return 0;
5984 return 1;
5985 }
5986
5987 /* Return nonzero if the rtx X is invariant over the current function. */
5988 /* ??? Actually, the places where we use this expect exactly what is
5989 tested here, and not everything that is function invariant. In
5990 particular, the frame pointer and arg pointer are special cased;
5991 pic_offset_table_rtx is not, and we must not spill these things to
5992 memory. */
5993
5994 int
5995 function_invariant_p (const_rtx x)
5996 {
5997 if (CONSTANT_P (x))
5998 return 1;
5999 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6000 return 1;
6001 if (GET_CODE (x) == PLUS
6002 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6003 && GET_CODE (XEXP (x, 1)) == CONST_INT)
6004 return 1;
6005 return 0;
6006 }
6007
6008 /* Determine whether the reload reg X overlaps any rtx'es used for
6009 overriding inheritance. Return nonzero if so. */
6010
6011 static int
6012 conflicts_with_override (rtx x)
6013 {
6014 int i;
6015 for (i = 0; i < n_reloads; i++)
6016 if (reload_override_in[i]
6017 && reg_overlap_mentioned_p (x, reload_override_in[i]))
6018 return 1;
6019 return 0;
6020 }
6021 \f
6022 /* Give an error message saying we failed to find a reload for INSN,
6023 and clear out reload R. */
6024 static void
6025 failed_reload (rtx insn, int r)
6026 {
6027 if (asm_noperands (PATTERN (insn)) < 0)
6028 /* It's the compiler's fault. */
6029 fatal_insn ("could not find a spill register", insn);
6030
6031 /* It's the user's fault; the operand's mode and constraint
6032 don't match. Disable this reload so we don't crash in final. */
6033 error_for_asm (insn,
6034 "%<asm%> operand constraint incompatible with operand size");
6035 rld[r].in = 0;
6036 rld[r].out = 0;
6037 rld[r].reg_rtx = 0;
6038 rld[r].optional = 1;
6039 rld[r].secondary_p = 1;
6040 }
6041
6042 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6043 for reload R. If it's valid, get an rtx for it. Return nonzero if
6044 successful. */
6045 static int
6046 set_reload_reg (int i, int r)
6047 {
6048 int regno;
6049 rtx reg = spill_reg_rtx[i];
6050
6051 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6052 spill_reg_rtx[i] = reg
6053 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6054
6055 regno = true_regnum (reg);
6056
6057 /* Detect when the reload reg can't hold the reload mode.
6058 This used to be one `if', but Sequent compiler can't handle that. */
6059 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6060 {
6061 enum machine_mode test_mode = VOIDmode;
6062 if (rld[r].in)
6063 test_mode = GET_MODE (rld[r].in);
6064 /* If rld[r].in has VOIDmode, it means we will load it
6065 in whatever mode the reload reg has: to wit, rld[r].mode.
6066 We have already tested that for validity. */
6067 /* Aside from that, we need to test that the expressions
6068 to reload from or into have modes which are valid for this
6069 reload register. Otherwise the reload insns would be invalid. */
6070 if (! (rld[r].in != 0 && test_mode != VOIDmode
6071 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6072 if (! (rld[r].out != 0
6073 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6074 {
6075 /* The reg is OK. */
6076 last_spill_reg = i;
6077
6078 /* Mark as in use for this insn the reload regs we use
6079 for this. */
6080 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6081 rld[r].when_needed, rld[r].mode);
6082
6083 rld[r].reg_rtx = reg;
6084 reload_spill_index[r] = spill_regs[i];
6085 return 1;
6086 }
6087 }
6088 return 0;
6089 }
6090
6091 /* Find a spill register to use as a reload register for reload R.
6092 LAST_RELOAD is nonzero if this is the last reload for the insn being
6093 processed.
6094
6095 Set rld[R].reg_rtx to the register allocated.
6096
6097 We return 1 if successful, or 0 if we couldn't find a spill reg and
6098 we didn't change anything. */
6099
6100 static int
6101 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6102 int last_reload)
6103 {
6104 int i, pass, count;
6105
6106 /* If we put this reload ahead, thinking it is a group,
6107 then insist on finding a group. Otherwise we can grab a
6108 reg that some other reload needs.
6109 (That can happen when we have a 68000 DATA_OR_FP_REG
6110 which is a group of data regs or one fp reg.)
6111 We need not be so restrictive if there are no more reloads
6112 for this insn.
6113
6114 ??? Really it would be nicer to have smarter handling
6115 for that kind of reg class, where a problem like this is normal.
6116 Perhaps those classes should be avoided for reloading
6117 by use of more alternatives. */
6118
6119 int force_group = rld[r].nregs > 1 && ! last_reload;
6120
6121 /* If we want a single register and haven't yet found one,
6122 take any reg in the right class and not in use.
6123 If we want a consecutive group, here is where we look for it.
6124
6125 We use three passes so we can first look for reload regs to
6126 reuse, which are already in use for other reloads in this insn,
6127 and only then use additional registers which are not "bad", then
6128 finally any register.
6129
6130 I think that maximizing reuse is needed to make sure we don't
6131 run out of reload regs. Suppose we have three reloads, and
6132 reloads A and B can share regs. These need two regs.
6133 Suppose A and B are given different regs.
6134 That leaves none for C. */
6135 for (pass = 0; pass < 3; pass++)
6136 {
6137 /* I is the index in spill_regs.
6138 We advance it round-robin between insns to use all spill regs
6139 equally, so that inherited reloads have a chance
6140 of leapfrogging each other. */
6141
6142 i = last_spill_reg;
6143
6144 for (count = 0; count < n_spills; count++)
6145 {
6146 int rclass = (int) rld[r].rclass;
6147 int regnum;
6148
6149 i++;
6150 if (i >= n_spills)
6151 i -= n_spills;
6152 regnum = spill_regs[i];
6153
6154 if ((reload_reg_free_p (regnum, rld[r].opnum,
6155 rld[r].when_needed)
6156 || (rld[r].in
6157 /* We check reload_reg_used to make sure we
6158 don't clobber the return register. */
6159 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6160 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6161 rld[r].when_needed, rld[r].in,
6162 rld[r].out, r, 1)))
6163 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6164 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6165 /* Look first for regs to share, then for unshared. But
6166 don't share regs used for inherited reloads; they are
6167 the ones we want to preserve. */
6168 && (pass
6169 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6170 regnum)
6171 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6172 regnum))))
6173 {
6174 int nr = hard_regno_nregs[regnum][rld[r].mode];
6175
6176 /* During the second pass we want to avoid reload registers
6177 which are "bad" for this reload. */
6178 if (pass == 1
6179 && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6180 continue;
6181
6182 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6183 (on 68000) got us two FP regs. If NR is 1,
6184 we would reject both of them. */
6185 if (force_group)
6186 nr = rld[r].nregs;
6187 /* If we need only one reg, we have already won. */
6188 if (nr == 1)
6189 {
6190 /* But reject a single reg if we demand a group. */
6191 if (force_group)
6192 continue;
6193 break;
6194 }
6195 /* Otherwise check that as many consecutive regs as we need
6196 are available here. */
6197 while (nr > 1)
6198 {
6199 int regno = regnum + nr - 1;
6200 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6201 && spill_reg_order[regno] >= 0
6202 && reload_reg_free_p (regno, rld[r].opnum,
6203 rld[r].when_needed)))
6204 break;
6205 nr--;
6206 }
6207 if (nr == 1)
6208 break;
6209 }
6210 }
6211
6212 /* If we found something on the current pass, omit later passes. */
6213 if (count < n_spills)
6214 break;
6215 }
6216
6217 /* We should have found a spill register by now. */
6218 if (count >= n_spills)
6219 return 0;
6220
6221 /* I is the index in SPILL_REG_RTX of the reload register we are to
6222 allocate. Get an rtx for it and find its register number. */
6223
6224 return set_reload_reg (i, r);
6225 }
6226 \f
6227 /* Initialize all the tables needed to allocate reload registers.
6228 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6229 is the array we use to restore the reg_rtx field for every reload. */
6230
6231 static void
6232 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6233 {
6234 int i;
6235
6236 for (i = 0; i < n_reloads; i++)
6237 rld[i].reg_rtx = save_reload_reg_rtx[i];
6238
6239 memset (reload_inherited, 0, MAX_RELOADS);
6240 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6241 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6242
6243 CLEAR_HARD_REG_SET (reload_reg_used);
6244 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6245 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6246 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6247 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6248 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6249
6250 CLEAR_HARD_REG_SET (reg_used_in_insn);
6251 {
6252 HARD_REG_SET tmp;
6253 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6254 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6255 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6256 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6257 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6258 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6259 }
6260
6261 for (i = 0; i < reload_n_operands; i++)
6262 {
6263 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6264 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6265 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6266 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6267 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6268 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6269 }
6270
6271 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6272
6273 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6274
6275 for (i = 0; i < n_reloads; i++)
6276 /* If we have already decided to use a certain register,
6277 don't use it in another way. */
6278 if (rld[i].reg_rtx)
6279 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6280 rld[i].when_needed, rld[i].mode);
6281 }
6282
6283 /* Assign hard reg targets for the pseudo-registers we must reload
6284 into hard regs for this insn.
6285 Also output the instructions to copy them in and out of the hard regs.
6286
6287 For machines with register classes, we are responsible for
6288 finding a reload reg in the proper class. */
6289
6290 static void
6291 choose_reload_regs (struct insn_chain *chain)
6292 {
6293 rtx insn = chain->insn;
6294 int i, j;
6295 unsigned int max_group_size = 1;
6296 enum reg_class group_class = NO_REGS;
6297 int pass, win, inheritance;
6298
6299 rtx save_reload_reg_rtx[MAX_RELOADS];
6300
6301 /* In order to be certain of getting the registers we need,
6302 we must sort the reloads into order of increasing register class.
6303 Then our grabbing of reload registers will parallel the process
6304 that provided the reload registers.
6305
6306 Also note whether any of the reloads wants a consecutive group of regs.
6307 If so, record the maximum size of the group desired and what
6308 register class contains all the groups needed by this insn. */
6309
6310 for (j = 0; j < n_reloads; j++)
6311 {
6312 reload_order[j] = j;
6313 if (rld[j].reg_rtx != NULL_RTX)
6314 {
6315 gcc_assert (REG_P (rld[j].reg_rtx)
6316 && HARD_REGISTER_P (rld[j].reg_rtx));
6317 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6318 }
6319 else
6320 reload_spill_index[j] = -1;
6321
6322 if (rld[j].nregs > 1)
6323 {
6324 max_group_size = MAX (rld[j].nregs, max_group_size);
6325 group_class
6326 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6327 }
6328
6329 save_reload_reg_rtx[j] = rld[j].reg_rtx;
6330 }
6331
6332 if (n_reloads > 1)
6333 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6334
6335 /* If -O, try first with inheritance, then turning it off.
6336 If not -O, don't do inheritance.
6337 Using inheritance when not optimizing leads to paradoxes
6338 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6339 because one side of the comparison might be inherited. */
6340 win = 0;
6341 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6342 {
6343 choose_reload_regs_init (chain, save_reload_reg_rtx);
6344
6345 /* Process the reloads in order of preference just found.
6346 Beyond this point, subregs can be found in reload_reg_rtx.
6347
6348 This used to look for an existing reloaded home for all of the
6349 reloads, and only then perform any new reloads. But that could lose
6350 if the reloads were done out of reg-class order because a later
6351 reload with a looser constraint might have an old home in a register
6352 needed by an earlier reload with a tighter constraint.
6353
6354 To solve this, we make two passes over the reloads, in the order
6355 described above. In the first pass we try to inherit a reload
6356 from a previous insn. If there is a later reload that needs a
6357 class that is a proper subset of the class being processed, we must
6358 also allocate a spill register during the first pass.
6359
6360 Then make a second pass over the reloads to allocate any reloads
6361 that haven't been given registers yet. */
6362
6363 for (j = 0; j < n_reloads; j++)
6364 {
6365 int r = reload_order[j];
6366 rtx search_equiv = NULL_RTX;
6367
6368 /* Ignore reloads that got marked inoperative. */
6369 if (rld[r].out == 0 && rld[r].in == 0
6370 && ! rld[r].secondary_p)
6371 continue;
6372
6373 /* If find_reloads chose to use reload_in or reload_out as a reload
6374 register, we don't need to chose one. Otherwise, try even if it
6375 found one since we might save an insn if we find the value lying
6376 around.
6377 Try also when reload_in is a pseudo without a hard reg. */
6378 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6379 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6380 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6381 && !MEM_P (rld[r].in)
6382 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6383 continue;
6384
6385 #if 0 /* No longer needed for correct operation.
6386 It might give better code, or might not; worth an experiment? */
6387 /* If this is an optional reload, we can't inherit from earlier insns
6388 until we are sure that any non-optional reloads have been allocated.
6389 The following code takes advantage of the fact that optional reloads
6390 are at the end of reload_order. */
6391 if (rld[r].optional != 0)
6392 for (i = 0; i < j; i++)
6393 if ((rld[reload_order[i]].out != 0
6394 || rld[reload_order[i]].in != 0
6395 || rld[reload_order[i]].secondary_p)
6396 && ! rld[reload_order[i]].optional
6397 && rld[reload_order[i]].reg_rtx == 0)
6398 allocate_reload_reg (chain, reload_order[i], 0);
6399 #endif
6400
6401 /* First see if this pseudo is already available as reloaded
6402 for a previous insn. We cannot try to inherit for reloads
6403 that are smaller than the maximum number of registers needed
6404 for groups unless the register we would allocate cannot be used
6405 for the groups.
6406
6407 We could check here to see if this is a secondary reload for
6408 an object that is already in a register of the desired class.
6409 This would avoid the need for the secondary reload register.
6410 But this is complex because we can't easily determine what
6411 objects might want to be loaded via this reload. So let a
6412 register be allocated here. In `emit_reload_insns' we suppress
6413 one of the loads in the case described above. */
6414
6415 if (inheritance)
6416 {
6417 int byte = 0;
6418 int regno = -1;
6419 enum machine_mode mode = VOIDmode;
6420
6421 if (rld[r].in == 0)
6422 ;
6423 else if (REG_P (rld[r].in))
6424 {
6425 regno = REGNO (rld[r].in);
6426 mode = GET_MODE (rld[r].in);
6427 }
6428 else if (REG_P (rld[r].in_reg))
6429 {
6430 regno = REGNO (rld[r].in_reg);
6431 mode = GET_MODE (rld[r].in_reg);
6432 }
6433 else if (GET_CODE (rld[r].in_reg) == SUBREG
6434 && REG_P (SUBREG_REG (rld[r].in_reg)))
6435 {
6436 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6437 if (regno < FIRST_PSEUDO_REGISTER)
6438 regno = subreg_regno (rld[r].in_reg);
6439 else
6440 byte = SUBREG_BYTE (rld[r].in_reg);
6441 mode = GET_MODE (rld[r].in_reg);
6442 }
6443 #ifdef AUTO_INC_DEC
6444 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6445 && REG_P (XEXP (rld[r].in_reg, 0)))
6446 {
6447 regno = REGNO (XEXP (rld[r].in_reg, 0));
6448 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6449 rld[r].out = rld[r].in;
6450 }
6451 #endif
6452 #if 0
6453 /* This won't work, since REGNO can be a pseudo reg number.
6454 Also, it takes much more hair to keep track of all the things
6455 that can invalidate an inherited reload of part of a pseudoreg. */
6456 else if (GET_CODE (rld[r].in) == SUBREG
6457 && REG_P (SUBREG_REG (rld[r].in)))
6458 regno = subreg_regno (rld[r].in);
6459 #endif
6460
6461 if (regno >= 0
6462 && reg_last_reload_reg[regno] != 0
6463 #ifdef CANNOT_CHANGE_MODE_CLASS
6464 /* Verify that the register it's in can be used in
6465 mode MODE. */
6466 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6467 GET_MODE (reg_last_reload_reg[regno]),
6468 mode)
6469 #endif
6470 )
6471 {
6472 enum reg_class rclass = rld[r].rclass, last_class;
6473 rtx last_reg = reg_last_reload_reg[regno];
6474 enum machine_mode need_mode;
6475
6476 i = REGNO (last_reg);
6477 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6478 last_class = REGNO_REG_CLASS (i);
6479
6480 if (byte == 0)
6481 need_mode = mode;
6482 else
6483 need_mode
6484 = smallest_mode_for_size
6485 (GET_MODE_BITSIZE (mode) + byte * BITS_PER_UNIT,
6486 GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
6487 ? MODE_INT : GET_MODE_CLASS (mode));
6488
6489 if ((GET_MODE_SIZE (GET_MODE (last_reg))
6490 >= GET_MODE_SIZE (need_mode))
6491 && reg_reloaded_contents[i] == regno
6492 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6493 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6494 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6495 /* Even if we can't use this register as a reload
6496 register, we might use it for reload_override_in,
6497 if copying it to the desired class is cheap
6498 enough. */
6499 || ((REGISTER_MOVE_COST (mode, last_class, rclass)
6500 < memory_move_cost (mode, rclass, true))
6501 && (secondary_reload_class (1, rclass, mode,
6502 last_reg)
6503 == NO_REGS)
6504 #ifdef SECONDARY_MEMORY_NEEDED
6505 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6506 mode)
6507 #endif
6508 ))
6509
6510 && (rld[r].nregs == max_group_size
6511 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6512 i))
6513 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6514 rld[r].when_needed, rld[r].in,
6515 const0_rtx, r, 1))
6516 {
6517 /* If a group is needed, verify that all the subsequent
6518 registers still have their values intact. */
6519 int nr = hard_regno_nregs[i][rld[r].mode];
6520 int k;
6521
6522 for (k = 1; k < nr; k++)
6523 if (reg_reloaded_contents[i + k] != regno
6524 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6525 break;
6526
6527 if (k == nr)
6528 {
6529 int i1;
6530 int bad_for_class;
6531
6532 last_reg = (GET_MODE (last_reg) == mode
6533 ? last_reg : gen_rtx_REG (mode, i));
6534
6535 bad_for_class = 0;
6536 for (k = 0; k < nr; k++)
6537 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6538 i+k);
6539
6540 /* We found a register that contains the
6541 value we need. If this register is the
6542 same as an `earlyclobber' operand of the
6543 current insn, just mark it as a place to
6544 reload from since we can't use it as the
6545 reload register itself. */
6546
6547 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6548 if (reg_overlap_mentioned_for_reload_p
6549 (reg_last_reload_reg[regno],
6550 reload_earlyclobbers[i1]))
6551 break;
6552
6553 if (i1 != n_earlyclobbers
6554 || ! (free_for_value_p (i, rld[r].mode,
6555 rld[r].opnum,
6556 rld[r].when_needed, rld[r].in,
6557 rld[r].out, r, 1))
6558 /* Don't use it if we'd clobber a pseudo reg. */
6559 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6560 && rld[r].out
6561 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6562 /* Don't clobber the frame pointer. */
6563 || (i == HARD_FRAME_POINTER_REGNUM
6564 && frame_pointer_needed
6565 && rld[r].out)
6566 /* Don't really use the inherited spill reg
6567 if we need it wider than we've got it. */
6568 || (GET_MODE_SIZE (rld[r].mode)
6569 > GET_MODE_SIZE (mode))
6570 || bad_for_class
6571
6572 /* If find_reloads chose reload_out as reload
6573 register, stay with it - that leaves the
6574 inherited register for subsequent reloads. */
6575 || (rld[r].out && rld[r].reg_rtx
6576 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6577 {
6578 if (! rld[r].optional)
6579 {
6580 reload_override_in[r] = last_reg;
6581 reload_inheritance_insn[r]
6582 = reg_reloaded_insn[i];
6583 }
6584 }
6585 else
6586 {
6587 int k;
6588 /* We can use this as a reload reg. */
6589 /* Mark the register as in use for this part of
6590 the insn. */
6591 mark_reload_reg_in_use (i,
6592 rld[r].opnum,
6593 rld[r].when_needed,
6594 rld[r].mode);
6595 rld[r].reg_rtx = last_reg;
6596 reload_inherited[r] = 1;
6597 reload_inheritance_insn[r]
6598 = reg_reloaded_insn[i];
6599 reload_spill_index[r] = i;
6600 for (k = 0; k < nr; k++)
6601 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6602 i + k);
6603 }
6604 }
6605 }
6606 }
6607 }
6608
6609 /* Here's another way to see if the value is already lying around. */
6610 if (inheritance
6611 && rld[r].in != 0
6612 && ! reload_inherited[r]
6613 && rld[r].out == 0
6614 && (CONSTANT_P (rld[r].in)
6615 || GET_CODE (rld[r].in) == PLUS
6616 || REG_P (rld[r].in)
6617 || MEM_P (rld[r].in))
6618 && (rld[r].nregs == max_group_size
6619 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6620 search_equiv = rld[r].in;
6621 /* If this is an output reload from a simple move insn, look
6622 if an equivalence for the input is available. */
6623 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6624 {
6625 rtx set = single_set (insn);
6626
6627 if (set
6628 && rtx_equal_p (rld[r].out, SET_DEST (set))
6629 && CONSTANT_P (SET_SRC (set)))
6630 search_equiv = SET_SRC (set);
6631 }
6632
6633 if (search_equiv)
6634 {
6635 rtx equiv
6636 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6637 -1, NULL, 0, rld[r].mode);
6638 int regno = 0;
6639
6640 if (equiv != 0)
6641 {
6642 if (REG_P (equiv))
6643 regno = REGNO (equiv);
6644 else
6645 {
6646 /* This must be a SUBREG of a hard register.
6647 Make a new REG since this might be used in an
6648 address and not all machines support SUBREGs
6649 there. */
6650 gcc_assert (GET_CODE (equiv) == SUBREG);
6651 regno = subreg_regno (equiv);
6652 equiv = gen_rtx_REG (rld[r].mode, regno);
6653 /* If we choose EQUIV as the reload register, but the
6654 loop below decides to cancel the inheritance, we'll
6655 end up reloading EQUIV in rld[r].mode, not the mode
6656 it had originally. That isn't safe when EQUIV isn't
6657 available as a spill register since its value might
6658 still be live at this point. */
6659 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6660 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6661 equiv = 0;
6662 }
6663 }
6664
6665 /* If we found a spill reg, reject it unless it is free
6666 and of the desired class. */
6667 if (equiv != 0)
6668 {
6669 int regs_used = 0;
6670 int bad_for_class = 0;
6671 int max_regno = regno + rld[r].nregs;
6672
6673 for (i = regno; i < max_regno; i++)
6674 {
6675 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6676 i);
6677 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6678 i);
6679 }
6680
6681 if ((regs_used
6682 && ! free_for_value_p (regno, rld[r].mode,
6683 rld[r].opnum, rld[r].when_needed,
6684 rld[r].in, rld[r].out, r, 1))
6685 || bad_for_class)
6686 equiv = 0;
6687 }
6688
6689 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6690 equiv = 0;
6691
6692 /* We found a register that contains the value we need.
6693 If this register is the same as an `earlyclobber' operand
6694 of the current insn, just mark it as a place to reload from
6695 since we can't use it as the reload register itself. */
6696
6697 if (equiv != 0)
6698 for (i = 0; i < n_earlyclobbers; i++)
6699 if (reg_overlap_mentioned_for_reload_p (equiv,
6700 reload_earlyclobbers[i]))
6701 {
6702 if (! rld[r].optional)
6703 reload_override_in[r] = equiv;
6704 equiv = 0;
6705 break;
6706 }
6707
6708 /* If the equiv register we have found is explicitly clobbered
6709 in the current insn, it depends on the reload type if we
6710 can use it, use it for reload_override_in, or not at all.
6711 In particular, we then can't use EQUIV for a
6712 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6713
6714 if (equiv != 0)
6715 {
6716 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6717 switch (rld[r].when_needed)
6718 {
6719 case RELOAD_FOR_OTHER_ADDRESS:
6720 case RELOAD_FOR_INPADDR_ADDRESS:
6721 case RELOAD_FOR_INPUT_ADDRESS:
6722 case RELOAD_FOR_OPADDR_ADDR:
6723 break;
6724 case RELOAD_OTHER:
6725 case RELOAD_FOR_INPUT:
6726 case RELOAD_FOR_OPERAND_ADDRESS:
6727 if (! rld[r].optional)
6728 reload_override_in[r] = equiv;
6729 /* Fall through. */
6730 default:
6731 equiv = 0;
6732 break;
6733 }
6734 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6735 switch (rld[r].when_needed)
6736 {
6737 case RELOAD_FOR_OTHER_ADDRESS:
6738 case RELOAD_FOR_INPADDR_ADDRESS:
6739 case RELOAD_FOR_INPUT_ADDRESS:
6740 case RELOAD_FOR_OPADDR_ADDR:
6741 case RELOAD_FOR_OPERAND_ADDRESS:
6742 case RELOAD_FOR_INPUT:
6743 break;
6744 case RELOAD_OTHER:
6745 if (! rld[r].optional)
6746 reload_override_in[r] = equiv;
6747 /* Fall through. */
6748 default:
6749 equiv = 0;
6750 break;
6751 }
6752 }
6753
6754 /* If we found an equivalent reg, say no code need be generated
6755 to load it, and use it as our reload reg. */
6756 if (equiv != 0
6757 && (regno != HARD_FRAME_POINTER_REGNUM
6758 || !frame_pointer_needed))
6759 {
6760 int nr = hard_regno_nregs[regno][rld[r].mode];
6761 int k;
6762 rld[r].reg_rtx = equiv;
6763 reload_spill_index[r] = regno;
6764 reload_inherited[r] = 1;
6765
6766 /* If reg_reloaded_valid is not set for this register,
6767 there might be a stale spill_reg_store lying around.
6768 We must clear it, since otherwise emit_reload_insns
6769 might delete the store. */
6770 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6771 spill_reg_store[regno] = NULL_RTX;
6772 /* If any of the hard registers in EQUIV are spill
6773 registers, mark them as in use for this insn. */
6774 for (k = 0; k < nr; k++)
6775 {
6776 i = spill_reg_order[regno + k];
6777 if (i >= 0)
6778 {
6779 mark_reload_reg_in_use (regno, rld[r].opnum,
6780 rld[r].when_needed,
6781 rld[r].mode);
6782 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6783 regno + k);
6784 }
6785 }
6786 }
6787 }
6788
6789 /* If we found a register to use already, or if this is an optional
6790 reload, we are done. */
6791 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6792 continue;
6793
6794 #if 0
6795 /* No longer needed for correct operation. Might or might
6796 not give better code on the average. Want to experiment? */
6797
6798 /* See if there is a later reload that has a class different from our
6799 class that intersects our class or that requires less register
6800 than our reload. If so, we must allocate a register to this
6801 reload now, since that reload might inherit a previous reload
6802 and take the only available register in our class. Don't do this
6803 for optional reloads since they will force all previous reloads
6804 to be allocated. Also don't do this for reloads that have been
6805 turned off. */
6806
6807 for (i = j + 1; i < n_reloads; i++)
6808 {
6809 int s = reload_order[i];
6810
6811 if ((rld[s].in == 0 && rld[s].out == 0
6812 && ! rld[s].secondary_p)
6813 || rld[s].optional)
6814 continue;
6815
6816 if ((rld[s].rclass != rld[r].rclass
6817 && reg_classes_intersect_p (rld[r].rclass,
6818 rld[s].rclass))
6819 || rld[s].nregs < rld[r].nregs)
6820 break;
6821 }
6822
6823 if (i == n_reloads)
6824 continue;
6825
6826 allocate_reload_reg (chain, r, j == n_reloads - 1);
6827 #endif
6828 }
6829
6830 /* Now allocate reload registers for anything non-optional that
6831 didn't get one yet. */
6832 for (j = 0; j < n_reloads; j++)
6833 {
6834 int r = reload_order[j];
6835
6836 /* Ignore reloads that got marked inoperative. */
6837 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6838 continue;
6839
6840 /* Skip reloads that already have a register allocated or are
6841 optional. */
6842 if (rld[r].reg_rtx != 0 || rld[r].optional)
6843 continue;
6844
6845 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6846 break;
6847 }
6848
6849 /* If that loop got all the way, we have won. */
6850 if (j == n_reloads)
6851 {
6852 win = 1;
6853 break;
6854 }
6855
6856 /* Loop around and try without any inheritance. */
6857 }
6858
6859 if (! win)
6860 {
6861 /* First undo everything done by the failed attempt
6862 to allocate with inheritance. */
6863 choose_reload_regs_init (chain, save_reload_reg_rtx);
6864
6865 /* Some sanity tests to verify that the reloads found in the first
6866 pass are identical to the ones we have now. */
6867 gcc_assert (chain->n_reloads == n_reloads);
6868
6869 for (i = 0; i < n_reloads; i++)
6870 {
6871 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6872 continue;
6873 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6874 for (j = 0; j < n_spills; j++)
6875 if (spill_regs[j] == chain->rld[i].regno)
6876 if (! set_reload_reg (j, i))
6877 failed_reload (chain->insn, i);
6878 }
6879 }
6880
6881 /* If we thought we could inherit a reload, because it seemed that
6882 nothing else wanted the same reload register earlier in the insn,
6883 verify that assumption, now that all reloads have been assigned.
6884 Likewise for reloads where reload_override_in has been set. */
6885
6886 /* If doing expensive optimizations, do one preliminary pass that doesn't
6887 cancel any inheritance, but removes reloads that have been needed only
6888 for reloads that we know can be inherited. */
6889 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6890 {
6891 for (j = 0; j < n_reloads; j++)
6892 {
6893 int r = reload_order[j];
6894 rtx check_reg;
6895 if (reload_inherited[r] && rld[r].reg_rtx)
6896 check_reg = rld[r].reg_rtx;
6897 else if (reload_override_in[r]
6898 && (REG_P (reload_override_in[r])
6899 || GET_CODE (reload_override_in[r]) == SUBREG))
6900 check_reg = reload_override_in[r];
6901 else
6902 continue;
6903 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6904 rld[r].opnum, rld[r].when_needed, rld[r].in,
6905 (reload_inherited[r]
6906 ? rld[r].out : const0_rtx),
6907 r, 1))
6908 {
6909 if (pass)
6910 continue;
6911 reload_inherited[r] = 0;
6912 reload_override_in[r] = 0;
6913 }
6914 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6915 reload_override_in, then we do not need its related
6916 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6917 likewise for other reload types.
6918 We handle this by removing a reload when its only replacement
6919 is mentioned in reload_in of the reload we are going to inherit.
6920 A special case are auto_inc expressions; even if the input is
6921 inherited, we still need the address for the output. We can
6922 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6923 If we succeeded removing some reload and we are doing a preliminary
6924 pass just to remove such reloads, make another pass, since the
6925 removal of one reload might allow us to inherit another one. */
6926 else if (rld[r].in
6927 && rld[r].out != rld[r].in
6928 && remove_address_replacements (rld[r].in) && pass)
6929 pass = 2;
6930 }
6931 }
6932
6933 /* Now that reload_override_in is known valid,
6934 actually override reload_in. */
6935 for (j = 0; j < n_reloads; j++)
6936 if (reload_override_in[j])
6937 rld[j].in = reload_override_in[j];
6938
6939 /* If this reload won't be done because it has been canceled or is
6940 optional and not inherited, clear reload_reg_rtx so other
6941 routines (such as subst_reloads) don't get confused. */
6942 for (j = 0; j < n_reloads; j++)
6943 if (rld[j].reg_rtx != 0
6944 && ((rld[j].optional && ! reload_inherited[j])
6945 || (rld[j].in == 0 && rld[j].out == 0
6946 && ! rld[j].secondary_p)))
6947 {
6948 int regno = true_regnum (rld[j].reg_rtx);
6949
6950 if (spill_reg_order[regno] >= 0)
6951 clear_reload_reg_in_use (regno, rld[j].opnum,
6952 rld[j].when_needed, rld[j].mode);
6953 rld[j].reg_rtx = 0;
6954 reload_spill_index[j] = -1;
6955 }
6956
6957 /* Record which pseudos and which spill regs have output reloads. */
6958 for (j = 0; j < n_reloads; j++)
6959 {
6960 int r = reload_order[j];
6961
6962 i = reload_spill_index[r];
6963
6964 /* I is nonneg if this reload uses a register.
6965 If rld[r].reg_rtx is 0, this is an optional reload
6966 that we opted to ignore. */
6967 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6968 && rld[r].reg_rtx != 0)
6969 {
6970 int nregno = REGNO (rld[r].out_reg);
6971 int nr = 1;
6972
6973 if (nregno < FIRST_PSEUDO_REGISTER)
6974 nr = hard_regno_nregs[nregno][rld[r].mode];
6975
6976 while (--nr >= 0)
6977 SET_REGNO_REG_SET (&reg_has_output_reload,
6978 nregno + nr);
6979
6980 if (i >= 0)
6981 {
6982 nr = hard_regno_nregs[i][rld[r].mode];
6983 while (--nr >= 0)
6984 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6985 }
6986
6987 gcc_assert (rld[r].when_needed == RELOAD_OTHER
6988 || rld[r].when_needed == RELOAD_FOR_OUTPUT
6989 || rld[r].when_needed == RELOAD_FOR_INSN);
6990 }
6991 }
6992 }
6993
6994 /* Deallocate the reload register for reload R. This is called from
6995 remove_address_replacements. */
6996
6997 void
6998 deallocate_reload_reg (int r)
6999 {
7000 int regno;
7001
7002 if (! rld[r].reg_rtx)
7003 return;
7004 regno = true_regnum (rld[r].reg_rtx);
7005 rld[r].reg_rtx = 0;
7006 if (spill_reg_order[regno] >= 0)
7007 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7008 rld[r].mode);
7009 reload_spill_index[r] = -1;
7010 }
7011 \f
7012 /* If the small_register_classes_for_mode_p target hook returns true for
7013 some machine modes, we may not have merged two reloads of the same item
7014 for fear that we might not have enough reload registers. However,
7015 normally they will get the same reload register and hence actually need
7016 not be loaded twice.
7017
7018 Here we check for the most common case of this phenomenon: when we have
7019 a number of reloads for the same object, each of which were allocated
7020 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
7021 reload, and is not modified in the insn itself. If we find such,
7022 merge all the reloads and set the resulting reload to RELOAD_OTHER.
7023 This will not increase the number of spill registers needed and will
7024 prevent redundant code. */
7025
7026 static void
7027 merge_assigned_reloads (rtx insn)
7028 {
7029 int i, j;
7030
7031 /* Scan all the reloads looking for ones that only load values and
7032 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
7033 assigned and not modified by INSN. */
7034
7035 for (i = 0; i < n_reloads; i++)
7036 {
7037 int conflicting_input = 0;
7038 int max_input_address_opnum = -1;
7039 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
7040
7041 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
7042 || rld[i].out != 0 || rld[i].reg_rtx == 0
7043 || reg_set_p (rld[i].reg_rtx, insn))
7044 continue;
7045
7046 /* Look at all other reloads. Ensure that the only use of this
7047 reload_reg_rtx is in a reload that just loads the same value
7048 as we do. Note that any secondary reloads must be of the identical
7049 class since the values, modes, and result registers are the
7050 same, so we need not do anything with any secondary reloads. */
7051
7052 for (j = 0; j < n_reloads; j++)
7053 {
7054 if (i == j || rld[j].reg_rtx == 0
7055 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
7056 rld[i].reg_rtx))
7057 continue;
7058
7059 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7060 && rld[j].opnum > max_input_address_opnum)
7061 max_input_address_opnum = rld[j].opnum;
7062
7063 /* If the reload regs aren't exactly the same (e.g, different modes)
7064 or if the values are different, we can't merge this reload.
7065 But if it is an input reload, we might still merge
7066 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
7067
7068 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
7069 || rld[j].out != 0 || rld[j].in == 0
7070 || ! rtx_equal_p (rld[i].in, rld[j].in))
7071 {
7072 if (rld[j].when_needed != RELOAD_FOR_INPUT
7073 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
7074 || rld[i].opnum > rld[j].opnum)
7075 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
7076 break;
7077 conflicting_input = 1;
7078 if (min_conflicting_input_opnum > rld[j].opnum)
7079 min_conflicting_input_opnum = rld[j].opnum;
7080 }
7081 }
7082
7083 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
7084 we, in fact, found any matching reloads. */
7085
7086 if (j == n_reloads
7087 && max_input_address_opnum <= min_conflicting_input_opnum)
7088 {
7089 gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
7090
7091 for (j = 0; j < n_reloads; j++)
7092 if (i != j && rld[j].reg_rtx != 0
7093 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
7094 && (! conflicting_input
7095 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7096 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
7097 {
7098 rld[i].when_needed = RELOAD_OTHER;
7099 rld[j].in = 0;
7100 reload_spill_index[j] = -1;
7101 transfer_replacements (i, j);
7102 }
7103
7104 /* If this is now RELOAD_OTHER, look for any reloads that
7105 load parts of this operand and set them to
7106 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
7107 RELOAD_OTHER for outputs. Note that this test is
7108 equivalent to looking for reloads for this operand
7109 number.
7110
7111 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
7112 it may share registers with a RELOAD_FOR_INPUT, so we can
7113 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
7114 never need to, since we do not modify RELOAD_FOR_OUTPUT.
7115
7116 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
7117 instruction is assigned the same register as the earlier
7118 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
7119 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
7120 instruction to be deleted later on. */
7121
7122 if (rld[i].when_needed == RELOAD_OTHER)
7123 for (j = 0; j < n_reloads; j++)
7124 if (rld[j].in != 0
7125 && rld[j].when_needed != RELOAD_OTHER
7126 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
7127 && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
7128 && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
7129 && (! conflicting_input
7130 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7131 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
7132 && reg_overlap_mentioned_for_reload_p (rld[j].in,
7133 rld[i].in))
7134 {
7135 int k;
7136
7137 rld[j].when_needed
7138 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7139 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
7140 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
7141
7142 /* Check to see if we accidentally converted two
7143 reloads that use the same reload register with
7144 different inputs to the same type. If so, the
7145 resulting code won't work. */
7146 if (rld[j].reg_rtx)
7147 for (k = 0; k < j; k++)
7148 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
7149 || rld[k].when_needed != rld[j].when_needed
7150 || !rtx_equal_p (rld[k].reg_rtx,
7151 rld[j].reg_rtx)
7152 || rtx_equal_p (rld[k].in,
7153 rld[j].in));
7154 }
7155 }
7156 }
7157 }
7158 \f
7159 /* These arrays are filled by emit_reload_insns and its subroutines. */
7160 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
7161 static rtx other_input_address_reload_insns = 0;
7162 static rtx other_input_reload_insns = 0;
7163 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
7164 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7165 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
7166 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
7167 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7168 static rtx operand_reload_insns = 0;
7169 static rtx other_operand_reload_insns = 0;
7170 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
7171
7172 /* Values to be put in spill_reg_store are put here first. */
7173 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7174 static HARD_REG_SET reg_reloaded_died;
7175
7176 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7177 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7178 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7179 adjusted register, and return true. Otherwise, return false. */
7180 static bool
7181 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7182 enum reg_class new_class,
7183 enum machine_mode new_mode)
7184
7185 {
7186 rtx reg;
7187
7188 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7189 {
7190 unsigned regno = REGNO (reg);
7191
7192 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7193 continue;
7194 if (GET_MODE (reg) != new_mode)
7195 {
7196 if (!HARD_REGNO_MODE_OK (regno, new_mode))
7197 continue;
7198 if (hard_regno_nregs[regno][new_mode]
7199 > hard_regno_nregs[regno][GET_MODE (reg)])
7200 continue;
7201 reg = reload_adjust_reg_for_mode (reg, new_mode);
7202 }
7203 *reload_reg = reg;
7204 return true;
7205 }
7206 return false;
7207 }
7208
7209 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7210 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7211 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7212 adjusted register, and return true. Otherwise, return false. */
7213 static bool
7214 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7215 enum insn_code icode)
7216
7217 {
7218 enum reg_class new_class = scratch_reload_class (icode);
7219 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7220
7221 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7222 new_class, new_mode);
7223 }
7224
7225 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7226 has the number J. OLD contains the value to be used as input. */
7227
7228 static void
7229 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7230 rtx old, int j)
7231 {
7232 rtx insn = chain->insn;
7233 rtx reloadreg;
7234 rtx oldequiv_reg = 0;
7235 rtx oldequiv = 0;
7236 int special = 0;
7237 enum machine_mode mode;
7238 rtx *where;
7239
7240 /* delete_output_reload is only invoked properly if old contains
7241 the original pseudo register. Since this is replaced with a
7242 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7243 find the pseudo in RELOAD_IN_REG. */
7244 if (reload_override_in[j]
7245 && REG_P (rl->in_reg))
7246 {
7247 oldequiv = old;
7248 old = rl->in_reg;
7249 }
7250 if (oldequiv == 0)
7251 oldequiv = old;
7252 else if (REG_P (oldequiv))
7253 oldequiv_reg = oldequiv;
7254 else if (GET_CODE (oldequiv) == SUBREG)
7255 oldequiv_reg = SUBREG_REG (oldequiv);
7256
7257 reloadreg = reload_reg_rtx_for_input[j];
7258 mode = GET_MODE (reloadreg);
7259
7260 /* If we are reloading from a register that was recently stored in
7261 with an output-reload, see if we can prove there was
7262 actually no need to store the old value in it. */
7263
7264 if (optimize && REG_P (oldequiv)
7265 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7266 && spill_reg_store[REGNO (oldequiv)]
7267 && REG_P (old)
7268 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7269 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7270 rl->out_reg)))
7271 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7272
7273 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7274 OLDEQUIV. */
7275
7276 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7277 oldequiv = SUBREG_REG (oldequiv);
7278 if (GET_MODE (oldequiv) != VOIDmode
7279 && mode != GET_MODE (oldequiv))
7280 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7281
7282 /* Switch to the right place to emit the reload insns. */
7283 switch (rl->when_needed)
7284 {
7285 case RELOAD_OTHER:
7286 where = &other_input_reload_insns;
7287 break;
7288 case RELOAD_FOR_INPUT:
7289 where = &input_reload_insns[rl->opnum];
7290 break;
7291 case RELOAD_FOR_INPUT_ADDRESS:
7292 where = &input_address_reload_insns[rl->opnum];
7293 break;
7294 case RELOAD_FOR_INPADDR_ADDRESS:
7295 where = &inpaddr_address_reload_insns[rl->opnum];
7296 break;
7297 case RELOAD_FOR_OUTPUT_ADDRESS:
7298 where = &output_address_reload_insns[rl->opnum];
7299 break;
7300 case RELOAD_FOR_OUTADDR_ADDRESS:
7301 where = &outaddr_address_reload_insns[rl->opnum];
7302 break;
7303 case RELOAD_FOR_OPERAND_ADDRESS:
7304 where = &operand_reload_insns;
7305 break;
7306 case RELOAD_FOR_OPADDR_ADDR:
7307 where = &other_operand_reload_insns;
7308 break;
7309 case RELOAD_FOR_OTHER_ADDRESS:
7310 where = &other_input_address_reload_insns;
7311 break;
7312 default:
7313 gcc_unreachable ();
7314 }
7315
7316 push_to_sequence (*where);
7317
7318 /* Auto-increment addresses must be reloaded in a special way. */
7319 if (rl->out && ! rl->out_reg)
7320 {
7321 /* We are not going to bother supporting the case where a
7322 incremented register can't be copied directly from
7323 OLDEQUIV since this seems highly unlikely. */
7324 gcc_assert (rl->secondary_in_reload < 0);
7325
7326 if (reload_inherited[j])
7327 oldequiv = reloadreg;
7328
7329 old = XEXP (rl->in_reg, 0);
7330
7331 if (optimize && REG_P (oldequiv)
7332 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7333 && spill_reg_store[REGNO (oldequiv)]
7334 && REG_P (old)
7335 && (dead_or_set_p (insn,
7336 spill_reg_stored_to[REGNO (oldequiv)])
7337 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7338 old)))
7339 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7340
7341 /* Prevent normal processing of this reload. */
7342 special = 1;
7343 /* Output a special code sequence for this case. */
7344 new_spill_reg_store[REGNO (reloadreg)]
7345 = inc_for_reload (reloadreg, oldequiv, rl->out,
7346 rl->inc);
7347 }
7348
7349 /* If we are reloading a pseudo-register that was set by the previous
7350 insn, see if we can get rid of that pseudo-register entirely
7351 by redirecting the previous insn into our reload register. */
7352
7353 else if (optimize && REG_P (old)
7354 && REGNO (old) >= FIRST_PSEUDO_REGISTER
7355 && dead_or_set_p (insn, old)
7356 /* This is unsafe if some other reload
7357 uses the same reg first. */
7358 && ! conflicts_with_override (reloadreg)
7359 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7360 rl->when_needed, old, rl->out, j, 0))
7361 {
7362 rtx temp = PREV_INSN (insn);
7363 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7364 temp = PREV_INSN (temp);
7365 if (temp
7366 && NONJUMP_INSN_P (temp)
7367 && GET_CODE (PATTERN (temp)) == SET
7368 && SET_DEST (PATTERN (temp)) == old
7369 /* Make sure we can access insn_operand_constraint. */
7370 && asm_noperands (PATTERN (temp)) < 0
7371 /* This is unsafe if operand occurs more than once in current
7372 insn. Perhaps some occurrences aren't reloaded. */
7373 && count_occurrences (PATTERN (insn), old, 0) == 1)
7374 {
7375 rtx old = SET_DEST (PATTERN (temp));
7376 /* Store into the reload register instead of the pseudo. */
7377 SET_DEST (PATTERN (temp)) = reloadreg;
7378
7379 /* Verify that resulting insn is valid. */
7380 extract_insn (temp);
7381 if (constrain_operands (1))
7382 {
7383 /* If the previous insn is an output reload, the source is
7384 a reload register, and its spill_reg_store entry will
7385 contain the previous destination. This is now
7386 invalid. */
7387 if (REG_P (SET_SRC (PATTERN (temp)))
7388 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7389 {
7390 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7391 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7392 }
7393
7394 /* If these are the only uses of the pseudo reg,
7395 pretend for GDB it lives in the reload reg we used. */
7396 if (REG_N_DEATHS (REGNO (old)) == 1
7397 && REG_N_SETS (REGNO (old)) == 1)
7398 {
7399 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7400 if (ira_conflicts_p)
7401 /* Inform IRA about the change. */
7402 ira_mark_allocation_change (REGNO (old));
7403 alter_reg (REGNO (old), -1, false);
7404 }
7405 special = 1;
7406
7407 /* Adjust any debug insns between temp and insn. */
7408 while ((temp = NEXT_INSN (temp)) != insn)
7409 if (DEBUG_INSN_P (temp))
7410 replace_rtx (PATTERN (temp), old, reloadreg);
7411 else
7412 gcc_assert (NOTE_P (temp));
7413 }
7414 else
7415 {
7416 SET_DEST (PATTERN (temp)) = old;
7417 }
7418 }
7419 }
7420
7421 /* We can't do that, so output an insn to load RELOADREG. */
7422
7423 /* If we have a secondary reload, pick up the secondary register
7424 and icode, if any. If OLDEQUIV and OLD are different or
7425 if this is an in-out reload, recompute whether or not we
7426 still need a secondary register and what the icode should
7427 be. If we still need a secondary register and the class or
7428 icode is different, go back to reloading from OLD if using
7429 OLDEQUIV means that we got the wrong type of register. We
7430 cannot have different class or icode due to an in-out reload
7431 because we don't make such reloads when both the input and
7432 output need secondary reload registers. */
7433
7434 if (! special && rl->secondary_in_reload >= 0)
7435 {
7436 rtx second_reload_reg = 0;
7437 rtx third_reload_reg = 0;
7438 int secondary_reload = rl->secondary_in_reload;
7439 rtx real_oldequiv = oldequiv;
7440 rtx real_old = old;
7441 rtx tmp;
7442 enum insn_code icode;
7443 enum insn_code tertiary_icode = CODE_FOR_nothing;
7444
7445 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7446 and similarly for OLD.
7447 See comments in get_secondary_reload in reload.c. */
7448 /* If it is a pseudo that cannot be replaced with its
7449 equivalent MEM, we must fall back to reload_in, which
7450 will have all the necessary substitutions registered.
7451 Likewise for a pseudo that can't be replaced with its
7452 equivalent constant.
7453
7454 Take extra care for subregs of such pseudos. Note that
7455 we cannot use reg_equiv_mem in this case because it is
7456 not in the right mode. */
7457
7458 tmp = oldequiv;
7459 if (GET_CODE (tmp) == SUBREG)
7460 tmp = SUBREG_REG (tmp);
7461 if (REG_P (tmp)
7462 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7463 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7464 || reg_equiv_constant[REGNO (tmp)] != 0))
7465 {
7466 if (! reg_equiv_mem[REGNO (tmp)]
7467 || num_not_at_initial_offset
7468 || GET_CODE (oldequiv) == SUBREG)
7469 real_oldequiv = rl->in;
7470 else
7471 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
7472 }
7473
7474 tmp = old;
7475 if (GET_CODE (tmp) == SUBREG)
7476 tmp = SUBREG_REG (tmp);
7477 if (REG_P (tmp)
7478 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7479 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7480 || reg_equiv_constant[REGNO (tmp)] != 0))
7481 {
7482 if (! reg_equiv_mem[REGNO (tmp)]
7483 || num_not_at_initial_offset
7484 || GET_CODE (old) == SUBREG)
7485 real_old = rl->in;
7486 else
7487 real_old = reg_equiv_mem[REGNO (tmp)];
7488 }
7489
7490 second_reload_reg = rld[secondary_reload].reg_rtx;
7491 if (rld[secondary_reload].secondary_in_reload >= 0)
7492 {
7493 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7494
7495 third_reload_reg = rld[tertiary_reload].reg_rtx;
7496 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7497 /* We'd have to add more code for quartary reloads. */
7498 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7499 }
7500 icode = rl->secondary_in_icode;
7501
7502 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7503 || (rl->in != 0 && rl->out != 0))
7504 {
7505 secondary_reload_info sri, sri2;
7506 enum reg_class new_class, new_t_class;
7507
7508 sri.icode = CODE_FOR_nothing;
7509 sri.prev_sri = NULL;
7510 new_class = targetm.secondary_reload (1, real_oldequiv, rl->rclass,
7511 mode, &sri);
7512
7513 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7514 second_reload_reg = 0;
7515 else if (new_class == NO_REGS)
7516 {
7517 if (reload_adjust_reg_for_icode (&second_reload_reg,
7518 third_reload_reg,
7519 (enum insn_code) sri.icode))
7520 {
7521 icode = (enum insn_code) sri.icode;
7522 third_reload_reg = 0;
7523 }
7524 else
7525 {
7526 oldequiv = old;
7527 real_oldequiv = real_old;
7528 }
7529 }
7530 else if (sri.icode != CODE_FOR_nothing)
7531 /* We currently lack a way to express this in reloads. */
7532 gcc_unreachable ();
7533 else
7534 {
7535 sri2.icode = CODE_FOR_nothing;
7536 sri2.prev_sri = &sri;
7537 new_t_class = targetm.secondary_reload (1, real_oldequiv,
7538 new_class, mode, &sri);
7539 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7540 {
7541 if (reload_adjust_reg_for_temp (&second_reload_reg,
7542 third_reload_reg,
7543 new_class, mode))
7544 {
7545 third_reload_reg = 0;
7546 tertiary_icode = (enum insn_code) sri2.icode;
7547 }
7548 else
7549 {
7550 oldequiv = old;
7551 real_oldequiv = real_old;
7552 }
7553 }
7554 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7555 {
7556 rtx intermediate = second_reload_reg;
7557
7558 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7559 new_class, mode)
7560 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7561 ((enum insn_code)
7562 sri2.icode)))
7563 {
7564 second_reload_reg = intermediate;
7565 tertiary_icode = (enum insn_code) sri2.icode;
7566 }
7567 else
7568 {
7569 oldequiv = old;
7570 real_oldequiv = real_old;
7571 }
7572 }
7573 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7574 {
7575 rtx intermediate = second_reload_reg;
7576
7577 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7578 new_class, mode)
7579 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7580 new_t_class, mode))
7581 {
7582 second_reload_reg = intermediate;
7583 tertiary_icode = (enum insn_code) sri2.icode;
7584 }
7585 else
7586 {
7587 oldequiv = old;
7588 real_oldequiv = real_old;
7589 }
7590 }
7591 else
7592 {
7593 /* This could be handled more intelligently too. */
7594 oldequiv = old;
7595 real_oldequiv = real_old;
7596 }
7597 }
7598 }
7599
7600 /* If we still need a secondary reload register, check
7601 to see if it is being used as a scratch or intermediate
7602 register and generate code appropriately. If we need
7603 a scratch register, use REAL_OLDEQUIV since the form of
7604 the insn may depend on the actual address if it is
7605 a MEM. */
7606
7607 if (second_reload_reg)
7608 {
7609 if (icode != CODE_FOR_nothing)
7610 {
7611 /* We'd have to add extra code to handle this case. */
7612 gcc_assert (!third_reload_reg);
7613
7614 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7615 second_reload_reg));
7616 special = 1;
7617 }
7618 else
7619 {
7620 /* See if we need a scratch register to load the
7621 intermediate register (a tertiary reload). */
7622 if (tertiary_icode != CODE_FOR_nothing)
7623 {
7624 emit_insn ((GEN_FCN (tertiary_icode)
7625 (second_reload_reg, real_oldequiv,
7626 third_reload_reg)));
7627 }
7628 else if (third_reload_reg)
7629 {
7630 gen_reload (third_reload_reg, real_oldequiv,
7631 rl->opnum,
7632 rl->when_needed);
7633 gen_reload (second_reload_reg, third_reload_reg,
7634 rl->opnum,
7635 rl->when_needed);
7636 }
7637 else
7638 gen_reload (second_reload_reg, real_oldequiv,
7639 rl->opnum,
7640 rl->when_needed);
7641
7642 oldequiv = second_reload_reg;
7643 }
7644 }
7645 }
7646
7647 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7648 {
7649 rtx real_oldequiv = oldequiv;
7650
7651 if ((REG_P (oldequiv)
7652 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7653 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7654 || reg_equiv_constant[REGNO (oldequiv)] != 0))
7655 || (GET_CODE (oldequiv) == SUBREG
7656 && REG_P (SUBREG_REG (oldequiv))
7657 && (REGNO (SUBREG_REG (oldequiv))
7658 >= FIRST_PSEUDO_REGISTER)
7659 && ((reg_equiv_memory_loc
7660 [REGNO (SUBREG_REG (oldequiv))] != 0)
7661 || (reg_equiv_constant
7662 [REGNO (SUBREG_REG (oldequiv))] != 0)))
7663 || (CONSTANT_P (oldequiv)
7664 && (PREFERRED_RELOAD_CLASS (oldequiv,
7665 REGNO_REG_CLASS (REGNO (reloadreg)))
7666 == NO_REGS)))
7667 real_oldequiv = rl->in;
7668 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7669 rl->when_needed);
7670 }
7671
7672 if (cfun->can_throw_non_call_exceptions)
7673 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7674
7675 /* End this sequence. */
7676 *where = get_insns ();
7677 end_sequence ();
7678
7679 /* Update reload_override_in so that delete_address_reloads_1
7680 can see the actual register usage. */
7681 if (oldequiv_reg)
7682 reload_override_in[j] = oldequiv;
7683 }
7684
7685 /* Generate insns to for the output reload RL, which is for the insn described
7686 by CHAIN and has the number J. */
7687 static void
7688 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7689 int j)
7690 {
7691 rtx reloadreg;
7692 rtx insn = chain->insn;
7693 int special = 0;
7694 rtx old = rl->out;
7695 enum machine_mode mode;
7696 rtx p;
7697 rtx rl_reg_rtx;
7698
7699 if (rl->when_needed == RELOAD_OTHER)
7700 start_sequence ();
7701 else
7702 push_to_sequence (output_reload_insns[rl->opnum]);
7703
7704 rl_reg_rtx = reload_reg_rtx_for_output[j];
7705 mode = GET_MODE (rl_reg_rtx);
7706
7707 reloadreg = rl_reg_rtx;
7708
7709 /* If we need two reload regs, set RELOADREG to the intermediate
7710 one, since it will be stored into OLD. We might need a secondary
7711 register only for an input reload, so check again here. */
7712
7713 if (rl->secondary_out_reload >= 0)
7714 {
7715 rtx real_old = old;
7716 int secondary_reload = rl->secondary_out_reload;
7717 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7718
7719 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7720 && reg_equiv_mem[REGNO (old)] != 0)
7721 real_old = reg_equiv_mem[REGNO (old)];
7722
7723 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7724 {
7725 rtx second_reloadreg = reloadreg;
7726 reloadreg = rld[secondary_reload].reg_rtx;
7727
7728 /* See if RELOADREG is to be used as a scratch register
7729 or as an intermediate register. */
7730 if (rl->secondary_out_icode != CODE_FOR_nothing)
7731 {
7732 /* We'd have to add extra code to handle this case. */
7733 gcc_assert (tertiary_reload < 0);
7734
7735 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7736 (real_old, second_reloadreg, reloadreg)));
7737 special = 1;
7738 }
7739 else
7740 {
7741 /* See if we need both a scratch and intermediate reload
7742 register. */
7743
7744 enum insn_code tertiary_icode
7745 = rld[secondary_reload].secondary_out_icode;
7746
7747 /* We'd have to add more code for quartary reloads. */
7748 gcc_assert (tertiary_reload < 0
7749 || rld[tertiary_reload].secondary_out_reload < 0);
7750
7751 if (GET_MODE (reloadreg) != mode)
7752 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7753
7754 if (tertiary_icode != CODE_FOR_nothing)
7755 {
7756 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7757 rtx tem;
7758
7759 /* Copy primary reload reg to secondary reload reg.
7760 (Note that these have been swapped above, then
7761 secondary reload reg to OLD using our insn.) */
7762
7763 /* If REAL_OLD is a paradoxical SUBREG, remove it
7764 and try to put the opposite SUBREG on
7765 RELOADREG. */
7766 if (GET_CODE (real_old) == SUBREG
7767 && (GET_MODE_SIZE (GET_MODE (real_old))
7768 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7769 && 0 != (tem = gen_lowpart_common
7770 (GET_MODE (SUBREG_REG (real_old)),
7771 reloadreg)))
7772 real_old = SUBREG_REG (real_old), reloadreg = tem;
7773
7774 gen_reload (reloadreg, second_reloadreg,
7775 rl->opnum, rl->when_needed);
7776 emit_insn ((GEN_FCN (tertiary_icode)
7777 (real_old, reloadreg, third_reloadreg)));
7778 special = 1;
7779 }
7780
7781 else
7782 {
7783 /* Copy between the reload regs here and then to
7784 OUT later. */
7785
7786 gen_reload (reloadreg, second_reloadreg,
7787 rl->opnum, rl->when_needed);
7788 if (tertiary_reload >= 0)
7789 {
7790 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7791
7792 gen_reload (third_reloadreg, reloadreg,
7793 rl->opnum, rl->when_needed);
7794 reloadreg = third_reloadreg;
7795 }
7796 }
7797 }
7798 }
7799 }
7800
7801 /* Output the last reload insn. */
7802 if (! special)
7803 {
7804 rtx set;
7805
7806 /* Don't output the last reload if OLD is not the dest of
7807 INSN and is in the src and is clobbered by INSN. */
7808 if (! flag_expensive_optimizations
7809 || !REG_P (old)
7810 || !(set = single_set (insn))
7811 || rtx_equal_p (old, SET_DEST (set))
7812 || !reg_mentioned_p (old, SET_SRC (set))
7813 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7814 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7815 gen_reload (old, reloadreg, rl->opnum,
7816 rl->when_needed);
7817 }
7818
7819 /* Look at all insns we emitted, just to be safe. */
7820 for (p = get_insns (); p; p = NEXT_INSN (p))
7821 if (INSN_P (p))
7822 {
7823 rtx pat = PATTERN (p);
7824
7825 /* If this output reload doesn't come from a spill reg,
7826 clear any memory of reloaded copies of the pseudo reg.
7827 If this output reload comes from a spill reg,
7828 reg_has_output_reload will make this do nothing. */
7829 note_stores (pat, forget_old_reloads_1, NULL);
7830
7831 if (reg_mentioned_p (rl_reg_rtx, pat))
7832 {
7833 rtx set = single_set (insn);
7834 if (reload_spill_index[j] < 0
7835 && set
7836 && SET_SRC (set) == rl_reg_rtx)
7837 {
7838 int src = REGNO (SET_SRC (set));
7839
7840 reload_spill_index[j] = src;
7841 SET_HARD_REG_BIT (reg_is_output_reload, src);
7842 if (find_regno_note (insn, REG_DEAD, src))
7843 SET_HARD_REG_BIT (reg_reloaded_died, src);
7844 }
7845 if (HARD_REGISTER_P (rl_reg_rtx))
7846 {
7847 int s = rl->secondary_out_reload;
7848 set = single_set (p);
7849 /* If this reload copies only to the secondary reload
7850 register, the secondary reload does the actual
7851 store. */
7852 if (s >= 0 && set == NULL_RTX)
7853 /* We can't tell what function the secondary reload
7854 has and where the actual store to the pseudo is
7855 made; leave new_spill_reg_store alone. */
7856 ;
7857 else if (s >= 0
7858 && SET_SRC (set) == rl_reg_rtx
7859 && SET_DEST (set) == rld[s].reg_rtx)
7860 {
7861 /* Usually the next instruction will be the
7862 secondary reload insn; if we can confirm
7863 that it is, setting new_spill_reg_store to
7864 that insn will allow an extra optimization. */
7865 rtx s_reg = rld[s].reg_rtx;
7866 rtx next = NEXT_INSN (p);
7867 rld[s].out = rl->out;
7868 rld[s].out_reg = rl->out_reg;
7869 set = single_set (next);
7870 if (set && SET_SRC (set) == s_reg
7871 && ! new_spill_reg_store[REGNO (s_reg)])
7872 {
7873 SET_HARD_REG_BIT (reg_is_output_reload,
7874 REGNO (s_reg));
7875 new_spill_reg_store[REGNO (s_reg)] = next;
7876 }
7877 }
7878 else
7879 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7880 }
7881 }
7882 }
7883
7884 if (rl->when_needed == RELOAD_OTHER)
7885 {
7886 emit_insn (other_output_reload_insns[rl->opnum]);
7887 other_output_reload_insns[rl->opnum] = get_insns ();
7888 }
7889 else
7890 output_reload_insns[rl->opnum] = get_insns ();
7891
7892 if (cfun->can_throw_non_call_exceptions)
7893 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7894
7895 end_sequence ();
7896 }
7897
7898 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7899 and has the number J. */
7900 static void
7901 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7902 {
7903 rtx insn = chain->insn;
7904 rtx old = (rl->in && MEM_P (rl->in)
7905 ? rl->in_reg : rl->in);
7906 rtx reg_rtx = rl->reg_rtx;
7907
7908 if (old && reg_rtx)
7909 {
7910 enum machine_mode mode;
7911
7912 /* Determine the mode to reload in.
7913 This is very tricky because we have three to choose from.
7914 There is the mode the insn operand wants (rl->inmode).
7915 There is the mode of the reload register RELOADREG.
7916 There is the intrinsic mode of the operand, which we could find
7917 by stripping some SUBREGs.
7918 It turns out that RELOADREG's mode is irrelevant:
7919 we can change that arbitrarily.
7920
7921 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7922 then the reload reg may not support QImode moves, so use SImode.
7923 If foo is in memory due to spilling a pseudo reg, this is safe,
7924 because the QImode value is in the least significant part of a
7925 slot big enough for a SImode. If foo is some other sort of
7926 memory reference, then it is impossible to reload this case,
7927 so previous passes had better make sure this never happens.
7928
7929 Then consider a one-word union which has SImode and one of its
7930 members is a float, being fetched as (SUBREG:SF union:SI).
7931 We must fetch that as SFmode because we could be loading into
7932 a float-only register. In this case OLD's mode is correct.
7933
7934 Consider an immediate integer: it has VOIDmode. Here we need
7935 to get a mode from something else.
7936
7937 In some cases, there is a fourth mode, the operand's
7938 containing mode. If the insn specifies a containing mode for
7939 this operand, it overrides all others.
7940
7941 I am not sure whether the algorithm here is always right,
7942 but it does the right things in those cases. */
7943
7944 mode = GET_MODE (old);
7945 if (mode == VOIDmode)
7946 mode = rl->inmode;
7947
7948 /* We cannot use gen_lowpart_common since it can do the wrong thing
7949 when REG_RTX has a multi-word mode. Note that REG_RTX must
7950 always be a REG here. */
7951 if (GET_MODE (reg_rtx) != mode)
7952 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7953 }
7954 reload_reg_rtx_for_input[j] = reg_rtx;
7955
7956 if (old != 0
7957 /* AUTO_INC reloads need to be handled even if inherited. We got an
7958 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7959 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7960 && ! rtx_equal_p (reg_rtx, old)
7961 && reg_rtx != 0)
7962 emit_input_reload_insns (chain, rld + j, old, j);
7963
7964 /* When inheriting a wider reload, we have a MEM in rl->in,
7965 e.g. inheriting a SImode output reload for
7966 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7967 if (optimize && reload_inherited[j] && rl->in
7968 && MEM_P (rl->in)
7969 && MEM_P (rl->in_reg)
7970 && reload_spill_index[j] >= 0
7971 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7972 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7973
7974 /* If we are reloading a register that was recently stored in with an
7975 output-reload, see if we can prove there was
7976 actually no need to store the old value in it. */
7977
7978 if (optimize
7979 && (reload_inherited[j] || reload_override_in[j])
7980 && reg_rtx
7981 && REG_P (reg_rtx)
7982 && spill_reg_store[REGNO (reg_rtx)] != 0
7983 #if 0
7984 /* There doesn't seem to be any reason to restrict this to pseudos
7985 and doing so loses in the case where we are copying from a
7986 register of the wrong class. */
7987 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7988 #endif
7989 /* The insn might have already some references to stackslots
7990 replaced by MEMs, while reload_out_reg still names the
7991 original pseudo. */
7992 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7993 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7994 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7995 }
7996
7997 /* Do output reloading for reload RL, which is for the insn described by
7998 CHAIN and has the number J.
7999 ??? At some point we need to support handling output reloads of
8000 JUMP_INSNs or insns that set cc0. */
8001 static void
8002 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
8003 {
8004 rtx note, old;
8005 rtx insn = chain->insn;
8006 /* If this is an output reload that stores something that is
8007 not loaded in this same reload, see if we can eliminate a previous
8008 store. */
8009 rtx pseudo = rl->out_reg;
8010 rtx reg_rtx = rl->reg_rtx;
8011
8012 if (rl->out && reg_rtx)
8013 {
8014 enum machine_mode mode;
8015
8016 /* Determine the mode to reload in.
8017 See comments above (for input reloading). */
8018 mode = GET_MODE (rl->out);
8019 if (mode == VOIDmode)
8020 {
8021 /* VOIDmode should never happen for an output. */
8022 if (asm_noperands (PATTERN (insn)) < 0)
8023 /* It's the compiler's fault. */
8024 fatal_insn ("VOIDmode on an output", insn);
8025 error_for_asm (insn, "output operand is constant in %<asm%>");
8026 /* Prevent crash--use something we know is valid. */
8027 mode = word_mode;
8028 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
8029 }
8030 if (GET_MODE (reg_rtx) != mode)
8031 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
8032 }
8033 reload_reg_rtx_for_output[j] = reg_rtx;
8034
8035 if (pseudo
8036 && optimize
8037 && REG_P (pseudo)
8038 && ! rtx_equal_p (rl->in_reg, pseudo)
8039 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
8040 && reg_last_reload_reg[REGNO (pseudo)])
8041 {
8042 int pseudo_no = REGNO (pseudo);
8043 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8044
8045 /* We don't need to test full validity of last_regno for
8046 inherit here; we only want to know if the store actually
8047 matches the pseudo. */
8048 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8049 && reg_reloaded_contents[last_regno] == pseudo_no
8050 && spill_reg_store[last_regno]
8051 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8052 delete_output_reload (insn, j, last_regno, reg_rtx);
8053 }
8054
8055 old = rl->out_reg;
8056 if (old == 0
8057 || reg_rtx == 0
8058 || rtx_equal_p (old, reg_rtx))
8059 return;
8060
8061 /* An output operand that dies right away does need a reload,
8062 but need not be copied from it. Show the new location in the
8063 REG_UNUSED note. */
8064 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8065 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8066 {
8067 XEXP (note, 0) = reg_rtx;
8068 return;
8069 }
8070 /* Likewise for a SUBREG of an operand that dies. */
8071 else if (GET_CODE (old) == SUBREG
8072 && REG_P (SUBREG_REG (old))
8073 && 0 != (note = find_reg_note (insn, REG_UNUSED,
8074 SUBREG_REG (old))))
8075 {
8076 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8077 return;
8078 }
8079 else if (GET_CODE (old) == SCRATCH)
8080 /* If we aren't optimizing, there won't be a REG_UNUSED note,
8081 but we don't want to make an output reload. */
8082 return;
8083
8084 /* If is a JUMP_INSN, we can't support output reloads yet. */
8085 gcc_assert (NONJUMP_INSN_P (insn));
8086
8087 emit_output_reload_insns (chain, rld + j, j);
8088 }
8089
8090 /* A reload copies values of MODE from register SRC to register DEST.
8091 Return true if it can be treated for inheritance purposes like a
8092 group of reloads, each one reloading a single hard register. The
8093 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8094 occupy the same number of hard registers. */
8095
8096 static bool
8097 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8098 int src ATTRIBUTE_UNUSED,
8099 enum machine_mode mode ATTRIBUTE_UNUSED)
8100 {
8101 #ifdef CANNOT_CHANGE_MODE_CLASS
8102 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8103 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8104 #else
8105 return true;
8106 #endif
8107 }
8108
8109 /* Output insns to reload values in and out of the chosen reload regs. */
8110
8111 static void
8112 emit_reload_insns (struct insn_chain *chain)
8113 {
8114 rtx insn = chain->insn;
8115
8116 int j;
8117
8118 CLEAR_HARD_REG_SET (reg_reloaded_died);
8119
8120 for (j = 0; j < reload_n_operands; j++)
8121 input_reload_insns[j] = input_address_reload_insns[j]
8122 = inpaddr_address_reload_insns[j]
8123 = output_reload_insns[j] = output_address_reload_insns[j]
8124 = outaddr_address_reload_insns[j]
8125 = other_output_reload_insns[j] = 0;
8126 other_input_address_reload_insns = 0;
8127 other_input_reload_insns = 0;
8128 operand_reload_insns = 0;
8129 other_operand_reload_insns = 0;
8130
8131 /* Dump reloads into the dump file. */
8132 if (dump_file)
8133 {
8134 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8135 debug_reload_to_stream (dump_file);
8136 }
8137
8138 /* Now output the instructions to copy the data into and out of the
8139 reload registers. Do these in the order that the reloads were reported,
8140 since reloads of base and index registers precede reloads of operands
8141 and the operands may need the base and index registers reloaded. */
8142
8143 for (j = 0; j < n_reloads; j++)
8144 {
8145 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8146 {
8147 unsigned int i;
8148
8149 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8150 new_spill_reg_store[i] = 0;
8151 }
8152
8153 do_input_reload (chain, rld + j, j);
8154 do_output_reload (chain, rld + j, j);
8155 }
8156
8157 /* Now write all the insns we made for reloads in the order expected by
8158 the allocation functions. Prior to the insn being reloaded, we write
8159 the following reloads:
8160
8161 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8162
8163 RELOAD_OTHER reloads.
8164
8165 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8166 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8167 RELOAD_FOR_INPUT reload for the operand.
8168
8169 RELOAD_FOR_OPADDR_ADDRS reloads.
8170
8171 RELOAD_FOR_OPERAND_ADDRESS reloads.
8172
8173 After the insn being reloaded, we write the following:
8174
8175 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8176 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8177 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8178 reloads for the operand. The RELOAD_OTHER output reloads are
8179 output in descending order by reload number. */
8180
8181 emit_insn_before (other_input_address_reload_insns, insn);
8182 emit_insn_before (other_input_reload_insns, insn);
8183
8184 for (j = 0; j < reload_n_operands; j++)
8185 {
8186 emit_insn_before (inpaddr_address_reload_insns[j], insn);
8187 emit_insn_before (input_address_reload_insns[j], insn);
8188 emit_insn_before (input_reload_insns[j], insn);
8189 }
8190
8191 emit_insn_before (other_operand_reload_insns, insn);
8192 emit_insn_before (operand_reload_insns, insn);
8193
8194 for (j = 0; j < reload_n_operands; j++)
8195 {
8196 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8197 x = emit_insn_after (output_address_reload_insns[j], x);
8198 x = emit_insn_after (output_reload_insns[j], x);
8199 emit_insn_after (other_output_reload_insns[j], x);
8200 }
8201
8202 /* For all the spill regs newly reloaded in this instruction,
8203 record what they were reloaded from, so subsequent instructions
8204 can inherit the reloads.
8205
8206 Update spill_reg_store for the reloads of this insn.
8207 Copy the elements that were updated in the loop above. */
8208
8209 for (j = 0; j < n_reloads; j++)
8210 {
8211 int r = reload_order[j];
8212 int i = reload_spill_index[r];
8213
8214 /* If this is a non-inherited input reload from a pseudo, we must
8215 clear any memory of a previous store to the same pseudo. Only do
8216 something if there will not be an output reload for the pseudo
8217 being reloaded. */
8218 if (rld[r].in_reg != 0
8219 && ! (reload_inherited[r] || reload_override_in[r]))
8220 {
8221 rtx reg = rld[r].in_reg;
8222
8223 if (GET_CODE (reg) == SUBREG)
8224 reg = SUBREG_REG (reg);
8225
8226 if (REG_P (reg)
8227 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8228 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8229 {
8230 int nregno = REGNO (reg);
8231
8232 if (reg_last_reload_reg[nregno])
8233 {
8234 int last_regno = REGNO (reg_last_reload_reg[nregno]);
8235
8236 if (reg_reloaded_contents[last_regno] == nregno)
8237 spill_reg_store[last_regno] = 0;
8238 }
8239 }
8240 }
8241
8242 /* I is nonneg if this reload used a register.
8243 If rld[r].reg_rtx is 0, this is an optional reload
8244 that we opted to ignore. */
8245
8246 if (i >= 0 && rld[r].reg_rtx != 0)
8247 {
8248 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8249 int k;
8250
8251 /* For a multi register reload, we need to check if all or part
8252 of the value lives to the end. */
8253 for (k = 0; k < nr; k++)
8254 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
8255 rld[r].when_needed))
8256 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8257
8258 /* Maybe the spill reg contains a copy of reload_out. */
8259 if (rld[r].out != 0
8260 && (REG_P (rld[r].out)
8261 #ifdef AUTO_INC_DEC
8262 || ! rld[r].out_reg
8263 #endif
8264 || REG_P (rld[r].out_reg)))
8265 {
8266 rtx reg;
8267 enum machine_mode mode;
8268 int regno, nregs;
8269
8270 reg = reload_reg_rtx_for_output[r];
8271 mode = GET_MODE (reg);
8272 regno = REGNO (reg);
8273 nregs = hard_regno_nregs[regno][mode];
8274 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
8275 rld[r].when_needed))
8276 {
8277 rtx out = (REG_P (rld[r].out)
8278 ? rld[r].out
8279 : rld[r].out_reg
8280 ? rld[r].out_reg
8281 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
8282 int out_regno = REGNO (out);
8283 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8284 : hard_regno_nregs[out_regno][mode]);
8285 bool piecemeal;
8286
8287 spill_reg_store[regno] = new_spill_reg_store[regno];
8288 spill_reg_stored_to[regno] = out;
8289 reg_last_reload_reg[out_regno] = reg;
8290
8291 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8292 && nregs == out_nregs
8293 && inherit_piecemeal_p (out_regno, regno, mode));
8294
8295 /* If OUT_REGNO is a hard register, it may occupy more than
8296 one register. If it does, say what is in the
8297 rest of the registers assuming that both registers
8298 agree on how many words the object takes. If not,
8299 invalidate the subsequent registers. */
8300
8301 if (HARD_REGISTER_NUM_P (out_regno))
8302 for (k = 1; k < out_nregs; k++)
8303 reg_last_reload_reg[out_regno + k]
8304 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8305
8306 /* Now do the inverse operation. */
8307 for (k = 0; k < nregs; k++)
8308 {
8309 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8310 reg_reloaded_contents[regno + k]
8311 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8312 ? out_regno
8313 : out_regno + k);
8314 reg_reloaded_insn[regno + k] = insn;
8315 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8316 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8317 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8318 regno + k);
8319 else
8320 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8321 regno + k);
8322 }
8323 }
8324 }
8325 /* Maybe the spill reg contains a copy of reload_in. Only do
8326 something if there will not be an output reload for
8327 the register being reloaded. */
8328 else if (rld[r].out_reg == 0
8329 && rld[r].in != 0
8330 && ((REG_P (rld[r].in)
8331 && !HARD_REGISTER_P (rld[r].in)
8332 && !REGNO_REG_SET_P (&reg_has_output_reload,
8333 REGNO (rld[r].in)))
8334 || (REG_P (rld[r].in_reg)
8335 && !REGNO_REG_SET_P (&reg_has_output_reload,
8336 REGNO (rld[r].in_reg))))
8337 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8338 {
8339 rtx reg;
8340 enum machine_mode mode;
8341 int regno, nregs;
8342
8343 reg = reload_reg_rtx_for_input[r];
8344 mode = GET_MODE (reg);
8345 regno = REGNO (reg);
8346 nregs = hard_regno_nregs[regno][mode];
8347 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
8348 rld[r].when_needed))
8349 {
8350 int in_regno;
8351 int in_nregs;
8352 rtx in;
8353 bool piecemeal;
8354
8355 if (REG_P (rld[r].in)
8356 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8357 in = rld[r].in;
8358 else if (REG_P (rld[r].in_reg))
8359 in = rld[r].in_reg;
8360 else
8361 in = XEXP (rld[r].in_reg, 0);
8362 in_regno = REGNO (in);
8363
8364 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8365 : hard_regno_nregs[in_regno][mode]);
8366
8367 reg_last_reload_reg[in_regno] = reg;
8368
8369 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8370 && nregs == in_nregs
8371 && inherit_piecemeal_p (regno, in_regno, mode));
8372
8373 if (HARD_REGISTER_NUM_P (in_regno))
8374 for (k = 1; k < in_nregs; k++)
8375 reg_last_reload_reg[in_regno + k]
8376 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8377
8378 /* Unless we inherited this reload, show we haven't
8379 recently done a store.
8380 Previous stores of inherited auto_inc expressions
8381 also have to be discarded. */
8382 if (! reload_inherited[r]
8383 || (rld[r].out && ! rld[r].out_reg))
8384 spill_reg_store[regno] = 0;
8385
8386 for (k = 0; k < nregs; k++)
8387 {
8388 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8389 reg_reloaded_contents[regno + k]
8390 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8391 ? in_regno
8392 : in_regno + k);
8393 reg_reloaded_insn[regno + k] = insn;
8394 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8395 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8396 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8397 regno + k);
8398 else
8399 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8400 regno + k);
8401 }
8402 }
8403 }
8404 }
8405
8406 /* The following if-statement was #if 0'd in 1.34 (or before...).
8407 It's reenabled in 1.35 because supposedly nothing else
8408 deals with this problem. */
8409
8410 /* If a register gets output-reloaded from a non-spill register,
8411 that invalidates any previous reloaded copy of it.
8412 But forget_old_reloads_1 won't get to see it, because
8413 it thinks only about the original insn. So invalidate it here.
8414 Also do the same thing for RELOAD_OTHER constraints where the
8415 output is discarded. */
8416 if (i < 0
8417 && ((rld[r].out != 0
8418 && (REG_P (rld[r].out)
8419 || (MEM_P (rld[r].out)
8420 && REG_P (rld[r].out_reg))))
8421 || (rld[r].out == 0 && rld[r].out_reg
8422 && REG_P (rld[r].out_reg))))
8423 {
8424 rtx out = ((rld[r].out && REG_P (rld[r].out))
8425 ? rld[r].out : rld[r].out_reg);
8426 int out_regno = REGNO (out);
8427 enum machine_mode mode = GET_MODE (out);
8428
8429 /* REG_RTX is now set or clobbered by the main instruction.
8430 As the comment above explains, forget_old_reloads_1 only
8431 sees the original instruction, and there is no guarantee
8432 that the original instruction also clobbered REG_RTX.
8433 For example, if find_reloads sees that the input side of
8434 a matched operand pair dies in this instruction, it may
8435 use the input register as the reload register.
8436
8437 Calling forget_old_reloads_1 is a waste of effort if
8438 REG_RTX is also the output register.
8439
8440 If we know that REG_RTX holds the value of a pseudo
8441 register, the code after the call will record that fact. */
8442 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8443 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8444
8445 if (!HARD_REGISTER_NUM_P (out_regno))
8446 {
8447 rtx src_reg, store_insn = NULL_RTX;
8448
8449 reg_last_reload_reg[out_regno] = 0;
8450
8451 /* If we can find a hard register that is stored, record
8452 the storing insn so that we may delete this insn with
8453 delete_output_reload. */
8454 src_reg = reload_reg_rtx_for_output[r];
8455
8456 /* If this is an optional reload, try to find the source reg
8457 from an input reload. */
8458 if (! src_reg)
8459 {
8460 rtx set = single_set (insn);
8461 if (set && SET_DEST (set) == rld[r].out)
8462 {
8463 int k;
8464
8465 src_reg = SET_SRC (set);
8466 store_insn = insn;
8467 for (k = 0; k < n_reloads; k++)
8468 {
8469 if (rld[k].in == src_reg)
8470 {
8471 src_reg = reload_reg_rtx_for_input[k];
8472 break;
8473 }
8474 }
8475 }
8476 }
8477 else
8478 store_insn = new_spill_reg_store[REGNO (src_reg)];
8479 if (src_reg && REG_P (src_reg)
8480 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8481 {
8482 int src_regno, src_nregs, k;
8483 rtx note;
8484
8485 gcc_assert (GET_MODE (src_reg) == mode);
8486 src_regno = REGNO (src_reg);
8487 src_nregs = hard_regno_nregs[src_regno][mode];
8488 /* The place where to find a death note varies with
8489 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8490 necessarily checked exactly in the code that moves
8491 notes, so just check both locations. */
8492 note = find_regno_note (insn, REG_DEAD, src_regno);
8493 if (! note && store_insn)
8494 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8495 for (k = 0; k < src_nregs; k++)
8496 {
8497 spill_reg_store[src_regno + k] = store_insn;
8498 spill_reg_stored_to[src_regno + k] = out;
8499 reg_reloaded_contents[src_regno + k] = out_regno;
8500 reg_reloaded_insn[src_regno + k] = store_insn;
8501 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8502 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8503 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8504 mode))
8505 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8506 src_regno + k);
8507 else
8508 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8509 src_regno + k);
8510 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8511 if (note)
8512 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8513 else
8514 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8515 }
8516 reg_last_reload_reg[out_regno] = src_reg;
8517 /* We have to set reg_has_output_reload here, or else
8518 forget_old_reloads_1 will clear reg_last_reload_reg
8519 right away. */
8520 SET_REGNO_REG_SET (&reg_has_output_reload,
8521 out_regno);
8522 }
8523 }
8524 else
8525 {
8526 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8527
8528 for (k = 0; k < out_nregs; k++)
8529 reg_last_reload_reg[out_regno + k] = 0;
8530 }
8531 }
8532 }
8533 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8534 }
8535 \f
8536 /* Go through the motions to emit INSN and test if it is strictly valid.
8537 Return the emitted insn if valid, else return NULL. */
8538
8539 static rtx
8540 emit_insn_if_valid_for_reload (rtx insn)
8541 {
8542 rtx last = get_last_insn ();
8543 int code;
8544
8545 insn = emit_insn (insn);
8546 code = recog_memoized (insn);
8547
8548 if (code >= 0)
8549 {
8550 extract_insn (insn);
8551 /* We want constrain operands to treat this insn strictly in its
8552 validity determination, i.e., the way it would after reload has
8553 completed. */
8554 if (constrain_operands (1))
8555 return insn;
8556 }
8557
8558 delete_insns_since (last);
8559 return NULL;
8560 }
8561
8562 /* Emit code to perform a reload from IN (which may be a reload register) to
8563 OUT (which may also be a reload register). IN or OUT is from operand
8564 OPNUM with reload type TYPE.
8565
8566 Returns first insn emitted. */
8567
8568 static rtx
8569 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8570 {
8571 rtx last = get_last_insn ();
8572 rtx tem;
8573
8574 /* If IN is a paradoxical SUBREG, remove it and try to put the
8575 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8576 if (GET_CODE (in) == SUBREG
8577 && (GET_MODE_SIZE (GET_MODE (in))
8578 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
8579 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
8580 in = SUBREG_REG (in), out = tem;
8581 else if (GET_CODE (out) == SUBREG
8582 && (GET_MODE_SIZE (GET_MODE (out))
8583 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
8584 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
8585 out = SUBREG_REG (out), in = tem;
8586
8587 /* How to do this reload can get quite tricky. Normally, we are being
8588 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8589 register that didn't get a hard register. In that case we can just
8590 call emit_move_insn.
8591
8592 We can also be asked to reload a PLUS that adds a register or a MEM to
8593 another register, constant or MEM. This can occur during frame pointer
8594 elimination and while reloading addresses. This case is handled by
8595 trying to emit a single insn to perform the add. If it is not valid,
8596 we use a two insn sequence.
8597
8598 Or we can be asked to reload an unary operand that was a fragment of
8599 an addressing mode, into a register. If it isn't recognized as-is,
8600 we try making the unop operand and the reload-register the same:
8601 (set reg:X (unop:X expr:Y))
8602 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8603
8604 Finally, we could be called to handle an 'o' constraint by putting
8605 an address into a register. In that case, we first try to do this
8606 with a named pattern of "reload_load_address". If no such pattern
8607 exists, we just emit a SET insn and hope for the best (it will normally
8608 be valid on machines that use 'o').
8609
8610 This entire process is made complex because reload will never
8611 process the insns we generate here and so we must ensure that
8612 they will fit their constraints and also by the fact that parts of
8613 IN might be being reloaded separately and replaced with spill registers.
8614 Because of this, we are, in some sense, just guessing the right approach
8615 here. The one listed above seems to work.
8616
8617 ??? At some point, this whole thing needs to be rethought. */
8618
8619 if (GET_CODE (in) == PLUS
8620 && (REG_P (XEXP (in, 0))
8621 || GET_CODE (XEXP (in, 0)) == SUBREG
8622 || MEM_P (XEXP (in, 0)))
8623 && (REG_P (XEXP (in, 1))
8624 || GET_CODE (XEXP (in, 1)) == SUBREG
8625 || CONSTANT_P (XEXP (in, 1))
8626 || MEM_P (XEXP (in, 1))))
8627 {
8628 /* We need to compute the sum of a register or a MEM and another
8629 register, constant, or MEM, and put it into the reload
8630 register. The best possible way of doing this is if the machine
8631 has a three-operand ADD insn that accepts the required operands.
8632
8633 The simplest approach is to try to generate such an insn and see if it
8634 is recognized and matches its constraints. If so, it can be used.
8635
8636 It might be better not to actually emit the insn unless it is valid,
8637 but we need to pass the insn as an operand to `recog' and
8638 `extract_insn' and it is simpler to emit and then delete the insn if
8639 not valid than to dummy things up. */
8640
8641 rtx op0, op1, tem, insn;
8642 int code;
8643
8644 op0 = find_replacement (&XEXP (in, 0));
8645 op1 = find_replacement (&XEXP (in, 1));
8646
8647 /* Since constraint checking is strict, commutativity won't be
8648 checked, so we need to do that here to avoid spurious failure
8649 if the add instruction is two-address and the second operand
8650 of the add is the same as the reload reg, which is frequently
8651 the case. If the insn would be A = B + A, rearrange it so
8652 it will be A = A + B as constrain_operands expects. */
8653
8654 if (REG_P (XEXP (in, 1))
8655 && REGNO (out) == REGNO (XEXP (in, 1)))
8656 tem = op0, op0 = op1, op1 = tem;
8657
8658 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8659 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8660
8661 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8662 if (insn)
8663 return insn;
8664
8665 /* If that failed, we must use a conservative two-insn sequence.
8666
8667 Use a move to copy one operand into the reload register. Prefer
8668 to reload a constant, MEM or pseudo since the move patterns can
8669 handle an arbitrary operand. If OP1 is not a constant, MEM or
8670 pseudo and OP1 is not a valid operand for an add instruction, then
8671 reload OP1.
8672
8673 After reloading one of the operands into the reload register, add
8674 the reload register to the output register.
8675
8676 If there is another way to do this for a specific machine, a
8677 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8678 we emit below. */
8679
8680 code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
8681
8682 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8683 || (REG_P (op1)
8684 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8685 || (code != CODE_FOR_nothing
8686 && ! ((*insn_data[code].operand[2].predicate)
8687 (op1, insn_data[code].operand[2].mode))))
8688 tem = op0, op0 = op1, op1 = tem;
8689
8690 gen_reload (out, op0, opnum, type);
8691
8692 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8693 This fixes a problem on the 32K where the stack pointer cannot
8694 be used as an operand of an add insn. */
8695
8696 if (rtx_equal_p (op0, op1))
8697 op1 = out;
8698
8699 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8700 if (insn)
8701 {
8702 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8703 set_unique_reg_note (insn, REG_EQUIV, in);
8704 return insn;
8705 }
8706
8707 /* If that failed, copy the address register to the reload register.
8708 Then add the constant to the reload register. */
8709
8710 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8711 gen_reload (out, op1, opnum, type);
8712 insn = emit_insn (gen_add2_insn (out, op0));
8713 set_unique_reg_note (insn, REG_EQUIV, in);
8714 }
8715
8716 #ifdef SECONDARY_MEMORY_NEEDED
8717 /* If we need a memory location to do the move, do it that way. */
8718 else if ((REG_P (in)
8719 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
8720 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8721 && (REG_P (out)
8722 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
8723 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8724 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8725 REGNO_REG_CLASS (reg_or_subregno (out)),
8726 GET_MODE (out)))
8727 {
8728 /* Get the memory to use and rewrite both registers to its mode. */
8729 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8730
8731 if (GET_MODE (loc) != GET_MODE (out))
8732 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8733
8734 if (GET_MODE (loc) != GET_MODE (in))
8735 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8736
8737 gen_reload (loc, in, opnum, type);
8738 gen_reload (out, loc, opnum, type);
8739 }
8740 #endif
8741 else if (REG_P (out) && UNARY_P (in))
8742 {
8743 rtx insn;
8744 rtx op1;
8745 rtx out_moded;
8746 rtx set;
8747
8748 op1 = find_replacement (&XEXP (in, 0));
8749 if (op1 != XEXP (in, 0))
8750 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8751
8752 /* First, try a plain SET. */
8753 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8754 if (set)
8755 return set;
8756
8757 /* If that failed, move the inner operand to the reload
8758 register, and try the same unop with the inner expression
8759 replaced with the reload register. */
8760
8761 if (GET_MODE (op1) != GET_MODE (out))
8762 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8763 else
8764 out_moded = out;
8765
8766 gen_reload (out_moded, op1, opnum, type);
8767
8768 insn
8769 = gen_rtx_SET (VOIDmode, out,
8770 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8771 out_moded));
8772 insn = emit_insn_if_valid_for_reload (insn);
8773 if (insn)
8774 {
8775 set_unique_reg_note (insn, REG_EQUIV, in);
8776 return insn;
8777 }
8778
8779 fatal_insn ("Failure trying to reload:", set);
8780 }
8781 /* If IN is a simple operand, use gen_move_insn. */
8782 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8783 {
8784 tem = emit_insn (gen_move_insn (out, in));
8785 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8786 mark_jump_label (in, tem, 0);
8787 }
8788
8789 #ifdef HAVE_reload_load_address
8790 else if (HAVE_reload_load_address)
8791 emit_insn (gen_reload_load_address (out, in));
8792 #endif
8793
8794 /* Otherwise, just write (set OUT IN) and hope for the best. */
8795 else
8796 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8797
8798 /* Return the first insn emitted.
8799 We can not just return get_last_insn, because there may have
8800 been multiple instructions emitted. Also note that gen_move_insn may
8801 emit more than one insn itself, so we can not assume that there is one
8802 insn emitted per emit_insn_before call. */
8803
8804 return last ? NEXT_INSN (last) : get_insns ();
8805 }
8806 \f
8807 /* Delete a previously made output-reload whose result we now believe
8808 is not needed. First we double-check.
8809
8810 INSN is the insn now being processed.
8811 LAST_RELOAD_REG is the hard register number for which we want to delete
8812 the last output reload.
8813 J is the reload-number that originally used REG. The caller has made
8814 certain that reload J doesn't use REG any longer for input.
8815 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8816
8817 static void
8818 delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8819 {
8820 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8821 rtx reg = spill_reg_stored_to[last_reload_reg];
8822 int k;
8823 int n_occurrences;
8824 int n_inherited = 0;
8825 rtx i1;
8826 rtx substed;
8827
8828 /* It is possible that this reload has been only used to set another reload
8829 we eliminated earlier and thus deleted this instruction too. */
8830 if (INSN_DELETED_P (output_reload_insn))
8831 return;
8832
8833 /* Get the raw pseudo-register referred to. */
8834
8835 while (GET_CODE (reg) == SUBREG)
8836 reg = SUBREG_REG (reg);
8837 substed = reg_equiv_memory_loc[REGNO (reg)];
8838
8839 /* This is unsafe if the operand occurs more often in the current
8840 insn than it is inherited. */
8841 for (k = n_reloads - 1; k >= 0; k--)
8842 {
8843 rtx reg2 = rld[k].in;
8844 if (! reg2)
8845 continue;
8846 if (MEM_P (reg2) || reload_override_in[k])
8847 reg2 = rld[k].in_reg;
8848 #ifdef AUTO_INC_DEC
8849 if (rld[k].out && ! rld[k].out_reg)
8850 reg2 = XEXP (rld[k].in_reg, 0);
8851 #endif
8852 while (GET_CODE (reg2) == SUBREG)
8853 reg2 = SUBREG_REG (reg2);
8854 if (rtx_equal_p (reg2, reg))
8855 {
8856 if (reload_inherited[k] || reload_override_in[k] || k == j)
8857 n_inherited++;
8858 else
8859 return;
8860 }
8861 }
8862 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8863 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8864 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8865 reg, 0);
8866 if (substed)
8867 n_occurrences += count_occurrences (PATTERN (insn),
8868 eliminate_regs (substed, VOIDmode,
8869 NULL_RTX), 0);
8870 for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8871 {
8872 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8873 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8874 }
8875 if (n_occurrences > n_inherited)
8876 return;
8877
8878 /* If the pseudo-reg we are reloading is no longer referenced
8879 anywhere between the store into it and here,
8880 and we're within the same basic block, then the value can only
8881 pass through the reload reg and end up here.
8882 Otherwise, give up--return. */
8883 for (i1 = NEXT_INSN (output_reload_insn);
8884 i1 != insn; i1 = NEXT_INSN (i1))
8885 {
8886 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8887 return;
8888 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8889 && reg_mentioned_p (reg, PATTERN (i1)))
8890 {
8891 /* If this is USE in front of INSN, we only have to check that
8892 there are no more references than accounted for by inheritance. */
8893 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8894 {
8895 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8896 i1 = NEXT_INSN (i1);
8897 }
8898 if (n_occurrences <= n_inherited && i1 == insn)
8899 break;
8900 return;
8901 }
8902 }
8903
8904 /* We will be deleting the insn. Remove the spill reg information. */
8905 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8906 {
8907 spill_reg_store[last_reload_reg + k] = 0;
8908 spill_reg_stored_to[last_reload_reg + k] = 0;
8909 }
8910
8911 /* The caller has already checked that REG dies or is set in INSN.
8912 It has also checked that we are optimizing, and thus some
8913 inaccuracies in the debugging information are acceptable.
8914 So we could just delete output_reload_insn. But in some cases
8915 we can improve the debugging information without sacrificing
8916 optimization - maybe even improving the code: See if the pseudo
8917 reg has been completely replaced with reload regs. If so, delete
8918 the store insn and forget we had a stack slot for the pseudo. */
8919 if (rld[j].out != rld[j].in
8920 && REG_N_DEATHS (REGNO (reg)) == 1
8921 && REG_N_SETS (REGNO (reg)) == 1
8922 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8923 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8924 {
8925 rtx i2;
8926
8927 /* We know that it was used only between here and the beginning of
8928 the current basic block. (We also know that the last use before
8929 INSN was the output reload we are thinking of deleting, but never
8930 mind that.) Search that range; see if any ref remains. */
8931 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8932 {
8933 rtx set = single_set (i2);
8934
8935 /* Uses which just store in the pseudo don't count,
8936 since if they are the only uses, they are dead. */
8937 if (set != 0 && SET_DEST (set) == reg)
8938 continue;
8939 if (LABEL_P (i2)
8940 || JUMP_P (i2))
8941 break;
8942 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8943 && reg_mentioned_p (reg, PATTERN (i2)))
8944 {
8945 /* Some other ref remains; just delete the output reload we
8946 know to be dead. */
8947 delete_address_reloads (output_reload_insn, insn);
8948 delete_insn (output_reload_insn);
8949 return;
8950 }
8951 }
8952
8953 /* Delete the now-dead stores into this pseudo. Note that this
8954 loop also takes care of deleting output_reload_insn. */
8955 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8956 {
8957 rtx set = single_set (i2);
8958
8959 if (set != 0 && SET_DEST (set) == reg)
8960 {
8961 delete_address_reloads (i2, insn);
8962 delete_insn (i2);
8963 }
8964 if (LABEL_P (i2)
8965 || JUMP_P (i2))
8966 break;
8967 }
8968
8969 /* For the debugging info, say the pseudo lives in this reload reg. */
8970 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8971 if (ira_conflicts_p)
8972 /* Inform IRA about the change. */
8973 ira_mark_allocation_change (REGNO (reg));
8974 alter_reg (REGNO (reg), -1, false);
8975 }
8976 else
8977 {
8978 delete_address_reloads (output_reload_insn, insn);
8979 delete_insn (output_reload_insn);
8980 }
8981 }
8982
8983 /* We are going to delete DEAD_INSN. Recursively delete loads of
8984 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8985 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8986 static void
8987 delete_address_reloads (rtx dead_insn, rtx current_insn)
8988 {
8989 rtx set = single_set (dead_insn);
8990 rtx set2, dst, prev, next;
8991 if (set)
8992 {
8993 rtx dst = SET_DEST (set);
8994 if (MEM_P (dst))
8995 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8996 }
8997 /* If we deleted the store from a reloaded post_{in,de}c expression,
8998 we can delete the matching adds. */
8999 prev = PREV_INSN (dead_insn);
9000 next = NEXT_INSN (dead_insn);
9001 if (! prev || ! next)
9002 return;
9003 set = single_set (next);
9004 set2 = single_set (prev);
9005 if (! set || ! set2
9006 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
9007 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
9008 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
9009 return;
9010 dst = SET_DEST (set);
9011 if (! rtx_equal_p (dst, SET_DEST (set2))
9012 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
9013 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
9014 || (INTVAL (XEXP (SET_SRC (set), 1))
9015 != -INTVAL (XEXP (SET_SRC (set2), 1))))
9016 return;
9017 delete_related_insns (prev);
9018 delete_related_insns (next);
9019 }
9020
9021 /* Subfunction of delete_address_reloads: process registers found in X. */
9022 static void
9023 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
9024 {
9025 rtx prev, set, dst, i2;
9026 int i, j;
9027 enum rtx_code code = GET_CODE (x);
9028
9029 if (code != REG)
9030 {
9031 const char *fmt = GET_RTX_FORMAT (code);
9032 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9033 {
9034 if (fmt[i] == 'e')
9035 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9036 else if (fmt[i] == 'E')
9037 {
9038 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9039 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9040 current_insn);
9041 }
9042 }
9043 return;
9044 }
9045
9046 if (spill_reg_order[REGNO (x)] < 0)
9047 return;
9048
9049 /* Scan backwards for the insn that sets x. This might be a way back due
9050 to inheritance. */
9051 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9052 {
9053 code = GET_CODE (prev);
9054 if (code == CODE_LABEL || code == JUMP_INSN)
9055 return;
9056 if (!INSN_P (prev))
9057 continue;
9058 if (reg_set_p (x, PATTERN (prev)))
9059 break;
9060 if (reg_referenced_p (x, PATTERN (prev)))
9061 return;
9062 }
9063 if (! prev || INSN_UID (prev) < reload_first_uid)
9064 return;
9065 /* Check that PREV only sets the reload register. */
9066 set = single_set (prev);
9067 if (! set)
9068 return;
9069 dst = SET_DEST (set);
9070 if (!REG_P (dst)
9071 || ! rtx_equal_p (dst, x))
9072 return;
9073 if (! reg_set_p (dst, PATTERN (dead_insn)))
9074 {
9075 /* Check if DST was used in a later insn -
9076 it might have been inherited. */
9077 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9078 {
9079 if (LABEL_P (i2))
9080 break;
9081 if (! INSN_P (i2))
9082 continue;
9083 if (reg_referenced_p (dst, PATTERN (i2)))
9084 {
9085 /* If there is a reference to the register in the current insn,
9086 it might be loaded in a non-inherited reload. If no other
9087 reload uses it, that means the register is set before
9088 referenced. */
9089 if (i2 == current_insn)
9090 {
9091 for (j = n_reloads - 1; j >= 0; j--)
9092 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9093 || reload_override_in[j] == dst)
9094 return;
9095 for (j = n_reloads - 1; j >= 0; j--)
9096 if (rld[j].in && rld[j].reg_rtx == dst)
9097 break;
9098 if (j >= 0)
9099 break;
9100 }
9101 return;
9102 }
9103 if (JUMP_P (i2))
9104 break;
9105 /* If DST is still live at CURRENT_INSN, check if it is used for
9106 any reload. Note that even if CURRENT_INSN sets DST, we still
9107 have to check the reloads. */
9108 if (i2 == current_insn)
9109 {
9110 for (j = n_reloads - 1; j >= 0; j--)
9111 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9112 || reload_override_in[j] == dst)
9113 return;
9114 /* ??? We can't finish the loop here, because dst might be
9115 allocated to a pseudo in this block if no reload in this
9116 block needs any of the classes containing DST - see
9117 spill_hard_reg. There is no easy way to tell this, so we
9118 have to scan till the end of the basic block. */
9119 }
9120 if (reg_set_p (dst, PATTERN (i2)))
9121 break;
9122 }
9123 }
9124 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9125 reg_reloaded_contents[REGNO (dst)] = -1;
9126 delete_insn (prev);
9127 }
9128 \f
9129 /* Output reload-insns to reload VALUE into RELOADREG.
9130 VALUE is an autoincrement or autodecrement RTX whose operand
9131 is a register or memory location;
9132 so reloading involves incrementing that location.
9133 IN is either identical to VALUE, or some cheaper place to reload from.
9134
9135 INC_AMOUNT is the number to increment or decrement by (always positive).
9136 This cannot be deduced from VALUE.
9137
9138 Return the instruction that stores into RELOADREG. */
9139
9140 static rtx
9141 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9142 {
9143 /* REG or MEM to be copied and incremented. */
9144 rtx incloc = find_replacement (&XEXP (value, 0));
9145 /* Nonzero if increment after copying. */
9146 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9147 || GET_CODE (value) == POST_MODIFY);
9148 rtx last;
9149 rtx inc;
9150 rtx add_insn;
9151 int code;
9152 rtx store;
9153 rtx real_in = in == value ? incloc : in;
9154
9155 /* No hard register is equivalent to this register after
9156 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9157 we could inc/dec that register as well (maybe even using it for
9158 the source), but I'm not sure it's worth worrying about. */
9159 if (REG_P (incloc))
9160 reg_last_reload_reg[REGNO (incloc)] = 0;
9161
9162 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9163 {
9164 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9165 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9166 }
9167 else
9168 {
9169 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9170 inc_amount = -inc_amount;
9171
9172 inc = GEN_INT (inc_amount);
9173 }
9174
9175 /* If this is post-increment, first copy the location to the reload reg. */
9176 if (post && real_in != reloadreg)
9177 emit_insn (gen_move_insn (reloadreg, real_in));
9178
9179 if (in == value)
9180 {
9181 /* See if we can directly increment INCLOC. Use a method similar to
9182 that in gen_reload. */
9183
9184 last = get_last_insn ();
9185 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
9186 gen_rtx_PLUS (GET_MODE (incloc),
9187 incloc, inc)));
9188
9189 code = recog_memoized (add_insn);
9190 if (code >= 0)
9191 {
9192 extract_insn (add_insn);
9193 if (constrain_operands (1))
9194 {
9195 /* If this is a pre-increment and we have incremented the value
9196 where it lives, copy the incremented value to RELOADREG to
9197 be used as an address. */
9198
9199 if (! post)
9200 emit_insn (gen_move_insn (reloadreg, incloc));
9201
9202 return add_insn;
9203 }
9204 }
9205 delete_insns_since (last);
9206 }
9207
9208 /* If couldn't do the increment directly, must increment in RELOADREG.
9209 The way we do this depends on whether this is pre- or post-increment.
9210 For pre-increment, copy INCLOC to the reload register, increment it
9211 there, then save back. */
9212
9213 if (! post)
9214 {
9215 if (in != reloadreg)
9216 emit_insn (gen_move_insn (reloadreg, real_in));
9217 emit_insn (gen_add2_insn (reloadreg, inc));
9218 store = emit_insn (gen_move_insn (incloc, reloadreg));
9219 }
9220 else
9221 {
9222 /* Postincrement.
9223 Because this might be a jump insn or a compare, and because RELOADREG
9224 may not be available after the insn in an input reload, we must do
9225 the incrementation before the insn being reloaded for.
9226
9227 We have already copied IN to RELOADREG. Increment the copy in
9228 RELOADREG, save that back, then decrement RELOADREG so it has
9229 the original value. */
9230
9231 emit_insn (gen_add2_insn (reloadreg, inc));
9232 store = emit_insn (gen_move_insn (incloc, reloadreg));
9233 if (CONST_INT_P (inc))
9234 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
9235 else
9236 emit_insn (gen_sub2_insn (reloadreg, inc));
9237 }
9238
9239 return store;
9240 }
9241 \f
9242 #ifdef AUTO_INC_DEC
9243 static void
9244 add_auto_inc_notes (rtx insn, rtx x)
9245 {
9246 enum rtx_code code = GET_CODE (x);
9247 const char *fmt;
9248 int i, j;
9249
9250 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9251 {
9252 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9253 return;
9254 }
9255
9256 /* Scan all the operand sub-expressions. */
9257 fmt = GET_RTX_FORMAT (code);
9258 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9259 {
9260 if (fmt[i] == 'e')
9261 add_auto_inc_notes (insn, XEXP (x, i));
9262 else if (fmt[i] == 'E')
9263 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9264 add_auto_inc_notes (insn, XVECEXP (x, i, j));
9265 }
9266 }
9267 #endif
9268
9269 /* This is used by reload pass, that does emit some instructions after
9270 abnormal calls moving basic block end, but in fact it wants to emit
9271 them on the edge. Looks for abnormal call edges, find backward the
9272 proper call and fix the damage.
9273
9274 Similar handle instructions throwing exceptions internally. */
9275 void
9276 fixup_abnormal_edges (void)
9277 {
9278 bool inserted = false;
9279 basic_block bb;
9280
9281 FOR_EACH_BB (bb)
9282 {
9283 edge e;
9284 edge_iterator ei;
9285
9286 /* Look for cases we are interested in - calls or instructions causing
9287 exceptions. */
9288 FOR_EACH_EDGE (e, ei, bb->succs)
9289 {
9290 if (e->flags & EDGE_ABNORMAL_CALL)
9291 break;
9292 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
9293 == (EDGE_ABNORMAL | EDGE_EH))
9294 break;
9295 }
9296 if (e && !CALL_P (BB_END (bb))
9297 && !can_throw_internal (BB_END (bb)))
9298 {
9299 rtx insn;
9300
9301 /* Get past the new insns generated. Allow notes, as the insns
9302 may be already deleted. */
9303 insn = BB_END (bb);
9304 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
9305 && !can_throw_internal (insn)
9306 && insn != BB_HEAD (bb))
9307 insn = PREV_INSN (insn);
9308
9309 if (CALL_P (insn) || can_throw_internal (insn))
9310 {
9311 rtx stop, next;
9312
9313 stop = NEXT_INSN (BB_END (bb));
9314 BB_END (bb) = insn;
9315 insn = NEXT_INSN (insn);
9316
9317 FOR_EACH_EDGE (e, ei, bb->succs)
9318 if (e->flags & EDGE_FALLTHRU)
9319 break;
9320
9321 while (insn && insn != stop)
9322 {
9323 next = NEXT_INSN (insn);
9324 if (INSN_P (insn))
9325 {
9326 delete_insn (insn);
9327
9328 /* Sometimes there's still the return value USE.
9329 If it's placed after a trapping call (i.e. that
9330 call is the last insn anyway), we have no fallthru
9331 edge. Simply delete this use and don't try to insert
9332 on the non-existent edge. */
9333 if (GET_CODE (PATTERN (insn)) != USE)
9334 {
9335 /* We're not deleting it, we're moving it. */
9336 INSN_DELETED_P (insn) = 0;
9337 PREV_INSN (insn) = NULL_RTX;
9338 NEXT_INSN (insn) = NULL_RTX;
9339
9340 insert_insn_on_edge (insn, e);
9341 inserted = true;
9342 }
9343 }
9344 else if (!BARRIER_P (insn))
9345 set_block_for_insn (insn, NULL);
9346 insn = next;
9347 }
9348 }
9349
9350 /* It may be that we don't find any such trapping insn. In this
9351 case we discovered quite late that the insn that had been
9352 marked as can_throw_internal in fact couldn't trap at all.
9353 So we should in fact delete the EH edges out of the block. */
9354 else
9355 purge_dead_edges (bb);
9356 }
9357 }
9358
9359 /* We've possibly turned single trapping insn into multiple ones. */
9360 if (cfun->can_throw_non_call_exceptions)
9361 {
9362 sbitmap blocks;
9363 blocks = sbitmap_alloc (last_basic_block);
9364 sbitmap_ones (blocks);
9365 find_many_sub_basic_blocks (blocks);
9366 sbitmap_free (blocks);
9367 }
9368
9369 if (inserted)
9370 commit_edge_insertions ();
9371
9372 #ifdef ENABLE_CHECKING
9373 /* Verify that we didn't turn one trapping insn into many, and that
9374 we found and corrected all of the problems wrt fixups on the
9375 fallthru edge. */
9376 verify_flow_info ();
9377 #endif
9378 }