alias.c: Fix typos in comments.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24
25 #include "machmode.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "obstack.h"
30 #include "insn-config.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "expr.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "basic-block.h"
37 #include "reload.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "cselib.h"
41 #include "real.h"
42 #include "toplev.h"
43 #include "except.h"
44
45 /* This file contains the reload pass of the compiler, which is
46 run after register allocation has been done. It checks that
47 each insn is valid (operands required to be in registers really
48 are in registers of the proper class) and fixes up invalid ones
49 by copying values temporarily into registers for the insns
50 that need them.
51
52 The results of register allocation are described by the vector
53 reg_renumber; the insns still contain pseudo regs, but reg_renumber
54 can be used to find which hard reg, if any, a pseudo reg is in.
55
56 The technique we always use is to free up a few hard regs that are
57 called ``reload regs'', and for each place where a pseudo reg
58 must be in a hard reg, copy it temporarily into one of the reload regs.
59
60 Reload regs are allocated locally for every instruction that needs
61 reloads. When there are pseudos which are allocated to a register that
62 has been chosen as a reload reg, such pseudos must be ``spilled''.
63 This means that they go to other hard regs, or to stack slots if no other
64 available hard regs can be found. Spilling can invalidate more
65 insns, requiring additional need for reloads, so we must keep checking
66 until the process stabilizes.
67
68 For machines with different classes of registers, we must keep track
69 of the register class needed for each reload, and make sure that
70 we allocate enough reload registers of each class.
71
72 The file reload.c contains the code that checks one insn for
73 validity and reports the reloads that it needs. This file
74 is in charge of scanning the entire rtl code, accumulating the
75 reload needs, spilling, assigning reload registers to use for
76 fixing up each insn, and generating the new insns to copy values
77 into the reload registers. */
78
79 #ifndef REGISTER_MOVE_COST
80 #define REGISTER_MOVE_COST(m, x, y) 2
81 #endif
82
83 #ifndef LOCAL_REGNO
84 #define LOCAL_REGNO(REGNO) 0
85 #endif
86 \f
87 /* During reload_as_needed, element N contains a REG rtx for the hard reg
88 into which reg N has been reloaded (perhaps for a previous insn). */
89 static rtx *reg_last_reload_reg;
90
91 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
92 for an output reload that stores into reg N. */
93 static char *reg_has_output_reload;
94
95 /* Indicates which hard regs are reload-registers for an output reload
96 in the current insn. */
97 static HARD_REG_SET reg_is_output_reload;
98
99 /* Element N is the constant value to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a constant.
101 find_reloads looks at this in order to replace pseudo reg N
102 with the constant it stands for. */
103 rtx *reg_equiv_constant;
104
105 /* Element N is a memory location to which pseudo reg N is equivalent,
106 prior to any register elimination (such as frame pointer to stack
107 pointer). Depending on whether or not it is a valid address, this value
108 is transferred to either reg_equiv_address or reg_equiv_mem. */
109 rtx *reg_equiv_memory_loc;
110
111 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
112 This is used when the address is not valid as a memory address
113 (because its displacement is too big for the machine.) */
114 rtx *reg_equiv_address;
115
116 /* Element N is the memory slot to which pseudo reg N is equivalent,
117 or zero if pseudo reg N is not equivalent to a memory slot. */
118 rtx *reg_equiv_mem;
119
120 /* Widest width in which each pseudo reg is referred to (via subreg). */
121 static unsigned int *reg_max_ref_width;
122
123 /* Element N is the list of insns that initialized reg N from its equivalent
124 constant or memory slot. */
125 static rtx *reg_equiv_init;
126
127 /* Vector to remember old contents of reg_renumber before spilling. */
128 static short *reg_old_renumber;
129
130 /* During reload_as_needed, element N contains the last pseudo regno reloaded
131 into hard register N. If that pseudo reg occupied more than one register,
132 reg_reloaded_contents points to that pseudo for each spill register in
133 use; all of these must remain set for an inheritance to occur. */
134 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
135
136 /* During reload_as_needed, element N contains the insn for which
137 hard register N was last used. Its contents are significant only
138 when reg_reloaded_valid is set for this register. */
139 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
140
141 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
142 static HARD_REG_SET reg_reloaded_valid;
143 /* Indicate if the register was dead at the end of the reload.
144 This is only valid if reg_reloaded_contents is set and valid. */
145 static HARD_REG_SET reg_reloaded_dead;
146
147 /* Number of spill-regs so far; number of valid elements of spill_regs. */
148 static int n_spills;
149
150 /* In parallel with spill_regs, contains REG rtx's for those regs.
151 Holds the last rtx used for any given reg, or 0 if it has never
152 been used for spilling yet. This rtx is reused, provided it has
153 the proper mode. */
154 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
155
156 /* In parallel with spill_regs, contains nonzero for a spill reg
157 that was stored after the last time it was used.
158 The precise value is the insn generated to do the store. */
159 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
160
161 /* This is the register that was stored with spill_reg_store. This is a
162 copy of reload_out / reload_out_reg when the value was stored; if
163 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
164 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
165
166 /* This table is the inverse mapping of spill_regs:
167 indexed by hard reg number,
168 it contains the position of that reg in spill_regs,
169 or -1 for something that is not in spill_regs.
170
171 ?!? This is no longer accurate. */
172 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
173
174 /* This reg set indicates registers that can't be used as spill registers for
175 the currently processed insn. These are the hard registers which are live
176 during the insn, but not allocated to pseudos, as well as fixed
177 registers. */
178 static HARD_REG_SET bad_spill_regs;
179
180 /* These are the hard registers that can't be used as spill register for any
181 insn. This includes registers used for user variables and registers that
182 we can't eliminate. A register that appears in this set also can't be used
183 to retry register allocation. */
184 static HARD_REG_SET bad_spill_regs_global;
185
186 /* Describes order of use of registers for reloading
187 of spilled pseudo-registers. `n_spills' is the number of
188 elements that are actually valid; new ones are added at the end.
189
190 Both spill_regs and spill_reg_order are used on two occasions:
191 once during find_reload_regs, where they keep track of the spill registers
192 for a single insn, but also during reload_as_needed where they show all
193 the registers ever used by reload. For the latter case, the information
194 is calculated during finish_spills. */
195 static short spill_regs[FIRST_PSEUDO_REGISTER];
196
197 /* This vector of reg sets indicates, for each pseudo, which hard registers
198 may not be used for retrying global allocation because the register was
199 formerly spilled from one of them. If we allowed reallocating a pseudo to
200 a register that it was already allocated to, reload might not
201 terminate. */
202 static HARD_REG_SET *pseudo_previous_regs;
203
204 /* This vector of reg sets indicates, for each pseudo, which hard
205 registers may not be used for retrying global allocation because they
206 are used as spill registers during one of the insns in which the
207 pseudo is live. */
208 static HARD_REG_SET *pseudo_forbidden_regs;
209
210 /* All hard regs that have been used as spill registers for any insn are
211 marked in this set. */
212 static HARD_REG_SET used_spill_regs;
213
214 /* Index of last register assigned as a spill register. We allocate in
215 a round-robin fashion. */
216 static int last_spill_reg;
217
218 /* Nonzero if indirect addressing is supported on the machine; this means
219 that spilling (REG n) does not require reloading it into a register in
220 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
221 value indicates the level of indirect addressing supported, e.g., two
222 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
223 a hard register. */
224 static char spill_indirect_levels;
225
226 /* Nonzero if indirect addressing is supported when the innermost MEM is
227 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
228 which these are valid is the same as spill_indirect_levels, above. */
229 char indirect_symref_ok;
230
231 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
232 char double_reg_address_ok;
233
234 /* Record the stack slot for each spilled hard register. */
235 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
236
237 /* Width allocated so far for that stack slot. */
238 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240 /* Record which pseudos needed to be spilled. */
241 static regset_head spilled_pseudos;
242
243 /* Used for communication between order_regs_for_reload and count_pseudo.
244 Used to avoid counting one pseudo twice. */
245 static regset_head pseudos_counted;
246
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
250
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253 int caller_save_needed;
254
255 /* Set to 1 while reload_as_needed is operating.
256 Required by some machines to handle any generated moves differently. */
257 int reload_in_progress = 0;
258
259 /* These arrays record the insn_code of insns that may be needed to
260 perform input and output reloads of special objects. They provide a
261 place to pass a scratch register. */
262 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
263 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
264
265 /* This obstack is used for allocation of rtl during register elimination.
266 The allocated storage can be freed once find_reloads has processed the
267 insn. */
268 struct obstack reload_obstack;
269
270 /* Points to the beginning of the reload_obstack. All insn_chain structures
271 are allocated first. */
272 char *reload_startobj;
273
274 /* The point after all insn_chain structures. Used to quickly deallocate
275 memory allocated in copy_reloads during calculate_needs_all_insns. */
276 char *reload_firstobj;
277
278 /* This points before all local rtl generated by register elimination.
279 Used to quickly free all memory after processing one insn. */
280 static char *reload_insn_firstobj;
281
282 #define obstack_chunk_alloc xmalloc
283 #define obstack_chunk_free free
284
285 /* List of insn_chain instructions, one for every insn that reload needs to
286 examine. */
287 struct insn_chain *reload_insn_chain;
288
289 #ifdef TREE_CODE
290 extern tree current_function_decl;
291 #else
292 extern union tree_node *current_function_decl;
293 #endif
294
295 /* List of all insns needing reloads. */
296 static struct insn_chain *insns_need_reload;
297 \f
298 /* This structure is used to record information about register eliminations.
299 Each array entry describes one possible way of eliminating a register
300 in favor of another. If there is more than one way of eliminating a
301 particular register, the most preferred should be specified first. */
302
303 struct elim_table
304 {
305 int from; /* Register number to be eliminated. */
306 int to; /* Register number used as replacement. */
307 int initial_offset; /* Initial difference between values. */
308 int can_eliminate; /* Non-zero if this elimination can be done. */
309 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
310 insns made by reload. */
311 int offset; /* Current offset between the two regs. */
312 int previous_offset; /* Offset at end of previous insn. */
313 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
314 rtx from_rtx; /* REG rtx for the register to be eliminated.
315 We cannot simply compare the number since
316 we might then spuriously replace a hard
317 register corresponding to a pseudo
318 assigned to the reg to be eliminated. */
319 rtx to_rtx; /* REG rtx for the replacement. */
320 };
321
322 static struct elim_table *reg_eliminate = 0;
323
324 /* This is an intermediate structure to initialize the table. It has
325 exactly the members provided by ELIMINABLE_REGS. */
326 static struct elim_table_1
327 {
328 int from;
329 int to;
330 } reg_eliminate_1[] =
331
332 /* If a set of eliminable registers was specified, define the table from it.
333 Otherwise, default to the normal case of the frame pointer being
334 replaced by the stack pointer. */
335
336 #ifdef ELIMINABLE_REGS
337 ELIMINABLE_REGS;
338 #else
339 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
340 #endif
341
342 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
343
344 /* Record the number of pending eliminations that have an offset not equal
345 to their initial offset. If non-zero, we use a new copy of each
346 replacement result in any insns encountered. */
347 int num_not_at_initial_offset;
348
349 /* Count the number of registers that we may be able to eliminate. */
350 static int num_eliminable;
351 /* And the number of registers that are equivalent to a constant that
352 can be eliminated to frame_pointer / arg_pointer + constant. */
353 static int num_eliminable_invariants;
354
355 /* For each label, we record the offset of each elimination. If we reach
356 a label by more than one path and an offset differs, we cannot do the
357 elimination. This information is indexed by the number of the label.
358 The first table is an array of flags that records whether we have yet
359 encountered a label and the second table is an array of arrays, one
360 entry in the latter array for each elimination. */
361
362 static char *offsets_known_at;
363 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
364
365 /* Number of labels in the current function. */
366
367 static int num_labels;
368 \f
369 static void replace_pseudos_in_call_usage PARAMS((rtx *,
370 enum machine_mode,
371 rtx));
372 static void maybe_fix_stack_asms PARAMS ((void));
373 static void copy_reloads PARAMS ((struct insn_chain *));
374 static void calculate_needs_all_insns PARAMS ((int));
375 static int find_reg PARAMS ((struct insn_chain *, int));
376 static void find_reload_regs PARAMS ((struct insn_chain *));
377 static void select_reload_regs PARAMS ((void));
378 static void delete_caller_save_insns PARAMS ((void));
379
380 static void spill_failure PARAMS ((rtx, enum reg_class));
381 static void count_spilled_pseudo PARAMS ((int, int, int));
382 static void delete_dead_insn PARAMS ((rtx));
383 static void alter_reg PARAMS ((int, int));
384 static void set_label_offsets PARAMS ((rtx, rtx, int));
385 static void check_eliminable_occurrences PARAMS ((rtx));
386 static void elimination_effects PARAMS ((rtx, enum machine_mode));
387 static int eliminate_regs_in_insn PARAMS ((rtx, int));
388 static void update_eliminable_offsets PARAMS ((void));
389 static void mark_not_eliminable PARAMS ((rtx, rtx, void *));
390 static void set_initial_elim_offsets PARAMS ((void));
391 static void verify_initial_elim_offsets PARAMS ((void));
392 static void set_initial_label_offsets PARAMS ((void));
393 static void set_offsets_for_label PARAMS ((rtx));
394 static void init_elim_table PARAMS ((void));
395 static void update_eliminables PARAMS ((HARD_REG_SET *));
396 static void spill_hard_reg PARAMS ((unsigned int, int));
397 static int finish_spills PARAMS ((int));
398 static void ior_hard_reg_set PARAMS ((HARD_REG_SET *, HARD_REG_SET *));
399 static void scan_paradoxical_subregs PARAMS ((rtx));
400 static void count_pseudo PARAMS ((int));
401 static void order_regs_for_reload PARAMS ((struct insn_chain *));
402 static void reload_as_needed PARAMS ((int));
403 static void forget_old_reloads_1 PARAMS ((rtx, rtx, void *));
404 static int reload_reg_class_lower PARAMS ((const PTR, const PTR));
405 static void mark_reload_reg_in_use PARAMS ((unsigned int, int,
406 enum reload_type,
407 enum machine_mode));
408 static void clear_reload_reg_in_use PARAMS ((unsigned int, int,
409 enum reload_type,
410 enum machine_mode));
411 static int reload_reg_free_p PARAMS ((unsigned int, int,
412 enum reload_type));
413 static int reload_reg_free_for_value_p PARAMS ((int, int, int,
414 enum reload_type,
415 rtx, rtx, int, int));
416 static int free_for_value_p PARAMS ((int, enum machine_mode, int,
417 enum reload_type, rtx, rtx,
418 int, int));
419 static int reload_reg_reaches_end_p PARAMS ((unsigned int, int,
420 enum reload_type));
421 static int allocate_reload_reg PARAMS ((struct insn_chain *, int,
422 int));
423 static int conflicts_with_override PARAMS ((rtx));
424 static void failed_reload PARAMS ((rtx, int));
425 static int set_reload_reg PARAMS ((int, int));
426 static void choose_reload_regs_init PARAMS ((struct insn_chain *, rtx *));
427 static void choose_reload_regs PARAMS ((struct insn_chain *));
428 static void merge_assigned_reloads PARAMS ((rtx));
429 static void emit_input_reload_insns PARAMS ((struct insn_chain *,
430 struct reload *, rtx, int));
431 static void emit_output_reload_insns PARAMS ((struct insn_chain *,
432 struct reload *, int));
433 static void do_input_reload PARAMS ((struct insn_chain *,
434 struct reload *, int));
435 static void do_output_reload PARAMS ((struct insn_chain *,
436 struct reload *, int));
437 static void emit_reload_insns PARAMS ((struct insn_chain *));
438 static void delete_output_reload PARAMS ((rtx, int, int));
439 static void delete_address_reloads PARAMS ((rtx, rtx));
440 static void delete_address_reloads_1 PARAMS ((rtx, rtx, rtx));
441 static rtx inc_for_reload PARAMS ((rtx, rtx, rtx, int));
442 static int constraint_accepts_reg_p PARAMS ((const char *, rtx));
443 static void reload_cse_regs_1 PARAMS ((rtx));
444 static int reload_cse_noop_set_p PARAMS ((rtx));
445 static int reload_cse_simplify_set PARAMS ((rtx, rtx));
446 static int reload_cse_simplify_operands PARAMS ((rtx));
447 static void reload_combine PARAMS ((void));
448 static void reload_combine_note_use PARAMS ((rtx *, rtx));
449 static void reload_combine_note_store PARAMS ((rtx, rtx, void *));
450 static void reload_cse_move2add PARAMS ((rtx));
451 static void move2add_note_store PARAMS ((rtx, rtx, void *));
452 #ifdef AUTO_INC_DEC
453 static void add_auto_inc_notes PARAMS ((rtx, rtx));
454 #endif
455 static void copy_eh_notes PARAMS ((rtx, rtx));
456 static HOST_WIDE_INT sext_for_mode PARAMS ((enum machine_mode,
457 HOST_WIDE_INT));
458 static void failed_reload PARAMS ((rtx, int));
459 static int set_reload_reg PARAMS ((int, int));
460 static void reload_cse_delete_noop_set PARAMS ((rtx, rtx));
461 static void reload_cse_simplify PARAMS ((rtx));
462 static void fixup_abnormal_edges PARAMS ((void));
463 extern void dump_needs PARAMS ((struct insn_chain *));
464 \f
465 /* Initialize the reload pass once per compilation. */
466
467 void
468 init_reload ()
469 {
470 register int i;
471
472 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
473 Set spill_indirect_levels to the number of levels such addressing is
474 permitted, zero if it is not permitted at all. */
475
476 register rtx tem
477 = gen_rtx_MEM (Pmode,
478 gen_rtx_PLUS (Pmode,
479 gen_rtx_REG (Pmode,
480 LAST_VIRTUAL_REGISTER + 1),
481 GEN_INT (4)));
482 spill_indirect_levels = 0;
483
484 while (memory_address_p (QImode, tem))
485 {
486 spill_indirect_levels++;
487 tem = gen_rtx_MEM (Pmode, tem);
488 }
489
490 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
491
492 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
493 indirect_symref_ok = memory_address_p (QImode, tem);
494
495 /* See if reg+reg is a valid (and offsettable) address. */
496
497 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
498 {
499 tem = gen_rtx_PLUS (Pmode,
500 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
501 gen_rtx_REG (Pmode, i));
502
503 /* This way, we make sure that reg+reg is an offsettable address. */
504 tem = plus_constant (tem, 4);
505
506 if (memory_address_p (QImode, tem))
507 {
508 double_reg_address_ok = 1;
509 break;
510 }
511 }
512
513 /* Initialize obstack for our rtl allocation. */
514 gcc_obstack_init (&reload_obstack);
515 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
516
517 INIT_REG_SET (&spilled_pseudos);
518 INIT_REG_SET (&pseudos_counted);
519 }
520
521 /* List of insn chains that are currently unused. */
522 static struct insn_chain *unused_insn_chains = 0;
523
524 /* Allocate an empty insn_chain structure. */
525 struct insn_chain *
526 new_insn_chain ()
527 {
528 struct insn_chain *c;
529
530 if (unused_insn_chains == 0)
531 {
532 c = (struct insn_chain *)
533 obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
534 INIT_REG_SET (&c->live_throughout);
535 INIT_REG_SET (&c->dead_or_set);
536 }
537 else
538 {
539 c = unused_insn_chains;
540 unused_insn_chains = c->next;
541 }
542 c->is_caller_save_insn = 0;
543 c->need_operand_change = 0;
544 c->need_reload = 0;
545 c->need_elim = 0;
546 return c;
547 }
548
549 /* Small utility function to set all regs in hard reg set TO which are
550 allocated to pseudos in regset FROM. */
551
552 void
553 compute_use_by_pseudos (to, from)
554 HARD_REG_SET *to;
555 regset from;
556 {
557 unsigned int regno;
558
559 EXECUTE_IF_SET_IN_REG_SET
560 (from, FIRST_PSEUDO_REGISTER, regno,
561 {
562 int r = reg_renumber[regno];
563 int nregs;
564
565 if (r < 0)
566 {
567 /* reload_combine uses the information from
568 BASIC_BLOCK->global_live_at_start, which might still
569 contain registers that have not actually been allocated
570 since they have an equivalence. */
571 if (! reload_completed)
572 abort ();
573 }
574 else
575 {
576 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
577 while (nregs-- > 0)
578 SET_HARD_REG_BIT (*to, r + nregs);
579 }
580 });
581 }
582
583 /* Replace all pseudos found in LOC with their corresponding
584 equivalences. */
585
586 static void
587 replace_pseudos_in_call_usage (loc, mem_mode, usage)
588 rtx *loc;
589 enum machine_mode mem_mode;
590 rtx usage;
591 {
592 rtx x = *loc;
593 enum rtx_code code;
594 const char *fmt;
595 int i, j;
596
597 if (! x)
598 return;
599
600 code = GET_CODE (x);
601 if (code == REG)
602 {
603 unsigned int regno = REGNO (x);
604
605 if (regno < FIRST_PSEUDO_REGISTER)
606 return;
607
608 x = eliminate_regs (x, mem_mode, usage);
609 if (x != *loc)
610 {
611 *loc = x;
612 replace_pseudos_in_call_usage (loc, mem_mode, usage);
613 return;
614 }
615
616 if (reg_equiv_constant[regno])
617 *loc = reg_equiv_constant[regno];
618 else if (reg_equiv_mem[regno])
619 *loc = reg_equiv_mem[regno];
620 else if (reg_equiv_address[regno])
621 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
622 else if (GET_CODE (regno_reg_rtx[regno]) != REG
623 || REGNO (regno_reg_rtx[regno]) != regno)
624 *loc = regno_reg_rtx[regno];
625 else
626 abort ();
627
628 return;
629 }
630 else if (code == MEM)
631 {
632 replace_pseudos_in_call_usage (& XEXP (x, 0), GET_MODE (x), usage);
633 return;
634 }
635
636 /* Process each of our operands recursively. */
637 fmt = GET_RTX_FORMAT (code);
638 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
639 if (*fmt == 'e')
640 replace_pseudos_in_call_usage (&XEXP (x, i), mem_mode, usage);
641 else if (*fmt == 'E')
642 for (j = 0; j < XVECLEN (x, i); j++)
643 replace_pseudos_in_call_usage (& XVECEXP (x, i, j), mem_mode, usage);
644 }
645
646 \f
647 /* Global variables used by reload and its subroutines. */
648
649 /* Set during calculate_needs if an insn needs register elimination. */
650 static int something_needs_elimination;
651 /* Set during calculate_needs if an insn needs an operand changed. */
652 int something_needs_operands_changed;
653
654 /* Nonzero means we couldn't get enough spill regs. */
655 static int failure;
656
657 /* Main entry point for the reload pass.
658
659 FIRST is the first insn of the function being compiled.
660
661 GLOBAL nonzero means we were called from global_alloc
662 and should attempt to reallocate any pseudoregs that we
663 displace from hard regs we will use for reloads.
664 If GLOBAL is zero, we do not have enough information to do that,
665 so any pseudo reg that is spilled must go to the stack.
666
667 Return value is nonzero if reload failed
668 and we must not do any more for this function. */
669
670 int
671 reload (first, global)
672 rtx first;
673 int global;
674 {
675 register int i;
676 register rtx insn;
677 register struct elim_table *ep;
678
679 /* The two pointers used to track the true location of the memory used
680 for label offsets. */
681 char *real_known_ptr = NULL;
682 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
683
684 /* Make sure even insns with volatile mem refs are recognizable. */
685 init_recog ();
686
687 failure = 0;
688
689 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
690
691 /* Make sure that the last insn in the chain
692 is not something that needs reloading. */
693 emit_note (NULL, NOTE_INSN_DELETED);
694
695 /* Enable find_equiv_reg to distinguish insns made by reload. */
696 reload_first_uid = get_max_uid ();
697
698 #ifdef SECONDARY_MEMORY_NEEDED
699 /* Initialize the secondary memory table. */
700 clear_secondary_mem ();
701 #endif
702
703 /* We don't have a stack slot for any spill reg yet. */
704 memset ((char *) spill_stack_slot, 0, sizeof spill_stack_slot);
705 memset ((char *) spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
706
707 /* Initialize the save area information for caller-save, in case some
708 are needed. */
709 init_save_areas ();
710
711 /* Compute which hard registers are now in use
712 as homes for pseudo registers.
713 This is done here rather than (eg) in global_alloc
714 because this point is reached even if not optimizing. */
715 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
716 mark_home_live (i);
717
718 /* A function that receives a nonlocal goto must save all call-saved
719 registers. */
720 if (current_function_has_nonlocal_label)
721 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
722 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
723 regs_ever_live[i] = 1;
724
725 /* Find all the pseudo registers that didn't get hard regs
726 but do have known equivalent constants or memory slots.
727 These include parameters (known equivalent to parameter slots)
728 and cse'd or loop-moved constant memory addresses.
729
730 Record constant equivalents in reg_equiv_constant
731 so they will be substituted by find_reloads.
732 Record memory equivalents in reg_mem_equiv so they can
733 be substituted eventually by altering the REG-rtx's. */
734
735 reg_equiv_constant = (rtx *) xcalloc (max_regno, sizeof (rtx));
736 reg_equiv_memory_loc = (rtx *) xcalloc (max_regno, sizeof (rtx));
737 reg_equiv_mem = (rtx *) xcalloc (max_regno, sizeof (rtx));
738 reg_equiv_init = (rtx *) xcalloc (max_regno, sizeof (rtx));
739 reg_equiv_address = (rtx *) xcalloc (max_regno, sizeof (rtx));
740 reg_max_ref_width = (unsigned int *) xcalloc (max_regno, sizeof (int));
741 reg_old_renumber = (short *) xcalloc (max_regno, sizeof (short));
742 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
743 pseudo_forbidden_regs
744 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
745 pseudo_previous_regs
746 = (HARD_REG_SET *) xcalloc (max_regno, sizeof (HARD_REG_SET));
747
748 CLEAR_HARD_REG_SET (bad_spill_regs_global);
749
750 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
751 Also find all paradoxical subregs and find largest such for each pseudo.
752 On machines with small register classes, record hard registers that
753 are used for user variables. These can never be used for spills.
754 Also look for a "constant" REG_SETJMP. This means that all
755 caller-saved registers must be marked live. */
756
757 num_eliminable_invariants = 0;
758 for (insn = first; insn; insn = NEXT_INSN (insn))
759 {
760 rtx set = single_set (insn);
761
762 if (GET_CODE (insn) == CALL_INSN
763 && find_reg_note (insn, REG_SETJMP, NULL))
764 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
765 if (! call_used_regs[i])
766 regs_ever_live[i] = 1;
767
768 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
769 {
770 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
771 if (note
772 #ifdef LEGITIMATE_PIC_OPERAND_P
773 && (! function_invariant_p (XEXP (note, 0))
774 || ! flag_pic
775 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
776 #endif
777 )
778 {
779 rtx x = XEXP (note, 0);
780 i = REGNO (SET_DEST (set));
781 if (i > LAST_VIRTUAL_REGISTER)
782 {
783 if (GET_CODE (x) == MEM)
784 {
785 /* Always unshare the equivalence, so we can
786 substitute into this insn without touching the
787 equivalence. */
788 reg_equiv_memory_loc[i] = copy_rtx (x);
789 }
790 else if (function_invariant_p (x))
791 {
792 if (GET_CODE (x) == PLUS)
793 {
794 /* This is PLUS of frame pointer and a constant,
795 and might be shared. Unshare it. */
796 reg_equiv_constant[i] = copy_rtx (x);
797 num_eliminable_invariants++;
798 }
799 else if (x == frame_pointer_rtx
800 || x == arg_pointer_rtx)
801 {
802 reg_equiv_constant[i] = x;
803 num_eliminable_invariants++;
804 }
805 else if (LEGITIMATE_CONSTANT_P (x))
806 reg_equiv_constant[i] = x;
807 else
808 reg_equiv_memory_loc[i]
809 = force_const_mem (GET_MODE (SET_DEST (set)), x);
810 }
811 else
812 continue;
813
814 /* If this register is being made equivalent to a MEM
815 and the MEM is not SET_SRC, the equivalencing insn
816 is one with the MEM as a SET_DEST and it occurs later.
817 So don't mark this insn now. */
818 if (GET_CODE (x) != MEM
819 || rtx_equal_p (SET_SRC (set), x))
820 reg_equiv_init[i]
821 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
822 }
823 }
824 }
825
826 /* If this insn is setting a MEM from a register equivalent to it,
827 this is the equivalencing insn. */
828 else if (set && GET_CODE (SET_DEST (set)) == MEM
829 && GET_CODE (SET_SRC (set)) == REG
830 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
831 && rtx_equal_p (SET_DEST (set),
832 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
833 reg_equiv_init[REGNO (SET_SRC (set))]
834 = gen_rtx_INSN_LIST (VOIDmode, insn,
835 reg_equiv_init[REGNO (SET_SRC (set))]);
836
837 if (INSN_P (insn))
838 scan_paradoxical_subregs (PATTERN (insn));
839 }
840
841 init_elim_table ();
842
843 num_labels = max_label_num () - get_first_label_num ();
844
845 /* Allocate the tables used to store offset information at labels. */
846 /* We used to use alloca here, but the size of what it would try to
847 allocate would occasionally cause it to exceed the stack limit and
848 cause a core dump. */
849 real_known_ptr = xmalloc (num_labels);
850 real_at_ptr
851 = (int (*)[NUM_ELIMINABLE_REGS])
852 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
853
854 offsets_known_at = real_known_ptr - get_first_label_num ();
855 offsets_at
856 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
857
858 /* Alter each pseudo-reg rtx to contain its hard reg number.
859 Assign stack slots to the pseudos that lack hard regs or equivalents.
860 Do not touch virtual registers. */
861
862 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
863 alter_reg (i, -1);
864
865 /* If we have some registers we think can be eliminated, scan all insns to
866 see if there is an insn that sets one of these registers to something
867 other than itself plus a constant. If so, the register cannot be
868 eliminated. Doing this scan here eliminates an extra pass through the
869 main reload loop in the most common case where register elimination
870 cannot be done. */
871 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
872 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
873 || GET_CODE (insn) == CALL_INSN)
874 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
875
876 maybe_fix_stack_asms ();
877
878 insns_need_reload = 0;
879 something_needs_elimination = 0;
880
881 /* Initialize to -1, which means take the first spill register. */
882 last_spill_reg = -1;
883
884 /* Spill any hard regs that we know we can't eliminate. */
885 CLEAR_HARD_REG_SET (used_spill_regs);
886 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
887 if (! ep->can_eliminate)
888 spill_hard_reg (ep->from, 1);
889
890 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
891 if (frame_pointer_needed)
892 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
893 #endif
894 finish_spills (global);
895
896 /* From now on, we may need to generate moves differently. We may also
897 allow modifications of insns which cause them to not be recognized.
898 Any such modifications will be cleaned up during reload itself. */
899 reload_in_progress = 1;
900
901 /* This loop scans the entire function each go-round
902 and repeats until one repetition spills no additional hard regs. */
903 for (;;)
904 {
905 int something_changed;
906 int did_spill;
907
908 HOST_WIDE_INT starting_frame_size;
909
910 /* Round size of stack frame to stack_alignment_needed. This must be done
911 here because the stack size may be a part of the offset computation
912 for register elimination, and there might have been new stack slots
913 created in the last iteration of this loop. */
914 if (cfun->stack_alignment_needed)
915 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
916
917 starting_frame_size = get_frame_size ();
918
919 set_initial_elim_offsets ();
920 set_initial_label_offsets ();
921
922 /* For each pseudo register that has an equivalent location defined,
923 try to eliminate any eliminable registers (such as the frame pointer)
924 assuming initial offsets for the replacement register, which
925 is the normal case.
926
927 If the resulting location is directly addressable, substitute
928 the MEM we just got directly for the old REG.
929
930 If it is not addressable but is a constant or the sum of a hard reg
931 and constant, it is probably not addressable because the constant is
932 out of range, in that case record the address; we will generate
933 hairy code to compute the address in a register each time it is
934 needed. Similarly if it is a hard register, but one that is not
935 valid as an address register.
936
937 If the location is not addressable, but does not have one of the
938 above forms, assign a stack slot. We have to do this to avoid the
939 potential of producing lots of reloads if, e.g., a location involves
940 a pseudo that didn't get a hard register and has an equivalent memory
941 location that also involves a pseudo that didn't get a hard register.
942
943 Perhaps at some point we will improve reload_when_needed handling
944 so this problem goes away. But that's very hairy. */
945
946 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
947 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
948 {
949 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
950
951 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
952 XEXP (x, 0)))
953 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
954 else if (CONSTANT_P (XEXP (x, 0))
955 || (GET_CODE (XEXP (x, 0)) == REG
956 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
957 || (GET_CODE (XEXP (x, 0)) == PLUS
958 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
959 && (REGNO (XEXP (XEXP (x, 0), 0))
960 < FIRST_PSEUDO_REGISTER)
961 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
962 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
963 else
964 {
965 /* Make a new stack slot. Then indicate that something
966 changed so we go back and recompute offsets for
967 eliminable registers because the allocation of memory
968 below might change some offset. reg_equiv_{mem,address}
969 will be set up for this pseudo on the next pass around
970 the loop. */
971 reg_equiv_memory_loc[i] = 0;
972 reg_equiv_init[i] = 0;
973 alter_reg (i, -1);
974 }
975 }
976
977 if (caller_save_needed)
978 setup_save_areas ();
979
980 /* If we allocated another stack slot, redo elimination bookkeeping. */
981 if (starting_frame_size != get_frame_size ())
982 continue;
983
984 if (caller_save_needed)
985 {
986 save_call_clobbered_regs ();
987 /* That might have allocated new insn_chain structures. */
988 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
989 }
990
991 calculate_needs_all_insns (global);
992
993 CLEAR_REG_SET (&spilled_pseudos);
994 did_spill = 0;
995
996 something_changed = 0;
997
998 /* If we allocated any new memory locations, make another pass
999 since it might have changed elimination offsets. */
1000 if (starting_frame_size != get_frame_size ())
1001 something_changed = 1;
1002
1003 {
1004 HARD_REG_SET to_spill;
1005 CLEAR_HARD_REG_SET (to_spill);
1006 update_eliminables (&to_spill);
1007 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1008 if (TEST_HARD_REG_BIT (to_spill, i))
1009 {
1010 spill_hard_reg (i, 1);
1011 did_spill = 1;
1012
1013 /* Regardless of the state of spills, if we previously had
1014 a register that we thought we could eliminate, but no can
1015 not eliminate, we must run another pass.
1016
1017 Consider pseudos which have an entry in reg_equiv_* which
1018 reference an eliminable register. We must make another pass
1019 to update reg_equiv_* so that we do not substitute in the
1020 old value from when we thought the elimination could be
1021 performed. */
1022 something_changed = 1;
1023 }
1024 }
1025
1026 select_reload_regs ();
1027 if (failure)
1028 goto failed;
1029
1030 if (insns_need_reload != 0 || did_spill)
1031 something_changed |= finish_spills (global);
1032
1033 if (! something_changed)
1034 break;
1035
1036 if (caller_save_needed)
1037 delete_caller_save_insns ();
1038
1039 obstack_free (&reload_obstack, reload_firstobj);
1040 }
1041
1042 /* If global-alloc was run, notify it of any register eliminations we have
1043 done. */
1044 if (global)
1045 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1046 if (ep->can_eliminate)
1047 mark_elimination (ep->from, ep->to);
1048
1049 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1050 If that insn didn't set the register (i.e., it copied the register to
1051 memory), just delete that insn instead of the equivalencing insn plus
1052 anything now dead. If we call delete_dead_insn on that insn, we may
1053 delete the insn that actually sets the register if the register dies
1054 there and that is incorrect. */
1055
1056 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1057 {
1058 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1059 {
1060 rtx list;
1061 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1062 {
1063 rtx equiv_insn = XEXP (list, 0);
1064 if (GET_CODE (equiv_insn) == NOTE)
1065 continue;
1066 if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1067 delete_dead_insn (equiv_insn);
1068 else
1069 {
1070 PUT_CODE (equiv_insn, NOTE);
1071 NOTE_SOURCE_FILE (equiv_insn) = 0;
1072 NOTE_LINE_NUMBER (equiv_insn) = NOTE_INSN_DELETED;
1073 }
1074 }
1075 }
1076 }
1077
1078 /* Use the reload registers where necessary
1079 by generating move instructions to move the must-be-register
1080 values into or out of the reload registers. */
1081
1082 if (insns_need_reload != 0 || something_needs_elimination
1083 || something_needs_operands_changed)
1084 {
1085 HOST_WIDE_INT old_frame_size = get_frame_size ();
1086
1087 reload_as_needed (global);
1088
1089 if (old_frame_size != get_frame_size ())
1090 abort ();
1091
1092 if (num_eliminable)
1093 verify_initial_elim_offsets ();
1094 }
1095
1096 /* If we were able to eliminate the frame pointer, show that it is no
1097 longer live at the start of any basic block. If it ls live by
1098 virtue of being in a pseudo, that pseudo will be marked live
1099 and hence the frame pointer will be known to be live via that
1100 pseudo. */
1101
1102 if (! frame_pointer_needed)
1103 for (i = 0; i < n_basic_blocks; i++)
1104 CLEAR_REGNO_REG_SET (BASIC_BLOCK (i)->global_live_at_start,
1105 HARD_FRAME_POINTER_REGNUM);
1106
1107 /* Come here (with failure set nonzero) if we can't get enough spill regs
1108 and we decide not to abort about it. */
1109 failed:
1110
1111 CLEAR_REG_SET (&spilled_pseudos);
1112 reload_in_progress = 0;
1113
1114 /* Now eliminate all pseudo regs by modifying them into
1115 their equivalent memory references.
1116 The REG-rtx's for the pseudos are modified in place,
1117 so all insns that used to refer to them now refer to memory.
1118
1119 For a reg that has a reg_equiv_address, all those insns
1120 were changed by reloading so that no insns refer to it any longer;
1121 but the DECL_RTL of a variable decl may refer to it,
1122 and if so this causes the debugging info to mention the variable. */
1123
1124 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1125 {
1126 rtx addr = 0;
1127 int in_struct = 0;
1128 int is_scalar = 0;
1129 int is_readonly = 0;
1130
1131 if (reg_equiv_memory_loc[i])
1132 {
1133 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
1134 is_scalar = MEM_SCALAR_P (reg_equiv_memory_loc[i]);
1135 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
1136 }
1137
1138 if (reg_equiv_mem[i])
1139 addr = XEXP (reg_equiv_mem[i], 0);
1140
1141 if (reg_equiv_address[i])
1142 addr = reg_equiv_address[i];
1143
1144 if (addr)
1145 {
1146 if (reg_renumber[i] < 0)
1147 {
1148 rtx reg = regno_reg_rtx[i];
1149 PUT_CODE (reg, MEM);
1150 XEXP (reg, 0) = addr;
1151 REG_USERVAR_P (reg) = 0;
1152 RTX_UNCHANGING_P (reg) = is_readonly;
1153 MEM_IN_STRUCT_P (reg) = in_struct;
1154 MEM_SCALAR_P (reg) = is_scalar;
1155 /* We have no alias information about this newly created
1156 MEM. */
1157 set_mem_alias_set (reg, 0);
1158 }
1159 else if (reg_equiv_mem[i])
1160 XEXP (reg_equiv_mem[i], 0) = addr;
1161 }
1162 }
1163
1164 /* We must set reload_completed now since the cleanup_subreg_operands call
1165 below will re-recognize each insn and reload may have generated insns
1166 which are only valid during and after reload. */
1167 reload_completed = 1;
1168
1169 /* Make a pass over all the insns and delete all USEs which we inserted
1170 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1171 notes. Delete all CLOBBER insns that don't refer to the return value
1172 and simplify (subreg (reg)) operands. Also remove all REG_RETVAL and
1173 REG_LIBCALL notes since they are no longer useful or accurate. Strip
1174 and regenerate REG_INC notes that may have been moved around. */
1175
1176 for (insn = first; insn; insn = NEXT_INSN (insn))
1177 if (INSN_P (insn))
1178 {
1179 rtx *pnote;
1180
1181 if (GET_CODE (insn) == CALL_INSN)
1182 replace_pseudos_in_call_usage (& CALL_INSN_FUNCTION_USAGE (insn),
1183 VOIDmode,
1184 CALL_INSN_FUNCTION_USAGE (insn));
1185
1186 if ((GET_CODE (PATTERN (insn)) == USE
1187 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1188 || (GET_CODE (PATTERN (insn)) == CLOBBER
1189 && (GET_CODE (XEXP (PATTERN (insn), 0)) != REG
1190 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1191 {
1192 PUT_CODE (insn, NOTE);
1193 NOTE_SOURCE_FILE (insn) = 0;
1194 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1195 continue;
1196 }
1197
1198 pnote = &REG_NOTES (insn);
1199 while (*pnote != 0)
1200 {
1201 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1202 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1203 || REG_NOTE_KIND (*pnote) == REG_INC
1204 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1205 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1206 *pnote = XEXP (*pnote, 1);
1207 else
1208 pnote = &XEXP (*pnote, 1);
1209 }
1210
1211 #ifdef AUTO_INC_DEC
1212 add_auto_inc_notes (insn, PATTERN (insn));
1213 #endif
1214
1215 /* And simplify (subreg (reg)) if it appears as an operand. */
1216 cleanup_subreg_operands (insn);
1217 }
1218
1219 /* If we are doing stack checking, give a warning if this function's
1220 frame size is larger than we expect. */
1221 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1222 {
1223 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1224 static int verbose_warned = 0;
1225
1226 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1227 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1228 size += UNITS_PER_WORD;
1229
1230 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1231 {
1232 warning ("frame size too large for reliable stack checking");
1233 if (! verbose_warned)
1234 {
1235 warning ("try reducing the number of local variables");
1236 verbose_warned = 1;
1237 }
1238 }
1239 }
1240
1241 /* Indicate that we no longer have known memory locations or constants. */
1242 if (reg_equiv_constant)
1243 free (reg_equiv_constant);
1244 reg_equiv_constant = 0;
1245 if (reg_equiv_memory_loc)
1246 free (reg_equiv_memory_loc);
1247 reg_equiv_memory_loc = 0;
1248
1249 if (real_known_ptr)
1250 free (real_known_ptr);
1251 if (real_at_ptr)
1252 free (real_at_ptr);
1253
1254 free (reg_equiv_mem);
1255 free (reg_equiv_init);
1256 free (reg_equiv_address);
1257 free (reg_max_ref_width);
1258 free (reg_old_renumber);
1259 free (pseudo_previous_regs);
1260 free (pseudo_forbidden_regs);
1261
1262 CLEAR_HARD_REG_SET (used_spill_regs);
1263 for (i = 0; i < n_spills; i++)
1264 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1265
1266 /* Free all the insn_chain structures at once. */
1267 obstack_free (&reload_obstack, reload_startobj);
1268 unused_insn_chains = 0;
1269 compute_bb_for_insn (get_max_uid ());
1270 fixup_abnormal_edges ();
1271
1272 return failure;
1273 }
1274
1275 /* Yet another special case. Unfortunately, reg-stack forces people to
1276 write incorrect clobbers in asm statements. These clobbers must not
1277 cause the register to appear in bad_spill_regs, otherwise we'll call
1278 fatal_insn later. We clear the corresponding regnos in the live
1279 register sets to avoid this.
1280 The whole thing is rather sick, I'm afraid. */
1281
1282 static void
1283 maybe_fix_stack_asms ()
1284 {
1285 #ifdef STACK_REGS
1286 const char *constraints[MAX_RECOG_OPERANDS];
1287 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1288 struct insn_chain *chain;
1289
1290 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1291 {
1292 int i, noperands;
1293 HARD_REG_SET clobbered, allowed;
1294 rtx pat;
1295
1296 if (! INSN_P (chain->insn)
1297 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1298 continue;
1299 pat = PATTERN (chain->insn);
1300 if (GET_CODE (pat) != PARALLEL)
1301 continue;
1302
1303 CLEAR_HARD_REG_SET (clobbered);
1304 CLEAR_HARD_REG_SET (allowed);
1305
1306 /* First, make a mask of all stack regs that are clobbered. */
1307 for (i = 0; i < XVECLEN (pat, 0); i++)
1308 {
1309 rtx t = XVECEXP (pat, 0, i);
1310 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1311 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1312 }
1313
1314 /* Get the operand values and constraints out of the insn. */
1315 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1316 constraints, operand_mode);
1317
1318 /* For every operand, see what registers are allowed. */
1319 for (i = 0; i < noperands; i++)
1320 {
1321 const char *p = constraints[i];
1322 /* For every alternative, we compute the class of registers allowed
1323 for reloading in CLS, and merge its contents into the reg set
1324 ALLOWED. */
1325 int cls = (int) NO_REGS;
1326
1327 for (;;)
1328 {
1329 char c = *p++;
1330
1331 if (c == '\0' || c == ',' || c == '#')
1332 {
1333 /* End of one alternative - mark the regs in the current
1334 class, and reset the class. */
1335 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1336 cls = NO_REGS;
1337 if (c == '#')
1338 do {
1339 c = *p++;
1340 } while (c != '\0' && c != ',');
1341 if (c == '\0')
1342 break;
1343 continue;
1344 }
1345
1346 switch (c)
1347 {
1348 case '=': case '+': case '*': case '%': case '?': case '!':
1349 case '0': case '1': case '2': case '3': case '4': case 'm':
1350 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1351 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1352 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1353 case 'P':
1354 break;
1355
1356 case 'p':
1357 cls = (int) reg_class_subunion[cls][(int) BASE_REG_CLASS];
1358 break;
1359
1360 case 'g':
1361 case 'r':
1362 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1363 break;
1364
1365 default:
1366 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)];
1367
1368 }
1369 }
1370 }
1371 /* Those of the registers which are clobbered, but allowed by the
1372 constraints, must be usable as reload registers. So clear them
1373 out of the life information. */
1374 AND_HARD_REG_SET (allowed, clobbered);
1375 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1376 if (TEST_HARD_REG_BIT (allowed, i))
1377 {
1378 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1379 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1380 }
1381 }
1382
1383 #endif
1384 }
1385 \f
1386 /* Copy the global variables n_reloads and rld into the corresponding elts
1387 of CHAIN. */
1388 static void
1389 copy_reloads (chain)
1390 struct insn_chain *chain;
1391 {
1392 chain->n_reloads = n_reloads;
1393 chain->rld
1394 = (struct reload *) obstack_alloc (&reload_obstack,
1395 n_reloads * sizeof (struct reload));
1396 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1397 reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1398 }
1399
1400 /* Walk the chain of insns, and determine for each whether it needs reloads
1401 and/or eliminations. Build the corresponding insns_need_reload list, and
1402 set something_needs_elimination as appropriate. */
1403 static void
1404 calculate_needs_all_insns (global)
1405 int global;
1406 {
1407 struct insn_chain **pprev_reload = &insns_need_reload;
1408 struct insn_chain *chain, *next = 0;
1409
1410 something_needs_elimination = 0;
1411
1412 reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1413 for (chain = reload_insn_chain; chain != 0; chain = next)
1414 {
1415 rtx insn = chain->insn;
1416
1417 next = chain->next;
1418
1419 /* Clear out the shortcuts. */
1420 chain->n_reloads = 0;
1421 chain->need_elim = 0;
1422 chain->need_reload = 0;
1423 chain->need_operand_change = 0;
1424
1425 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1426 include REG_LABEL), we need to see what effects this has on the
1427 known offsets at labels. */
1428
1429 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1430 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1431 set_label_offsets (insn, insn, 0);
1432
1433 if (INSN_P (insn))
1434 {
1435 rtx old_body = PATTERN (insn);
1436 int old_code = INSN_CODE (insn);
1437 rtx old_notes = REG_NOTES (insn);
1438 int did_elimination = 0;
1439 int operands_changed = 0;
1440 rtx set = single_set (insn);
1441
1442 /* Skip insns that only set an equivalence. */
1443 if (set && GET_CODE (SET_DEST (set)) == REG
1444 && reg_renumber[REGNO (SET_DEST (set))] < 0
1445 && reg_equiv_constant[REGNO (SET_DEST (set))])
1446 continue;
1447
1448 /* If needed, eliminate any eliminable registers. */
1449 if (num_eliminable || num_eliminable_invariants)
1450 did_elimination = eliminate_regs_in_insn (insn, 0);
1451
1452 /* Analyze the instruction. */
1453 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1454 global, spill_reg_order);
1455
1456 /* If a no-op set needs more than one reload, this is likely
1457 to be something that needs input address reloads. We
1458 can't get rid of this cleanly later, and it is of no use
1459 anyway, so discard it now.
1460 We only do this when expensive_optimizations is enabled,
1461 since this complements reload inheritance / output
1462 reload deletion, and it can make debugging harder. */
1463 if (flag_expensive_optimizations && n_reloads > 1)
1464 {
1465 rtx set = single_set (insn);
1466 if (set
1467 && SET_SRC (set) == SET_DEST (set)
1468 && GET_CODE (SET_SRC (set)) == REG
1469 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1470 {
1471 PUT_CODE (insn, NOTE);
1472 NOTE_SOURCE_FILE (insn) = 0;
1473 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1474 /* Delete it from the reload chain */
1475 if (chain->prev)
1476 chain->prev->next = next;
1477 else
1478 reload_insn_chain = next;
1479 if (next)
1480 next->prev = chain->prev;
1481 chain->next = unused_insn_chains;
1482 unused_insn_chains = chain;
1483 continue;
1484 }
1485 }
1486 if (num_eliminable)
1487 update_eliminable_offsets ();
1488
1489 /* Remember for later shortcuts which insns had any reloads or
1490 register eliminations. */
1491 chain->need_elim = did_elimination;
1492 chain->need_reload = n_reloads > 0;
1493 chain->need_operand_change = operands_changed;
1494
1495 /* Discard any register replacements done. */
1496 if (did_elimination)
1497 {
1498 obstack_free (&reload_obstack, reload_insn_firstobj);
1499 PATTERN (insn) = old_body;
1500 INSN_CODE (insn) = old_code;
1501 REG_NOTES (insn) = old_notes;
1502 something_needs_elimination = 1;
1503 }
1504
1505 something_needs_operands_changed |= operands_changed;
1506
1507 if (n_reloads != 0)
1508 {
1509 copy_reloads (chain);
1510 *pprev_reload = chain;
1511 pprev_reload = &chain->next_need_reload;
1512 }
1513 }
1514 }
1515 *pprev_reload = 0;
1516 }
1517 \f
1518 /* Comparison function for qsort to decide which of two reloads
1519 should be handled first. *P1 and *P2 are the reload numbers. */
1520
1521 static int
1522 reload_reg_class_lower (r1p, r2p)
1523 const PTR r1p;
1524 const PTR r2p;
1525 {
1526 register int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1527 register int t;
1528
1529 /* Consider required reloads before optional ones. */
1530 t = rld[r1].optional - rld[r2].optional;
1531 if (t != 0)
1532 return t;
1533
1534 /* Count all solitary classes before non-solitary ones. */
1535 t = ((reg_class_size[(int) rld[r2].class] == 1)
1536 - (reg_class_size[(int) rld[r1].class] == 1));
1537 if (t != 0)
1538 return t;
1539
1540 /* Aside from solitaires, consider all multi-reg groups first. */
1541 t = rld[r2].nregs - rld[r1].nregs;
1542 if (t != 0)
1543 return t;
1544
1545 /* Consider reloads in order of increasing reg-class number. */
1546 t = (int) rld[r1].class - (int) rld[r2].class;
1547 if (t != 0)
1548 return t;
1549
1550 /* If reloads are equally urgent, sort by reload number,
1551 so that the results of qsort leave nothing to chance. */
1552 return r1 - r2;
1553 }
1554 \f
1555 /* The cost of spilling each hard reg. */
1556 static int spill_cost[FIRST_PSEUDO_REGISTER];
1557
1558 /* When spilling multiple hard registers, we use SPILL_COST for the first
1559 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1560 only the first hard reg for a multi-reg pseudo. */
1561 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1562
1563 /* Update the spill cost arrays, considering that pseudo REG is live. */
1564
1565 static void
1566 count_pseudo (reg)
1567 int reg;
1568 {
1569 int freq = REG_FREQ (reg);
1570 int r = reg_renumber[reg];
1571 int nregs;
1572
1573 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1574 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1575 return;
1576
1577 SET_REGNO_REG_SET (&pseudos_counted, reg);
1578
1579 if (r < 0)
1580 abort ();
1581
1582 spill_add_cost[r] += freq;
1583
1584 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1585 while (nregs-- > 0)
1586 spill_cost[r + nregs] += freq;
1587 }
1588
1589 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1590 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1591
1592 static void
1593 order_regs_for_reload (chain)
1594 struct insn_chain *chain;
1595 {
1596 int i;
1597 HARD_REG_SET used_by_pseudos;
1598 HARD_REG_SET used_by_pseudos2;
1599
1600 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1601
1602 memset (spill_cost, 0, sizeof spill_cost);
1603 memset (spill_add_cost, 0, sizeof spill_add_cost);
1604
1605 /* Count number of uses of each hard reg by pseudo regs allocated to it
1606 and then order them by decreasing use. First exclude hard registers
1607 that are live in or across this insn. */
1608
1609 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1610 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1611 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1612 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1613
1614 /* Now find out which pseudos are allocated to it, and update
1615 hard_reg_n_uses. */
1616 CLEAR_REG_SET (&pseudos_counted);
1617
1618 EXECUTE_IF_SET_IN_REG_SET
1619 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
1620 {
1621 count_pseudo (i);
1622 });
1623 EXECUTE_IF_SET_IN_REG_SET
1624 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
1625 {
1626 count_pseudo (i);
1627 });
1628 CLEAR_REG_SET (&pseudos_counted);
1629 }
1630 \f
1631 /* Vector of reload-numbers showing the order in which the reloads should
1632 be processed. */
1633 static short reload_order[MAX_RELOADS];
1634
1635 /* This is used to keep track of the spill regs used in one insn. */
1636 static HARD_REG_SET used_spill_regs_local;
1637
1638 /* We decided to spill hard register SPILLED, which has a size of
1639 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1640 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1641 update SPILL_COST/SPILL_ADD_COST. */
1642
1643 static void
1644 count_spilled_pseudo (spilled, spilled_nregs, reg)
1645 int spilled, spilled_nregs, reg;
1646 {
1647 int r = reg_renumber[reg];
1648 int nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1649
1650 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1651 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1652 return;
1653
1654 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1655
1656 spill_add_cost[r] -= REG_FREQ (reg);
1657 while (nregs-- > 0)
1658 spill_cost[r + nregs] -= REG_FREQ (reg);
1659 }
1660
1661 /* Find reload register to use for reload number ORDER. */
1662
1663 static int
1664 find_reg (chain, order)
1665 struct insn_chain *chain;
1666 int order;
1667 {
1668 int rnum = reload_order[order];
1669 struct reload *rl = rld + rnum;
1670 int best_cost = INT_MAX;
1671 int best_reg = -1;
1672 unsigned int i, j;
1673 int k;
1674 HARD_REG_SET not_usable;
1675 HARD_REG_SET used_by_other_reload;
1676
1677 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1678 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1679 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1680
1681 CLEAR_HARD_REG_SET (used_by_other_reload);
1682 for (k = 0; k < order; k++)
1683 {
1684 int other = reload_order[k];
1685
1686 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1687 for (j = 0; j < rld[other].nregs; j++)
1688 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1689 }
1690
1691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1692 {
1693 unsigned int regno = i;
1694
1695 if (! TEST_HARD_REG_BIT (not_usable, regno)
1696 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1697 && HARD_REGNO_MODE_OK (regno, rl->mode))
1698 {
1699 int this_cost = spill_cost[regno];
1700 int ok = 1;
1701 unsigned int this_nregs = HARD_REGNO_NREGS (regno, rl->mode);
1702
1703 for (j = 1; j < this_nregs; j++)
1704 {
1705 this_cost += spill_add_cost[regno + j];
1706 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1707 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1708 ok = 0;
1709 }
1710 if (! ok)
1711 continue;
1712 if (rl->in && GET_CODE (rl->in) == REG && REGNO (rl->in) == regno)
1713 this_cost--;
1714 if (rl->out && GET_CODE (rl->out) == REG && REGNO (rl->out) == regno)
1715 this_cost--;
1716 if (this_cost < best_cost
1717 /* Among registers with equal cost, prefer caller-saved ones, or
1718 use REG_ALLOC_ORDER if it is defined. */
1719 || (this_cost == best_cost
1720 #ifdef REG_ALLOC_ORDER
1721 && (inv_reg_alloc_order[regno]
1722 < inv_reg_alloc_order[best_reg])
1723 #else
1724 && call_used_regs[regno]
1725 && ! call_used_regs[best_reg]
1726 #endif
1727 ))
1728 {
1729 best_reg = regno;
1730 best_cost = this_cost;
1731 }
1732 }
1733 }
1734 if (best_reg == -1)
1735 return 0;
1736
1737 if (rtl_dump_file)
1738 fprintf (rtl_dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1739
1740 rl->nregs = HARD_REGNO_NREGS (best_reg, rl->mode);
1741 rl->regno = best_reg;
1742
1743 EXECUTE_IF_SET_IN_REG_SET
1744 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j,
1745 {
1746 count_spilled_pseudo (best_reg, rl->nregs, j);
1747 });
1748
1749 EXECUTE_IF_SET_IN_REG_SET
1750 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j,
1751 {
1752 count_spilled_pseudo (best_reg, rl->nregs, j);
1753 });
1754
1755 for (i = 0; i < rl->nregs; i++)
1756 {
1757 if (spill_cost[best_reg + i] != 0
1758 || spill_add_cost[best_reg + i] != 0)
1759 abort ();
1760 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1761 }
1762 return 1;
1763 }
1764
1765 /* Find more reload regs to satisfy the remaining need of an insn, which
1766 is given by CHAIN.
1767 Do it by ascending class number, since otherwise a reg
1768 might be spilled for a big class and might fail to count
1769 for a smaller class even though it belongs to that class. */
1770
1771 static void
1772 find_reload_regs (chain)
1773 struct insn_chain *chain;
1774 {
1775 int i;
1776
1777 /* In order to be certain of getting the registers we need,
1778 we must sort the reloads into order of increasing register class.
1779 Then our grabbing of reload registers will parallel the process
1780 that provided the reload registers. */
1781 for (i = 0; i < chain->n_reloads; i++)
1782 {
1783 /* Show whether this reload already has a hard reg. */
1784 if (chain->rld[i].reg_rtx)
1785 {
1786 int regno = REGNO (chain->rld[i].reg_rtx);
1787 chain->rld[i].regno = regno;
1788 chain->rld[i].nregs
1789 = HARD_REGNO_NREGS (regno, GET_MODE (chain->rld[i].reg_rtx));
1790 }
1791 else
1792 chain->rld[i].regno = -1;
1793 reload_order[i] = i;
1794 }
1795
1796 n_reloads = chain->n_reloads;
1797 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1798
1799 CLEAR_HARD_REG_SET (used_spill_regs_local);
1800
1801 if (rtl_dump_file)
1802 fprintf (rtl_dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1803
1804 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1805
1806 /* Compute the order of preference for hard registers to spill. */
1807
1808 order_regs_for_reload (chain);
1809
1810 for (i = 0; i < n_reloads; i++)
1811 {
1812 int r = reload_order[i];
1813
1814 /* Ignore reloads that got marked inoperative. */
1815 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1816 && ! rld[r].optional
1817 && rld[r].regno == -1)
1818 if (! find_reg (chain, i))
1819 {
1820 spill_failure (chain->insn, rld[r].class);
1821 failure = 1;
1822 return;
1823 }
1824 }
1825
1826 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1827 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1828
1829 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1830 }
1831
1832 static void
1833 select_reload_regs ()
1834 {
1835 struct insn_chain *chain;
1836
1837 /* Try to satisfy the needs for each insn. */
1838 for (chain = insns_need_reload; chain != 0;
1839 chain = chain->next_need_reload)
1840 find_reload_regs (chain);
1841 }
1842 \f
1843 /* Delete all insns that were inserted by emit_caller_save_insns during
1844 this iteration. */
1845 static void
1846 delete_caller_save_insns ()
1847 {
1848 struct insn_chain *c = reload_insn_chain;
1849
1850 while (c != 0)
1851 {
1852 while (c != 0 && c->is_caller_save_insn)
1853 {
1854 struct insn_chain *next = c->next;
1855 rtx insn = c->insn;
1856
1857 if (insn == BLOCK_HEAD (c->block))
1858 BLOCK_HEAD (c->block) = NEXT_INSN (insn);
1859 if (insn == BLOCK_END (c->block))
1860 BLOCK_END (c->block) = PREV_INSN (insn);
1861 if (c == reload_insn_chain)
1862 reload_insn_chain = next;
1863
1864 if (NEXT_INSN (insn) != 0)
1865 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1866 if (PREV_INSN (insn) != 0)
1867 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1868
1869 if (next)
1870 next->prev = c->prev;
1871 if (c->prev)
1872 c->prev->next = next;
1873 c->next = unused_insn_chains;
1874 unused_insn_chains = c;
1875 c = next;
1876 }
1877 if (c != 0)
1878 c = c->next;
1879 }
1880 }
1881 \f
1882 /* Handle the failure to find a register to spill.
1883 INSN should be one of the insns which needed this particular spill reg. */
1884
1885 static void
1886 spill_failure (insn, class)
1887 rtx insn;
1888 enum reg_class class;
1889 {
1890 static const char *const reg_class_names[] = REG_CLASS_NAMES;
1891 if (asm_noperands (PATTERN (insn)) >= 0)
1892 error_for_asm (insn, "Can't find a register in class `%s' while reloading `asm'.",
1893 reg_class_names[class]);
1894 else
1895 {
1896 error ("Unable to find a register to spill in class `%s'.",
1897 reg_class_names[class]);
1898 fatal_insn ("This is the insn:", insn);
1899 }
1900 }
1901 \f
1902 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
1903 data that is dead in INSN. */
1904
1905 static void
1906 delete_dead_insn (insn)
1907 rtx insn;
1908 {
1909 rtx prev = prev_real_insn (insn);
1910 rtx prev_dest;
1911
1912 /* If the previous insn sets a register that dies in our insn, delete it
1913 too. */
1914 if (prev && GET_CODE (PATTERN (prev)) == SET
1915 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
1916 && reg_mentioned_p (prev_dest, PATTERN (insn))
1917 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
1918 && ! side_effects_p (SET_SRC (PATTERN (prev))))
1919 delete_dead_insn (prev);
1920
1921 PUT_CODE (insn, NOTE);
1922 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1923 NOTE_SOURCE_FILE (insn) = 0;
1924 }
1925
1926 /* Modify the home of pseudo-reg I.
1927 The new home is present in reg_renumber[I].
1928
1929 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1930 or it may be -1, meaning there is none or it is not relevant.
1931 This is used so that all pseudos spilled from a given hard reg
1932 can share one stack slot. */
1933
1934 static void
1935 alter_reg (i, from_reg)
1936 register int i;
1937 int from_reg;
1938 {
1939 /* When outputting an inline function, this can happen
1940 for a reg that isn't actually used. */
1941 if (regno_reg_rtx[i] == 0)
1942 return;
1943
1944 /* If the reg got changed to a MEM at rtl-generation time,
1945 ignore it. */
1946 if (GET_CODE (regno_reg_rtx[i]) != REG)
1947 return;
1948
1949 /* Modify the reg-rtx to contain the new hard reg
1950 number or else to contain its pseudo reg number. */
1951 REGNO (regno_reg_rtx[i])
1952 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
1953
1954 /* If we have a pseudo that is needed but has no hard reg or equivalent,
1955 allocate a stack slot for it. */
1956
1957 if (reg_renumber[i] < 0
1958 && REG_N_REFS (i) > 0
1959 && reg_equiv_constant[i] == 0
1960 && reg_equiv_memory_loc[i] == 0)
1961 {
1962 register rtx x;
1963 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
1964 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
1965 int adjust = 0;
1966
1967 /* Each pseudo reg has an inherent size which comes from its own mode,
1968 and a total size which provides room for paradoxical subregs
1969 which refer to the pseudo reg in wider modes.
1970
1971 We can use a slot already allocated if it provides both
1972 enough inherent space and enough total space.
1973 Otherwise, we allocate a new slot, making sure that it has no less
1974 inherent space, and no less total space, then the previous slot. */
1975 if (from_reg == -1)
1976 {
1977 /* No known place to spill from => no slot to reuse. */
1978 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
1979 inherent_size == total_size ? 0 : -1);
1980 if (BYTES_BIG_ENDIAN)
1981 /* Cancel the big-endian correction done in assign_stack_local.
1982 Get the address of the beginning of the slot.
1983 This is so we can do a big-endian correction unconditionally
1984 below. */
1985 adjust = inherent_size - total_size;
1986
1987 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
1988
1989 /* Nothing can alias this slot except this pseudo. */
1990 set_mem_alias_set (x, new_alias_set ());
1991 }
1992
1993 /* Reuse a stack slot if possible. */
1994 else if (spill_stack_slot[from_reg] != 0
1995 && spill_stack_slot_width[from_reg] >= total_size
1996 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
1997 >= inherent_size))
1998 x = spill_stack_slot[from_reg];
1999
2000 /* Allocate a bigger slot. */
2001 else
2002 {
2003 /* Compute maximum size needed, both for inherent size
2004 and for total size. */
2005 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2006 rtx stack_slot;
2007
2008 if (spill_stack_slot[from_reg])
2009 {
2010 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2011 > inherent_size)
2012 mode = GET_MODE (spill_stack_slot[from_reg]);
2013 if (spill_stack_slot_width[from_reg] > total_size)
2014 total_size = spill_stack_slot_width[from_reg];
2015 }
2016
2017 /* Make a slot with that size. */
2018 x = assign_stack_local (mode, total_size,
2019 inherent_size == total_size ? 0 : -1);
2020 stack_slot = x;
2021
2022 /* All pseudos mapped to this slot can alias each other. */
2023 if (spill_stack_slot[from_reg])
2024 set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
2025 else
2026 set_mem_alias_set (x, new_alias_set ());
2027
2028 if (BYTES_BIG_ENDIAN)
2029 {
2030 /* Cancel the big-endian correction done in assign_stack_local.
2031 Get the address of the beginning of the slot.
2032 This is so we can do a big-endian correction unconditionally
2033 below. */
2034 adjust = GET_MODE_SIZE (mode) - total_size;
2035 if (adjust)
2036 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2037 * BITS_PER_UNIT,
2038 MODE_INT, 1),
2039 plus_constant (XEXP (x, 0), adjust));
2040 }
2041
2042 spill_stack_slot[from_reg] = stack_slot;
2043 spill_stack_slot_width[from_reg] = total_size;
2044 }
2045
2046 /* On a big endian machine, the "address" of the slot
2047 is the address of the low part that fits its inherent mode. */
2048 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2049 adjust += (total_size - inherent_size);
2050
2051 /* If we have any adjustment to make, or if the stack slot is the
2052 wrong mode, make a new stack slot. */
2053 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2054 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2055
2056 /* Save the stack slot for later. */
2057 reg_equiv_memory_loc[i] = x;
2058 }
2059 }
2060
2061 /* Mark the slots in regs_ever_live for the hard regs
2062 used by pseudo-reg number REGNO. */
2063
2064 void
2065 mark_home_live (regno)
2066 int regno;
2067 {
2068 register int i, lim;
2069
2070 i = reg_renumber[regno];
2071 if (i < 0)
2072 return;
2073 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2074 while (i < lim)
2075 regs_ever_live[i++] = 1;
2076 }
2077 \f
2078 /* This function handles the tracking of elimination offsets around branches.
2079
2080 X is a piece of RTL being scanned.
2081
2082 INSN is the insn that it came from, if any.
2083
2084 INITIAL_P is non-zero if we are to set the offset to be the initial
2085 offset and zero if we are setting the offset of the label to be the
2086 current offset. */
2087
2088 static void
2089 set_label_offsets (x, insn, initial_p)
2090 rtx x;
2091 rtx insn;
2092 int initial_p;
2093 {
2094 enum rtx_code code = GET_CODE (x);
2095 rtx tem;
2096 unsigned int i;
2097 struct elim_table *p;
2098
2099 switch (code)
2100 {
2101 case LABEL_REF:
2102 if (LABEL_REF_NONLOCAL_P (x))
2103 return;
2104
2105 x = XEXP (x, 0);
2106
2107 /* ... fall through ... */
2108
2109 case CODE_LABEL:
2110 /* If we know nothing about this label, set the desired offsets. Note
2111 that this sets the offset at a label to be the offset before a label
2112 if we don't know anything about the label. This is not correct for
2113 the label after a BARRIER, but is the best guess we can make. If
2114 we guessed wrong, we will suppress an elimination that might have
2115 been possible had we been able to guess correctly. */
2116
2117 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2118 {
2119 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2120 offsets_at[CODE_LABEL_NUMBER (x)][i]
2121 = (initial_p ? reg_eliminate[i].initial_offset
2122 : reg_eliminate[i].offset);
2123 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2124 }
2125
2126 /* Otherwise, if this is the definition of a label and it is
2127 preceded by a BARRIER, set our offsets to the known offset of
2128 that label. */
2129
2130 else if (x == insn
2131 && (tem = prev_nonnote_insn (insn)) != 0
2132 && GET_CODE (tem) == BARRIER)
2133 set_offsets_for_label (insn);
2134 else
2135 /* If neither of the above cases is true, compare each offset
2136 with those previously recorded and suppress any eliminations
2137 where the offsets disagree. */
2138
2139 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2140 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2141 != (initial_p ? reg_eliminate[i].initial_offset
2142 : reg_eliminate[i].offset))
2143 reg_eliminate[i].can_eliminate = 0;
2144
2145 return;
2146
2147 case JUMP_INSN:
2148 set_label_offsets (PATTERN (insn), insn, initial_p);
2149
2150 /* ... fall through ... */
2151
2152 case INSN:
2153 case CALL_INSN:
2154 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2155 and hence must have all eliminations at their initial offsets. */
2156 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2157 if (REG_NOTE_KIND (tem) == REG_LABEL)
2158 set_label_offsets (XEXP (tem, 0), insn, 1);
2159 return;
2160
2161 case PARALLEL:
2162 case ADDR_VEC:
2163 case ADDR_DIFF_VEC:
2164 /* Each of the labels in the parallel or address vector must be
2165 at their initial offsets. We want the first field for PARALLEL
2166 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2167
2168 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2169 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2170 insn, initial_p);
2171 return;
2172
2173 case SET:
2174 /* We only care about setting PC. If the source is not RETURN,
2175 IF_THEN_ELSE, or a label, disable any eliminations not at
2176 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2177 isn't one of those possibilities. For branches to a label,
2178 call ourselves recursively.
2179
2180 Note that this can disable elimination unnecessarily when we have
2181 a non-local goto since it will look like a non-constant jump to
2182 someplace in the current function. This isn't a significant
2183 problem since such jumps will normally be when all elimination
2184 pairs are back to their initial offsets. */
2185
2186 if (SET_DEST (x) != pc_rtx)
2187 return;
2188
2189 switch (GET_CODE (SET_SRC (x)))
2190 {
2191 case PC:
2192 case RETURN:
2193 return;
2194
2195 case LABEL_REF:
2196 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2197 return;
2198
2199 case IF_THEN_ELSE:
2200 tem = XEXP (SET_SRC (x), 1);
2201 if (GET_CODE (tem) == LABEL_REF)
2202 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2203 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2204 break;
2205
2206 tem = XEXP (SET_SRC (x), 2);
2207 if (GET_CODE (tem) == LABEL_REF)
2208 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2209 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2210 break;
2211 return;
2212
2213 default:
2214 break;
2215 }
2216
2217 /* If we reach here, all eliminations must be at their initial
2218 offset because we are doing a jump to a variable address. */
2219 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2220 if (p->offset != p->initial_offset)
2221 p->can_eliminate = 0;
2222 break;
2223
2224 default:
2225 break;
2226 }
2227 }
2228 \f
2229 /* Scan X and replace any eliminable registers (such as fp) with a
2230 replacement (such as sp), plus an offset.
2231
2232 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2233 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2234 MEM, we are allowed to replace a sum of a register and the constant zero
2235 with the register, which we cannot do outside a MEM. In addition, we need
2236 to record the fact that a register is referenced outside a MEM.
2237
2238 If INSN is an insn, it is the insn containing X. If we replace a REG
2239 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2240 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2241 the REG is being modified.
2242
2243 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2244 That's used when we eliminate in expressions stored in notes.
2245 This means, do not set ref_outside_mem even if the reference
2246 is outside of MEMs.
2247
2248 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2249 replacements done assuming all offsets are at their initial values. If
2250 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2251 encounter, return the actual location so that find_reloads will do
2252 the proper thing. */
2253
2254 rtx
2255 eliminate_regs (x, mem_mode, insn)
2256 rtx x;
2257 enum machine_mode mem_mode;
2258 rtx insn;
2259 {
2260 enum rtx_code code = GET_CODE (x);
2261 struct elim_table *ep;
2262 int regno;
2263 rtx new;
2264 int i, j;
2265 const char *fmt;
2266 int copied = 0;
2267
2268 if (! current_function_decl)
2269 return x;
2270
2271 switch (code)
2272 {
2273 case CONST_INT:
2274 case CONST_DOUBLE:
2275 case CONST:
2276 case SYMBOL_REF:
2277 case CODE_LABEL:
2278 case PC:
2279 case CC0:
2280 case ASM_INPUT:
2281 case ADDR_VEC:
2282 case ADDR_DIFF_VEC:
2283 case RETURN:
2284 return x;
2285
2286 case ADDRESSOF:
2287 /* This is only for the benefit of the debugging backends, which call
2288 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2289 removed after CSE. */
2290 new = eliminate_regs (XEXP (x, 0), 0, insn);
2291 if (GET_CODE (new) == MEM)
2292 return XEXP (new, 0);
2293 return x;
2294
2295 case REG:
2296 regno = REGNO (x);
2297
2298 /* First handle the case where we encounter a bare register that
2299 is eliminable. Replace it with a PLUS. */
2300 if (regno < FIRST_PSEUDO_REGISTER)
2301 {
2302 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2303 ep++)
2304 if (ep->from_rtx == x && ep->can_eliminate)
2305 return plus_constant (ep->to_rtx, ep->previous_offset);
2306
2307 }
2308 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2309 && reg_equiv_constant[regno]
2310 && ! CONSTANT_P (reg_equiv_constant[regno]))
2311 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2312 mem_mode, insn);
2313 return x;
2314
2315 /* You might think handling MINUS in a manner similar to PLUS is a
2316 good idea. It is not. It has been tried multiple times and every
2317 time the change has had to have been reverted.
2318
2319 Other parts of reload know a PLUS is special (gen_reload for example)
2320 and require special code to handle code a reloaded PLUS operand.
2321
2322 Also consider backends where the flags register is clobbered by a
2323 MINUS, but we can emit a PLUS that does not clobber flags (ia32,
2324 lea instruction comes to mind). If we try to reload a MINUS, we
2325 may kill the flags register that was holding a useful value.
2326
2327 So, please before trying to handle MINUS, consider reload as a
2328 whole instead of this little section as well as the backend issues. */
2329 case PLUS:
2330 /* If this is the sum of an eliminable register and a constant, rework
2331 the sum. */
2332 if (GET_CODE (XEXP (x, 0)) == REG
2333 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2334 && CONSTANT_P (XEXP (x, 1)))
2335 {
2336 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2337 ep++)
2338 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2339 {
2340 /* The only time we want to replace a PLUS with a REG (this
2341 occurs when the constant operand of the PLUS is the negative
2342 of the offset) is when we are inside a MEM. We won't want
2343 to do so at other times because that would change the
2344 structure of the insn in a way that reload can't handle.
2345 We special-case the commonest situation in
2346 eliminate_regs_in_insn, so just replace a PLUS with a
2347 PLUS here, unless inside a MEM. */
2348 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2349 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2350 return ep->to_rtx;
2351 else
2352 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2353 plus_constant (XEXP (x, 1),
2354 ep->previous_offset));
2355 }
2356
2357 /* If the register is not eliminable, we are done since the other
2358 operand is a constant. */
2359 return x;
2360 }
2361
2362 /* If this is part of an address, we want to bring any constant to the
2363 outermost PLUS. We will do this by doing register replacement in
2364 our operands and seeing if a constant shows up in one of them.
2365
2366 Note that there is no risk of modifying the structure of the insn,
2367 since we only get called for its operands, thus we are either
2368 modifying the address inside a MEM, or something like an address
2369 operand of a load-address insn. */
2370
2371 {
2372 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2373 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2374
2375 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2376 {
2377 /* If one side is a PLUS and the other side is a pseudo that
2378 didn't get a hard register but has a reg_equiv_constant,
2379 we must replace the constant here since it may no longer
2380 be in the position of any operand. */
2381 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2382 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2383 && reg_renumber[REGNO (new1)] < 0
2384 && reg_equiv_constant != 0
2385 && reg_equiv_constant[REGNO (new1)] != 0)
2386 new1 = reg_equiv_constant[REGNO (new1)];
2387 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2388 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2389 && reg_renumber[REGNO (new0)] < 0
2390 && reg_equiv_constant[REGNO (new0)] != 0)
2391 new0 = reg_equiv_constant[REGNO (new0)];
2392
2393 new = form_sum (new0, new1);
2394
2395 /* As above, if we are not inside a MEM we do not want to
2396 turn a PLUS into something else. We might try to do so here
2397 for an addition of 0 if we aren't optimizing. */
2398 if (! mem_mode && GET_CODE (new) != PLUS)
2399 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2400 else
2401 return new;
2402 }
2403 }
2404 return x;
2405
2406 case MULT:
2407 /* If this is the product of an eliminable register and a
2408 constant, apply the distribute law and move the constant out
2409 so that we have (plus (mult ..) ..). This is needed in order
2410 to keep load-address insns valid. This case is pathological.
2411 We ignore the possibility of overflow here. */
2412 if (GET_CODE (XEXP (x, 0)) == REG
2413 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2414 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2415 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2416 ep++)
2417 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2418 {
2419 if (! mem_mode
2420 /* Refs inside notes don't count for this purpose. */
2421 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2422 || GET_CODE (insn) == INSN_LIST)))
2423 ep->ref_outside_mem = 1;
2424
2425 return
2426 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2427 ep->previous_offset * INTVAL (XEXP (x, 1)));
2428 }
2429
2430 /* ... fall through ... */
2431
2432 case CALL:
2433 case COMPARE:
2434 /* See comments before PLUS about handling MINUS. */
2435 case MINUS:
2436 case DIV: case UDIV:
2437 case MOD: case UMOD:
2438 case AND: case IOR: case XOR:
2439 case ROTATERT: case ROTATE:
2440 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2441 case NE: case EQ:
2442 case GE: case GT: case GEU: case GTU:
2443 case LE: case LT: case LEU: case LTU:
2444 {
2445 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2446 rtx new1
2447 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2448
2449 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2450 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2451 }
2452 return x;
2453
2454 case EXPR_LIST:
2455 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2456 if (XEXP (x, 0))
2457 {
2458 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2459 if (new != XEXP (x, 0))
2460 {
2461 /* If this is a REG_DEAD note, it is not valid anymore.
2462 Using the eliminated version could result in creating a
2463 REG_DEAD note for the stack or frame pointer. */
2464 if (GET_MODE (x) == REG_DEAD)
2465 return (XEXP (x, 1)
2466 ? eliminate_regs (XEXP (x, 1), mem_mode, insn)
2467 : NULL_RTX);
2468
2469 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2470 }
2471 }
2472
2473 /* ... fall through ... */
2474
2475 case INSN_LIST:
2476 /* Now do eliminations in the rest of the chain. If this was
2477 an EXPR_LIST, this might result in allocating more memory than is
2478 strictly needed, but it simplifies the code. */
2479 if (XEXP (x, 1))
2480 {
2481 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2482 if (new != XEXP (x, 1))
2483 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2484 }
2485 return x;
2486
2487 case PRE_INC:
2488 case POST_INC:
2489 case PRE_DEC:
2490 case POST_DEC:
2491 case STRICT_LOW_PART:
2492 case NEG: case NOT:
2493 case SIGN_EXTEND: case ZERO_EXTEND:
2494 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2495 case FLOAT: case FIX:
2496 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2497 case ABS:
2498 case SQRT:
2499 case FFS:
2500 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2501 if (new != XEXP (x, 0))
2502 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2503 return x;
2504
2505 case SUBREG:
2506 /* Similar to above processing, but preserve SUBREG_BYTE.
2507 Convert (subreg (mem)) to (mem) if not paradoxical.
2508 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2509 pseudo didn't get a hard reg, we must replace this with the
2510 eliminated version of the memory location because push_reloads
2511 may do the replacement in certain circumstances. */
2512 if (GET_CODE (SUBREG_REG (x)) == REG
2513 && (GET_MODE_SIZE (GET_MODE (x))
2514 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2515 && reg_equiv_memory_loc != 0
2516 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2517 {
2518 new = SUBREG_REG (x);
2519 }
2520 else
2521 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2522
2523 if (new != SUBREG_REG (x))
2524 {
2525 int x_size = GET_MODE_SIZE (GET_MODE (x));
2526 int new_size = GET_MODE_SIZE (GET_MODE (new));
2527
2528 if (GET_CODE (new) == MEM
2529 && ((x_size < new_size
2530 #ifdef WORD_REGISTER_OPERATIONS
2531 /* On these machines, combine can create rtl of the form
2532 (set (subreg:m1 (reg:m2 R) 0) ...)
2533 where m1 < m2, and expects something interesting to
2534 happen to the entire word. Moreover, it will use the
2535 (reg:m2 R) later, expecting all bits to be preserved.
2536 So if the number of words is the same, preserve the
2537 subreg so that push_reloads can see it. */
2538 && ! ((x_size - 1) / UNITS_PER_WORD
2539 == (new_size -1 ) / UNITS_PER_WORD)
2540 #endif
2541 )
2542 || x_size == new_size)
2543 )
2544 {
2545 int offset = SUBREG_BYTE (x);
2546 enum machine_mode mode = GET_MODE (x);
2547
2548 PUT_MODE (new, mode);
2549 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2550 return new;
2551 }
2552 else
2553 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2554 }
2555
2556 return x;
2557
2558 case MEM:
2559 /* This is only for the benefit of the debugging backends, which call
2560 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2561 removed after CSE. */
2562 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2563 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
2564
2565 /* Our only special processing is to pass the mode of the MEM to our
2566 recursive call and copy the flags. While we are here, handle this
2567 case more efficiently. */
2568 return
2569 replace_equiv_address_nv (x,
2570 eliminate_regs (XEXP (x, 0),
2571 GET_MODE (x), insn));
2572
2573 case USE:
2574 /* Handle insn_list USE that a call to a pure function may generate. */
2575 new = eliminate_regs (XEXP (x, 0), 0, insn);
2576 if (new != XEXP (x, 0))
2577 return gen_rtx_USE (GET_MODE (x), new);
2578 return x;
2579
2580 case CLOBBER:
2581 case ASM_OPERANDS:
2582 case SET:
2583 abort ();
2584
2585 default:
2586 break;
2587 }
2588
2589 /* Process each of our operands recursively. If any have changed, make a
2590 copy of the rtx. */
2591 fmt = GET_RTX_FORMAT (code);
2592 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2593 {
2594 if (*fmt == 'e')
2595 {
2596 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
2597 if (new != XEXP (x, i) && ! copied)
2598 {
2599 rtx new_x = rtx_alloc (code);
2600 memcpy (new_x, x,
2601 (sizeof (*new_x) - sizeof (new_x->fld)
2602 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
2603 x = new_x;
2604 copied = 1;
2605 }
2606 XEXP (x, i) = new;
2607 }
2608 else if (*fmt == 'E')
2609 {
2610 int copied_vec = 0;
2611 for (j = 0; j < XVECLEN (x, i); j++)
2612 {
2613 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2614 if (new != XVECEXP (x, i, j) && ! copied_vec)
2615 {
2616 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2617 XVEC (x, i)->elem);
2618 if (! copied)
2619 {
2620 rtx new_x = rtx_alloc (code);
2621 memcpy (new_x, x,
2622 (sizeof (*new_x) - sizeof (new_x->fld)
2623 + (sizeof (new_x->fld[0])
2624 * GET_RTX_LENGTH (code))));
2625 x = new_x;
2626 copied = 1;
2627 }
2628 XVEC (x, i) = new_v;
2629 copied_vec = 1;
2630 }
2631 XVECEXP (x, i, j) = new;
2632 }
2633 }
2634 }
2635
2636 return x;
2637 }
2638
2639 /* Scan rtx X for modifications of elimination target registers. Update
2640 the table of eliminables to reflect the changed state. MEM_MODE is
2641 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2642
2643 static void
2644 elimination_effects (x, mem_mode)
2645 rtx x;
2646 enum machine_mode mem_mode;
2647
2648 {
2649 enum rtx_code code = GET_CODE (x);
2650 struct elim_table *ep;
2651 int regno;
2652 int i, j;
2653 const char *fmt;
2654
2655 switch (code)
2656 {
2657 case CONST_INT:
2658 case CONST_DOUBLE:
2659 case CONST:
2660 case SYMBOL_REF:
2661 case CODE_LABEL:
2662 case PC:
2663 case CC0:
2664 case ASM_INPUT:
2665 case ADDR_VEC:
2666 case ADDR_DIFF_VEC:
2667 case RETURN:
2668 return;
2669
2670 case ADDRESSOF:
2671 abort ();
2672
2673 case REG:
2674 regno = REGNO (x);
2675
2676 /* First handle the case where we encounter a bare register that
2677 is eliminable. Replace it with a PLUS. */
2678 if (regno < FIRST_PSEUDO_REGISTER)
2679 {
2680 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2681 ep++)
2682 if (ep->from_rtx == x && ep->can_eliminate)
2683 {
2684 if (! mem_mode)
2685 ep->ref_outside_mem = 1;
2686 return;
2687 }
2688
2689 }
2690 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2691 && reg_equiv_constant[regno]
2692 && ! CONSTANT_P (reg_equiv_constant[regno]))
2693 elimination_effects (reg_equiv_constant[regno], mem_mode);
2694 return;
2695
2696 case PRE_INC:
2697 case POST_INC:
2698 case PRE_DEC:
2699 case POST_DEC:
2700 case POST_MODIFY:
2701 case PRE_MODIFY:
2702 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2703 if (ep->to_rtx == XEXP (x, 0))
2704 {
2705 int size = GET_MODE_SIZE (mem_mode);
2706
2707 /* If more bytes than MEM_MODE are pushed, account for them. */
2708 #ifdef PUSH_ROUNDING
2709 if (ep->to_rtx == stack_pointer_rtx)
2710 size = PUSH_ROUNDING (size);
2711 #endif
2712 if (code == PRE_DEC || code == POST_DEC)
2713 ep->offset += size;
2714 else if (code == PRE_INC || code == POST_INC)
2715 ep->offset -= size;
2716 else if ((code == PRE_MODIFY || code == POST_MODIFY)
2717 && GET_CODE (XEXP (x, 1)) == PLUS
2718 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2719 && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
2720 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2721 }
2722
2723 /* These two aren't unary operators. */
2724 if (code == POST_MODIFY || code == PRE_MODIFY)
2725 break;
2726
2727 /* Fall through to generic unary operation case. */
2728 case STRICT_LOW_PART:
2729 case NEG: case NOT:
2730 case SIGN_EXTEND: case ZERO_EXTEND:
2731 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2732 case FLOAT: case FIX:
2733 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2734 case ABS:
2735 case SQRT:
2736 case FFS:
2737 elimination_effects (XEXP (x, 0), mem_mode);
2738 return;
2739
2740 case SUBREG:
2741 if (GET_CODE (SUBREG_REG (x)) == REG
2742 && (GET_MODE_SIZE (GET_MODE (x))
2743 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2744 && reg_equiv_memory_loc != 0
2745 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2746 return;
2747
2748 elimination_effects (SUBREG_REG (x), mem_mode);
2749 return;
2750
2751 case USE:
2752 /* If using a register that is the source of an eliminate we still
2753 think can be performed, note it cannot be performed since we don't
2754 know how this register is used. */
2755 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2756 if (ep->from_rtx == XEXP (x, 0))
2757 ep->can_eliminate = 0;
2758
2759 elimination_effects (XEXP (x, 0), mem_mode);
2760 return;
2761
2762 case CLOBBER:
2763 /* If clobbering a register that is the replacement register for an
2764 elimination we still think can be performed, note that it cannot
2765 be performed. Otherwise, we need not be concerned about it. */
2766 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2767 if (ep->to_rtx == XEXP (x, 0))
2768 ep->can_eliminate = 0;
2769
2770 elimination_effects (XEXP (x, 0), mem_mode);
2771 return;
2772
2773 case SET:
2774 /* Check for setting a register that we know about. */
2775 if (GET_CODE (SET_DEST (x)) == REG)
2776 {
2777 /* See if this is setting the replacement register for an
2778 elimination.
2779
2780 If DEST is the hard frame pointer, we do nothing because we
2781 assume that all assignments to the frame pointer are for
2782 non-local gotos and are being done at a time when they are valid
2783 and do not disturb anything else. Some machines want to
2784 eliminate a fake argument pointer (or even a fake frame pointer)
2785 with either the real frame or the stack pointer. Assignments to
2786 the hard frame pointer must not prevent this elimination. */
2787
2788 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2789 ep++)
2790 if (ep->to_rtx == SET_DEST (x)
2791 && SET_DEST (x) != hard_frame_pointer_rtx)
2792 {
2793 /* If it is being incremented, adjust the offset. Otherwise,
2794 this elimination can't be done. */
2795 rtx src = SET_SRC (x);
2796
2797 if (GET_CODE (src) == PLUS
2798 && XEXP (src, 0) == SET_DEST (x)
2799 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2800 ep->offset -= INTVAL (XEXP (src, 1));
2801 else
2802 ep->can_eliminate = 0;
2803 }
2804 }
2805
2806 elimination_effects (SET_DEST (x), 0);
2807 elimination_effects (SET_SRC (x), 0);
2808 return;
2809
2810 case MEM:
2811 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2812 abort ();
2813
2814 /* Our only special processing is to pass the mode of the MEM to our
2815 recursive call. */
2816 elimination_effects (XEXP (x, 0), GET_MODE (x));
2817 return;
2818
2819 default:
2820 break;
2821 }
2822
2823 fmt = GET_RTX_FORMAT (code);
2824 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2825 {
2826 if (*fmt == 'e')
2827 elimination_effects (XEXP (x, i), mem_mode);
2828 else if (*fmt == 'E')
2829 for (j = 0; j < XVECLEN (x, i); j++)
2830 elimination_effects (XVECEXP (x, i, j), mem_mode);
2831 }
2832 }
2833
2834 /* Descend through rtx X and verify that no references to eliminable registers
2835 remain. If any do remain, mark the involved register as not
2836 eliminable. */
2837
2838 static void
2839 check_eliminable_occurrences (x)
2840 rtx x;
2841 {
2842 const char *fmt;
2843 int i;
2844 enum rtx_code code;
2845
2846 if (x == 0)
2847 return;
2848
2849 code = GET_CODE (x);
2850
2851 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2852 {
2853 struct elim_table *ep;
2854
2855 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2856 if (ep->from_rtx == x && ep->can_eliminate)
2857 ep->can_eliminate = 0;
2858 return;
2859 }
2860
2861 fmt = GET_RTX_FORMAT (code);
2862 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2863 {
2864 if (*fmt == 'e')
2865 check_eliminable_occurrences (XEXP (x, i));
2866 else if (*fmt == 'E')
2867 {
2868 int j;
2869 for (j = 0; j < XVECLEN (x, i); j++)
2870 check_eliminable_occurrences (XVECEXP (x, i, j));
2871 }
2872 }
2873 }
2874 \f
2875 /* Scan INSN and eliminate all eliminable registers in it.
2876
2877 If REPLACE is nonzero, do the replacement destructively. Also
2878 delete the insn as dead it if it is setting an eliminable register.
2879
2880 If REPLACE is zero, do all our allocations in reload_obstack.
2881
2882 If no eliminations were done and this insn doesn't require any elimination
2883 processing (these are not identical conditions: it might be updating sp,
2884 but not referencing fp; this needs to be seen during reload_as_needed so
2885 that the offset between fp and sp can be taken into consideration), zero
2886 is returned. Otherwise, 1 is returned. */
2887
2888 static int
2889 eliminate_regs_in_insn (insn, replace)
2890 rtx insn;
2891 int replace;
2892 {
2893 int icode = recog_memoized (insn);
2894 rtx old_body = PATTERN (insn);
2895 int insn_is_asm = asm_noperands (old_body) >= 0;
2896 rtx old_set = single_set (insn);
2897 rtx new_body;
2898 int val = 0;
2899 int i, any_changes;
2900 rtx substed_operand[MAX_RECOG_OPERANDS];
2901 rtx orig_operand[MAX_RECOG_OPERANDS];
2902 struct elim_table *ep;
2903
2904 if (! insn_is_asm && icode < 0)
2905 {
2906 if (GET_CODE (PATTERN (insn)) == USE
2907 || GET_CODE (PATTERN (insn)) == CLOBBER
2908 || GET_CODE (PATTERN (insn)) == ADDR_VEC
2909 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
2910 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
2911 return 0;
2912 abort ();
2913 }
2914
2915 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
2916 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
2917 {
2918 /* Check for setting an eliminable register. */
2919 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2920 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
2921 {
2922 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2923 /* If this is setting the frame pointer register to the
2924 hardware frame pointer register and this is an elimination
2925 that will be done (tested above), this insn is really
2926 adjusting the frame pointer downward to compensate for
2927 the adjustment done before a nonlocal goto. */
2928 if (ep->from == FRAME_POINTER_REGNUM
2929 && ep->to == HARD_FRAME_POINTER_REGNUM)
2930 {
2931 rtx src = SET_SRC (old_set);
2932 int offset = 0, ok = 0;
2933 rtx prev_insn, prev_set;
2934
2935 if (src == ep->to_rtx)
2936 offset = 0, ok = 1;
2937 else if (GET_CODE (src) == PLUS
2938 && GET_CODE (XEXP (src, 0)) == CONST_INT
2939 && XEXP (src, 1) == ep->to_rtx)
2940 offset = INTVAL (XEXP (src, 0)), ok = 1;
2941 else if (GET_CODE (src) == PLUS
2942 && GET_CODE (XEXP (src, 1)) == CONST_INT
2943 && XEXP (src, 0) == ep->to_rtx)
2944 offset = INTVAL (XEXP (src, 1)), ok = 1;
2945 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
2946 && (prev_set = single_set (prev_insn)) != 0
2947 && rtx_equal_p (SET_DEST (prev_set), src))
2948 {
2949 src = SET_SRC (prev_set);
2950 if (src == ep->to_rtx)
2951 offset = 0, ok = 1;
2952 else if (GET_CODE (src) == PLUS
2953 && GET_CODE (XEXP (src, 0)) == CONST_INT
2954 && XEXP (src, 1) == ep->to_rtx)
2955 offset = INTVAL (XEXP (src, 0)), ok = 1;
2956 else if (GET_CODE (src) == PLUS
2957 && GET_CODE (XEXP (src, 1)) == CONST_INT
2958 && XEXP (src, 0) == ep->to_rtx)
2959 offset = INTVAL (XEXP (src, 1)), ok = 1;
2960 }
2961
2962 if (ok)
2963 {
2964 rtx src
2965 = plus_constant (ep->to_rtx, offset - ep->offset);
2966
2967 new_body = old_body;
2968 if (! replace)
2969 {
2970 new_body = copy_insn (old_body);
2971 if (REG_NOTES (insn))
2972 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
2973 }
2974 PATTERN (insn) = new_body;
2975 old_set = single_set (insn);
2976
2977 /* First see if this insn remains valid when we
2978 make the change. If not, keep the INSN_CODE
2979 the same and let reload fit it up. */
2980 validate_change (insn, &SET_SRC (old_set), src, 1);
2981 validate_change (insn, &SET_DEST (old_set),
2982 ep->to_rtx, 1);
2983 if (! apply_change_group ())
2984 {
2985 SET_SRC (old_set) = src;
2986 SET_DEST (old_set) = ep->to_rtx;
2987 }
2988
2989 val = 1;
2990 goto done;
2991 }
2992 }
2993 #endif
2994
2995 /* In this case this insn isn't serving a useful purpose. We
2996 will delete it in reload_as_needed once we know that this
2997 elimination is, in fact, being done.
2998
2999 If REPLACE isn't set, we can't delete this insn, but needn't
3000 process it since it won't be used unless something changes. */
3001 if (replace)
3002 {
3003 delete_dead_insn (insn);
3004 return 1;
3005 }
3006 val = 1;
3007 goto done;
3008 }
3009 }
3010
3011 /* We allow one special case which happens to work on all machines we
3012 currently support: a single set with the source being a PLUS of an
3013 eliminable register and a constant. */
3014 if (old_set
3015 && GET_CODE (SET_DEST (old_set)) == REG
3016 && GET_CODE (SET_SRC (old_set)) == PLUS
3017 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3018 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT
3019 && REGNO (XEXP (SET_SRC (old_set), 0)) < FIRST_PSEUDO_REGISTER)
3020 {
3021 rtx reg = XEXP (SET_SRC (old_set), 0);
3022 int offset = INTVAL (XEXP (SET_SRC (old_set), 1));
3023
3024 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3025 if (ep->from_rtx == reg && ep->can_eliminate)
3026 {
3027 offset += ep->offset;
3028
3029 if (offset == 0)
3030 {
3031 int num_clobbers;
3032 /* We assume here that if we need a PARALLEL with
3033 CLOBBERs for this assignment, we can do with the
3034 MATCH_SCRATCHes that add_clobbers allocates.
3035 There's not much we can do if that doesn't work. */
3036 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3037 SET_DEST (old_set),
3038 ep->to_rtx);
3039 num_clobbers = 0;
3040 INSN_CODE (insn) = recog (PATTERN (insn), insn, &num_clobbers);
3041 if (num_clobbers)
3042 {
3043 rtvec vec = rtvec_alloc (num_clobbers + 1);
3044
3045 vec->elem[0] = PATTERN (insn);
3046 PATTERN (insn) = gen_rtx_PARALLEL (VOIDmode, vec);
3047 add_clobbers (PATTERN (insn), INSN_CODE (insn));
3048 }
3049 if (INSN_CODE (insn) < 0)
3050 abort ();
3051 }
3052 else
3053 {
3054 new_body = old_body;
3055 if (! replace)
3056 {
3057 new_body = copy_insn (old_body);
3058 if (REG_NOTES (insn))
3059 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3060 }
3061 PATTERN (insn) = new_body;
3062 old_set = single_set (insn);
3063
3064 XEXP (SET_SRC (old_set), 0) = ep->to_rtx;
3065 XEXP (SET_SRC (old_set), 1) = GEN_INT (offset);
3066 }
3067 val = 1;
3068 /* This can't have an effect on elimination offsets, so skip right
3069 to the end. */
3070 goto done;
3071 }
3072 }
3073
3074 /* Determine the effects of this insn on elimination offsets. */
3075 elimination_effects (old_body, 0);
3076
3077 /* Eliminate all eliminable registers occurring in operands that
3078 can be handled by reload. */
3079 extract_insn (insn);
3080 any_changes = 0;
3081 for (i = 0; i < recog_data.n_operands; i++)
3082 {
3083 orig_operand[i] = recog_data.operand[i];
3084 substed_operand[i] = recog_data.operand[i];
3085
3086 /* For an asm statement, every operand is eliminable. */
3087 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3088 {
3089 /* Check for setting a register that we know about. */
3090 if (recog_data.operand_type[i] != OP_IN
3091 && GET_CODE (orig_operand[i]) == REG)
3092 {
3093 /* If we are assigning to a register that can be eliminated, it
3094 must be as part of a PARALLEL, since the code above handles
3095 single SETs. We must indicate that we can no longer
3096 eliminate this reg. */
3097 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3098 ep++)
3099 if (ep->from_rtx == orig_operand[i] && ep->can_eliminate)
3100 ep->can_eliminate = 0;
3101 }
3102
3103 substed_operand[i] = eliminate_regs (recog_data.operand[i], 0,
3104 replace ? insn : NULL_RTX);
3105 if (substed_operand[i] != orig_operand[i])
3106 val = any_changes = 1;
3107 /* Terminate the search in check_eliminable_occurrences at
3108 this point. */
3109 *recog_data.operand_loc[i] = 0;
3110
3111 /* If an output operand changed from a REG to a MEM and INSN is an
3112 insn, write a CLOBBER insn. */
3113 if (recog_data.operand_type[i] != OP_IN
3114 && GET_CODE (orig_operand[i]) == REG
3115 && GET_CODE (substed_operand[i]) == MEM
3116 && replace)
3117 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3118 insn);
3119 }
3120 }
3121
3122 for (i = 0; i < recog_data.n_dups; i++)
3123 *recog_data.dup_loc[i]
3124 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3125
3126 /* If any eliminable remain, they aren't eliminable anymore. */
3127 check_eliminable_occurrences (old_body);
3128
3129 /* Substitute the operands; the new values are in the substed_operand
3130 array. */
3131 for (i = 0; i < recog_data.n_operands; i++)
3132 *recog_data.operand_loc[i] = substed_operand[i];
3133 for (i = 0; i < recog_data.n_dups; i++)
3134 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3135
3136 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3137 re-recognize the insn. We do this in case we had a simple addition
3138 but now can do this as a load-address. This saves an insn in this
3139 common case.
3140 If re-recognition fails, the old insn code number will still be used,
3141 and some register operands may have changed into PLUS expressions.
3142 These will be handled by find_reloads by loading them into a register
3143 again. */
3144
3145 if (val)
3146 {
3147 /* If we aren't replacing things permanently and we changed something,
3148 make another copy to ensure that all the RTL is new. Otherwise
3149 things can go wrong if find_reload swaps commutative operands
3150 and one is inside RTL that has been copied while the other is not. */
3151 new_body = old_body;
3152 if (! replace)
3153 {
3154 new_body = copy_insn (old_body);
3155 if (REG_NOTES (insn))
3156 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3157 }
3158 PATTERN (insn) = new_body;
3159
3160 /* If we had a move insn but now we don't, rerecognize it. This will
3161 cause spurious re-recognition if the old move had a PARALLEL since
3162 the new one still will, but we can't call single_set without
3163 having put NEW_BODY into the insn and the re-recognition won't
3164 hurt in this rare case. */
3165 /* ??? Why this huge if statement - why don't we just rerecognize the
3166 thing always? */
3167 if (! insn_is_asm
3168 && old_set != 0
3169 && ((GET_CODE (SET_SRC (old_set)) == REG
3170 && (GET_CODE (new_body) != SET
3171 || GET_CODE (SET_SRC (new_body)) != REG))
3172 /* If this was a load from or store to memory, compare
3173 the MEM in recog_data.operand to the one in the insn.
3174 If they are not equal, then rerecognize the insn. */
3175 || (old_set != 0
3176 && ((GET_CODE (SET_SRC (old_set)) == MEM
3177 && SET_SRC (old_set) != recog_data.operand[1])
3178 || (GET_CODE (SET_DEST (old_set)) == MEM
3179 && SET_DEST (old_set) != recog_data.operand[0])))
3180 /* If this was an add insn before, rerecognize. */
3181 || GET_CODE (SET_SRC (old_set)) == PLUS))
3182 {
3183 int new_icode = recog (PATTERN (insn), insn, 0);
3184 if (new_icode < 0)
3185 INSN_CODE (insn) = icode;
3186 }
3187 }
3188
3189 /* Restore the old body. If there were any changes to it, we made a copy
3190 of it while the changes were still in place, so we'll correctly return
3191 a modified insn below. */
3192 if (! replace)
3193 {
3194 /* Restore the old body. */
3195 for (i = 0; i < recog_data.n_operands; i++)
3196 *recog_data.operand_loc[i] = orig_operand[i];
3197 for (i = 0; i < recog_data.n_dups; i++)
3198 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3199 }
3200
3201 /* Update all elimination pairs to reflect the status after the current
3202 insn. The changes we make were determined by the earlier call to
3203 elimination_effects.
3204
3205 We also detect a cases where register elimination cannot be done,
3206 namely, if a register would be both changed and referenced outside a MEM
3207 in the resulting insn since such an insn is often undefined and, even if
3208 not, we cannot know what meaning will be given to it. Note that it is
3209 valid to have a register used in an address in an insn that changes it
3210 (presumably with a pre- or post-increment or decrement).
3211
3212 If anything changes, return nonzero. */
3213
3214 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3215 {
3216 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3217 ep->can_eliminate = 0;
3218
3219 ep->ref_outside_mem = 0;
3220
3221 if (ep->previous_offset != ep->offset)
3222 val = 1;
3223 }
3224
3225 done:
3226 /* If we changed something, perform elimination in REG_NOTES. This is
3227 needed even when REPLACE is zero because a REG_DEAD note might refer
3228 to a register that we eliminate and could cause a different number
3229 of spill registers to be needed in the final reload pass than in
3230 the pre-passes. */
3231 if (val && REG_NOTES (insn) != 0)
3232 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3233
3234 return val;
3235 }
3236
3237 /* Loop through all elimination pairs.
3238 Recalculate the number not at initial offset.
3239
3240 Compute the maximum offset (minimum offset if the stack does not
3241 grow downward) for each elimination pair. */
3242
3243 static void
3244 update_eliminable_offsets ()
3245 {
3246 struct elim_table *ep;
3247
3248 num_not_at_initial_offset = 0;
3249 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3250 {
3251 ep->previous_offset = ep->offset;
3252 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3253 num_not_at_initial_offset++;
3254 }
3255 }
3256
3257 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3258 replacement we currently believe is valid, mark it as not eliminable if X
3259 modifies DEST in any way other than by adding a constant integer to it.
3260
3261 If DEST is the frame pointer, we do nothing because we assume that
3262 all assignments to the hard frame pointer are nonlocal gotos and are being
3263 done at a time when they are valid and do not disturb anything else.
3264 Some machines want to eliminate a fake argument pointer with either the
3265 frame or stack pointer. Assignments to the hard frame pointer must not
3266 prevent this elimination.
3267
3268 Called via note_stores from reload before starting its passes to scan
3269 the insns of the function. */
3270
3271 static void
3272 mark_not_eliminable (dest, x, data)
3273 rtx dest;
3274 rtx x;
3275 void *data ATTRIBUTE_UNUSED;
3276 {
3277 register unsigned int i;
3278
3279 /* A SUBREG of a hard register here is just changing its mode. We should
3280 not see a SUBREG of an eliminable hard register, but check just in
3281 case. */
3282 if (GET_CODE (dest) == SUBREG)
3283 dest = SUBREG_REG (dest);
3284
3285 if (dest == hard_frame_pointer_rtx)
3286 return;
3287
3288 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3289 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3290 && (GET_CODE (x) != SET
3291 || GET_CODE (SET_SRC (x)) != PLUS
3292 || XEXP (SET_SRC (x), 0) != dest
3293 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3294 {
3295 reg_eliminate[i].can_eliminate_previous
3296 = reg_eliminate[i].can_eliminate = 0;
3297 num_eliminable--;
3298 }
3299 }
3300
3301 /* Verify that the initial elimination offsets did not change since the
3302 last call to set_initial_elim_offsets. This is used to catch cases
3303 where something illegal happened during reload_as_needed that could
3304 cause incorrect code to be generated if we did not check for it. */
3305
3306 static void
3307 verify_initial_elim_offsets ()
3308 {
3309 int t;
3310
3311 #ifdef ELIMINABLE_REGS
3312 struct elim_table *ep;
3313
3314 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3315 {
3316 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3317 if (t != ep->initial_offset)
3318 abort ();
3319 }
3320 #else
3321 INITIAL_FRAME_POINTER_OFFSET (t);
3322 if (t != reg_eliminate[0].initial_offset)
3323 abort ();
3324 #endif
3325 }
3326
3327 /* Reset all offsets on eliminable registers to their initial values. */
3328
3329 static void
3330 set_initial_elim_offsets ()
3331 {
3332 struct elim_table *ep = reg_eliminate;
3333
3334 #ifdef ELIMINABLE_REGS
3335 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3336 {
3337 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3338 ep->previous_offset = ep->offset = ep->initial_offset;
3339 }
3340 #else
3341 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3342 ep->previous_offset = ep->offset = ep->initial_offset;
3343 #endif
3344
3345 num_not_at_initial_offset = 0;
3346 }
3347
3348 /* Initialize the known label offsets.
3349 Set a known offset for each forced label to be at the initial offset
3350 of each elimination. We do this because we assume that all
3351 computed jumps occur from a location where each elimination is
3352 at its initial offset.
3353 For all other labels, show that we don't know the offsets. */
3354
3355 static void
3356 set_initial_label_offsets ()
3357 {
3358 rtx x;
3359 memset ((char *) &offsets_known_at[get_first_label_num ()], 0, num_labels);
3360
3361 for (x = forced_labels; x; x = XEXP (x, 1))
3362 if (XEXP (x, 0))
3363 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3364 }
3365
3366 /* Set all elimination offsets to the known values for the code label given
3367 by INSN. */
3368
3369 static void
3370 set_offsets_for_label (insn)
3371 rtx insn;
3372 {
3373 unsigned int i;
3374 int label_nr = CODE_LABEL_NUMBER (insn);
3375 struct elim_table *ep;
3376
3377 num_not_at_initial_offset = 0;
3378 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3379 {
3380 ep->offset = ep->previous_offset = offsets_at[label_nr][i];
3381 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3382 num_not_at_initial_offset++;
3383 }
3384 }
3385
3386 /* See if anything that happened changes which eliminations are valid.
3387 For example, on the Sparc, whether or not the frame pointer can
3388 be eliminated can depend on what registers have been used. We need
3389 not check some conditions again (such as flag_omit_frame_pointer)
3390 since they can't have changed. */
3391
3392 static void
3393 update_eliminables (pset)
3394 HARD_REG_SET *pset;
3395 {
3396 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3397 int previous_frame_pointer_needed = frame_pointer_needed;
3398 #endif
3399 struct elim_table *ep;
3400
3401 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3402 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3403 #ifdef ELIMINABLE_REGS
3404 || ! CAN_ELIMINATE (ep->from, ep->to)
3405 #endif
3406 )
3407 ep->can_eliminate = 0;
3408
3409 /* Look for the case where we have discovered that we can't replace
3410 register A with register B and that means that we will now be
3411 trying to replace register A with register C. This means we can
3412 no longer replace register C with register B and we need to disable
3413 such an elimination, if it exists. This occurs often with A == ap,
3414 B == sp, and C == fp. */
3415
3416 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3417 {
3418 struct elim_table *op;
3419 register int new_to = -1;
3420
3421 if (! ep->can_eliminate && ep->can_eliminate_previous)
3422 {
3423 /* Find the current elimination for ep->from, if there is a
3424 new one. */
3425 for (op = reg_eliminate;
3426 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3427 if (op->from == ep->from && op->can_eliminate)
3428 {
3429 new_to = op->to;
3430 break;
3431 }
3432
3433 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3434 disable it. */
3435 for (op = reg_eliminate;
3436 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3437 if (op->from == new_to && op->to == ep->to)
3438 op->can_eliminate = 0;
3439 }
3440 }
3441
3442 /* See if any registers that we thought we could eliminate the previous
3443 time are no longer eliminable. If so, something has changed and we
3444 must spill the register. Also, recompute the number of eliminable
3445 registers and see if the frame pointer is needed; it is if there is
3446 no elimination of the frame pointer that we can perform. */
3447
3448 frame_pointer_needed = 1;
3449 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3450 {
3451 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3452 && ep->to != HARD_FRAME_POINTER_REGNUM)
3453 frame_pointer_needed = 0;
3454
3455 if (! ep->can_eliminate && ep->can_eliminate_previous)
3456 {
3457 ep->can_eliminate_previous = 0;
3458 SET_HARD_REG_BIT (*pset, ep->from);
3459 num_eliminable--;
3460 }
3461 }
3462
3463 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3464 /* If we didn't need a frame pointer last time, but we do now, spill
3465 the hard frame pointer. */
3466 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3467 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3468 #endif
3469 }
3470
3471 /* Initialize the table of registers to eliminate. */
3472
3473 static void
3474 init_elim_table ()
3475 {
3476 struct elim_table *ep;
3477 #ifdef ELIMINABLE_REGS
3478 struct elim_table_1 *ep1;
3479 #endif
3480
3481 if (!reg_eliminate)
3482 reg_eliminate = (struct elim_table *)
3483 xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3484
3485 /* Does this function require a frame pointer? */
3486
3487 frame_pointer_needed = (! flag_omit_frame_pointer
3488 #ifdef EXIT_IGNORE_STACK
3489 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3490 and restore sp for alloca. So we can't eliminate
3491 the frame pointer in that case. At some point,
3492 we should improve this by emitting the
3493 sp-adjusting insns for this case. */
3494 || (current_function_calls_alloca
3495 && EXIT_IGNORE_STACK)
3496 #endif
3497 || FRAME_POINTER_REQUIRED);
3498
3499 num_eliminable = 0;
3500
3501 #ifdef ELIMINABLE_REGS
3502 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3503 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3504 {
3505 ep->from = ep1->from;
3506 ep->to = ep1->to;
3507 ep->can_eliminate = ep->can_eliminate_previous
3508 = (CAN_ELIMINATE (ep->from, ep->to)
3509 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3510 }
3511 #else
3512 reg_eliminate[0].from = reg_eliminate_1[0].from;
3513 reg_eliminate[0].to = reg_eliminate_1[0].to;
3514 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3515 = ! frame_pointer_needed;
3516 #endif
3517
3518 /* Count the number of eliminable registers and build the FROM and TO
3519 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3520 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3521 We depend on this. */
3522 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3523 {
3524 num_eliminable += ep->can_eliminate;
3525 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3526 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3527 }
3528 }
3529 \f
3530 /* Kick all pseudos out of hard register REGNO.
3531
3532 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3533 because we found we can't eliminate some register. In the case, no pseudos
3534 are allowed to be in the register, even if they are only in a block that
3535 doesn't require spill registers, unlike the case when we are spilling this
3536 hard reg to produce another spill register.
3537
3538 Return nonzero if any pseudos needed to be kicked out. */
3539
3540 static void
3541 spill_hard_reg (regno, cant_eliminate)
3542 unsigned int regno;
3543 int cant_eliminate;
3544 {
3545 register int i;
3546
3547 if (cant_eliminate)
3548 {
3549 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3550 regs_ever_live[regno] = 1;
3551 }
3552
3553 /* Spill every pseudo reg that was allocated to this reg
3554 or to something that overlaps this reg. */
3555
3556 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3557 if (reg_renumber[i] >= 0
3558 && (unsigned int) reg_renumber[i] <= regno
3559 && ((unsigned int) reg_renumber[i]
3560 + HARD_REGNO_NREGS ((unsigned int) reg_renumber[i],
3561 PSEUDO_REGNO_MODE (i))
3562 > regno))
3563 SET_REGNO_REG_SET (&spilled_pseudos, i);
3564 }
3565
3566 /* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3567 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
3568
3569 static void
3570 ior_hard_reg_set (set1, set2)
3571 HARD_REG_SET *set1, *set2;
3572 {
3573 IOR_HARD_REG_SET (*set1, *set2);
3574 }
3575
3576 /* After find_reload_regs has been run for all insn that need reloads,
3577 and/or spill_hard_regs was called, this function is used to actually
3578 spill pseudo registers and try to reallocate them. It also sets up the
3579 spill_regs array for use by choose_reload_regs. */
3580
3581 static int
3582 finish_spills (global)
3583 int global;
3584 {
3585 struct insn_chain *chain;
3586 int something_changed = 0;
3587 int i;
3588
3589 /* Build the spill_regs array for the function. */
3590 /* If there are some registers still to eliminate and one of the spill regs
3591 wasn't ever used before, additional stack space may have to be
3592 allocated to store this register. Thus, we may have changed the offset
3593 between the stack and frame pointers, so mark that something has changed.
3594
3595 One might think that we need only set VAL to 1 if this is a call-used
3596 register. However, the set of registers that must be saved by the
3597 prologue is not identical to the call-used set. For example, the
3598 register used by the call insn for the return PC is a call-used register,
3599 but must be saved by the prologue. */
3600
3601 n_spills = 0;
3602 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3603 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3604 {
3605 spill_reg_order[i] = n_spills;
3606 spill_regs[n_spills++] = i;
3607 if (num_eliminable && ! regs_ever_live[i])
3608 something_changed = 1;
3609 regs_ever_live[i] = 1;
3610 }
3611 else
3612 spill_reg_order[i] = -1;
3613
3614 EXECUTE_IF_SET_IN_REG_SET
3615 (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i,
3616 {
3617 /* Record the current hard register the pseudo is allocated to in
3618 pseudo_previous_regs so we avoid reallocating it to the same
3619 hard reg in a later pass. */
3620 if (reg_renumber[i] < 0)
3621 abort ();
3622
3623 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3624 /* Mark it as no longer having a hard register home. */
3625 reg_renumber[i] = -1;
3626 /* We will need to scan everything again. */
3627 something_changed = 1;
3628 });
3629
3630 /* Retry global register allocation if possible. */
3631 if (global)
3632 {
3633 memset ((char *) pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3634 /* For every insn that needs reloads, set the registers used as spill
3635 regs in pseudo_forbidden_regs for every pseudo live across the
3636 insn. */
3637 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3638 {
3639 EXECUTE_IF_SET_IN_REG_SET
3640 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
3641 {
3642 ior_hard_reg_set (pseudo_forbidden_regs + i,
3643 &chain->used_spill_regs);
3644 });
3645 EXECUTE_IF_SET_IN_REG_SET
3646 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
3647 {
3648 ior_hard_reg_set (pseudo_forbidden_regs + i,
3649 &chain->used_spill_regs);
3650 });
3651 }
3652
3653 /* Retry allocating the spilled pseudos. For each reg, merge the
3654 various reg sets that indicate which hard regs can't be used,
3655 and call retry_global_alloc.
3656 We change spill_pseudos here to only contain pseudos that did not
3657 get a new hard register. */
3658 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3659 if (reg_old_renumber[i] != reg_renumber[i])
3660 {
3661 HARD_REG_SET forbidden;
3662 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3663 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3664 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3665 retry_global_alloc (i, forbidden);
3666 if (reg_renumber[i] >= 0)
3667 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3668 }
3669 }
3670
3671 /* Fix up the register information in the insn chain.
3672 This involves deleting those of the spilled pseudos which did not get
3673 a new hard register home from the live_{before,after} sets. */
3674 for (chain = reload_insn_chain; chain; chain = chain->next)
3675 {
3676 HARD_REG_SET used_by_pseudos;
3677 HARD_REG_SET used_by_pseudos2;
3678
3679 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3680 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3681
3682 /* Mark any unallocated hard regs as available for spills. That
3683 makes inheritance work somewhat better. */
3684 if (chain->need_reload)
3685 {
3686 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3687 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3688 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3689
3690 /* Save the old value for the sanity test below. */
3691 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3692
3693 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3694 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
3695 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3696 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3697
3698 /* Make sure we only enlarge the set. */
3699 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3700 abort ();
3701 ok:;
3702 }
3703 }
3704
3705 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3706 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3707 {
3708 int regno = reg_renumber[i];
3709 if (reg_old_renumber[i] == regno)
3710 continue;
3711
3712 alter_reg (i, reg_old_renumber[i]);
3713 reg_old_renumber[i] = regno;
3714 if (rtl_dump_file)
3715 {
3716 if (regno == -1)
3717 fprintf (rtl_dump_file, " Register %d now on stack.\n\n", i);
3718 else
3719 fprintf (rtl_dump_file, " Register %d now in %d.\n\n",
3720 i, reg_renumber[i]);
3721 }
3722 }
3723
3724 return something_changed;
3725 }
3726 \f
3727 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3728 Also mark any hard registers used to store user variables as
3729 forbidden from being used for spill registers. */
3730
3731 static void
3732 scan_paradoxical_subregs (x)
3733 register rtx x;
3734 {
3735 register int i;
3736 register const char *fmt;
3737 register enum rtx_code code = GET_CODE (x);
3738
3739 switch (code)
3740 {
3741 case REG:
3742 #if 0
3743 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3744 && REG_USERVAR_P (x))
3745 SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x));
3746 #endif
3747 return;
3748
3749 case CONST_INT:
3750 case CONST:
3751 case SYMBOL_REF:
3752 case LABEL_REF:
3753 case CONST_DOUBLE:
3754 case CC0:
3755 case PC:
3756 case USE:
3757 case CLOBBER:
3758 return;
3759
3760 case SUBREG:
3761 if (GET_CODE (SUBREG_REG (x)) == REG
3762 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3763 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3764 = GET_MODE_SIZE (GET_MODE (x));
3765 return;
3766
3767 default:
3768 break;
3769 }
3770
3771 fmt = GET_RTX_FORMAT (code);
3772 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3773 {
3774 if (fmt[i] == 'e')
3775 scan_paradoxical_subregs (XEXP (x, i));
3776 else if (fmt[i] == 'E')
3777 {
3778 register int j;
3779 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3780 scan_paradoxical_subregs (XVECEXP (x, i, j));
3781 }
3782 }
3783 }
3784 \f
3785 /* Reload pseudo-registers into hard regs around each insn as needed.
3786 Additional register load insns are output before the insn that needs it
3787 and perhaps store insns after insns that modify the reloaded pseudo reg.
3788
3789 reg_last_reload_reg and reg_reloaded_contents keep track of
3790 which registers are already available in reload registers.
3791 We update these for the reloads that we perform,
3792 as the insns are scanned. */
3793
3794 static void
3795 reload_as_needed (live_known)
3796 int live_known;
3797 {
3798 struct insn_chain *chain;
3799 #if defined (AUTO_INC_DEC)
3800 register int i;
3801 #endif
3802 rtx x;
3803
3804 memset ((char *) spill_reg_rtx, 0, sizeof spill_reg_rtx);
3805 memset ((char *) spill_reg_store, 0, sizeof spill_reg_store);
3806 reg_last_reload_reg = (rtx *) xcalloc (max_regno, sizeof (rtx));
3807 reg_has_output_reload = (char *) xmalloc (max_regno);
3808 CLEAR_HARD_REG_SET (reg_reloaded_valid);
3809
3810 set_initial_elim_offsets ();
3811
3812 for (chain = reload_insn_chain; chain; chain = chain->next)
3813 {
3814 rtx prev;
3815 rtx insn = chain->insn;
3816 rtx old_next = NEXT_INSN (insn);
3817
3818 /* If we pass a label, copy the offsets from the label information
3819 into the current offsets of each elimination. */
3820 if (GET_CODE (insn) == CODE_LABEL)
3821 set_offsets_for_label (insn);
3822
3823 else if (INSN_P (insn))
3824 {
3825 rtx oldpat = PATTERN (insn);
3826
3827 /* If this is a USE and CLOBBER of a MEM, ensure that any
3828 references to eliminable registers have been removed. */
3829
3830 if ((GET_CODE (PATTERN (insn)) == USE
3831 || GET_CODE (PATTERN (insn)) == CLOBBER)
3832 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3833 XEXP (XEXP (PATTERN (insn), 0), 0)
3834 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3835 GET_MODE (XEXP (PATTERN (insn), 0)),
3836 NULL_RTX);
3837
3838 /* If we need to do register elimination processing, do so.
3839 This might delete the insn, in which case we are done. */
3840 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
3841 {
3842 eliminate_regs_in_insn (insn, 1);
3843 if (GET_CODE (insn) == NOTE)
3844 {
3845 update_eliminable_offsets ();
3846 continue;
3847 }
3848 }
3849
3850 /* If need_elim is nonzero but need_reload is zero, one might think
3851 that we could simply set n_reloads to 0. However, find_reloads
3852 could have done some manipulation of the insn (such as swapping
3853 commutative operands), and these manipulations are lost during
3854 the first pass for every insn that needs register elimination.
3855 So the actions of find_reloads must be redone here. */
3856
3857 if (! chain->need_elim && ! chain->need_reload
3858 && ! chain->need_operand_change)
3859 n_reloads = 0;
3860 /* First find the pseudo regs that must be reloaded for this insn.
3861 This info is returned in the tables reload_... (see reload.h).
3862 Also modify the body of INSN by substituting RELOAD
3863 rtx's for those pseudo regs. */
3864 else
3865 {
3866 memset (reg_has_output_reload, 0, max_regno);
3867 CLEAR_HARD_REG_SET (reg_is_output_reload);
3868
3869 find_reloads (insn, 1, spill_indirect_levels, live_known,
3870 spill_reg_order);
3871 }
3872
3873 if (n_reloads > 0)
3874 {
3875 rtx next = NEXT_INSN (insn);
3876 rtx p;
3877
3878 prev = PREV_INSN (insn);
3879
3880 /* Now compute which reload regs to reload them into. Perhaps
3881 reusing reload regs from previous insns, or else output
3882 load insns to reload them. Maybe output store insns too.
3883 Record the choices of reload reg in reload_reg_rtx. */
3884 choose_reload_regs (chain);
3885
3886 /* Merge any reloads that we didn't combine for fear of
3887 increasing the number of spill registers needed but now
3888 discover can be safely merged. */
3889 if (SMALL_REGISTER_CLASSES)
3890 merge_assigned_reloads (insn);
3891
3892 /* Generate the insns to reload operands into or out of
3893 their reload regs. */
3894 emit_reload_insns (chain);
3895
3896 /* Substitute the chosen reload regs from reload_reg_rtx
3897 into the insn's body (or perhaps into the bodies of other
3898 load and store insn that we just made for reloading
3899 and that we moved the structure into). */
3900 subst_reloads (insn);
3901
3902 /* If this was an ASM, make sure that all the reload insns
3903 we have generated are valid. If not, give an error
3904 and delete them. */
3905
3906 if (asm_noperands (PATTERN (insn)) >= 0)
3907 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3908 if (p != insn && INSN_P (p)
3909 && (recog_memoized (p) < 0
3910 || (extract_insn (p), ! constrain_operands (1))))
3911 {
3912 error_for_asm (insn,
3913 "`asm' operand requires impossible reload");
3914 PUT_CODE (p, NOTE);
3915 NOTE_SOURCE_FILE (p) = 0;
3916 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3917 }
3918 }
3919
3920 if (num_eliminable && chain->need_elim)
3921 update_eliminable_offsets ();
3922
3923 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3924 is no longer validly lying around to save a future reload.
3925 Note that this does not detect pseudos that were reloaded
3926 for this insn in order to be stored in
3927 (obeying register constraints). That is correct; such reload
3928 registers ARE still valid. */
3929 note_stores (oldpat, forget_old_reloads_1, NULL);
3930
3931 /* There may have been CLOBBER insns placed after INSN. So scan
3932 between INSN and NEXT and use them to forget old reloads. */
3933 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
3934 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3935 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
3936
3937 #ifdef AUTO_INC_DEC
3938 /* Likewise for regs altered by auto-increment in this insn.
3939 REG_INC notes have been changed by reloading:
3940 find_reloads_address_1 records substitutions for them,
3941 which have been performed by subst_reloads above. */
3942 for (i = n_reloads - 1; i >= 0; i--)
3943 {
3944 rtx in_reg = rld[i].in_reg;
3945 if (in_reg)
3946 {
3947 enum rtx_code code = GET_CODE (in_reg);
3948 /* PRE_INC / PRE_DEC will have the reload register ending up
3949 with the same value as the stack slot, but that doesn't
3950 hold true for POST_INC / POST_DEC. Either we have to
3951 convert the memory access to a true POST_INC / POST_DEC,
3952 or we can't use the reload register for inheritance. */
3953 if ((code == POST_INC || code == POST_DEC)
3954 && TEST_HARD_REG_BIT (reg_reloaded_valid,
3955 REGNO (rld[i].reg_rtx))
3956 /* Make sure it is the inc/dec pseudo, and not
3957 some other (e.g. output operand) pseudo. */
3958 && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
3959 == REGNO (XEXP (in_reg, 0))))
3960
3961 {
3962 rtx reload_reg = rld[i].reg_rtx;
3963 enum machine_mode mode = GET_MODE (reload_reg);
3964 int n = 0;
3965 rtx p;
3966
3967 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
3968 {
3969 /* We really want to ignore REG_INC notes here, so
3970 use PATTERN (p) as argument to reg_set_p . */
3971 if (reg_set_p (reload_reg, PATTERN (p)))
3972 break;
3973 n = count_occurrences (PATTERN (p), reload_reg, 0);
3974 if (! n)
3975 continue;
3976 if (n == 1)
3977 {
3978 n = validate_replace_rtx (reload_reg,
3979 gen_rtx (code, mode,
3980 reload_reg),
3981 p);
3982
3983 /* We must also verify that the constraints
3984 are met after the replacement. */
3985 extract_insn (p);
3986 if (n)
3987 n = constrain_operands (1);
3988 else
3989 break;
3990
3991 /* If the constraints were not met, then
3992 undo the replacement. */
3993 if (!n)
3994 {
3995 validate_replace_rtx (gen_rtx (code, mode,
3996 reload_reg),
3997 reload_reg, p);
3998 break;
3999 }
4000
4001 }
4002 break;
4003 }
4004 if (n == 1)
4005 {
4006 REG_NOTES (p)
4007 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4008 REG_NOTES (p));
4009 /* Mark this as having an output reload so that the
4010 REG_INC processing code below won't invalidate
4011 the reload for inheritance. */
4012 SET_HARD_REG_BIT (reg_is_output_reload,
4013 REGNO (reload_reg));
4014 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4015 }
4016 else
4017 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4018 NULL);
4019 }
4020 else if ((code == PRE_INC || code == PRE_DEC)
4021 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4022 REGNO (rld[i].reg_rtx))
4023 /* Make sure it is the inc/dec pseudo, and not
4024 some other (e.g. output operand) pseudo. */
4025 && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4026 == REGNO (XEXP (in_reg, 0))))
4027 {
4028 SET_HARD_REG_BIT (reg_is_output_reload,
4029 REGNO (rld[i].reg_rtx));
4030 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4031 }
4032 }
4033 }
4034 /* If a pseudo that got a hard register is auto-incremented,
4035 we must purge records of copying it into pseudos without
4036 hard registers. */
4037 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4038 if (REG_NOTE_KIND (x) == REG_INC)
4039 {
4040 /* See if this pseudo reg was reloaded in this insn.
4041 If so, its last-reload info is still valid
4042 because it is based on this insn's reload. */
4043 for (i = 0; i < n_reloads; i++)
4044 if (rld[i].out == XEXP (x, 0))
4045 break;
4046
4047 if (i == n_reloads)
4048 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4049 }
4050 #endif
4051 }
4052 /* A reload reg's contents are unknown after a label. */
4053 if (GET_CODE (insn) == CODE_LABEL)
4054 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4055
4056 /* Don't assume a reload reg is still good after a call insn
4057 if it is a call-used reg. */
4058 else if (GET_CODE (insn) == CALL_INSN)
4059 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
4060 }
4061
4062 /* Clean up. */
4063 free (reg_last_reload_reg);
4064 free (reg_has_output_reload);
4065 }
4066
4067 /* Discard all record of any value reloaded from X,
4068 or reloaded in X from someplace else;
4069 unless X is an output reload reg of the current insn.
4070
4071 X may be a hard reg (the reload reg)
4072 or it may be a pseudo reg that was reloaded from. */
4073
4074 static void
4075 forget_old_reloads_1 (x, ignored, data)
4076 rtx x;
4077 rtx ignored ATTRIBUTE_UNUSED;
4078 void *data ATTRIBUTE_UNUSED;
4079 {
4080 unsigned int regno;
4081 unsigned int nr;
4082 int offset = 0;
4083
4084 /* note_stores does give us subregs of hard regs,
4085 subreg_regno_offset will abort if it is not a hard reg. */
4086 while (GET_CODE (x) == SUBREG)
4087 {
4088 offset += subreg_regno_offset (REGNO (SUBREG_REG (x)),
4089 GET_MODE (SUBREG_REG (x)),
4090 SUBREG_BYTE (x),
4091 GET_MODE (x));
4092 x = SUBREG_REG (x);
4093 }
4094
4095 if (GET_CODE (x) != REG)
4096 return;
4097
4098 regno = REGNO (x) + offset;
4099
4100 if (regno >= FIRST_PSEUDO_REGISTER)
4101 nr = 1;
4102 else
4103 {
4104 unsigned int i;
4105
4106 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4107 /* Storing into a spilled-reg invalidates its contents.
4108 This can happen if a block-local pseudo is allocated to that reg
4109 and it wasn't spilled because this block's total need is 0.
4110 Then some insn might have an optional reload and use this reg. */
4111 for (i = 0; i < nr; i++)
4112 /* But don't do this if the reg actually serves as an output
4113 reload reg in the current instruction. */
4114 if (n_reloads == 0
4115 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4116 {
4117 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4118 spill_reg_store[regno + i] = 0;
4119 }
4120 }
4121
4122 /* Since value of X has changed,
4123 forget any value previously copied from it. */
4124
4125 while (nr-- > 0)
4126 /* But don't forget a copy if this is the output reload
4127 that establishes the copy's validity. */
4128 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4129 reg_last_reload_reg[regno + nr] = 0;
4130 }
4131 \f
4132 /* The following HARD_REG_SETs indicate when each hard register is
4133 used for a reload of various parts of the current insn. */
4134
4135 /* If reg is unavailable for all reloads. */
4136 static HARD_REG_SET reload_reg_unavailable;
4137 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4138 static HARD_REG_SET reload_reg_used;
4139 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4140 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4141 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4142 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4143 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4144 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4145 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4146 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4147 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4148 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4149 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4150 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4151 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4152 static HARD_REG_SET reload_reg_used_in_op_addr;
4153 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4154 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4155 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4156 static HARD_REG_SET reload_reg_used_in_insn;
4157 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4158 static HARD_REG_SET reload_reg_used_in_other_addr;
4159
4160 /* If reg is in use as a reload reg for any sort of reload. */
4161 static HARD_REG_SET reload_reg_used_at_all;
4162
4163 /* If reg is use as an inherited reload. We just mark the first register
4164 in the group. */
4165 static HARD_REG_SET reload_reg_used_for_inherit;
4166
4167 /* Records which hard regs are used in any way, either as explicit use or
4168 by being allocated to a pseudo during any point of the current insn. */
4169 static HARD_REG_SET reg_used_in_insn;
4170
4171 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4172 TYPE. MODE is used to indicate how many consecutive regs are
4173 actually used. */
4174
4175 static void
4176 mark_reload_reg_in_use (regno, opnum, type, mode)
4177 unsigned int regno;
4178 int opnum;
4179 enum reload_type type;
4180 enum machine_mode mode;
4181 {
4182 unsigned int nregs = HARD_REGNO_NREGS (regno, mode);
4183 unsigned int i;
4184
4185 for (i = regno; i < nregs + regno; i++)
4186 {
4187 switch (type)
4188 {
4189 case RELOAD_OTHER:
4190 SET_HARD_REG_BIT (reload_reg_used, i);
4191 break;
4192
4193 case RELOAD_FOR_INPUT_ADDRESS:
4194 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4195 break;
4196
4197 case RELOAD_FOR_INPADDR_ADDRESS:
4198 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4199 break;
4200
4201 case RELOAD_FOR_OUTPUT_ADDRESS:
4202 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4203 break;
4204
4205 case RELOAD_FOR_OUTADDR_ADDRESS:
4206 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4207 break;
4208
4209 case RELOAD_FOR_OPERAND_ADDRESS:
4210 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4211 break;
4212
4213 case RELOAD_FOR_OPADDR_ADDR:
4214 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4215 break;
4216
4217 case RELOAD_FOR_OTHER_ADDRESS:
4218 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4219 break;
4220
4221 case RELOAD_FOR_INPUT:
4222 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4223 break;
4224
4225 case RELOAD_FOR_OUTPUT:
4226 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4227 break;
4228
4229 case RELOAD_FOR_INSN:
4230 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4231 break;
4232 }
4233
4234 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4235 }
4236 }
4237
4238 /* Similarly, but show REGNO is no longer in use for a reload. */
4239
4240 static void
4241 clear_reload_reg_in_use (regno, opnum, type, mode)
4242 unsigned int regno;
4243 int opnum;
4244 enum reload_type type;
4245 enum machine_mode mode;
4246 {
4247 unsigned int nregs = HARD_REGNO_NREGS (regno, mode);
4248 unsigned int start_regno, end_regno, r;
4249 int i;
4250 /* A complication is that for some reload types, inheritance might
4251 allow multiple reloads of the same types to share a reload register.
4252 We set check_opnum if we have to check only reloads with the same
4253 operand number, and check_any if we have to check all reloads. */
4254 int check_opnum = 0;
4255 int check_any = 0;
4256 HARD_REG_SET *used_in_set;
4257
4258 switch (type)
4259 {
4260 case RELOAD_OTHER:
4261 used_in_set = &reload_reg_used;
4262 break;
4263
4264 case RELOAD_FOR_INPUT_ADDRESS:
4265 used_in_set = &reload_reg_used_in_input_addr[opnum];
4266 break;
4267
4268 case RELOAD_FOR_INPADDR_ADDRESS:
4269 check_opnum = 1;
4270 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4271 break;
4272
4273 case RELOAD_FOR_OUTPUT_ADDRESS:
4274 used_in_set = &reload_reg_used_in_output_addr[opnum];
4275 break;
4276
4277 case RELOAD_FOR_OUTADDR_ADDRESS:
4278 check_opnum = 1;
4279 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4280 break;
4281
4282 case RELOAD_FOR_OPERAND_ADDRESS:
4283 used_in_set = &reload_reg_used_in_op_addr;
4284 break;
4285
4286 case RELOAD_FOR_OPADDR_ADDR:
4287 check_any = 1;
4288 used_in_set = &reload_reg_used_in_op_addr_reload;
4289 break;
4290
4291 case RELOAD_FOR_OTHER_ADDRESS:
4292 used_in_set = &reload_reg_used_in_other_addr;
4293 check_any = 1;
4294 break;
4295
4296 case RELOAD_FOR_INPUT:
4297 used_in_set = &reload_reg_used_in_input[opnum];
4298 break;
4299
4300 case RELOAD_FOR_OUTPUT:
4301 used_in_set = &reload_reg_used_in_output[opnum];
4302 break;
4303
4304 case RELOAD_FOR_INSN:
4305 used_in_set = &reload_reg_used_in_insn;
4306 break;
4307 default:
4308 abort ();
4309 }
4310 /* We resolve conflicts with remaining reloads of the same type by
4311 excluding the intervals of of reload registers by them from the
4312 interval of freed reload registers. Since we only keep track of
4313 one set of interval bounds, we might have to exclude somewhat
4314 more then what would be necessary if we used a HARD_REG_SET here.
4315 But this should only happen very infrequently, so there should
4316 be no reason to worry about it. */
4317
4318 start_regno = regno;
4319 end_regno = regno + nregs;
4320 if (check_opnum || check_any)
4321 {
4322 for (i = n_reloads - 1; i >= 0; i--)
4323 {
4324 if (rld[i].when_needed == type
4325 && (check_any || rld[i].opnum == opnum)
4326 && rld[i].reg_rtx)
4327 {
4328 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4329 unsigned int conflict_end
4330 = (conflict_start
4331 + HARD_REGNO_NREGS (conflict_start, rld[i].mode));
4332
4333 /* If there is an overlap with the first to-be-freed register,
4334 adjust the interval start. */
4335 if (conflict_start <= start_regno && conflict_end > start_regno)
4336 start_regno = conflict_end;
4337 /* Otherwise, if there is a conflict with one of the other
4338 to-be-freed registers, adjust the interval end. */
4339 if (conflict_start > start_regno && conflict_start < end_regno)
4340 end_regno = conflict_start;
4341 }
4342 }
4343 }
4344
4345 for (r = start_regno; r < end_regno; r++)
4346 CLEAR_HARD_REG_BIT (*used_in_set, r);
4347 }
4348
4349 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4350 specified by OPNUM and TYPE. */
4351
4352 static int
4353 reload_reg_free_p (regno, opnum, type)
4354 unsigned int regno;
4355 int opnum;
4356 enum reload_type type;
4357 {
4358 int i;
4359
4360 /* In use for a RELOAD_OTHER means it's not available for anything. */
4361 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4362 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4363 return 0;
4364
4365 switch (type)
4366 {
4367 case RELOAD_OTHER:
4368 /* In use for anything means we can't use it for RELOAD_OTHER. */
4369 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4370 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4371 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4372 return 0;
4373
4374 for (i = 0; i < reload_n_operands; i++)
4375 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4376 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4377 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4378 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4379 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4380 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4381 return 0;
4382
4383 return 1;
4384
4385 case RELOAD_FOR_INPUT:
4386 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4387 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4388 return 0;
4389
4390 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4391 return 0;
4392
4393 /* If it is used for some other input, can't use it. */
4394 for (i = 0; i < reload_n_operands; i++)
4395 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4396 return 0;
4397
4398 /* If it is used in a later operand's address, can't use it. */
4399 for (i = opnum + 1; i < reload_n_operands; i++)
4400 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4401 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4402 return 0;
4403
4404 return 1;
4405
4406 case RELOAD_FOR_INPUT_ADDRESS:
4407 /* Can't use a register if it is used for an input address for this
4408 operand or used as an input in an earlier one. */
4409 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4410 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4411 return 0;
4412
4413 for (i = 0; i < opnum; i++)
4414 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4415 return 0;
4416
4417 return 1;
4418
4419 case RELOAD_FOR_INPADDR_ADDRESS:
4420 /* Can't use a register if it is used for an input address
4421 for this operand or used as an input in an earlier
4422 one. */
4423 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4424 return 0;
4425
4426 for (i = 0; i < opnum; i++)
4427 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4428 return 0;
4429
4430 return 1;
4431
4432 case RELOAD_FOR_OUTPUT_ADDRESS:
4433 /* Can't use a register if it is used for an output address for this
4434 operand or used as an output in this or a later operand. */
4435 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4436 return 0;
4437
4438 for (i = opnum; i < reload_n_operands; i++)
4439 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4440 return 0;
4441
4442 return 1;
4443
4444 case RELOAD_FOR_OUTADDR_ADDRESS:
4445 /* Can't use a register if it is used for an output address
4446 for this operand or used as an output in this or a
4447 later operand. */
4448 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4449 return 0;
4450
4451 for (i = opnum; i < reload_n_operands; i++)
4452 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4453 return 0;
4454
4455 return 1;
4456
4457 case RELOAD_FOR_OPERAND_ADDRESS:
4458 for (i = 0; i < reload_n_operands; i++)
4459 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4460 return 0;
4461
4462 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4463 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4464
4465 case RELOAD_FOR_OPADDR_ADDR:
4466 for (i = 0; i < reload_n_operands; i++)
4467 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4468 return 0;
4469
4470 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4471
4472 case RELOAD_FOR_OUTPUT:
4473 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4474 outputs, or an operand address for this or an earlier output. */
4475 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4476 return 0;
4477
4478 for (i = 0; i < reload_n_operands; i++)
4479 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4480 return 0;
4481
4482 for (i = 0; i <= opnum; i++)
4483 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4484 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4485 return 0;
4486
4487 return 1;
4488
4489 case RELOAD_FOR_INSN:
4490 for (i = 0; i < reload_n_operands; i++)
4491 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4492 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4493 return 0;
4494
4495 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4496 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4497
4498 case RELOAD_FOR_OTHER_ADDRESS:
4499 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4500 }
4501 abort ();
4502 }
4503
4504 /* Return 1 if the value in reload reg REGNO, as used by a reload
4505 needed for the part of the insn specified by OPNUM and TYPE,
4506 is still available in REGNO at the end of the insn.
4507
4508 We can assume that the reload reg was already tested for availability
4509 at the time it is needed, and we should not check this again,
4510 in case the reg has already been marked in use. */
4511
4512 static int
4513 reload_reg_reaches_end_p (regno, opnum, type)
4514 unsigned int regno;
4515 int opnum;
4516 enum reload_type type;
4517 {
4518 int i;
4519
4520 switch (type)
4521 {
4522 case RELOAD_OTHER:
4523 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4524 its value must reach the end. */
4525 return 1;
4526
4527 /* If this use is for part of the insn,
4528 its value reaches if no subsequent part uses the same register.
4529 Just like the above function, don't try to do this with lots
4530 of fallthroughs. */
4531
4532 case RELOAD_FOR_OTHER_ADDRESS:
4533 /* Here we check for everything else, since these don't conflict
4534 with anything else and everything comes later. */
4535
4536 for (i = 0; i < reload_n_operands; i++)
4537 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4538 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4539 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4540 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4541 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4542 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4543 return 0;
4544
4545 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4546 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4547 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4548
4549 case RELOAD_FOR_INPUT_ADDRESS:
4550 case RELOAD_FOR_INPADDR_ADDRESS:
4551 /* Similar, except that we check only for this and subsequent inputs
4552 and the address of only subsequent inputs and we do not need
4553 to check for RELOAD_OTHER objects since they are known not to
4554 conflict. */
4555
4556 for (i = opnum; i < reload_n_operands; i++)
4557 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4558 return 0;
4559
4560 for (i = opnum + 1; i < reload_n_operands; i++)
4561 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4562 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4563 return 0;
4564
4565 for (i = 0; i < reload_n_operands; i++)
4566 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4567 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4568 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4569 return 0;
4570
4571 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4572 return 0;
4573
4574 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4575 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4576 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4577
4578 case RELOAD_FOR_INPUT:
4579 /* Similar to input address, except we start at the next operand for
4580 both input and input address and we do not check for
4581 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4582 would conflict. */
4583
4584 for (i = opnum + 1; i < reload_n_operands; i++)
4585 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4586 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4587 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4588 return 0;
4589
4590 /* ... fall through ... */
4591
4592 case RELOAD_FOR_OPERAND_ADDRESS:
4593 /* Check outputs and their addresses. */
4594
4595 for (i = 0; i < reload_n_operands; i++)
4596 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4597 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4598 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4599 return 0;
4600
4601 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4602
4603 case RELOAD_FOR_OPADDR_ADDR:
4604 for (i = 0; i < reload_n_operands; i++)
4605 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4606 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4607 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4608 return 0;
4609
4610 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4611 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4612 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4613
4614 case RELOAD_FOR_INSN:
4615 /* These conflict with other outputs with RELOAD_OTHER. So
4616 we need only check for output addresses. */
4617
4618 opnum = -1;
4619
4620 /* ... fall through ... */
4621
4622 case RELOAD_FOR_OUTPUT:
4623 case RELOAD_FOR_OUTPUT_ADDRESS:
4624 case RELOAD_FOR_OUTADDR_ADDRESS:
4625 /* We already know these can't conflict with a later output. So the
4626 only thing to check are later output addresses. */
4627 for (i = opnum + 1; i < reload_n_operands; i++)
4628 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4629 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4630 return 0;
4631
4632 return 1;
4633 }
4634
4635 abort ();
4636 }
4637 \f
4638 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4639 Return 0 otherwise.
4640
4641 This function uses the same algorithm as reload_reg_free_p above. */
4642
4643 int
4644 reloads_conflict (r1, r2)
4645 int r1, r2;
4646 {
4647 enum reload_type r1_type = rld[r1].when_needed;
4648 enum reload_type r2_type = rld[r2].when_needed;
4649 int r1_opnum = rld[r1].opnum;
4650 int r2_opnum = rld[r2].opnum;
4651
4652 /* RELOAD_OTHER conflicts with everything. */
4653 if (r2_type == RELOAD_OTHER)
4654 return 1;
4655
4656 /* Otherwise, check conflicts differently for each type. */
4657
4658 switch (r1_type)
4659 {
4660 case RELOAD_FOR_INPUT:
4661 return (r2_type == RELOAD_FOR_INSN
4662 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4663 || r2_type == RELOAD_FOR_OPADDR_ADDR
4664 || r2_type == RELOAD_FOR_INPUT
4665 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4666 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4667 && r2_opnum > r1_opnum));
4668
4669 case RELOAD_FOR_INPUT_ADDRESS:
4670 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4671 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4672
4673 case RELOAD_FOR_INPADDR_ADDRESS:
4674 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4675 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4676
4677 case RELOAD_FOR_OUTPUT_ADDRESS:
4678 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4679 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4680
4681 case RELOAD_FOR_OUTADDR_ADDRESS:
4682 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4683 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4684
4685 case RELOAD_FOR_OPERAND_ADDRESS:
4686 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4687 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4688
4689 case RELOAD_FOR_OPADDR_ADDR:
4690 return (r2_type == RELOAD_FOR_INPUT
4691 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4692
4693 case RELOAD_FOR_OUTPUT:
4694 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4695 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4696 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4697 && r2_opnum <= r1_opnum));
4698
4699 case RELOAD_FOR_INSN:
4700 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4701 || r2_type == RELOAD_FOR_INSN
4702 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4703
4704 case RELOAD_FOR_OTHER_ADDRESS:
4705 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4706
4707 case RELOAD_OTHER:
4708 return 1;
4709
4710 default:
4711 abort ();
4712 }
4713 }
4714 \f
4715 /* Indexed by reload number, 1 if incoming value
4716 inherited from previous insns. */
4717 char reload_inherited[MAX_RELOADS];
4718
4719 /* For an inherited reload, this is the insn the reload was inherited from,
4720 if we know it. Otherwise, this is 0. */
4721 rtx reload_inheritance_insn[MAX_RELOADS];
4722
4723 /* If non-zero, this is a place to get the value of the reload,
4724 rather than using reload_in. */
4725 rtx reload_override_in[MAX_RELOADS];
4726
4727 /* For each reload, the hard register number of the register used,
4728 or -1 if we did not need a register for this reload. */
4729 int reload_spill_index[MAX_RELOADS];
4730
4731 /* Subroutine of free_for_value_p, used to check a single register.
4732 START_REGNO is the starting regno of the full reload register
4733 (possibly comprising multiple hard registers) that we are considering. */
4734
4735 static int
4736 reload_reg_free_for_value_p (start_regno, regno, opnum, type, value, out,
4737 reloadnum, ignore_address_reloads)
4738 int start_regno, regno;
4739 int opnum;
4740 enum reload_type type;
4741 rtx value, out;
4742 int reloadnum;
4743 int ignore_address_reloads;
4744 {
4745 int time1;
4746 /* Set if we see an input reload that must not share its reload register
4747 with any new earlyclobber, but might otherwise share the reload
4748 register with an output or input-output reload. */
4749 int check_earlyclobber = 0;
4750 int i;
4751 int copy = 0;
4752
4753 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4754 return 0;
4755
4756 if (out == const0_rtx)
4757 {
4758 copy = 1;
4759 out = NULL_RTX;
4760 }
4761
4762 /* We use some pseudo 'time' value to check if the lifetimes of the
4763 new register use would overlap with the one of a previous reload
4764 that is not read-only or uses a different value.
4765 The 'time' used doesn't have to be linear in any shape or form, just
4766 monotonic.
4767 Some reload types use different 'buckets' for each operand.
4768 So there are MAX_RECOG_OPERANDS different time values for each
4769 such reload type.
4770 We compute TIME1 as the time when the register for the prospective
4771 new reload ceases to be live, and TIME2 for each existing
4772 reload as the time when that the reload register of that reload
4773 becomes live.
4774 Where there is little to be gained by exact lifetime calculations,
4775 we just make conservative assumptions, i.e. a longer lifetime;
4776 this is done in the 'default:' cases. */
4777 switch (type)
4778 {
4779 case RELOAD_FOR_OTHER_ADDRESS:
4780 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
4781 time1 = copy ? 0 : 1;
4782 break;
4783 case RELOAD_OTHER:
4784 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
4785 break;
4786 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
4787 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
4788 respectively, to the time values for these, we get distinct time
4789 values. To get distinct time values for each operand, we have to
4790 multiply opnum by at least three. We round that up to four because
4791 multiply by four is often cheaper. */
4792 case RELOAD_FOR_INPADDR_ADDRESS:
4793 time1 = opnum * 4 + 2;
4794 break;
4795 case RELOAD_FOR_INPUT_ADDRESS:
4796 time1 = opnum * 4 + 3;
4797 break;
4798 case RELOAD_FOR_INPUT:
4799 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
4800 executes (inclusive). */
4801 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
4802 break;
4803 case RELOAD_FOR_OPADDR_ADDR:
4804 /* opnum * 4 + 4
4805 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
4806 time1 = MAX_RECOG_OPERANDS * 4 + 1;
4807 break;
4808 case RELOAD_FOR_OPERAND_ADDRESS:
4809 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
4810 is executed. */
4811 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
4812 break;
4813 case RELOAD_FOR_OUTADDR_ADDRESS:
4814 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
4815 break;
4816 case RELOAD_FOR_OUTPUT_ADDRESS:
4817 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
4818 break;
4819 default:
4820 time1 = MAX_RECOG_OPERANDS * 5 + 5;
4821 }
4822
4823 for (i = 0; i < n_reloads; i++)
4824 {
4825 rtx reg = rld[i].reg_rtx;
4826 if (reg && GET_CODE (reg) == REG
4827 && ((unsigned) regno - true_regnum (reg)
4828 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
4829 && i != reloadnum)
4830 {
4831 rtx other_input = rld[i].in;
4832
4833 /* If the other reload loads the same input value, that
4834 will not cause a conflict only if it's loading it into
4835 the same register. */
4836 if (true_regnum (reg) != start_regno)
4837 other_input = NULL_RTX;
4838 if (! other_input || ! rtx_equal_p (other_input, value)
4839 || rld[i].out || out)
4840 {
4841 int time2;
4842 switch (rld[i].when_needed)
4843 {
4844 case RELOAD_FOR_OTHER_ADDRESS:
4845 time2 = 0;
4846 break;
4847 case RELOAD_FOR_INPADDR_ADDRESS:
4848 /* find_reloads makes sure that a
4849 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
4850 by at most one - the first -
4851 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
4852 address reload is inherited, the address address reload
4853 goes away, so we can ignore this conflict. */
4854 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
4855 && ignore_address_reloads
4856 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
4857 Then the address address is still needed to store
4858 back the new address. */
4859 && ! rld[reloadnum].out)
4860 continue;
4861 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
4862 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
4863 reloads go away. */
4864 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4865 && ignore_address_reloads
4866 /* Unless we are reloading an auto_inc expression. */
4867 && ! rld[reloadnum].out)
4868 continue;
4869 time2 = rld[i].opnum * 4 + 2;
4870 break;
4871 case RELOAD_FOR_INPUT_ADDRESS:
4872 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4873 && ignore_address_reloads
4874 && ! rld[reloadnum].out)
4875 continue;
4876 time2 = rld[i].opnum * 4 + 3;
4877 break;
4878 case RELOAD_FOR_INPUT:
4879 time2 = rld[i].opnum * 4 + 4;
4880 check_earlyclobber = 1;
4881 break;
4882 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
4883 == MAX_RECOG_OPERAND * 4 */
4884 case RELOAD_FOR_OPADDR_ADDR:
4885 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
4886 && ignore_address_reloads
4887 && ! rld[reloadnum].out)
4888 continue;
4889 time2 = MAX_RECOG_OPERANDS * 4 + 1;
4890 break;
4891 case RELOAD_FOR_OPERAND_ADDRESS:
4892 time2 = MAX_RECOG_OPERANDS * 4 + 2;
4893 check_earlyclobber = 1;
4894 break;
4895 case RELOAD_FOR_INSN:
4896 time2 = MAX_RECOG_OPERANDS * 4 + 3;
4897 break;
4898 case RELOAD_FOR_OUTPUT:
4899 /* All RELOAD_FOR_OUTPUT reloads become live just after the
4900 instruction is executed. */
4901 time2 = MAX_RECOG_OPERANDS * 4 + 4;
4902 break;
4903 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
4904 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
4905 value. */
4906 case RELOAD_FOR_OUTADDR_ADDRESS:
4907 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
4908 && ignore_address_reloads
4909 && ! rld[reloadnum].out)
4910 continue;
4911 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
4912 break;
4913 case RELOAD_FOR_OUTPUT_ADDRESS:
4914 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
4915 break;
4916 case RELOAD_OTHER:
4917 /* If there is no conflict in the input part, handle this
4918 like an output reload. */
4919 if (! rld[i].in || rtx_equal_p (other_input, value))
4920 {
4921 time2 = MAX_RECOG_OPERANDS * 4 + 4;
4922 /* Earlyclobbered outputs must conflict with inputs. */
4923 if (earlyclobber_operand_p (rld[i].out))
4924 time2 = MAX_RECOG_OPERANDS * 4 + 3;
4925
4926 break;
4927 }
4928 time2 = 1;
4929 /* RELOAD_OTHER might be live beyond instruction execution,
4930 but this is not obvious when we set time2 = 1. So check
4931 here if there might be a problem with the new reload
4932 clobbering the register used by the RELOAD_OTHER. */
4933 if (out)
4934 return 0;
4935 break;
4936 default:
4937 return 0;
4938 }
4939 if ((time1 >= time2
4940 && (! rld[i].in || rld[i].out
4941 || ! rtx_equal_p (other_input, value)))
4942 || (out && rld[reloadnum].out_reg
4943 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
4944 return 0;
4945 }
4946 }
4947 }
4948
4949 /* Earlyclobbered outputs must conflict with inputs. */
4950 if (check_earlyclobber && out && earlyclobber_operand_p (out))
4951 return 0;
4952
4953 return 1;
4954 }
4955
4956 /* Return 1 if the value in reload reg REGNO, as used by a reload
4957 needed for the part of the insn specified by OPNUM and TYPE,
4958 may be used to load VALUE into it.
4959
4960 MODE is the mode in which the register is used, this is needed to
4961 determine how many hard regs to test.
4962
4963 Other read-only reloads with the same value do not conflict
4964 unless OUT is non-zero and these other reloads have to live while
4965 output reloads live.
4966 If OUT is CONST0_RTX, this is a special case: it means that the
4967 test should not be for using register REGNO as reload register, but
4968 for copying from register REGNO into the reload register.
4969
4970 RELOADNUM is the number of the reload we want to load this value for;
4971 a reload does not conflict with itself.
4972
4973 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
4974 reloads that load an address for the very reload we are considering.
4975
4976 The caller has to make sure that there is no conflict with the return
4977 register. */
4978
4979 static int
4980 free_for_value_p (regno, mode, opnum, type, value, out, reloadnum,
4981 ignore_address_reloads)
4982 int regno;
4983 enum machine_mode mode;
4984 int opnum;
4985 enum reload_type type;
4986 rtx value, out;
4987 int reloadnum;
4988 int ignore_address_reloads;
4989 {
4990 int nregs = HARD_REGNO_NREGS (regno, mode);
4991 while (nregs-- > 0)
4992 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
4993 value, out, reloadnum,
4994 ignore_address_reloads))
4995 return 0;
4996 return 1;
4997 }
4998
4999 /* Determine whether the reload reg X overlaps any rtx'es used for
5000 overriding inheritance. Return nonzero if so. */
5001
5002 static int
5003 conflicts_with_override (x)
5004 rtx x;
5005 {
5006 int i;
5007 for (i = 0; i < n_reloads; i++)
5008 if (reload_override_in[i]
5009 && reg_overlap_mentioned_p (x, reload_override_in[i]))
5010 return 1;
5011 return 0;
5012 }
5013 \f
5014 /* Give an error message saying we failed to find a reload for INSN,
5015 and clear out reload R. */
5016 static void
5017 failed_reload (insn, r)
5018 rtx insn;
5019 int r;
5020 {
5021 if (asm_noperands (PATTERN (insn)) < 0)
5022 /* It's the compiler's fault. */
5023 fatal_insn ("Could not find a spill register", insn);
5024
5025 /* It's the user's fault; the operand's mode and constraint
5026 don't match. Disable this reload so we don't crash in final. */
5027 error_for_asm (insn,
5028 "`asm' operand constraint incompatible with operand size");
5029 rld[r].in = 0;
5030 rld[r].out = 0;
5031 rld[r].reg_rtx = 0;
5032 rld[r].optional = 1;
5033 rld[r].secondary_p = 1;
5034 }
5035
5036 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5037 for reload R. If it's valid, get an rtx for it. Return nonzero if
5038 successful. */
5039 static int
5040 set_reload_reg (i, r)
5041 int i, r;
5042 {
5043 int regno;
5044 rtx reg = spill_reg_rtx[i];
5045
5046 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5047 spill_reg_rtx[i] = reg
5048 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5049
5050 regno = true_regnum (reg);
5051
5052 /* Detect when the reload reg can't hold the reload mode.
5053 This used to be one `if', but Sequent compiler can't handle that. */
5054 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5055 {
5056 enum machine_mode test_mode = VOIDmode;
5057 if (rld[r].in)
5058 test_mode = GET_MODE (rld[r].in);
5059 /* If rld[r].in has VOIDmode, it means we will load it
5060 in whatever mode the reload reg has: to wit, rld[r].mode.
5061 We have already tested that for validity. */
5062 /* Aside from that, we need to test that the expressions
5063 to reload from or into have modes which are valid for this
5064 reload register. Otherwise the reload insns would be invalid. */
5065 if (! (rld[r].in != 0 && test_mode != VOIDmode
5066 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5067 if (! (rld[r].out != 0
5068 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5069 {
5070 /* The reg is OK. */
5071 last_spill_reg = i;
5072
5073 /* Mark as in use for this insn the reload regs we use
5074 for this. */
5075 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5076 rld[r].when_needed, rld[r].mode);
5077
5078 rld[r].reg_rtx = reg;
5079 reload_spill_index[r] = spill_regs[i];
5080 return 1;
5081 }
5082 }
5083 return 0;
5084 }
5085
5086 /* Find a spill register to use as a reload register for reload R.
5087 LAST_RELOAD is non-zero if this is the last reload for the insn being
5088 processed.
5089
5090 Set rld[R].reg_rtx to the register allocated.
5091
5092 We return 1 if successful, or 0 if we couldn't find a spill reg and
5093 we didn't change anything. */
5094
5095 static int
5096 allocate_reload_reg (chain, r, last_reload)
5097 struct insn_chain *chain ATTRIBUTE_UNUSED;
5098 int r;
5099 int last_reload;
5100 {
5101 int i, pass, count;
5102
5103 /* If we put this reload ahead, thinking it is a group,
5104 then insist on finding a group. Otherwise we can grab a
5105 reg that some other reload needs.
5106 (That can happen when we have a 68000 DATA_OR_FP_REG
5107 which is a group of data regs or one fp reg.)
5108 We need not be so restrictive if there are no more reloads
5109 for this insn.
5110
5111 ??? Really it would be nicer to have smarter handling
5112 for that kind of reg class, where a problem like this is normal.
5113 Perhaps those classes should be avoided for reloading
5114 by use of more alternatives. */
5115
5116 int force_group = rld[r].nregs > 1 && ! last_reload;
5117
5118 /* If we want a single register and haven't yet found one,
5119 take any reg in the right class and not in use.
5120 If we want a consecutive group, here is where we look for it.
5121
5122 We use two passes so we can first look for reload regs to
5123 reuse, which are already in use for other reloads in this insn,
5124 and only then use additional registers.
5125 I think that maximizing reuse is needed to make sure we don't
5126 run out of reload regs. Suppose we have three reloads, and
5127 reloads A and B can share regs. These need two regs.
5128 Suppose A and B are given different regs.
5129 That leaves none for C. */
5130 for (pass = 0; pass < 2; pass++)
5131 {
5132 /* I is the index in spill_regs.
5133 We advance it round-robin between insns to use all spill regs
5134 equally, so that inherited reloads have a chance
5135 of leapfrogging each other. */
5136
5137 i = last_spill_reg;
5138
5139 for (count = 0; count < n_spills; count++)
5140 {
5141 int class = (int) rld[r].class;
5142 int regnum;
5143
5144 i++;
5145 if (i >= n_spills)
5146 i -= n_spills;
5147 regnum = spill_regs[i];
5148
5149 if ((reload_reg_free_p (regnum, rld[r].opnum,
5150 rld[r].when_needed)
5151 || (rld[r].in
5152 /* We check reload_reg_used to make sure we
5153 don't clobber the return register. */
5154 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5155 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5156 rld[r].when_needed, rld[r].in,
5157 rld[r].out, r, 1)))
5158 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5159 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5160 /* Look first for regs to share, then for unshared. But
5161 don't share regs used for inherited reloads; they are
5162 the ones we want to preserve. */
5163 && (pass
5164 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5165 regnum)
5166 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5167 regnum))))
5168 {
5169 int nr = HARD_REGNO_NREGS (regnum, rld[r].mode);
5170 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5171 (on 68000) got us two FP regs. If NR is 1,
5172 we would reject both of them. */
5173 if (force_group)
5174 nr = rld[r].nregs;
5175 /* If we need only one reg, we have already won. */
5176 if (nr == 1)
5177 {
5178 /* But reject a single reg if we demand a group. */
5179 if (force_group)
5180 continue;
5181 break;
5182 }
5183 /* Otherwise check that as many consecutive regs as we need
5184 are available here. */
5185 while (nr > 1)
5186 {
5187 int regno = regnum + nr - 1;
5188 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5189 && spill_reg_order[regno] >= 0
5190 && reload_reg_free_p (regno, rld[r].opnum,
5191 rld[r].when_needed)))
5192 break;
5193 nr--;
5194 }
5195 if (nr == 1)
5196 break;
5197 }
5198 }
5199
5200 /* If we found something on pass 1, omit pass 2. */
5201 if (count < n_spills)
5202 break;
5203 }
5204
5205 /* We should have found a spill register by now. */
5206 if (count >= n_spills)
5207 return 0;
5208
5209 /* I is the index in SPILL_REG_RTX of the reload register we are to
5210 allocate. Get an rtx for it and find its register number. */
5211
5212 return set_reload_reg (i, r);
5213 }
5214 \f
5215 /* Initialize all the tables needed to allocate reload registers.
5216 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5217 is the array we use to restore the reg_rtx field for every reload. */
5218
5219 static void
5220 choose_reload_regs_init (chain, save_reload_reg_rtx)
5221 struct insn_chain *chain;
5222 rtx *save_reload_reg_rtx;
5223 {
5224 int i;
5225
5226 for (i = 0; i < n_reloads; i++)
5227 rld[i].reg_rtx = save_reload_reg_rtx[i];
5228
5229 memset (reload_inherited, 0, MAX_RELOADS);
5230 memset ((char *) reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5231 memset ((char *) reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5232
5233 CLEAR_HARD_REG_SET (reload_reg_used);
5234 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5235 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5236 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5237 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5238 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5239
5240 CLEAR_HARD_REG_SET (reg_used_in_insn);
5241 {
5242 HARD_REG_SET tmp;
5243 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5244 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5245 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5246 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5247 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5248 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5249 }
5250
5251 for (i = 0; i < reload_n_operands; i++)
5252 {
5253 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5254 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5255 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5256 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5257 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5258 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5259 }
5260
5261 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5262
5263 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5264
5265 for (i = 0; i < n_reloads; i++)
5266 /* If we have already decided to use a certain register,
5267 don't use it in another way. */
5268 if (rld[i].reg_rtx)
5269 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5270 rld[i].when_needed, rld[i].mode);
5271 }
5272
5273 /* Assign hard reg targets for the pseudo-registers we must reload
5274 into hard regs for this insn.
5275 Also output the instructions to copy them in and out of the hard regs.
5276
5277 For machines with register classes, we are responsible for
5278 finding a reload reg in the proper class. */
5279
5280 static void
5281 choose_reload_regs (chain)
5282 struct insn_chain *chain;
5283 {
5284 rtx insn = chain->insn;
5285 register int i, j;
5286 unsigned int max_group_size = 1;
5287 enum reg_class group_class = NO_REGS;
5288 int pass, win, inheritance;
5289
5290 rtx save_reload_reg_rtx[MAX_RELOADS];
5291
5292 /* In order to be certain of getting the registers we need,
5293 we must sort the reloads into order of increasing register class.
5294 Then our grabbing of reload registers will parallel the process
5295 that provided the reload registers.
5296
5297 Also note whether any of the reloads wants a consecutive group of regs.
5298 If so, record the maximum size of the group desired and what
5299 register class contains all the groups needed by this insn. */
5300
5301 for (j = 0; j < n_reloads; j++)
5302 {
5303 reload_order[j] = j;
5304 reload_spill_index[j] = -1;
5305
5306 if (rld[j].nregs > 1)
5307 {
5308 max_group_size = MAX (rld[j].nregs, max_group_size);
5309 group_class
5310 = reg_class_superunion[(int) rld[j].class][(int)group_class];
5311 }
5312
5313 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5314 }
5315
5316 if (n_reloads > 1)
5317 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5318
5319 /* If -O, try first with inheritance, then turning it off.
5320 If not -O, don't do inheritance.
5321 Using inheritance when not optimizing leads to paradoxes
5322 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5323 because one side of the comparison might be inherited. */
5324 win = 0;
5325 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5326 {
5327 choose_reload_regs_init (chain, save_reload_reg_rtx);
5328
5329 /* Process the reloads in order of preference just found.
5330 Beyond this point, subregs can be found in reload_reg_rtx.
5331
5332 This used to look for an existing reloaded home for all of the
5333 reloads, and only then perform any new reloads. But that could lose
5334 if the reloads were done out of reg-class order because a later
5335 reload with a looser constraint might have an old home in a register
5336 needed by an earlier reload with a tighter constraint.
5337
5338 To solve this, we make two passes over the reloads, in the order
5339 described above. In the first pass we try to inherit a reload
5340 from a previous insn. If there is a later reload that needs a
5341 class that is a proper subset of the class being processed, we must
5342 also allocate a spill register during the first pass.
5343
5344 Then make a second pass over the reloads to allocate any reloads
5345 that haven't been given registers yet. */
5346
5347 for (j = 0; j < n_reloads; j++)
5348 {
5349 register int r = reload_order[j];
5350 rtx search_equiv = NULL_RTX;
5351
5352 /* Ignore reloads that got marked inoperative. */
5353 if (rld[r].out == 0 && rld[r].in == 0
5354 && ! rld[r].secondary_p)
5355 continue;
5356
5357 /* If find_reloads chose to use reload_in or reload_out as a reload
5358 register, we don't need to chose one. Otherwise, try even if it
5359 found one since we might save an insn if we find the value lying
5360 around.
5361 Try also when reload_in is a pseudo without a hard reg. */
5362 if (rld[r].in != 0 && rld[r].reg_rtx != 0
5363 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5364 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5365 && GET_CODE (rld[r].in) != MEM
5366 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5367 continue;
5368
5369 #if 0 /* No longer needed for correct operation.
5370 It might give better code, or might not; worth an experiment? */
5371 /* If this is an optional reload, we can't inherit from earlier insns
5372 until we are sure that any non-optional reloads have been allocated.
5373 The following code takes advantage of the fact that optional reloads
5374 are at the end of reload_order. */
5375 if (rld[r].optional != 0)
5376 for (i = 0; i < j; i++)
5377 if ((rld[reload_order[i]].out != 0
5378 || rld[reload_order[i]].in != 0
5379 || rld[reload_order[i]].secondary_p)
5380 && ! rld[reload_order[i]].optional
5381 && rld[reload_order[i]].reg_rtx == 0)
5382 allocate_reload_reg (chain, reload_order[i], 0);
5383 #endif
5384
5385 /* First see if this pseudo is already available as reloaded
5386 for a previous insn. We cannot try to inherit for reloads
5387 that are smaller than the maximum number of registers needed
5388 for groups unless the register we would allocate cannot be used
5389 for the groups.
5390
5391 We could check here to see if this is a secondary reload for
5392 an object that is already in a register of the desired class.
5393 This would avoid the need for the secondary reload register.
5394 But this is complex because we can't easily determine what
5395 objects might want to be loaded via this reload. So let a
5396 register be allocated here. In `emit_reload_insns' we suppress
5397 one of the loads in the case described above. */
5398
5399 if (inheritance)
5400 {
5401 int byte = 0;
5402 register int regno = -1;
5403 enum machine_mode mode = VOIDmode;
5404
5405 if (rld[r].in == 0)
5406 ;
5407 else if (GET_CODE (rld[r].in) == REG)
5408 {
5409 regno = REGNO (rld[r].in);
5410 mode = GET_MODE (rld[r].in);
5411 }
5412 else if (GET_CODE (rld[r].in_reg) == REG)
5413 {
5414 regno = REGNO (rld[r].in_reg);
5415 mode = GET_MODE (rld[r].in_reg);
5416 }
5417 else if (GET_CODE (rld[r].in_reg) == SUBREG
5418 && GET_CODE (SUBREG_REG (rld[r].in_reg)) == REG)
5419 {
5420 byte = SUBREG_BYTE (rld[r].in_reg);
5421 regno = REGNO (SUBREG_REG (rld[r].in_reg));
5422 if (regno < FIRST_PSEUDO_REGISTER)
5423 regno = subreg_regno (rld[r].in_reg);
5424 mode = GET_MODE (rld[r].in_reg);
5425 }
5426 #ifdef AUTO_INC_DEC
5427 else if ((GET_CODE (rld[r].in_reg) == PRE_INC
5428 || GET_CODE (rld[r].in_reg) == PRE_DEC
5429 || GET_CODE (rld[r].in_reg) == POST_INC
5430 || GET_CODE (rld[r].in_reg) == POST_DEC)
5431 && GET_CODE (XEXP (rld[r].in_reg, 0)) == REG)
5432 {
5433 regno = REGNO (XEXP (rld[r].in_reg, 0));
5434 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5435 rld[r].out = rld[r].in;
5436 }
5437 #endif
5438 #if 0
5439 /* This won't work, since REGNO can be a pseudo reg number.
5440 Also, it takes much more hair to keep track of all the things
5441 that can invalidate an inherited reload of part of a pseudoreg. */
5442 else if (GET_CODE (rld[r].in) == SUBREG
5443 && GET_CODE (SUBREG_REG (rld[r].in)) == REG)
5444 regno = subreg_regno (rld[r].in);
5445 #endif
5446
5447 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5448 {
5449 enum reg_class class = rld[r].class, last_class;
5450 rtx last_reg = reg_last_reload_reg[regno];
5451 enum machine_mode need_mode;
5452
5453 i = REGNO (last_reg);
5454 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5455 last_class = REGNO_REG_CLASS (i);
5456
5457 if (byte == 0)
5458 need_mode = mode;
5459 else
5460 need_mode
5461 = smallest_mode_for_size (GET_MODE_SIZE (mode) + byte,
5462 GET_MODE_CLASS (mode));
5463
5464 if (
5465 #ifdef CLASS_CANNOT_CHANGE_MODE
5466 (TEST_HARD_REG_BIT
5467 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE], i)
5468 ? ! CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (last_reg),
5469 need_mode)
5470 : (GET_MODE_SIZE (GET_MODE (last_reg))
5471 >= GET_MODE_SIZE (need_mode)))
5472 #else
5473 (GET_MODE_SIZE (GET_MODE (last_reg))
5474 >= GET_MODE_SIZE (need_mode))
5475 #endif
5476 && reg_reloaded_contents[i] == regno
5477 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5478 && HARD_REGNO_MODE_OK (i, rld[r].mode)
5479 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5480 /* Even if we can't use this register as a reload
5481 register, we might use it for reload_override_in,
5482 if copying it to the desired class is cheap
5483 enough. */
5484 || ((REGISTER_MOVE_COST (mode, last_class, class)
5485 < MEMORY_MOVE_COST (mode, class, 1))
5486 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5487 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5488 last_reg)
5489 == NO_REGS)
5490 #endif
5491 #ifdef SECONDARY_MEMORY_NEEDED
5492 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5493 mode)
5494 #endif
5495 ))
5496
5497 && (rld[r].nregs == max_group_size
5498 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5499 i))
5500 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5501 rld[r].when_needed, rld[r].in,
5502 const0_rtx, r, 1))
5503 {
5504 /* If a group is needed, verify that all the subsequent
5505 registers still have their values intact. */
5506 int nr = HARD_REGNO_NREGS (i, rld[r].mode);
5507 int k;
5508
5509 for (k = 1; k < nr; k++)
5510 if (reg_reloaded_contents[i + k] != regno
5511 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5512 break;
5513
5514 if (k == nr)
5515 {
5516 int i1;
5517 int bad_for_class;
5518
5519 last_reg = (GET_MODE (last_reg) == mode
5520 ? last_reg : gen_rtx_REG (mode, i));
5521
5522 bad_for_class = 0;
5523 for (k = 0; k < nr; k++)
5524 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5525 i+k);
5526
5527 /* We found a register that contains the
5528 value we need. If this register is the
5529 same as an `earlyclobber' operand of the
5530 current insn, just mark it as a place to
5531 reload from since we can't use it as the
5532 reload register itself. */
5533
5534 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5535 if (reg_overlap_mentioned_for_reload_p
5536 (reg_last_reload_reg[regno],
5537 reload_earlyclobbers[i1]))
5538 break;
5539
5540 if (i1 != n_earlyclobbers
5541 || ! (free_for_value_p (i, rld[r].mode,
5542 rld[r].opnum,
5543 rld[r].when_needed, rld[r].in,
5544 rld[r].out, r, 1))
5545 /* Don't use it if we'd clobber a pseudo reg. */
5546 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5547 && rld[r].out
5548 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5549 /* Don't clobber the frame pointer. */
5550 || (i == HARD_FRAME_POINTER_REGNUM
5551 && rld[r].out)
5552 /* Don't really use the inherited spill reg
5553 if we need it wider than we've got it. */
5554 || (GET_MODE_SIZE (rld[r].mode)
5555 > GET_MODE_SIZE (mode))
5556 || bad_for_class
5557
5558 /* If find_reloads chose reload_out as reload
5559 register, stay with it - that leaves the
5560 inherited register for subsequent reloads. */
5561 || (rld[r].out && rld[r].reg_rtx
5562 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5563 {
5564 if (! rld[r].optional)
5565 {
5566 reload_override_in[r] = last_reg;
5567 reload_inheritance_insn[r]
5568 = reg_reloaded_insn[i];
5569 }
5570 }
5571 else
5572 {
5573 int k;
5574 /* We can use this as a reload reg. */
5575 /* Mark the register as in use for this part of
5576 the insn. */
5577 mark_reload_reg_in_use (i,
5578 rld[r].opnum,
5579 rld[r].when_needed,
5580 rld[r].mode);
5581 rld[r].reg_rtx = last_reg;
5582 reload_inherited[r] = 1;
5583 reload_inheritance_insn[r]
5584 = reg_reloaded_insn[i];
5585 reload_spill_index[r] = i;
5586 for (k = 0; k < nr; k++)
5587 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5588 i + k);
5589 }
5590 }
5591 }
5592 }
5593 }
5594
5595 /* Here's another way to see if the value is already lying around. */
5596 if (inheritance
5597 && rld[r].in != 0
5598 && ! reload_inherited[r]
5599 && rld[r].out == 0
5600 && (CONSTANT_P (rld[r].in)
5601 || GET_CODE (rld[r].in) == PLUS
5602 || GET_CODE (rld[r].in) == REG
5603 || GET_CODE (rld[r].in) == MEM)
5604 && (rld[r].nregs == max_group_size
5605 || ! reg_classes_intersect_p (rld[r].class, group_class)))
5606 search_equiv = rld[r].in;
5607 /* If this is an output reload from a simple move insn, look
5608 if an equivalence for the input is available. */
5609 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
5610 {
5611 rtx set = single_set (insn);
5612
5613 if (set
5614 && rtx_equal_p (rld[r].out, SET_DEST (set))
5615 && CONSTANT_P (SET_SRC (set)))
5616 search_equiv = SET_SRC (set);
5617 }
5618
5619 if (search_equiv)
5620 {
5621 register rtx equiv
5622 = find_equiv_reg (search_equiv, insn, rld[r].class,
5623 -1, NULL, 0, rld[r].mode);
5624 int regno = 0;
5625
5626 if (equiv != 0)
5627 {
5628 if (GET_CODE (equiv) == REG)
5629 regno = REGNO (equiv);
5630 else if (GET_CODE (equiv) == SUBREG)
5631 {
5632 /* This must be a SUBREG of a hard register.
5633 Make a new REG since this might be used in an
5634 address and not all machines support SUBREGs
5635 there. */
5636 regno = subreg_regno (equiv);
5637 equiv = gen_rtx_REG (rld[r].mode, regno);
5638 }
5639 else
5640 abort ();
5641 }
5642
5643 /* If we found a spill reg, reject it unless it is free
5644 and of the desired class. */
5645 if (equiv != 0
5646 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
5647 && ! free_for_value_p (regno, rld[r].mode,
5648 rld[r].opnum, rld[r].when_needed,
5649 rld[r].in, rld[r].out, r, 1))
5650 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5651 regno)))
5652 equiv = 0;
5653
5654 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
5655 equiv = 0;
5656
5657 /* We found a register that contains the value we need.
5658 If this register is the same as an `earlyclobber' operand
5659 of the current insn, just mark it as a place to reload from
5660 since we can't use it as the reload register itself. */
5661
5662 if (equiv != 0)
5663 for (i = 0; i < n_earlyclobbers; i++)
5664 if (reg_overlap_mentioned_for_reload_p (equiv,
5665 reload_earlyclobbers[i]))
5666 {
5667 if (! rld[r].optional)
5668 reload_override_in[r] = equiv;
5669 equiv = 0;
5670 break;
5671 }
5672
5673 /* If the equiv register we have found is explicitly clobbered
5674 in the current insn, it depends on the reload type if we
5675 can use it, use it for reload_override_in, or not at all.
5676 In particular, we then can't use EQUIV for a
5677 RELOAD_FOR_OUTPUT_ADDRESS reload. */
5678
5679 if (equiv != 0)
5680 {
5681 if (regno_clobbered_p (regno, insn, rld[r].mode, 0))
5682 switch (rld[r].when_needed)
5683 {
5684 case RELOAD_FOR_OTHER_ADDRESS:
5685 case RELOAD_FOR_INPADDR_ADDRESS:
5686 case RELOAD_FOR_INPUT_ADDRESS:
5687 case RELOAD_FOR_OPADDR_ADDR:
5688 break;
5689 case RELOAD_OTHER:
5690 case RELOAD_FOR_INPUT:
5691 case RELOAD_FOR_OPERAND_ADDRESS:
5692 if (! rld[r].optional)
5693 reload_override_in[r] = equiv;
5694 /* Fall through. */
5695 default:
5696 equiv = 0;
5697 break;
5698 }
5699 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
5700 switch (rld[r].when_needed)
5701 {
5702 case RELOAD_FOR_OTHER_ADDRESS:
5703 case RELOAD_FOR_INPADDR_ADDRESS:
5704 case RELOAD_FOR_INPUT_ADDRESS:
5705 case RELOAD_FOR_OPADDR_ADDR:
5706 case RELOAD_FOR_OPERAND_ADDRESS:
5707 case RELOAD_FOR_INPUT:
5708 break;
5709 case RELOAD_OTHER:
5710 if (! rld[r].optional)
5711 reload_override_in[r] = equiv;
5712 /* Fall through. */
5713 default:
5714 equiv = 0;
5715 break;
5716 }
5717 }
5718
5719 /* If we found an equivalent reg, say no code need be generated
5720 to load it, and use it as our reload reg. */
5721 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5722 {
5723 int nr = HARD_REGNO_NREGS (regno, rld[r].mode);
5724 int k;
5725 rld[r].reg_rtx = equiv;
5726 reload_inherited[r] = 1;
5727
5728 /* If reg_reloaded_valid is not set for this register,
5729 there might be a stale spill_reg_store lying around.
5730 We must clear it, since otherwise emit_reload_insns
5731 might delete the store. */
5732 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
5733 spill_reg_store[regno] = NULL_RTX;
5734 /* If any of the hard registers in EQUIV are spill
5735 registers, mark them as in use for this insn. */
5736 for (k = 0; k < nr; k++)
5737 {
5738 i = spill_reg_order[regno + k];
5739 if (i >= 0)
5740 {
5741 mark_reload_reg_in_use (regno, rld[r].opnum,
5742 rld[r].when_needed,
5743 rld[r].mode);
5744 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5745 regno + k);
5746 }
5747 }
5748 }
5749 }
5750
5751 /* If we found a register to use already, or if this is an optional
5752 reload, we are done. */
5753 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
5754 continue;
5755
5756 #if 0
5757 /* No longer needed for correct operation. Might or might
5758 not give better code on the average. Want to experiment? */
5759
5760 /* See if there is a later reload that has a class different from our
5761 class that intersects our class or that requires less register
5762 than our reload. If so, we must allocate a register to this
5763 reload now, since that reload might inherit a previous reload
5764 and take the only available register in our class. Don't do this
5765 for optional reloads since they will force all previous reloads
5766 to be allocated. Also don't do this for reloads that have been
5767 turned off. */
5768
5769 for (i = j + 1; i < n_reloads; i++)
5770 {
5771 int s = reload_order[i];
5772
5773 if ((rld[s].in == 0 && rld[s].out == 0
5774 && ! rld[s].secondary_p)
5775 || rld[s].optional)
5776 continue;
5777
5778 if ((rld[s].class != rld[r].class
5779 && reg_classes_intersect_p (rld[r].class,
5780 rld[s].class))
5781 || rld[s].nregs < rld[r].nregs)
5782 break;
5783 }
5784
5785 if (i == n_reloads)
5786 continue;
5787
5788 allocate_reload_reg (chain, r, j == n_reloads - 1);
5789 #endif
5790 }
5791
5792 /* Now allocate reload registers for anything non-optional that
5793 didn't get one yet. */
5794 for (j = 0; j < n_reloads; j++)
5795 {
5796 register int r = reload_order[j];
5797
5798 /* Ignore reloads that got marked inoperative. */
5799 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
5800 continue;
5801
5802 /* Skip reloads that already have a register allocated or are
5803 optional. */
5804 if (rld[r].reg_rtx != 0 || rld[r].optional)
5805 continue;
5806
5807 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
5808 break;
5809 }
5810
5811 /* If that loop got all the way, we have won. */
5812 if (j == n_reloads)
5813 {
5814 win = 1;
5815 break;
5816 }
5817
5818 /* Loop around and try without any inheritance. */
5819 }
5820
5821 if (! win)
5822 {
5823 /* First undo everything done by the failed attempt
5824 to allocate with inheritance. */
5825 choose_reload_regs_init (chain, save_reload_reg_rtx);
5826
5827 /* Some sanity tests to verify that the reloads found in the first
5828 pass are identical to the ones we have now. */
5829 if (chain->n_reloads != n_reloads)
5830 abort ();
5831
5832 for (i = 0; i < n_reloads; i++)
5833 {
5834 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
5835 continue;
5836 if (chain->rld[i].when_needed != rld[i].when_needed)
5837 abort ();
5838 for (j = 0; j < n_spills; j++)
5839 if (spill_regs[j] == chain->rld[i].regno)
5840 if (! set_reload_reg (j, i))
5841 failed_reload (chain->insn, i);
5842 }
5843 }
5844
5845 /* If we thought we could inherit a reload, because it seemed that
5846 nothing else wanted the same reload register earlier in the insn,
5847 verify that assumption, now that all reloads have been assigned.
5848 Likewise for reloads where reload_override_in has been set. */
5849
5850 /* If doing expensive optimizations, do one preliminary pass that doesn't
5851 cancel any inheritance, but removes reloads that have been needed only
5852 for reloads that we know can be inherited. */
5853 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
5854 {
5855 for (j = 0; j < n_reloads; j++)
5856 {
5857 register int r = reload_order[j];
5858 rtx check_reg;
5859 if (reload_inherited[r] && rld[r].reg_rtx)
5860 check_reg = rld[r].reg_rtx;
5861 else if (reload_override_in[r]
5862 && (GET_CODE (reload_override_in[r]) == REG
5863 || GET_CODE (reload_override_in[r]) == SUBREG))
5864 check_reg = reload_override_in[r];
5865 else
5866 continue;
5867 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
5868 rld[r].opnum, rld[r].when_needed, rld[r].in,
5869 (reload_inherited[r]
5870 ? rld[r].out : const0_rtx),
5871 r, 1))
5872 {
5873 if (pass)
5874 continue;
5875 reload_inherited[r] = 0;
5876 reload_override_in[r] = 0;
5877 }
5878 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
5879 reload_override_in, then we do not need its related
5880 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
5881 likewise for other reload types.
5882 We handle this by removing a reload when its only replacement
5883 is mentioned in reload_in of the reload we are going to inherit.
5884 A special case are auto_inc expressions; even if the input is
5885 inherited, we still need the address for the output. We can
5886 recognize them because they have RELOAD_OUT set to RELOAD_IN.
5887 If we suceeded removing some reload and we are doing a preliminary
5888 pass just to remove such reloads, make another pass, since the
5889 removal of one reload might allow us to inherit another one. */
5890 else if (rld[r].in
5891 && rld[r].out != rld[r].in
5892 && remove_address_replacements (rld[r].in) && pass)
5893 pass = 2;
5894 }
5895 }
5896
5897 /* Now that reload_override_in is known valid,
5898 actually override reload_in. */
5899 for (j = 0; j < n_reloads; j++)
5900 if (reload_override_in[j])
5901 rld[j].in = reload_override_in[j];
5902
5903 /* If this reload won't be done because it has been cancelled or is
5904 optional and not inherited, clear reload_reg_rtx so other
5905 routines (such as subst_reloads) don't get confused. */
5906 for (j = 0; j < n_reloads; j++)
5907 if (rld[j].reg_rtx != 0
5908 && ((rld[j].optional && ! reload_inherited[j])
5909 || (rld[j].in == 0 && rld[j].out == 0
5910 && ! rld[j].secondary_p)))
5911 {
5912 int regno = true_regnum (rld[j].reg_rtx);
5913
5914 if (spill_reg_order[regno] >= 0)
5915 clear_reload_reg_in_use (regno, rld[j].opnum,
5916 rld[j].when_needed, rld[j].mode);
5917 rld[j].reg_rtx = 0;
5918 reload_spill_index[j] = -1;
5919 }
5920
5921 /* Record which pseudos and which spill regs have output reloads. */
5922 for (j = 0; j < n_reloads; j++)
5923 {
5924 register int r = reload_order[j];
5925
5926 i = reload_spill_index[r];
5927
5928 /* I is nonneg if this reload uses a register.
5929 If rld[r].reg_rtx is 0, this is an optional reload
5930 that we opted to ignore. */
5931 if (rld[r].out_reg != 0 && GET_CODE (rld[r].out_reg) == REG
5932 && rld[r].reg_rtx != 0)
5933 {
5934 register int nregno = REGNO (rld[r].out_reg);
5935 int nr = 1;
5936
5937 if (nregno < FIRST_PSEUDO_REGISTER)
5938 nr = HARD_REGNO_NREGS (nregno, rld[r].mode);
5939
5940 while (--nr >= 0)
5941 reg_has_output_reload[nregno + nr] = 1;
5942
5943 if (i >= 0)
5944 {
5945 nr = HARD_REGNO_NREGS (i, rld[r].mode);
5946 while (--nr >= 0)
5947 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
5948 }
5949
5950 if (rld[r].when_needed != RELOAD_OTHER
5951 && rld[r].when_needed != RELOAD_FOR_OUTPUT
5952 && rld[r].when_needed != RELOAD_FOR_INSN)
5953 abort ();
5954 }
5955 }
5956 }
5957
5958 /* Deallocate the reload register for reload R. This is called from
5959 remove_address_replacements. */
5960
5961 void
5962 deallocate_reload_reg (r)
5963 int r;
5964 {
5965 int regno;
5966
5967 if (! rld[r].reg_rtx)
5968 return;
5969 regno = true_regnum (rld[r].reg_rtx);
5970 rld[r].reg_rtx = 0;
5971 if (spill_reg_order[regno] >= 0)
5972 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
5973 rld[r].mode);
5974 reload_spill_index[r] = -1;
5975 }
5976 \f
5977 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
5978 reloads of the same item for fear that we might not have enough reload
5979 registers. However, normally they will get the same reload register
5980 and hence actually need not be loaded twice.
5981
5982 Here we check for the most common case of this phenomenon: when we have
5983 a number of reloads for the same object, each of which were allocated
5984 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5985 reload, and is not modified in the insn itself. If we find such,
5986 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5987 This will not increase the number of spill registers needed and will
5988 prevent redundant code. */
5989
5990 static void
5991 merge_assigned_reloads (insn)
5992 rtx insn;
5993 {
5994 int i, j;
5995
5996 /* Scan all the reloads looking for ones that only load values and
5997 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5998 assigned and not modified by INSN. */
5999
6000 for (i = 0; i < n_reloads; i++)
6001 {
6002 int conflicting_input = 0;
6003 int max_input_address_opnum = -1;
6004 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6005
6006 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6007 || rld[i].out != 0 || rld[i].reg_rtx == 0
6008 || reg_set_p (rld[i].reg_rtx, insn))
6009 continue;
6010
6011 /* Look at all other reloads. Ensure that the only use of this
6012 reload_reg_rtx is in a reload that just loads the same value
6013 as we do. Note that any secondary reloads must be of the identical
6014 class since the values, modes, and result registers are the
6015 same, so we need not do anything with any secondary reloads. */
6016
6017 for (j = 0; j < n_reloads; j++)
6018 {
6019 if (i == j || rld[j].reg_rtx == 0
6020 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6021 rld[i].reg_rtx))
6022 continue;
6023
6024 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6025 && rld[j].opnum > max_input_address_opnum)
6026 max_input_address_opnum = rld[j].opnum;
6027
6028 /* If the reload regs aren't exactly the same (e.g, different modes)
6029 or if the values are different, we can't merge this reload.
6030 But if it is an input reload, we might still merge
6031 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6032
6033 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6034 || rld[j].out != 0 || rld[j].in == 0
6035 || ! rtx_equal_p (rld[i].in, rld[j].in))
6036 {
6037 if (rld[j].when_needed != RELOAD_FOR_INPUT
6038 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6039 || rld[i].opnum > rld[j].opnum)
6040 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6041 break;
6042 conflicting_input = 1;
6043 if (min_conflicting_input_opnum > rld[j].opnum)
6044 min_conflicting_input_opnum = rld[j].opnum;
6045 }
6046 }
6047
6048 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6049 we, in fact, found any matching reloads. */
6050
6051 if (j == n_reloads
6052 && max_input_address_opnum <= min_conflicting_input_opnum)
6053 {
6054 for (j = 0; j < n_reloads; j++)
6055 if (i != j && rld[j].reg_rtx != 0
6056 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6057 && (! conflicting_input
6058 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6059 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6060 {
6061 rld[i].when_needed = RELOAD_OTHER;
6062 rld[j].in = 0;
6063 reload_spill_index[j] = -1;
6064 transfer_replacements (i, j);
6065 }
6066
6067 /* If this is now RELOAD_OTHER, look for any reloads that load
6068 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6069 if they were for inputs, RELOAD_OTHER for outputs. Note that
6070 this test is equivalent to looking for reloads for this operand
6071 number. */
6072
6073 if (rld[i].when_needed == RELOAD_OTHER)
6074 for (j = 0; j < n_reloads; j++)
6075 if (rld[j].in != 0
6076 && rld[j].when_needed != RELOAD_OTHER
6077 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6078 rld[i].in))
6079 rld[j].when_needed
6080 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6081 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6082 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6083 }
6084 }
6085 }
6086 \f
6087 /* These arrays are filled by emit_reload_insns and its subroutines. */
6088 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6089 static rtx other_input_address_reload_insns = 0;
6090 static rtx other_input_reload_insns = 0;
6091 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6092 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6093 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6094 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6095 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6096 static rtx operand_reload_insns = 0;
6097 static rtx other_operand_reload_insns = 0;
6098 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6099
6100 /* Values to be put in spill_reg_store are put here first. */
6101 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6102 static HARD_REG_SET reg_reloaded_died;
6103
6104 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6105 has the number J. OLD contains the value to be used as input. */
6106
6107 static void
6108 emit_input_reload_insns (chain, rl, old, j)
6109 struct insn_chain *chain;
6110 struct reload *rl;
6111 rtx old;
6112 int j;
6113 {
6114 rtx insn = chain->insn;
6115 register rtx reloadreg = rl->reg_rtx;
6116 rtx oldequiv_reg = 0;
6117 rtx oldequiv = 0;
6118 int special = 0;
6119 enum machine_mode mode;
6120 rtx *where;
6121
6122 /* Determine the mode to reload in.
6123 This is very tricky because we have three to choose from.
6124 There is the mode the insn operand wants (rl->inmode).
6125 There is the mode of the reload register RELOADREG.
6126 There is the intrinsic mode of the operand, which we could find
6127 by stripping some SUBREGs.
6128 It turns out that RELOADREG's mode is irrelevant:
6129 we can change that arbitrarily.
6130
6131 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6132 then the reload reg may not support QImode moves, so use SImode.
6133 If foo is in memory due to spilling a pseudo reg, this is safe,
6134 because the QImode value is in the least significant part of a
6135 slot big enough for a SImode. If foo is some other sort of
6136 memory reference, then it is impossible to reload this case,
6137 so previous passes had better make sure this never happens.
6138
6139 Then consider a one-word union which has SImode and one of its
6140 members is a float, being fetched as (SUBREG:SF union:SI).
6141 We must fetch that as SFmode because we could be loading into
6142 a float-only register. In this case OLD's mode is correct.
6143
6144 Consider an immediate integer: it has VOIDmode. Here we need
6145 to get a mode from something else.
6146
6147 In some cases, there is a fourth mode, the operand's
6148 containing mode. If the insn specifies a containing mode for
6149 this operand, it overrides all others.
6150
6151 I am not sure whether the algorithm here is always right,
6152 but it does the right things in those cases. */
6153
6154 mode = GET_MODE (old);
6155 if (mode == VOIDmode)
6156 mode = rl->inmode;
6157
6158 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6159 /* If we need a secondary register for this operation, see if
6160 the value is already in a register in that class. Don't
6161 do this if the secondary register will be used as a scratch
6162 register. */
6163
6164 if (rl->secondary_in_reload >= 0
6165 && rl->secondary_in_icode == CODE_FOR_nothing
6166 && optimize)
6167 oldequiv
6168 = find_equiv_reg (old, insn,
6169 rld[rl->secondary_in_reload].class,
6170 -1, NULL, 0, mode);
6171 #endif
6172
6173 /* If reloading from memory, see if there is a register
6174 that already holds the same value. If so, reload from there.
6175 We can pass 0 as the reload_reg_p argument because
6176 any other reload has either already been emitted,
6177 in which case find_equiv_reg will see the reload-insn,
6178 or has yet to be emitted, in which case it doesn't matter
6179 because we will use this equiv reg right away. */
6180
6181 if (oldequiv == 0 && optimize
6182 && (GET_CODE (old) == MEM
6183 || (GET_CODE (old) == REG
6184 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6185 && reg_renumber[REGNO (old)] < 0)))
6186 oldequiv = find_equiv_reg (old, insn, ALL_REGS, -1, NULL, 0, mode);
6187
6188 if (oldequiv)
6189 {
6190 unsigned int regno = true_regnum (oldequiv);
6191
6192 /* Don't use OLDEQUIV if any other reload changes it at an
6193 earlier stage of this insn or at this stage. */
6194 if (! free_for_value_p (regno, rl->mode, rl->opnum, rl->when_needed,
6195 rl->in, const0_rtx, j, 0))
6196 oldequiv = 0;
6197
6198 /* If it is no cheaper to copy from OLDEQUIV into the
6199 reload register than it would be to move from memory,
6200 don't use it. Likewise, if we need a secondary register
6201 or memory. */
6202
6203 if (oldequiv != 0
6204 && ((REGNO_REG_CLASS (regno) != rl->class
6205 && (REGISTER_MOVE_COST (mode, REGNO_REG_CLASS (regno),
6206 rl->class)
6207 >= MEMORY_MOVE_COST (mode, rl->class, 1)))
6208 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6209 || (SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6210 mode, oldequiv)
6211 != NO_REGS)
6212 #endif
6213 #ifdef SECONDARY_MEMORY_NEEDED
6214 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6215 rl->class,
6216 mode)
6217 #endif
6218 ))
6219 oldequiv = 0;
6220 }
6221
6222 /* delete_output_reload is only invoked properly if old contains
6223 the original pseudo register. Since this is replaced with a
6224 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6225 find the pseudo in RELOAD_IN_REG. */
6226 if (oldequiv == 0
6227 && reload_override_in[j]
6228 && GET_CODE (rl->in_reg) == REG)
6229 {
6230 oldequiv = old;
6231 old = rl->in_reg;
6232 }
6233 if (oldequiv == 0)
6234 oldequiv = old;
6235 else if (GET_CODE (oldequiv) == REG)
6236 oldequiv_reg = oldequiv;
6237 else if (GET_CODE (oldequiv) == SUBREG)
6238 oldequiv_reg = SUBREG_REG (oldequiv);
6239
6240 /* If we are reloading from a register that was recently stored in
6241 with an output-reload, see if we can prove there was
6242 actually no need to store the old value in it. */
6243
6244 if (optimize && GET_CODE (oldequiv) == REG
6245 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6246 && spill_reg_store[REGNO (oldequiv)]
6247 && GET_CODE (old) == REG
6248 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6249 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6250 rl->out_reg)))
6251 delete_output_reload (insn, j, REGNO (oldequiv));
6252
6253 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6254 then load RELOADREG from OLDEQUIV. Note that we cannot use
6255 gen_lowpart_common since it can do the wrong thing when
6256 RELOADREG has a multi-word mode. Note that RELOADREG
6257 must always be a REG here. */
6258
6259 if (GET_MODE (reloadreg) != mode)
6260 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6261 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6262 oldequiv = SUBREG_REG (oldequiv);
6263 if (GET_MODE (oldequiv) != VOIDmode
6264 && mode != GET_MODE (oldequiv))
6265 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6266
6267 /* Switch to the right place to emit the reload insns. */
6268 switch (rl->when_needed)
6269 {
6270 case RELOAD_OTHER:
6271 where = &other_input_reload_insns;
6272 break;
6273 case RELOAD_FOR_INPUT:
6274 where = &input_reload_insns[rl->opnum];
6275 break;
6276 case RELOAD_FOR_INPUT_ADDRESS:
6277 where = &input_address_reload_insns[rl->opnum];
6278 break;
6279 case RELOAD_FOR_INPADDR_ADDRESS:
6280 where = &inpaddr_address_reload_insns[rl->opnum];
6281 break;
6282 case RELOAD_FOR_OUTPUT_ADDRESS:
6283 where = &output_address_reload_insns[rl->opnum];
6284 break;
6285 case RELOAD_FOR_OUTADDR_ADDRESS:
6286 where = &outaddr_address_reload_insns[rl->opnum];
6287 break;
6288 case RELOAD_FOR_OPERAND_ADDRESS:
6289 where = &operand_reload_insns;
6290 break;
6291 case RELOAD_FOR_OPADDR_ADDR:
6292 where = &other_operand_reload_insns;
6293 break;
6294 case RELOAD_FOR_OTHER_ADDRESS:
6295 where = &other_input_address_reload_insns;
6296 break;
6297 default:
6298 abort ();
6299 }
6300
6301 push_to_sequence (*where);
6302
6303 /* Auto-increment addresses must be reloaded in a special way. */
6304 if (rl->out && ! rl->out_reg)
6305 {
6306 /* We are not going to bother supporting the case where a
6307 incremented register can't be copied directly from
6308 OLDEQUIV since this seems highly unlikely. */
6309 if (rl->secondary_in_reload >= 0)
6310 abort ();
6311
6312 if (reload_inherited[j])
6313 oldequiv = reloadreg;
6314
6315 old = XEXP (rl->in_reg, 0);
6316
6317 if (optimize && GET_CODE (oldequiv) == REG
6318 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6319 && spill_reg_store[REGNO (oldequiv)]
6320 && GET_CODE (old) == REG
6321 && (dead_or_set_p (insn,
6322 spill_reg_stored_to[REGNO (oldequiv)])
6323 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6324 old)))
6325 delete_output_reload (insn, j, REGNO (oldequiv));
6326
6327 /* Prevent normal processing of this reload. */
6328 special = 1;
6329 /* Output a special code sequence for this case. */
6330 new_spill_reg_store[REGNO (reloadreg)]
6331 = inc_for_reload (reloadreg, oldequiv, rl->out,
6332 rl->inc);
6333 }
6334
6335 /* If we are reloading a pseudo-register that was set by the previous
6336 insn, see if we can get rid of that pseudo-register entirely
6337 by redirecting the previous insn into our reload register. */
6338
6339 else if (optimize && GET_CODE (old) == REG
6340 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6341 && dead_or_set_p (insn, old)
6342 /* This is unsafe if some other reload
6343 uses the same reg first. */
6344 && ! conflicts_with_override (reloadreg)
6345 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6346 rl->when_needed, old, rl->out, j, 0))
6347 {
6348 rtx temp = PREV_INSN (insn);
6349 while (temp && GET_CODE (temp) == NOTE)
6350 temp = PREV_INSN (temp);
6351 if (temp
6352 && GET_CODE (temp) == INSN
6353 && GET_CODE (PATTERN (temp)) == SET
6354 && SET_DEST (PATTERN (temp)) == old
6355 /* Make sure we can access insn_operand_constraint. */
6356 && asm_noperands (PATTERN (temp)) < 0
6357 /* This is unsafe if prev insn rejects our reload reg. */
6358 && constraint_accepts_reg_p (insn_data[recog_memoized (temp)].operand[0].constraint,
6359 reloadreg)
6360 /* This is unsafe if operand occurs more than once in current
6361 insn. Perhaps some occurrences aren't reloaded. */
6362 && count_occurrences (PATTERN (insn), old, 0) == 1
6363 /* Don't risk splitting a matching pair of operands. */
6364 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6365 {
6366 /* Store into the reload register instead of the pseudo. */
6367 SET_DEST (PATTERN (temp)) = reloadreg;
6368
6369 /* If the previous insn is an output reload, the source is
6370 a reload register, and its spill_reg_store entry will
6371 contain the previous destination. This is now
6372 invalid. */
6373 if (GET_CODE (SET_SRC (PATTERN (temp))) == REG
6374 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6375 {
6376 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6377 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6378 }
6379
6380 /* If these are the only uses of the pseudo reg,
6381 pretend for GDB it lives in the reload reg we used. */
6382 if (REG_N_DEATHS (REGNO (old)) == 1
6383 && REG_N_SETS (REGNO (old)) == 1)
6384 {
6385 reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6386 alter_reg (REGNO (old), -1);
6387 }
6388 special = 1;
6389 }
6390 }
6391
6392 /* We can't do that, so output an insn to load RELOADREG. */
6393
6394 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6395 /* If we have a secondary reload, pick up the secondary register
6396 and icode, if any. If OLDEQUIV and OLD are different or
6397 if this is an in-out reload, recompute whether or not we
6398 still need a secondary register and what the icode should
6399 be. If we still need a secondary register and the class or
6400 icode is different, go back to reloading from OLD if using
6401 OLDEQUIV means that we got the wrong type of register. We
6402 cannot have different class or icode due to an in-out reload
6403 because we don't make such reloads when both the input and
6404 output need secondary reload registers. */
6405
6406 if (! special && rl->secondary_in_reload >= 0)
6407 {
6408 rtx second_reload_reg = 0;
6409 int secondary_reload = rl->secondary_in_reload;
6410 rtx real_oldequiv = oldequiv;
6411 rtx real_old = old;
6412 rtx tmp;
6413 enum insn_code icode;
6414
6415 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6416 and similarly for OLD.
6417 See comments in get_secondary_reload in reload.c. */
6418 /* If it is a pseudo that cannot be replaced with its
6419 equivalent MEM, we must fall back to reload_in, which
6420 will have all the necessary substitutions registered.
6421 Likewise for a pseudo that can't be replaced with its
6422 equivalent constant.
6423
6424 Take extra care for subregs of such pseudos. Note that
6425 we cannot use reg_equiv_mem in this case because it is
6426 not in the right mode. */
6427
6428 tmp = oldequiv;
6429 if (GET_CODE (tmp) == SUBREG)
6430 tmp = SUBREG_REG (tmp);
6431 if (GET_CODE (tmp) == REG
6432 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6433 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6434 || reg_equiv_constant[REGNO (tmp)] != 0))
6435 {
6436 if (! reg_equiv_mem[REGNO (tmp)]
6437 || num_not_at_initial_offset
6438 || GET_CODE (oldequiv) == SUBREG)
6439 real_oldequiv = rl->in;
6440 else
6441 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6442 }
6443
6444 tmp = old;
6445 if (GET_CODE (tmp) == SUBREG)
6446 tmp = SUBREG_REG (tmp);
6447 if (GET_CODE (tmp) == REG
6448 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6449 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6450 || reg_equiv_constant[REGNO (tmp)] != 0))
6451 {
6452 if (! reg_equiv_mem[REGNO (tmp)]
6453 || num_not_at_initial_offset
6454 || GET_CODE (old) == SUBREG)
6455 real_old = rl->in;
6456 else
6457 real_old = reg_equiv_mem[REGNO (tmp)];
6458 }
6459
6460 second_reload_reg = rld[secondary_reload].reg_rtx;
6461 icode = rl->secondary_in_icode;
6462
6463 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6464 || (rl->in != 0 && rl->out != 0))
6465 {
6466 enum reg_class new_class
6467 = SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6468 mode, real_oldequiv);
6469
6470 if (new_class == NO_REGS)
6471 second_reload_reg = 0;
6472 else
6473 {
6474 enum insn_code new_icode;
6475 enum machine_mode new_mode;
6476
6477 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6478 REGNO (second_reload_reg)))
6479 oldequiv = old, real_oldequiv = real_old;
6480 else
6481 {
6482 new_icode = reload_in_optab[(int) mode];
6483 if (new_icode != CODE_FOR_nothing
6484 && ((insn_data[(int) new_icode].operand[0].predicate
6485 && ! ((*insn_data[(int) new_icode].operand[0].predicate)
6486 (reloadreg, mode)))
6487 || (insn_data[(int) new_icode].operand[1].predicate
6488 && ! ((*insn_data[(int) new_icode].operand[1].predicate)
6489 (real_oldequiv, mode)))))
6490 new_icode = CODE_FOR_nothing;
6491
6492 if (new_icode == CODE_FOR_nothing)
6493 new_mode = mode;
6494 else
6495 new_mode = insn_data[(int) new_icode].operand[2].mode;
6496
6497 if (GET_MODE (second_reload_reg) != new_mode)
6498 {
6499 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6500 new_mode))
6501 oldequiv = old, real_oldequiv = real_old;
6502 else
6503 second_reload_reg
6504 = gen_rtx_REG (new_mode,
6505 REGNO (second_reload_reg));
6506 }
6507 }
6508 }
6509 }
6510
6511 /* If we still need a secondary reload register, check
6512 to see if it is being used as a scratch or intermediate
6513 register and generate code appropriately. If we need
6514 a scratch register, use REAL_OLDEQUIV since the form of
6515 the insn may depend on the actual address if it is
6516 a MEM. */
6517
6518 if (second_reload_reg)
6519 {
6520 if (icode != CODE_FOR_nothing)
6521 {
6522 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6523 second_reload_reg));
6524 special = 1;
6525 }
6526 else
6527 {
6528 /* See if we need a scratch register to load the
6529 intermediate register (a tertiary reload). */
6530 enum insn_code tertiary_icode
6531 = rld[secondary_reload].secondary_in_icode;
6532
6533 if (tertiary_icode != CODE_FOR_nothing)
6534 {
6535 rtx third_reload_reg
6536 = rld[rld[secondary_reload].secondary_in_reload].reg_rtx;
6537
6538 emit_insn ((GEN_FCN (tertiary_icode)
6539 (second_reload_reg, real_oldequiv,
6540 third_reload_reg)));
6541 }
6542 else
6543 gen_reload (second_reload_reg, real_oldequiv,
6544 rl->opnum,
6545 rl->when_needed);
6546
6547 oldequiv = second_reload_reg;
6548 }
6549 }
6550 }
6551 #endif
6552
6553 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6554 {
6555 rtx real_oldequiv = oldequiv;
6556
6557 if ((GET_CODE (oldequiv) == REG
6558 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6559 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
6560 || reg_equiv_constant[REGNO (oldequiv)] != 0))
6561 || (GET_CODE (oldequiv) == SUBREG
6562 && GET_CODE (SUBREG_REG (oldequiv)) == REG
6563 && (REGNO (SUBREG_REG (oldequiv))
6564 >= FIRST_PSEUDO_REGISTER)
6565 && ((reg_equiv_memory_loc
6566 [REGNO (SUBREG_REG (oldequiv))] != 0)
6567 || (reg_equiv_constant
6568 [REGNO (SUBREG_REG (oldequiv))] != 0)))
6569 || (CONSTANT_P (oldequiv)
6570 && PREFERRED_RELOAD_CLASS (oldequiv,
6571 REGNO_REG_CLASS (REGNO (reloadreg))) == NO_REGS))
6572 real_oldequiv = rl->in;
6573 gen_reload (reloadreg, real_oldequiv, rl->opnum,
6574 rl->when_needed);
6575 }
6576
6577 if (flag_non_call_exceptions)
6578 copy_eh_notes (insn, get_insns ());
6579
6580 /* End this sequence. */
6581 *where = get_insns ();
6582 end_sequence ();
6583
6584 /* Update reload_override_in so that delete_address_reloads_1
6585 can see the actual register usage. */
6586 if (oldequiv_reg)
6587 reload_override_in[j] = oldequiv;
6588 }
6589
6590 /* Generate insns to for the output reload RL, which is for the insn described
6591 by CHAIN and has the number J. */
6592 static void
6593 emit_output_reload_insns (chain, rl, j)
6594 struct insn_chain *chain;
6595 struct reload *rl;
6596 int j;
6597 {
6598 rtx reloadreg = rl->reg_rtx;
6599 rtx insn = chain->insn;
6600 int special = 0;
6601 rtx old = rl->out;
6602 enum machine_mode mode = GET_MODE (old);
6603 rtx p;
6604
6605 if (rl->when_needed == RELOAD_OTHER)
6606 start_sequence ();
6607 else
6608 push_to_sequence (output_reload_insns[rl->opnum]);
6609
6610 /* Determine the mode to reload in.
6611 See comments above (for input reloading). */
6612
6613 if (mode == VOIDmode)
6614 {
6615 /* VOIDmode should never happen for an output. */
6616 if (asm_noperands (PATTERN (insn)) < 0)
6617 /* It's the compiler's fault. */
6618 fatal_insn ("VOIDmode on an output", insn);
6619 error_for_asm (insn, "output operand is constant in `asm'");
6620 /* Prevent crash--use something we know is valid. */
6621 mode = word_mode;
6622 old = gen_rtx_REG (mode, REGNO (reloadreg));
6623 }
6624
6625 if (GET_MODE (reloadreg) != mode)
6626 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6627
6628 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6629
6630 /* If we need two reload regs, set RELOADREG to the intermediate
6631 one, since it will be stored into OLD. We might need a secondary
6632 register only for an input reload, so check again here. */
6633
6634 if (rl->secondary_out_reload >= 0)
6635 {
6636 rtx real_old = old;
6637
6638 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6639 && reg_equiv_mem[REGNO (old)] != 0)
6640 real_old = reg_equiv_mem[REGNO (old)];
6641
6642 if ((SECONDARY_OUTPUT_RELOAD_CLASS (rl->class,
6643 mode, real_old)
6644 != NO_REGS))
6645 {
6646 rtx second_reloadreg = reloadreg;
6647 reloadreg = rld[rl->secondary_out_reload].reg_rtx;
6648
6649 /* See if RELOADREG is to be used as a scratch register
6650 or as an intermediate register. */
6651 if (rl->secondary_out_icode != CODE_FOR_nothing)
6652 {
6653 emit_insn ((GEN_FCN (rl->secondary_out_icode)
6654 (real_old, second_reloadreg, reloadreg)));
6655 special = 1;
6656 }
6657 else
6658 {
6659 /* See if we need both a scratch and intermediate reload
6660 register. */
6661
6662 int secondary_reload = rl->secondary_out_reload;
6663 enum insn_code tertiary_icode
6664 = rld[secondary_reload].secondary_out_icode;
6665
6666 if (GET_MODE (reloadreg) != mode)
6667 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6668
6669 if (tertiary_icode != CODE_FOR_nothing)
6670 {
6671 rtx third_reloadreg
6672 = rld[rld[secondary_reload].secondary_out_reload].reg_rtx;
6673 rtx tem;
6674
6675 /* Copy primary reload reg to secondary reload reg.
6676 (Note that these have been swapped above, then
6677 secondary reload reg to OLD using our insn.) */
6678
6679 /* If REAL_OLD is a paradoxical SUBREG, remove it
6680 and try to put the opposite SUBREG on
6681 RELOADREG. */
6682 if (GET_CODE (real_old) == SUBREG
6683 && (GET_MODE_SIZE (GET_MODE (real_old))
6684 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6685 && 0 != (tem = gen_lowpart_common
6686 (GET_MODE (SUBREG_REG (real_old)),
6687 reloadreg)))
6688 real_old = SUBREG_REG (real_old), reloadreg = tem;
6689
6690 gen_reload (reloadreg, second_reloadreg,
6691 rl->opnum, rl->when_needed);
6692 emit_insn ((GEN_FCN (tertiary_icode)
6693 (real_old, reloadreg, third_reloadreg)));
6694 special = 1;
6695 }
6696
6697 else
6698 /* Copy between the reload regs here and then to
6699 OUT later. */
6700
6701 gen_reload (reloadreg, second_reloadreg,
6702 rl->opnum, rl->when_needed);
6703 }
6704 }
6705 }
6706 #endif
6707
6708 /* Output the last reload insn. */
6709 if (! special)
6710 {
6711 rtx set;
6712
6713 /* Don't output the last reload if OLD is not the dest of
6714 INSN and is in the src and is clobbered by INSN. */
6715 if (! flag_expensive_optimizations
6716 || GET_CODE (old) != REG
6717 || !(set = single_set (insn))
6718 || rtx_equal_p (old, SET_DEST (set))
6719 || !reg_mentioned_p (old, SET_SRC (set))
6720 || !regno_clobbered_p (REGNO (old), insn, rl->mode, 0))
6721 gen_reload (old, reloadreg, rl->opnum,
6722 rl->when_needed);
6723 }
6724
6725 /* Look at all insns we emitted, just to be safe. */
6726 for (p = get_insns (); p; p = NEXT_INSN (p))
6727 if (INSN_P (p))
6728 {
6729 rtx pat = PATTERN (p);
6730
6731 /* If this output reload doesn't come from a spill reg,
6732 clear any memory of reloaded copies of the pseudo reg.
6733 If this output reload comes from a spill reg,
6734 reg_has_output_reload will make this do nothing. */
6735 note_stores (pat, forget_old_reloads_1, NULL);
6736
6737 if (reg_mentioned_p (rl->reg_rtx, pat))
6738 {
6739 rtx set = single_set (insn);
6740 if (reload_spill_index[j] < 0
6741 && set
6742 && SET_SRC (set) == rl->reg_rtx)
6743 {
6744 int src = REGNO (SET_SRC (set));
6745
6746 reload_spill_index[j] = src;
6747 SET_HARD_REG_BIT (reg_is_output_reload, src);
6748 if (find_regno_note (insn, REG_DEAD, src))
6749 SET_HARD_REG_BIT (reg_reloaded_died, src);
6750 }
6751 if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
6752 {
6753 int s = rl->secondary_out_reload;
6754 set = single_set (p);
6755 /* If this reload copies only to the secondary reload
6756 register, the secondary reload does the actual
6757 store. */
6758 if (s >= 0 && set == NULL_RTX)
6759 /* We can't tell what function the secondary reload
6760 has and where the actual store to the pseudo is
6761 made; leave new_spill_reg_store alone. */
6762 ;
6763 else if (s >= 0
6764 && SET_SRC (set) == rl->reg_rtx
6765 && SET_DEST (set) == rld[s].reg_rtx)
6766 {
6767 /* Usually the next instruction will be the
6768 secondary reload insn; if we can confirm
6769 that it is, setting new_spill_reg_store to
6770 that insn will allow an extra optimization. */
6771 rtx s_reg = rld[s].reg_rtx;
6772 rtx next = NEXT_INSN (p);
6773 rld[s].out = rl->out;
6774 rld[s].out_reg = rl->out_reg;
6775 set = single_set (next);
6776 if (set && SET_SRC (set) == s_reg
6777 && ! new_spill_reg_store[REGNO (s_reg)])
6778 {
6779 SET_HARD_REG_BIT (reg_is_output_reload,
6780 REGNO (s_reg));
6781 new_spill_reg_store[REGNO (s_reg)] = next;
6782 }
6783 }
6784 else
6785 new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
6786 }
6787 }
6788 }
6789
6790 if (rl->when_needed == RELOAD_OTHER)
6791 {
6792 emit_insns (other_output_reload_insns[rl->opnum]);
6793 other_output_reload_insns[rl->opnum] = get_insns ();
6794 }
6795 else
6796 output_reload_insns[rl->opnum] = get_insns ();
6797
6798 if (flag_non_call_exceptions)
6799 copy_eh_notes (insn, get_insns ());
6800
6801 end_sequence ();
6802 }
6803
6804 /* Do input reloading for reload RL, which is for the insn described by CHAIN
6805 and has the number J. */
6806 static void
6807 do_input_reload (chain, rl, j)
6808 struct insn_chain *chain;
6809 struct reload *rl;
6810 int j;
6811 {
6812 int expect_occurrences = 1;
6813 rtx insn = chain->insn;
6814 rtx old = (rl->in && GET_CODE (rl->in) == MEM
6815 ? rl->in_reg : rl->in);
6816
6817 if (old != 0
6818 /* AUTO_INC reloads need to be handled even if inherited. We got an
6819 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6820 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
6821 && ! rtx_equal_p (rl->reg_rtx, old)
6822 && rl->reg_rtx != 0)
6823 emit_input_reload_insns (chain, rld + j, old, j);
6824
6825 /* When inheriting a wider reload, we have a MEM in rl->in,
6826 e.g. inheriting a SImode output reload for
6827 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6828 if (optimize && reload_inherited[j] && rl->in
6829 && GET_CODE (rl->in) == MEM
6830 && GET_CODE (rl->in_reg) == MEM
6831 && reload_spill_index[j] >= 0
6832 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6833 {
6834 expect_occurrences
6835 = count_occurrences (PATTERN (insn), rl->in, 0) == 1 ? 0 : -1;
6836 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6837 }
6838
6839 /* If we are reloading a register that was recently stored in with an
6840 output-reload, see if we can prove there was
6841 actually no need to store the old value in it. */
6842
6843 if (optimize
6844 && (reload_inherited[j] || reload_override_in[j])
6845 && rl->reg_rtx
6846 && GET_CODE (rl->reg_rtx) == REG
6847 && spill_reg_store[REGNO (rl->reg_rtx)] != 0
6848 #if 0
6849 /* There doesn't seem to be any reason to restrict this to pseudos
6850 and doing so loses in the case where we are copying from a
6851 register of the wrong class. */
6852 && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
6853 >= FIRST_PSEUDO_REGISTER)
6854 #endif
6855 /* The insn might have already some references to stackslots
6856 replaced by MEMs, while reload_out_reg still names the
6857 original pseudo. */
6858 && (dead_or_set_p (insn,
6859 spill_reg_stored_to[REGNO (rl->reg_rtx)])
6860 || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
6861 rl->out_reg)))
6862 delete_output_reload (insn, j, REGNO (rl->reg_rtx));
6863 }
6864
6865 /* Do output reloading for reload RL, which is for the insn described by
6866 CHAIN and has the number J.
6867 ??? At some point we need to support handling output reloads of
6868 JUMP_INSNs or insns that set cc0. */
6869 static void
6870 do_output_reload (chain, rl, j)
6871 struct insn_chain *chain;
6872 struct reload *rl;
6873 int j;
6874 {
6875 rtx note, old;
6876 rtx insn = chain->insn;
6877 /* If this is an output reload that stores something that is
6878 not loaded in this same reload, see if we can eliminate a previous
6879 store. */
6880 rtx pseudo = rl->out_reg;
6881
6882 if (pseudo
6883 && GET_CODE (pseudo) == REG
6884 && ! rtx_equal_p (rl->in_reg, pseudo)
6885 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
6886 && reg_last_reload_reg[REGNO (pseudo)])
6887 {
6888 int pseudo_no = REGNO (pseudo);
6889 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
6890
6891 /* We don't need to test full validity of last_regno for
6892 inherit here; we only want to know if the store actually
6893 matches the pseudo. */
6894 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
6895 && reg_reloaded_contents[last_regno] == pseudo_no
6896 && spill_reg_store[last_regno]
6897 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
6898 delete_output_reload (insn, j, last_regno);
6899 }
6900
6901 old = rl->out_reg;
6902 if (old == 0
6903 || rl->reg_rtx == old
6904 || rl->reg_rtx == 0)
6905 return;
6906
6907 /* An output operand that dies right away does need a reload,
6908 but need not be copied from it. Show the new location in the
6909 REG_UNUSED note. */
6910 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6911 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6912 {
6913 XEXP (note, 0) = rl->reg_rtx;
6914 return;
6915 }
6916 /* Likewise for a SUBREG of an operand that dies. */
6917 else if (GET_CODE (old) == SUBREG
6918 && GET_CODE (SUBREG_REG (old)) == REG
6919 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6920 SUBREG_REG (old))))
6921 {
6922 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6923 rl->reg_rtx);
6924 return;
6925 }
6926 else if (GET_CODE (old) == SCRATCH)
6927 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6928 but we don't want to make an output reload. */
6929 return;
6930
6931 /* If is a JUMP_INSN, we can't support output reloads yet. */
6932 if (GET_CODE (insn) == JUMP_INSN)
6933 abort ();
6934
6935 emit_output_reload_insns (chain, rld + j, j);
6936 }
6937
6938 /* Output insns to reload values in and out of the chosen reload regs. */
6939
6940 static void
6941 emit_reload_insns (chain)
6942 struct insn_chain *chain;
6943 {
6944 rtx insn = chain->insn;
6945
6946 register int j;
6947 rtx following_insn = NEXT_INSN (insn);
6948 rtx before_insn = PREV_INSN (insn);
6949
6950 CLEAR_HARD_REG_SET (reg_reloaded_died);
6951
6952 for (j = 0; j < reload_n_operands; j++)
6953 input_reload_insns[j] = input_address_reload_insns[j]
6954 = inpaddr_address_reload_insns[j]
6955 = output_reload_insns[j] = output_address_reload_insns[j]
6956 = outaddr_address_reload_insns[j]
6957 = other_output_reload_insns[j] = 0;
6958 other_input_address_reload_insns = 0;
6959 other_input_reload_insns = 0;
6960 operand_reload_insns = 0;
6961 other_operand_reload_insns = 0;
6962
6963 /* Dump reloads into the dump file. */
6964 if (rtl_dump_file)
6965 {
6966 fprintf (rtl_dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
6967 debug_reload_to_stream (rtl_dump_file);
6968 }
6969
6970 /* Now output the instructions to copy the data into and out of the
6971 reload registers. Do these in the order that the reloads were reported,
6972 since reloads of base and index registers precede reloads of operands
6973 and the operands may need the base and index registers reloaded. */
6974
6975 for (j = 0; j < n_reloads; j++)
6976 {
6977 if (rld[j].reg_rtx
6978 && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
6979 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
6980
6981 do_input_reload (chain, rld + j, j);
6982 do_output_reload (chain, rld + j, j);
6983 }
6984
6985 /* Now write all the insns we made for reloads in the order expected by
6986 the allocation functions. Prior to the insn being reloaded, we write
6987 the following reloads:
6988
6989 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6990
6991 RELOAD_OTHER reloads.
6992
6993 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6994 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6995 RELOAD_FOR_INPUT reload for the operand.
6996
6997 RELOAD_FOR_OPADDR_ADDRS reloads.
6998
6999 RELOAD_FOR_OPERAND_ADDRESS reloads.
7000
7001 After the insn being reloaded, we write the following:
7002
7003 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7004 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7005 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7006 reloads for the operand. The RELOAD_OTHER output reloads are
7007 output in descending order by reload number. */
7008
7009 emit_insns_before (other_input_address_reload_insns, insn);
7010 emit_insns_before (other_input_reload_insns, insn);
7011
7012 for (j = 0; j < reload_n_operands; j++)
7013 {
7014 emit_insns_before (inpaddr_address_reload_insns[j], insn);
7015 emit_insns_before (input_address_reload_insns[j], insn);
7016 emit_insns_before (input_reload_insns[j], insn);
7017 }
7018
7019 emit_insns_before (other_operand_reload_insns, insn);
7020 emit_insns_before (operand_reload_insns, insn);
7021
7022 for (j = 0; j < reload_n_operands; j++)
7023 {
7024 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
7025 emit_insns_before (output_address_reload_insns[j], following_insn);
7026 emit_insns_before (output_reload_insns[j], following_insn);
7027 emit_insns_before (other_output_reload_insns[j], following_insn);
7028 }
7029
7030 /* Keep basic block info up to date. */
7031 if (n_basic_blocks)
7032 {
7033 if (BLOCK_HEAD (chain->block) == insn)
7034 BLOCK_HEAD (chain->block) = NEXT_INSN (before_insn);
7035 if (BLOCK_END (chain->block) == insn)
7036 BLOCK_END (chain->block) = PREV_INSN (following_insn);
7037 }
7038
7039 /* For all the spill regs newly reloaded in this instruction,
7040 record what they were reloaded from, so subsequent instructions
7041 can inherit the reloads.
7042
7043 Update spill_reg_store for the reloads of this insn.
7044 Copy the elements that were updated in the loop above. */
7045
7046 for (j = 0; j < n_reloads; j++)
7047 {
7048 register int r = reload_order[j];
7049 register int i = reload_spill_index[r];
7050
7051 /* If this is a non-inherited input reload from a pseudo, we must
7052 clear any memory of a previous store to the same pseudo. Only do
7053 something if there will not be an output reload for the pseudo
7054 being reloaded. */
7055 if (rld[r].in_reg != 0
7056 && ! (reload_inherited[r] || reload_override_in[r]))
7057 {
7058 rtx reg = rld[r].in_reg;
7059
7060 if (GET_CODE (reg) == SUBREG)
7061 reg = SUBREG_REG (reg);
7062
7063 if (GET_CODE (reg) == REG
7064 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7065 && ! reg_has_output_reload[REGNO (reg)])
7066 {
7067 int nregno = REGNO (reg);
7068
7069 if (reg_last_reload_reg[nregno])
7070 {
7071 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7072
7073 if (reg_reloaded_contents[last_regno] == nregno)
7074 spill_reg_store[last_regno] = 0;
7075 }
7076 }
7077 }
7078
7079 /* I is nonneg if this reload used a register.
7080 If rld[r].reg_rtx is 0, this is an optional reload
7081 that we opted to ignore. */
7082
7083 if (i >= 0 && rld[r].reg_rtx != 0)
7084 {
7085 int nr = HARD_REGNO_NREGS (i, GET_MODE (rld[r].reg_rtx));
7086 int k;
7087 int part_reaches_end = 0;
7088 int all_reaches_end = 1;
7089
7090 /* For a multi register reload, we need to check if all or part
7091 of the value lives to the end. */
7092 for (k = 0; k < nr; k++)
7093 {
7094 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7095 rld[r].when_needed))
7096 part_reaches_end = 1;
7097 else
7098 all_reaches_end = 0;
7099 }
7100
7101 /* Ignore reloads that don't reach the end of the insn in
7102 entirety. */
7103 if (all_reaches_end)
7104 {
7105 /* First, clear out memory of what used to be in this spill reg.
7106 If consecutive registers are used, clear them all. */
7107
7108 for (k = 0; k < nr; k++)
7109 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7110
7111 /* Maybe the spill reg contains a copy of reload_out. */
7112 if (rld[r].out != 0
7113 && (GET_CODE (rld[r].out) == REG
7114 #ifdef AUTO_INC_DEC
7115 || ! rld[r].out_reg
7116 #endif
7117 || GET_CODE (rld[r].out_reg) == REG))
7118 {
7119 rtx out = (GET_CODE (rld[r].out) == REG
7120 ? rld[r].out
7121 : rld[r].out_reg
7122 ? rld[r].out_reg
7123 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7124 register int nregno = REGNO (out);
7125 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7126 : HARD_REGNO_NREGS (nregno,
7127 GET_MODE (rld[r].reg_rtx)));
7128
7129 spill_reg_store[i] = new_spill_reg_store[i];
7130 spill_reg_stored_to[i] = out;
7131 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7132
7133 /* If NREGNO is a hard register, it may occupy more than
7134 one register. If it does, say what is in the
7135 rest of the registers assuming that both registers
7136 agree on how many words the object takes. If not,
7137 invalidate the subsequent registers. */
7138
7139 if (nregno < FIRST_PSEUDO_REGISTER)
7140 for (k = 1; k < nnr; k++)
7141 reg_last_reload_reg[nregno + k]
7142 = (nr == nnr
7143 ? gen_rtx_REG (reg_raw_mode[REGNO (rld[r].reg_rtx) + k],
7144 REGNO (rld[r].reg_rtx) + k)
7145 : 0);
7146
7147 /* Now do the inverse operation. */
7148 for (k = 0; k < nr; k++)
7149 {
7150 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7151 reg_reloaded_contents[i + k]
7152 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7153 ? nregno
7154 : nregno + k);
7155 reg_reloaded_insn[i + k] = insn;
7156 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7157 }
7158 }
7159
7160 /* Maybe the spill reg contains a copy of reload_in. Only do
7161 something if there will not be an output reload for
7162 the register being reloaded. */
7163 else if (rld[r].out_reg == 0
7164 && rld[r].in != 0
7165 && ((GET_CODE (rld[r].in) == REG
7166 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7167 && ! reg_has_output_reload[REGNO (rld[r].in)])
7168 || (GET_CODE (rld[r].in_reg) == REG
7169 && ! reg_has_output_reload[REGNO (rld[r].in_reg)]))
7170 && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7171 {
7172 register int nregno;
7173 int nnr;
7174
7175 if (GET_CODE (rld[r].in) == REG
7176 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7177 nregno = REGNO (rld[r].in);
7178 else if (GET_CODE (rld[r].in_reg) == REG)
7179 nregno = REGNO (rld[r].in_reg);
7180 else
7181 nregno = REGNO (XEXP (rld[r].in_reg, 0));
7182
7183 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7184 : HARD_REGNO_NREGS (nregno,
7185 GET_MODE (rld[r].reg_rtx)));
7186
7187 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7188
7189 if (nregno < FIRST_PSEUDO_REGISTER)
7190 for (k = 1; k < nnr; k++)
7191 reg_last_reload_reg[nregno + k]
7192 = (nr == nnr
7193 ? gen_rtx_REG (reg_raw_mode[REGNO (rld[r].reg_rtx) + k],
7194 REGNO (rld[r].reg_rtx) + k)
7195 : 0);
7196
7197 /* Unless we inherited this reload, show we haven't
7198 recently done a store.
7199 Previous stores of inherited auto_inc expressions
7200 also have to be discarded. */
7201 if (! reload_inherited[r]
7202 || (rld[r].out && ! rld[r].out_reg))
7203 spill_reg_store[i] = 0;
7204
7205 for (k = 0; k < nr; k++)
7206 {
7207 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7208 reg_reloaded_contents[i + k]
7209 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7210 ? nregno
7211 : nregno + k);
7212 reg_reloaded_insn[i + k] = insn;
7213 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7214 }
7215 }
7216 }
7217
7218 /* However, if part of the reload reaches the end, then we must
7219 invalidate the old info for the part that survives to the end. */
7220 else if (part_reaches_end)
7221 {
7222 for (k = 0; k < nr; k++)
7223 if (reload_reg_reaches_end_p (i + k,
7224 rld[r].opnum,
7225 rld[r].when_needed))
7226 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7227 }
7228 }
7229
7230 /* The following if-statement was #if 0'd in 1.34 (or before...).
7231 It's reenabled in 1.35 because supposedly nothing else
7232 deals with this problem. */
7233
7234 /* If a register gets output-reloaded from a non-spill register,
7235 that invalidates any previous reloaded copy of it.
7236 But forget_old_reloads_1 won't get to see it, because
7237 it thinks only about the original insn. So invalidate it here. */
7238 if (i < 0 && rld[r].out != 0
7239 && (GET_CODE (rld[r].out) == REG
7240 || (GET_CODE (rld[r].out) == MEM
7241 && GET_CODE (rld[r].out_reg) == REG)))
7242 {
7243 rtx out = (GET_CODE (rld[r].out) == REG
7244 ? rld[r].out : rld[r].out_reg);
7245 register int nregno = REGNO (out);
7246 if (nregno >= FIRST_PSEUDO_REGISTER)
7247 {
7248 rtx src_reg, store_insn = NULL_RTX;
7249
7250 reg_last_reload_reg[nregno] = 0;
7251
7252 /* If we can find a hard register that is stored, record
7253 the storing insn so that we may delete this insn with
7254 delete_output_reload. */
7255 src_reg = rld[r].reg_rtx;
7256
7257 /* If this is an optional reload, try to find the source reg
7258 from an input reload. */
7259 if (! src_reg)
7260 {
7261 rtx set = single_set (insn);
7262 if (set && SET_DEST (set) == rld[r].out)
7263 {
7264 int k;
7265
7266 src_reg = SET_SRC (set);
7267 store_insn = insn;
7268 for (k = 0; k < n_reloads; k++)
7269 {
7270 if (rld[k].in == src_reg)
7271 {
7272 src_reg = rld[k].reg_rtx;
7273 break;
7274 }
7275 }
7276 }
7277 }
7278 else
7279 store_insn = new_spill_reg_store[REGNO (src_reg)];
7280 if (src_reg && GET_CODE (src_reg) == REG
7281 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7282 {
7283 int src_regno = REGNO (src_reg);
7284 int nr = HARD_REGNO_NREGS (src_regno, rld[r].mode);
7285 /* The place where to find a death note varies with
7286 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7287 necessarily checked exactly in the code that moves
7288 notes, so just check both locations. */
7289 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7290 if (! note && store_insn)
7291 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7292 while (nr-- > 0)
7293 {
7294 spill_reg_store[src_regno + nr] = store_insn;
7295 spill_reg_stored_to[src_regno + nr] = out;
7296 reg_reloaded_contents[src_regno + nr] = nregno;
7297 reg_reloaded_insn[src_regno + nr] = store_insn;
7298 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7299 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7300 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7301 if (note)
7302 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7303 else
7304 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7305 }
7306 reg_last_reload_reg[nregno] = src_reg;
7307 }
7308 }
7309 else
7310 {
7311 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (rld[r].out));
7312
7313 while (num_regs-- > 0)
7314 reg_last_reload_reg[nregno + num_regs] = 0;
7315 }
7316 }
7317 }
7318 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7319 }
7320 \f
7321 /* Emit code to perform a reload from IN (which may be a reload register) to
7322 OUT (which may also be a reload register). IN or OUT is from operand
7323 OPNUM with reload type TYPE.
7324
7325 Returns first insn emitted. */
7326
7327 rtx
7328 gen_reload (out, in, opnum, type)
7329 rtx out;
7330 rtx in;
7331 int opnum;
7332 enum reload_type type;
7333 {
7334 rtx last = get_last_insn ();
7335 rtx tem;
7336
7337 /* If IN is a paradoxical SUBREG, remove it and try to put the
7338 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7339 if (GET_CODE (in) == SUBREG
7340 && (GET_MODE_SIZE (GET_MODE (in))
7341 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7342 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7343 in = SUBREG_REG (in), out = tem;
7344 else if (GET_CODE (out) == SUBREG
7345 && (GET_MODE_SIZE (GET_MODE (out))
7346 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7347 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7348 out = SUBREG_REG (out), in = tem;
7349
7350 /* How to do this reload can get quite tricky. Normally, we are being
7351 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7352 register that didn't get a hard register. In that case we can just
7353 call emit_move_insn.
7354
7355 We can also be asked to reload a PLUS that adds a register or a MEM to
7356 another register, constant or MEM. This can occur during frame pointer
7357 elimination and while reloading addresses. This case is handled by
7358 trying to emit a single insn to perform the add. If it is not valid,
7359 we use a two insn sequence.
7360
7361 Finally, we could be called to handle an 'o' constraint by putting
7362 an address into a register. In that case, we first try to do this
7363 with a named pattern of "reload_load_address". If no such pattern
7364 exists, we just emit a SET insn and hope for the best (it will normally
7365 be valid on machines that use 'o').
7366
7367 This entire process is made complex because reload will never
7368 process the insns we generate here and so we must ensure that
7369 they will fit their constraints and also by the fact that parts of
7370 IN might be being reloaded separately and replaced with spill registers.
7371 Because of this, we are, in some sense, just guessing the right approach
7372 here. The one listed above seems to work.
7373
7374 ??? At some point, this whole thing needs to be rethought. */
7375
7376 if (GET_CODE (in) == PLUS
7377 && (GET_CODE (XEXP (in, 0)) == REG
7378 || GET_CODE (XEXP (in, 0)) == SUBREG
7379 || GET_CODE (XEXP (in, 0)) == MEM)
7380 && (GET_CODE (XEXP (in, 1)) == REG
7381 || GET_CODE (XEXP (in, 1)) == SUBREG
7382 || CONSTANT_P (XEXP (in, 1))
7383 || GET_CODE (XEXP (in, 1)) == MEM))
7384 {
7385 /* We need to compute the sum of a register or a MEM and another
7386 register, constant, or MEM, and put it into the reload
7387 register. The best possible way of doing this is if the machine
7388 has a three-operand ADD insn that accepts the required operands.
7389
7390 The simplest approach is to try to generate such an insn and see if it
7391 is recognized and matches its constraints. If so, it can be used.
7392
7393 It might be better not to actually emit the insn unless it is valid,
7394 but we need to pass the insn as an operand to `recog' and
7395 `extract_insn' and it is simpler to emit and then delete the insn if
7396 not valid than to dummy things up. */
7397
7398 rtx op0, op1, tem, insn;
7399 int code;
7400
7401 op0 = find_replacement (&XEXP (in, 0));
7402 op1 = find_replacement (&XEXP (in, 1));
7403
7404 /* Since constraint checking is strict, commutativity won't be
7405 checked, so we need to do that here to avoid spurious failure
7406 if the add instruction is two-address and the second operand
7407 of the add is the same as the reload reg, which is frequently
7408 the case. If the insn would be A = B + A, rearrange it so
7409 it will be A = A + B as constrain_operands expects. */
7410
7411 if (GET_CODE (XEXP (in, 1)) == REG
7412 && REGNO (out) == REGNO (XEXP (in, 1)))
7413 tem = op0, op0 = op1, op1 = tem;
7414
7415 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7416 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7417
7418 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7419 code = recog_memoized (insn);
7420
7421 if (code >= 0)
7422 {
7423 extract_insn (insn);
7424 /* We want constrain operands to treat this insn strictly in
7425 its validity determination, i.e., the way it would after reload
7426 has completed. */
7427 if (constrain_operands (1))
7428 return insn;
7429 }
7430
7431 delete_insns_since (last);
7432
7433 /* If that failed, we must use a conservative two-insn sequence.
7434
7435 Use a move to copy one operand into the reload register. Prefer
7436 to reload a constant, MEM or pseudo since the move patterns can
7437 handle an arbitrary operand. If OP1 is not a constant, MEM or
7438 pseudo and OP1 is not a valid operand for an add instruction, then
7439 reload OP1.
7440
7441 After reloading one of the operands into the reload register, add
7442 the reload register to the output register.
7443
7444 If there is another way to do this for a specific machine, a
7445 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7446 we emit below. */
7447
7448 code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7449
7450 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7451 || (GET_CODE (op1) == REG
7452 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7453 || (code != CODE_FOR_nothing
7454 && ! ((*insn_data[code].operand[2].predicate)
7455 (op1, insn_data[code].operand[2].mode))))
7456 tem = op0, op0 = op1, op1 = tem;
7457
7458 gen_reload (out, op0, opnum, type);
7459
7460 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7461 This fixes a problem on the 32K where the stack pointer cannot
7462 be used as an operand of an add insn. */
7463
7464 if (rtx_equal_p (op0, op1))
7465 op1 = out;
7466
7467 insn = emit_insn (gen_add2_insn (out, op1));
7468
7469 /* If that failed, copy the address register to the reload register.
7470 Then add the constant to the reload register. */
7471
7472 code = recog_memoized (insn);
7473
7474 if (code >= 0)
7475 {
7476 extract_insn (insn);
7477 /* We want constrain operands to treat this insn strictly in
7478 its validity determination, i.e., the way it would after reload
7479 has completed. */
7480 if (constrain_operands (1))
7481 {
7482 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7483 REG_NOTES (insn)
7484 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7485 return insn;
7486 }
7487 }
7488
7489 delete_insns_since (last);
7490
7491 gen_reload (out, op1, opnum, type);
7492 insn = emit_insn (gen_add2_insn (out, op0));
7493 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7494 }
7495
7496 #ifdef SECONDARY_MEMORY_NEEDED
7497 /* If we need a memory location to do the move, do it that way. */
7498 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7499 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7500 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7501 REGNO_REG_CLASS (REGNO (out)),
7502 GET_MODE (out)))
7503 {
7504 /* Get the memory to use and rewrite both registers to its mode. */
7505 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7506
7507 if (GET_MODE (loc) != GET_MODE (out))
7508 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7509
7510 if (GET_MODE (loc) != GET_MODE (in))
7511 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7512
7513 gen_reload (loc, in, opnum, type);
7514 gen_reload (out, loc, opnum, type);
7515 }
7516 #endif
7517
7518 /* If IN is a simple operand, use gen_move_insn. */
7519 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7520 emit_insn (gen_move_insn (out, in));
7521
7522 #ifdef HAVE_reload_load_address
7523 else if (HAVE_reload_load_address)
7524 emit_insn (gen_reload_load_address (out, in));
7525 #endif
7526
7527 /* Otherwise, just write (set OUT IN) and hope for the best. */
7528 else
7529 emit_insn (gen_rtx_SET (VOIDmode, out, in));
7530
7531 /* Return the first insn emitted.
7532 We can not just return get_last_insn, because there may have
7533 been multiple instructions emitted. Also note that gen_move_insn may
7534 emit more than one insn itself, so we can not assume that there is one
7535 insn emitted per emit_insn_before call. */
7536
7537 return last ? NEXT_INSN (last) : get_insns ();
7538 }
7539 \f
7540 /* Delete a previously made output-reload
7541 whose result we now believe is not needed.
7542 First we double-check.
7543
7544 INSN is the insn now being processed.
7545 LAST_RELOAD_REG is the hard register number for which we want to delete
7546 the last output reload.
7547 J is the reload-number that originally used REG. The caller has made
7548 certain that reload J doesn't use REG any longer for input. */
7549
7550 static void
7551 delete_output_reload (insn, j, last_reload_reg)
7552 rtx insn;
7553 int j;
7554 int last_reload_reg;
7555 {
7556 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7557 rtx reg = spill_reg_stored_to[last_reload_reg];
7558 int k;
7559 int n_occurrences;
7560 int n_inherited = 0;
7561 register rtx i1;
7562 rtx substed;
7563
7564 /* Get the raw pseudo-register referred to. */
7565
7566 while (GET_CODE (reg) == SUBREG)
7567 reg = SUBREG_REG (reg);
7568 substed = reg_equiv_memory_loc[REGNO (reg)];
7569
7570 /* This is unsafe if the operand occurs more often in the current
7571 insn than it is inherited. */
7572 for (k = n_reloads - 1; k >= 0; k--)
7573 {
7574 rtx reg2 = rld[k].in;
7575 if (! reg2)
7576 continue;
7577 if (GET_CODE (reg2) == MEM || reload_override_in[k])
7578 reg2 = rld[k].in_reg;
7579 #ifdef AUTO_INC_DEC
7580 if (rld[k].out && ! rld[k].out_reg)
7581 reg2 = XEXP (rld[k].in_reg, 0);
7582 #endif
7583 while (GET_CODE (reg2) == SUBREG)
7584 reg2 = SUBREG_REG (reg2);
7585 if (rtx_equal_p (reg2, reg))
7586 {
7587 if (reload_inherited[k] || reload_override_in[k] || k == j)
7588 {
7589 n_inherited++;
7590 reg2 = rld[k].out_reg;
7591 if (! reg2)
7592 continue;
7593 while (GET_CODE (reg2) == SUBREG)
7594 reg2 = XEXP (reg2, 0);
7595 if (rtx_equal_p (reg2, reg))
7596 n_inherited++;
7597 }
7598 else
7599 return;
7600 }
7601 }
7602 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
7603 if (substed)
7604 n_occurrences += count_occurrences (PATTERN (insn),
7605 eliminate_regs (substed, 0,
7606 NULL_RTX), 0);
7607 if (n_occurrences > n_inherited)
7608 return;
7609
7610 /* If the pseudo-reg we are reloading is no longer referenced
7611 anywhere between the store into it and here,
7612 and no jumps or labels intervene, then the value can get
7613 here through the reload reg alone.
7614 Otherwise, give up--return. */
7615 for (i1 = NEXT_INSN (output_reload_insn);
7616 i1 != insn; i1 = NEXT_INSN (i1))
7617 {
7618 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7619 return;
7620 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7621 && reg_mentioned_p (reg, PATTERN (i1)))
7622 {
7623 /* If this is USE in front of INSN, we only have to check that
7624 there are no more references than accounted for by inheritance. */
7625 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
7626 {
7627 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7628 i1 = NEXT_INSN (i1);
7629 }
7630 if (n_occurrences <= n_inherited && i1 == insn)
7631 break;
7632 return;
7633 }
7634 }
7635
7636 /* The caller has already checked that REG dies or is set in INSN.
7637 It has also checked that we are optimizing, and thus some inaccurancies
7638 in the debugging information are acceptable.
7639 So we could just delete output_reload_insn.
7640 But in some cases we can improve the debugging information without
7641 sacrificing optimization - maybe even improving the code:
7642 See if the pseudo reg has been completely replaced
7643 with reload regs. If so, delete the store insn
7644 and forget we had a stack slot for the pseudo. */
7645 if (rld[j].out != rld[j].in
7646 && REG_N_DEATHS (REGNO (reg)) == 1
7647 && REG_N_SETS (REGNO (reg)) == 1
7648 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7649 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7650 {
7651 rtx i2;
7652
7653 /* We know that it was used only between here
7654 and the beginning of the current basic block.
7655 (We also know that the last use before INSN was
7656 the output reload we are thinking of deleting, but never mind that.)
7657 Search that range; see if any ref remains. */
7658 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7659 {
7660 rtx set = single_set (i2);
7661
7662 /* Uses which just store in the pseudo don't count,
7663 since if they are the only uses, they are dead. */
7664 if (set != 0 && SET_DEST (set) == reg)
7665 continue;
7666 if (GET_CODE (i2) == CODE_LABEL
7667 || GET_CODE (i2) == JUMP_INSN)
7668 break;
7669 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7670 && reg_mentioned_p (reg, PATTERN (i2)))
7671 {
7672 /* Some other ref remains; just delete the output reload we
7673 know to be dead. */
7674 delete_address_reloads (output_reload_insn, insn);
7675 PUT_CODE (output_reload_insn, NOTE);
7676 NOTE_SOURCE_FILE (output_reload_insn) = 0;
7677 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
7678 return;
7679 }
7680 }
7681
7682 /* Delete the now-dead stores into this pseudo. */
7683 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7684 {
7685 rtx set = single_set (i2);
7686
7687 if (set != 0 && SET_DEST (set) == reg)
7688 {
7689 delete_address_reloads (i2, insn);
7690 /* This might be a basic block head,
7691 thus don't use delete_insn. */
7692 PUT_CODE (i2, NOTE);
7693 NOTE_SOURCE_FILE (i2) = 0;
7694 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7695 }
7696 if (GET_CODE (i2) == CODE_LABEL
7697 || GET_CODE (i2) == JUMP_INSN)
7698 break;
7699 }
7700
7701 /* For the debugging info,
7702 say the pseudo lives in this reload reg. */
7703 reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
7704 alter_reg (REGNO (reg), -1);
7705 }
7706 delete_address_reloads (output_reload_insn, insn);
7707 PUT_CODE (output_reload_insn, NOTE);
7708 NOTE_SOURCE_FILE (output_reload_insn) = 0;
7709 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
7710
7711 }
7712
7713 /* We are going to delete DEAD_INSN. Recursively delete loads of
7714 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7715 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
7716 static void
7717 delete_address_reloads (dead_insn, current_insn)
7718 rtx dead_insn, current_insn;
7719 {
7720 rtx set = single_set (dead_insn);
7721 rtx set2, dst, prev, next;
7722 if (set)
7723 {
7724 rtx dst = SET_DEST (set);
7725 if (GET_CODE (dst) == MEM)
7726 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
7727 }
7728 /* If we deleted the store from a reloaded post_{in,de}c expression,
7729 we can delete the matching adds. */
7730 prev = PREV_INSN (dead_insn);
7731 next = NEXT_INSN (dead_insn);
7732 if (! prev || ! next)
7733 return;
7734 set = single_set (next);
7735 set2 = single_set (prev);
7736 if (! set || ! set2
7737 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
7738 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
7739 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
7740 return;
7741 dst = SET_DEST (set);
7742 if (! rtx_equal_p (dst, SET_DEST (set2))
7743 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
7744 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
7745 || (INTVAL (XEXP (SET_SRC (set), 1))
7746 != -INTVAL (XEXP (SET_SRC (set2), 1))))
7747 return;
7748 delete_insn (prev);
7749 delete_insn (next);
7750 }
7751
7752 /* Subfunction of delete_address_reloads: process registers found in X. */
7753 static void
7754 delete_address_reloads_1 (dead_insn, x, current_insn)
7755 rtx dead_insn, x, current_insn;
7756 {
7757 rtx prev, set, dst, i2;
7758 int i, j;
7759 enum rtx_code code = GET_CODE (x);
7760
7761 if (code != REG)
7762 {
7763 const char *fmt = GET_RTX_FORMAT (code);
7764 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7765 {
7766 if (fmt[i] == 'e')
7767 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
7768 else if (fmt[i] == 'E')
7769 {
7770 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7771 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
7772 current_insn);
7773 }
7774 }
7775 return;
7776 }
7777
7778 if (spill_reg_order[REGNO (x)] < 0)
7779 return;
7780
7781 /* Scan backwards for the insn that sets x. This might be a way back due
7782 to inheritance. */
7783 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
7784 {
7785 code = GET_CODE (prev);
7786 if (code == CODE_LABEL || code == JUMP_INSN)
7787 return;
7788 if (GET_RTX_CLASS (code) != 'i')
7789 continue;
7790 if (reg_set_p (x, PATTERN (prev)))
7791 break;
7792 if (reg_referenced_p (x, PATTERN (prev)))
7793 return;
7794 }
7795 if (! prev || INSN_UID (prev) < reload_first_uid)
7796 return;
7797 /* Check that PREV only sets the reload register. */
7798 set = single_set (prev);
7799 if (! set)
7800 return;
7801 dst = SET_DEST (set);
7802 if (GET_CODE (dst) != REG
7803 || ! rtx_equal_p (dst, x))
7804 return;
7805 if (! reg_set_p (dst, PATTERN (dead_insn)))
7806 {
7807 /* Check if DST was used in a later insn -
7808 it might have been inherited. */
7809 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
7810 {
7811 if (GET_CODE (i2) == CODE_LABEL)
7812 break;
7813 if (! INSN_P (i2))
7814 continue;
7815 if (reg_referenced_p (dst, PATTERN (i2)))
7816 {
7817 /* If there is a reference to the register in the current insn,
7818 it might be loaded in a non-inherited reload. If no other
7819 reload uses it, that means the register is set before
7820 referenced. */
7821 if (i2 == current_insn)
7822 {
7823 for (j = n_reloads - 1; j >= 0; j--)
7824 if ((rld[j].reg_rtx == dst && reload_inherited[j])
7825 || reload_override_in[j] == dst)
7826 return;
7827 for (j = n_reloads - 1; j >= 0; j--)
7828 if (rld[j].in && rld[j].reg_rtx == dst)
7829 break;
7830 if (j >= 0)
7831 break;
7832 }
7833 return;
7834 }
7835 if (GET_CODE (i2) == JUMP_INSN)
7836 break;
7837 /* If DST is still live at CURRENT_INSN, check if it is used for
7838 any reload. Note that even if CURRENT_INSN sets DST, we still
7839 have to check the reloads. */
7840 if (i2 == current_insn)
7841 {
7842 for (j = n_reloads - 1; j >= 0; j--)
7843 if ((rld[j].reg_rtx == dst && reload_inherited[j])
7844 || reload_override_in[j] == dst)
7845 return;
7846 /* ??? We can't finish the loop here, because dst might be
7847 allocated to a pseudo in this block if no reload in this
7848 block needs any of the clsses containing DST - see
7849 spill_hard_reg. There is no easy way to tell this, so we
7850 have to scan till the end of the basic block. */
7851 }
7852 if (reg_set_p (dst, PATTERN (i2)))
7853 break;
7854 }
7855 }
7856 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
7857 reg_reloaded_contents[REGNO (dst)] = -1;
7858 /* Can't use delete_insn here because PREV might be a basic block head. */
7859 PUT_CODE (prev, NOTE);
7860 NOTE_LINE_NUMBER (prev) = NOTE_INSN_DELETED;
7861 NOTE_SOURCE_FILE (prev) = 0;
7862 }
7863 \f
7864 /* Output reload-insns to reload VALUE into RELOADREG.
7865 VALUE is an autoincrement or autodecrement RTX whose operand
7866 is a register or memory location;
7867 so reloading involves incrementing that location.
7868 IN is either identical to VALUE, or some cheaper place to reload from.
7869
7870 INC_AMOUNT is the number to increment or decrement by (always positive).
7871 This cannot be deduced from VALUE.
7872
7873 Return the instruction that stores into RELOADREG. */
7874
7875 static rtx
7876 inc_for_reload (reloadreg, in, value, inc_amount)
7877 rtx reloadreg;
7878 rtx in, value;
7879 int inc_amount;
7880 {
7881 /* REG or MEM to be copied and incremented. */
7882 rtx incloc = XEXP (value, 0);
7883 /* Nonzero if increment after copying. */
7884 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7885 rtx last;
7886 rtx inc;
7887 rtx add_insn;
7888 int code;
7889 rtx store;
7890 rtx real_in = in == value ? XEXP (in, 0) : in;
7891
7892 /* No hard register is equivalent to this register after
7893 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7894 we could inc/dec that register as well (maybe even using it for
7895 the source), but I'm not sure it's worth worrying about. */
7896 if (GET_CODE (incloc) == REG)
7897 reg_last_reload_reg[REGNO (incloc)] = 0;
7898
7899 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7900 inc_amount = -inc_amount;
7901
7902 inc = GEN_INT (inc_amount);
7903
7904 /* If this is post-increment, first copy the location to the reload reg. */
7905 if (post && real_in != reloadreg)
7906 emit_insn (gen_move_insn (reloadreg, real_in));
7907
7908 if (in == value)
7909 {
7910 /* See if we can directly increment INCLOC. Use a method similar to
7911 that in gen_reload. */
7912
7913 last = get_last_insn ();
7914 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
7915 gen_rtx_PLUS (GET_MODE (incloc),
7916 incloc, inc)));
7917
7918 code = recog_memoized (add_insn);
7919 if (code >= 0)
7920 {
7921 extract_insn (add_insn);
7922 if (constrain_operands (1))
7923 {
7924 /* If this is a pre-increment and we have incremented the value
7925 where it lives, copy the incremented value to RELOADREG to
7926 be used as an address. */
7927
7928 if (! post)
7929 emit_insn (gen_move_insn (reloadreg, incloc));
7930
7931 return add_insn;
7932 }
7933 }
7934 delete_insns_since (last);
7935 }
7936
7937 /* If couldn't do the increment directly, must increment in RELOADREG.
7938 The way we do this depends on whether this is pre- or post-increment.
7939 For pre-increment, copy INCLOC to the reload register, increment it
7940 there, then save back. */
7941
7942 if (! post)
7943 {
7944 if (in != reloadreg)
7945 emit_insn (gen_move_insn (reloadreg, real_in));
7946 emit_insn (gen_add2_insn (reloadreg, inc));
7947 store = emit_insn (gen_move_insn (incloc, reloadreg));
7948 }
7949 else
7950 {
7951 /* Postincrement.
7952 Because this might be a jump insn or a compare, and because RELOADREG
7953 may not be available after the insn in an input reload, we must do
7954 the incrementation before the insn being reloaded for.
7955
7956 We have already copied IN to RELOADREG. Increment the copy in
7957 RELOADREG, save that back, then decrement RELOADREG so it has
7958 the original value. */
7959
7960 emit_insn (gen_add2_insn (reloadreg, inc));
7961 store = emit_insn (gen_move_insn (incloc, reloadreg));
7962 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7963 }
7964
7965 return store;
7966 }
7967 \f
7968 /* Return 1 if we are certain that the constraint-string STRING allows
7969 the hard register REG. Return 0 if we can't be sure of this. */
7970
7971 static int
7972 constraint_accepts_reg_p (string, reg)
7973 const char *string;
7974 rtx reg;
7975 {
7976 int value = 0;
7977 int regno = true_regnum (reg);
7978 int c;
7979
7980 /* Initialize for first alternative. */
7981 value = 0;
7982 /* Check that each alternative contains `g' or `r'. */
7983 while (1)
7984 switch (c = *string++)
7985 {
7986 case 0:
7987 /* If an alternative lacks `g' or `r', we lose. */
7988 return value;
7989 case ',':
7990 /* If an alternative lacks `g' or `r', we lose. */
7991 if (value == 0)
7992 return 0;
7993 /* Initialize for next alternative. */
7994 value = 0;
7995 break;
7996 case 'g':
7997 case 'r':
7998 /* Any general reg wins for this alternative. */
7999 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
8000 value = 1;
8001 break;
8002 default:
8003 /* Any reg in specified class wins for this alternative. */
8004 {
8005 enum reg_class class = REG_CLASS_FROM_LETTER (c);
8006
8007 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
8008 value = 1;
8009 }
8010 }
8011 }
8012 \f
8013 /* INSN is a no-op; delete it.
8014 If this sets the return value of the function, we must keep a USE around,
8015 in case this is in a different basic block than the final USE. Otherwise,
8016 we could loose important register lifeness information on
8017 SMALL_REGISTER_CLASSES machines, where return registers might be used as
8018 spills: subsequent passes assume that spill registers are dead at the end
8019 of a basic block.
8020 VALUE must be the return value in such a case, NULL otherwise. */
8021 static void
8022 reload_cse_delete_noop_set (insn, value)
8023 rtx insn, value;
8024 {
8025 if (value)
8026 {
8027 PATTERN (insn) = gen_rtx_USE (VOIDmode, value);
8028 INSN_CODE (insn) = -1;
8029 REG_NOTES (insn) = NULL_RTX;
8030 }
8031 else
8032 {
8033 PUT_CODE (insn, NOTE);
8034 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8035 NOTE_SOURCE_FILE (insn) = 0;
8036 }
8037 }
8038
8039 /* See whether a single set SET is a noop. */
8040 static int
8041 reload_cse_noop_set_p (set)
8042 rtx set;
8043 {
8044 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
8045 }
8046
8047 /* Try to simplify INSN. */
8048 static void
8049 reload_cse_simplify (insn)
8050 rtx insn;
8051 {
8052 rtx body = PATTERN (insn);
8053
8054 if (GET_CODE (body) == SET)
8055 {
8056 int count = 0;
8057
8058 /* Simplify even if we may think it is a no-op.
8059 We may think a memory load of a value smaller than WORD_SIZE
8060 is redundant because we haven't taken into account possible
8061 implicit extension. reload_cse_simplify_set() will bring
8062 this out, so it's safer to simplify before we delete. */
8063 count += reload_cse_simplify_set (body, insn);
8064
8065 if (!count && reload_cse_noop_set_p (body))
8066 {
8067 rtx value = SET_DEST (body);
8068 if (! REG_FUNCTION_VALUE_P (SET_DEST (body)))
8069 value = 0;
8070 reload_cse_delete_noop_set (insn, value);
8071 return;
8072 }
8073
8074 if (count > 0)
8075 apply_change_group ();
8076 else
8077 reload_cse_simplify_operands (insn);
8078 }
8079 else if (GET_CODE (body) == PARALLEL)
8080 {
8081 int i;
8082 int count = 0;
8083 rtx value = NULL_RTX;
8084
8085 /* If every action in a PARALLEL is a noop, we can delete
8086 the entire PARALLEL. */
8087 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8088 {
8089 rtx part = XVECEXP (body, 0, i);
8090 if (GET_CODE (part) == SET)
8091 {
8092 if (! reload_cse_noop_set_p (part))
8093 break;
8094 if (REG_FUNCTION_VALUE_P (SET_DEST (part)))
8095 {
8096 if (value)
8097 break;
8098 value = SET_DEST (part);
8099 }
8100 }
8101 else if (GET_CODE (part) != CLOBBER)
8102 break;
8103 }
8104
8105 if (i < 0)
8106 {
8107 reload_cse_delete_noop_set (insn, value);
8108 /* We're done with this insn. */
8109 return;
8110 }
8111
8112 /* It's not a no-op, but we can try to simplify it. */
8113 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8114 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8115 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8116
8117 if (count > 0)
8118 apply_change_group ();
8119 else
8120 reload_cse_simplify_operands (insn);
8121 }
8122 }
8123
8124 /* Do a very simple CSE pass over the hard registers.
8125
8126 This function detects no-op moves where we happened to assign two
8127 different pseudo-registers to the same hard register, and then
8128 copied one to the other. Reload will generate a useless
8129 instruction copying a register to itself.
8130
8131 This function also detects cases where we load a value from memory
8132 into two different registers, and (if memory is more expensive than
8133 registers) changes it to simply copy the first register into the
8134 second register.
8135
8136 Another optimization is performed that scans the operands of each
8137 instruction to see whether the value is already available in a
8138 hard register. It then replaces the operand with the hard register
8139 if possible, much like an optional reload would. */
8140
8141 static void
8142 reload_cse_regs_1 (first)
8143 rtx first;
8144 {
8145 rtx insn;
8146
8147 cselib_init ();
8148 init_alias_analysis ();
8149
8150 for (insn = first; insn; insn = NEXT_INSN (insn))
8151 {
8152 if (INSN_P (insn))
8153 reload_cse_simplify (insn);
8154
8155 cselib_process_insn (insn);
8156 }
8157
8158 /* Clean up. */
8159 end_alias_analysis ();
8160 cselib_finish ();
8161 }
8162
8163 /* Call cse / combine like post-reload optimization phases.
8164 FIRST is the first instruction. */
8165 void
8166 reload_cse_regs (first)
8167 rtx first;
8168 {
8169 reload_cse_regs_1 (first);
8170 reload_combine ();
8171 reload_cse_move2add (first);
8172 if (flag_expensive_optimizations)
8173 reload_cse_regs_1 (first);
8174 }
8175
8176 /* Try to simplify a single SET instruction. SET is the set pattern.
8177 INSN is the instruction it came from.
8178 This function only handles one case: if we set a register to a value
8179 which is not a register, we try to find that value in some other register
8180 and change the set into a register copy. */
8181
8182 static int
8183 reload_cse_simplify_set (set, insn)
8184 rtx set;
8185 rtx insn;
8186 {
8187 int did_change = 0;
8188 int dreg;
8189 rtx src;
8190 enum reg_class dclass;
8191 int old_cost;
8192 cselib_val *val;
8193 struct elt_loc_list *l;
8194 #ifdef LOAD_EXTEND_OP
8195 enum rtx_code extend_op = NIL;
8196 #endif
8197
8198 dreg = true_regnum (SET_DEST (set));
8199 if (dreg < 0)
8200 return 0;
8201
8202 src = SET_SRC (set);
8203 if (side_effects_p (src) || true_regnum (src) >= 0)
8204 return 0;
8205
8206 dclass = REGNO_REG_CLASS (dreg);
8207
8208 #ifdef LOAD_EXTEND_OP
8209 /* When replacing a memory with a register, we need to honor assumptions
8210 that combine made wrt the contents of sign bits. We'll do this by
8211 generating an extend instruction instead of a reg->reg copy. Thus
8212 the destination must be a register that we can widen. */
8213 if (GET_CODE (src) == MEM
8214 && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
8215 && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != NIL
8216 && GET_CODE (SET_DEST (set)) != REG)
8217 return 0;
8218 #endif
8219
8220 /* If memory loads are cheaper than register copies, don't change them. */
8221 if (GET_CODE (src) == MEM)
8222 old_cost = MEMORY_MOVE_COST (GET_MODE (src), dclass, 1);
8223 else if (CONSTANT_P (src))
8224 old_cost = rtx_cost (src, SET);
8225 else if (GET_CODE (src) == REG)
8226 old_cost = REGISTER_MOVE_COST (GET_MODE (src),
8227 REGNO_REG_CLASS (REGNO (src)), dclass);
8228 else
8229 /* ??? */
8230 old_cost = rtx_cost (src, SET);
8231
8232 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0);
8233 if (! val)
8234 return 0;
8235 for (l = val->locs; l; l = l->next)
8236 {
8237 rtx this_rtx = l->loc;
8238 int this_cost;
8239
8240 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
8241 {
8242 #ifdef LOAD_EXTEND_OP
8243 if (extend_op != NIL)
8244 {
8245 HOST_WIDE_INT this_val;
8246
8247 /* ??? I'm lazy and don't wish to handle CONST_DOUBLE. Other
8248 constants, such as SYMBOL_REF, cannot be extended. */
8249 if (GET_CODE (this_rtx) != CONST_INT)
8250 continue;
8251
8252 this_val = INTVAL (this_rtx);
8253 switch (extend_op)
8254 {
8255 case ZERO_EXTEND:
8256 this_val &= GET_MODE_MASK (GET_MODE (src));
8257 break;
8258 case SIGN_EXTEND:
8259 /* ??? In theory we're already extended. */
8260 if (this_val == trunc_int_for_mode (this_val, GET_MODE (src)))
8261 break;
8262 default:
8263 abort ();
8264 }
8265 this_rtx = GEN_INT (this_val);
8266 }
8267 #endif
8268 this_cost = rtx_cost (this_rtx, SET);
8269 }
8270 else if (GET_CODE (this_rtx) == REG)
8271 {
8272 #ifdef LOAD_EXTEND_OP
8273 if (extend_op != NIL)
8274 {
8275 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
8276 this_cost = rtx_cost (this_rtx, SET);
8277 }
8278 else
8279 #endif
8280 this_cost = REGISTER_MOVE_COST (GET_MODE (this_rtx),
8281 REGNO_REG_CLASS (REGNO (this_rtx)),
8282 dclass);
8283 }
8284 else
8285 continue;
8286
8287 /* If equal costs, prefer registers over anything else. That
8288 tends to lead to smaller instructions on some machines. */
8289 if (this_cost < old_cost
8290 || (this_cost == old_cost
8291 && GET_CODE (this_rtx) == REG
8292 && GET_CODE (SET_SRC (set)) != REG))
8293 {
8294 #ifdef LOAD_EXTEND_OP
8295 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
8296 && extend_op != NIL)
8297 {
8298 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
8299 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
8300 validate_change (insn, &SET_DEST (set), wide_dest, 1);
8301 }
8302 #endif
8303
8304 validate_change (insn, &SET_SRC (set), copy_rtx (this_rtx), 1);
8305 old_cost = this_cost, did_change = 1;
8306 }
8307 }
8308
8309 return did_change;
8310 }
8311
8312 /* Try to replace operands in INSN with equivalent values that are already
8313 in registers. This can be viewed as optional reloading.
8314
8315 For each non-register operand in the insn, see if any hard regs are
8316 known to be equivalent to that operand. Record the alternatives which
8317 can accept these hard registers. Among all alternatives, select the
8318 ones which are better or equal to the one currently matching, where
8319 "better" is in terms of '?' and '!' constraints. Among the remaining
8320 alternatives, select the one which replaces most operands with
8321 hard registers. */
8322
8323 static int
8324 reload_cse_simplify_operands (insn)
8325 rtx insn;
8326 {
8327 int i, j;
8328
8329 /* For each operand, all registers that are equivalent to it. */
8330 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
8331
8332 const char *constraints[MAX_RECOG_OPERANDS];
8333
8334 /* Vector recording how bad an alternative is. */
8335 int *alternative_reject;
8336 /* Vector recording how many registers can be introduced by choosing
8337 this alternative. */
8338 int *alternative_nregs;
8339 /* Array of vectors recording, for each operand and each alternative,
8340 which hard register to substitute, or -1 if the operand should be
8341 left as it is. */
8342 int *op_alt_regno[MAX_RECOG_OPERANDS];
8343 /* Array of alternatives, sorted in order of decreasing desirability. */
8344 int *alternative_order;
8345 rtx reg = gen_rtx_REG (VOIDmode, -1);
8346
8347 extract_insn (insn);
8348
8349 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
8350 return 0;
8351
8352 /* Figure out which alternative currently matches. */
8353 if (! constrain_operands (1))
8354 fatal_insn_not_found (insn);
8355
8356 alternative_reject = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8357 alternative_nregs = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8358 alternative_order = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8359 memset ((char *)alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
8360 memset ((char *)alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
8361
8362 /* For each operand, find out which regs are equivalent. */
8363 for (i = 0; i < recog_data.n_operands; i++)
8364 {
8365 cselib_val *v;
8366 struct elt_loc_list *l;
8367
8368 CLEAR_HARD_REG_SET (equiv_regs[i]);
8369
8370 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
8371 right, so avoid the problem here. Likewise if we have a constant
8372 and the insn pattern doesn't tell us the mode we need. */
8373 if (GET_CODE (recog_data.operand[i]) == CODE_LABEL
8374 || (CONSTANT_P (recog_data.operand[i])
8375 && recog_data.operand_mode[i] == VOIDmode))
8376 continue;
8377
8378 v = cselib_lookup (recog_data.operand[i], recog_data.operand_mode[i], 0);
8379 if (! v)
8380 continue;
8381
8382 for (l = v->locs; l; l = l->next)
8383 if (GET_CODE (l->loc) == REG)
8384 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
8385 }
8386
8387 for (i = 0; i < recog_data.n_operands; i++)
8388 {
8389 enum machine_mode mode;
8390 int regno;
8391 const char *p;
8392
8393 op_alt_regno[i] = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8394 for (j = 0; j < recog_data.n_alternatives; j++)
8395 op_alt_regno[i][j] = -1;
8396
8397 p = constraints[i] = recog_data.constraints[i];
8398 mode = recog_data.operand_mode[i];
8399
8400 /* Add the reject values for each alternative given by the constraints
8401 for this operand. */
8402 j = 0;
8403 while (*p != '\0')
8404 {
8405 char c = *p++;
8406 if (c == ',')
8407 j++;
8408 else if (c == '?')
8409 alternative_reject[j] += 3;
8410 else if (c == '!')
8411 alternative_reject[j] += 300;
8412 }
8413
8414 /* We won't change operands which are already registers. We
8415 also don't want to modify output operands. */
8416 regno = true_regnum (recog_data.operand[i]);
8417 if (regno >= 0
8418 || constraints[i][0] == '='
8419 || constraints[i][0] == '+')
8420 continue;
8421
8422 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8423 {
8424 int class = (int) NO_REGS;
8425
8426 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
8427 continue;
8428
8429 REGNO (reg) = regno;
8430 PUT_MODE (reg, mode);
8431
8432 /* We found a register equal to this operand. Now look for all
8433 alternatives that can accept this register and have not been
8434 assigned a register they can use yet. */
8435 j = 0;
8436 p = constraints[i];
8437 for (;;)
8438 {
8439 char c = *p++;
8440
8441 switch (c)
8442 {
8443 case '=': case '+': case '?':
8444 case '#': case '&': case '!':
8445 case '*': case '%':
8446 case '0': case '1': case '2': case '3': case '4':
8447 case '5': case '6': case '7': case '8': case '9':
8448 case 'm': case '<': case '>': case 'V': case 'o':
8449 case 'E': case 'F': case 'G': case 'H':
8450 case 's': case 'i': case 'n':
8451 case 'I': case 'J': case 'K': case 'L':
8452 case 'M': case 'N': case 'O': case 'P':
8453 case 'p': case 'X':
8454 /* These don't say anything we care about. */
8455 break;
8456
8457 case 'g': case 'r':
8458 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8459 break;
8460
8461 default:
8462 class
8463 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
8464 break;
8465
8466 case ',': case '\0':
8467 /* See if REGNO fits this alternative, and set it up as the
8468 replacement register if we don't have one for this
8469 alternative yet and the operand being replaced is not
8470 a cheap CONST_INT. */
8471 if (op_alt_regno[i][j] == -1
8472 && reg_fits_class_p (reg, class, 0, mode)
8473 && (GET_CODE (recog_data.operand[i]) != CONST_INT
8474 || (rtx_cost (recog_data.operand[i], SET)
8475 > rtx_cost (reg, SET))))
8476 {
8477 alternative_nregs[j]++;
8478 op_alt_regno[i][j] = regno;
8479 }
8480 j++;
8481 break;
8482 }
8483
8484 if (c == '\0')
8485 break;
8486 }
8487 }
8488 }
8489
8490 /* Record all alternatives which are better or equal to the currently
8491 matching one in the alternative_order array. */
8492 for (i = j = 0; i < recog_data.n_alternatives; i++)
8493 if (alternative_reject[i] <= alternative_reject[which_alternative])
8494 alternative_order[j++] = i;
8495 recog_data.n_alternatives = j;
8496
8497 /* Sort it. Given a small number of alternatives, a dumb algorithm
8498 won't hurt too much. */
8499 for (i = 0; i < recog_data.n_alternatives - 1; i++)
8500 {
8501 int best = i;
8502 int best_reject = alternative_reject[alternative_order[i]];
8503 int best_nregs = alternative_nregs[alternative_order[i]];
8504 int tmp;
8505
8506 for (j = i + 1; j < recog_data.n_alternatives; j++)
8507 {
8508 int this_reject = alternative_reject[alternative_order[j]];
8509 int this_nregs = alternative_nregs[alternative_order[j]];
8510
8511 if (this_reject < best_reject
8512 || (this_reject == best_reject && this_nregs < best_nregs))
8513 {
8514 best = j;
8515 best_reject = this_reject;
8516 best_nregs = this_nregs;
8517 }
8518 }
8519
8520 tmp = alternative_order[best];
8521 alternative_order[best] = alternative_order[i];
8522 alternative_order[i] = tmp;
8523 }
8524
8525 /* Substitute the operands as determined by op_alt_regno for the best
8526 alternative. */
8527 j = alternative_order[0];
8528
8529 for (i = 0; i < recog_data.n_operands; i++)
8530 {
8531 enum machine_mode mode = recog_data.operand_mode[i];
8532 if (op_alt_regno[i][j] == -1)
8533 continue;
8534
8535 validate_change (insn, recog_data.operand_loc[i],
8536 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
8537 }
8538
8539 for (i = recog_data.n_dups - 1; i >= 0; i--)
8540 {
8541 int op = recog_data.dup_num[i];
8542 enum machine_mode mode = recog_data.operand_mode[op];
8543
8544 if (op_alt_regno[op][j] == -1)
8545 continue;
8546
8547 validate_change (insn, recog_data.dup_loc[i],
8548 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
8549 }
8550
8551 return apply_change_group ();
8552 }
8553 \f
8554 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
8555 addressing now.
8556 This code might also be useful when reload gave up on reg+reg addresssing
8557 because of clashes between the return register and INDEX_REG_CLASS. */
8558
8559 /* The maximum number of uses of a register we can keep track of to
8560 replace them with reg+reg addressing. */
8561 #define RELOAD_COMBINE_MAX_USES 6
8562
8563 /* INSN is the insn where a register has ben used, and USEP points to the
8564 location of the register within the rtl. */
8565 struct reg_use { rtx insn, *usep; };
8566
8567 /* If the register is used in some unknown fashion, USE_INDEX is negative.
8568 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
8569 indicates where it becomes live again.
8570 Otherwise, USE_INDEX is the index of the last encountered use of the
8571 register (which is first among these we have seen since we scan backwards),
8572 OFFSET contains the constant offset that is added to the register in
8573 all encountered uses, and USE_RUID indicates the first encountered, i.e.
8574 last, of these uses.
8575 STORE_RUID is always meaningful if we only want to use a value in a
8576 register in a different place: it denotes the next insn in the insn
8577 stream (i.e. the last ecountered) that sets or clobbers the register. */
8578 static struct
8579 {
8580 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
8581 int use_index;
8582 rtx offset;
8583 int store_ruid;
8584 int use_ruid;
8585 } reg_state[FIRST_PSEUDO_REGISTER];
8586
8587 /* Reverse linear uid. This is increased in reload_combine while scanning
8588 the instructions from last to first. It is used to set last_label_ruid
8589 and the store_ruid / use_ruid fields in reg_state. */
8590 static int reload_combine_ruid;
8591
8592 #define LABEL_LIVE(LABEL) \
8593 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
8594
8595 static void
8596 reload_combine ()
8597 {
8598 rtx insn, set;
8599 int first_index_reg = -1;
8600 int last_index_reg = 0;
8601 int i;
8602 unsigned int r;
8603 int last_label_ruid;
8604 int min_labelno, n_labels;
8605 HARD_REG_SET ever_live_at_start, *label_live;
8606
8607 /* If reg+reg can be used in offsetable memory adresses, the main chunk of
8608 reload has already used it where appropriate, so there is no use in
8609 trying to generate it now. */
8610 if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
8611 return;
8612
8613 /* To avoid wasting too much time later searching for an index register,
8614 determine the minimum and maximum index register numbers. */
8615 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8616 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
8617 {
8618 if (first_index_reg == -1)
8619 first_index_reg = r;
8620
8621 last_index_reg = r;
8622 }
8623
8624 /* If no index register is available, we can quit now. */
8625 if (first_index_reg == -1)
8626 return;
8627
8628 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
8629 information is a bit fuzzy immediately after reload, but it's
8630 still good enough to determine which registers are live at a jump
8631 destination. */
8632 min_labelno = get_first_label_num ();
8633 n_labels = max_label_num () - min_labelno;
8634 label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
8635 CLEAR_HARD_REG_SET (ever_live_at_start);
8636
8637 for (i = n_basic_blocks - 1; i >= 0; i--)
8638 {
8639 insn = BLOCK_HEAD (i);
8640 if (GET_CODE (insn) == CODE_LABEL)
8641 {
8642 HARD_REG_SET live;
8643
8644 REG_SET_TO_HARD_REG_SET (live,
8645 BASIC_BLOCK (i)->global_live_at_start);
8646 compute_use_by_pseudos (&live,
8647 BASIC_BLOCK (i)->global_live_at_start);
8648 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
8649 IOR_HARD_REG_SET (ever_live_at_start, live);
8650 }
8651 }
8652
8653 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
8654 last_label_ruid = reload_combine_ruid = 0;
8655 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8656 {
8657 reg_state[r].store_ruid = reload_combine_ruid;
8658 if (fixed_regs[r])
8659 reg_state[r].use_index = -1;
8660 else
8661 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8662 }
8663
8664 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
8665 {
8666 rtx note;
8667
8668 /* We cannot do our optimization across labels. Invalidating all the use
8669 information we have would be costly, so we just note where the label
8670 is and then later disable any optimization that would cross it. */
8671 if (GET_CODE (insn) == CODE_LABEL)
8672 last_label_ruid = reload_combine_ruid;
8673 else if (GET_CODE (insn) == BARRIER)
8674 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8675 if (! fixed_regs[r])
8676 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8677
8678 if (! INSN_P (insn))
8679 continue;
8680
8681 reload_combine_ruid++;
8682
8683 /* Look for (set (REGX) (CONST_INT))
8684 (set (REGX) (PLUS (REGX) (REGY)))
8685 ...
8686 ... (MEM (REGX)) ...
8687 and convert it to
8688 (set (REGZ) (CONST_INT))
8689 ...
8690 ... (MEM (PLUS (REGZ) (REGY)))... .
8691
8692 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
8693 and that we know all uses of REGX before it dies. */
8694 set = single_set (insn);
8695 if (set != NULL_RTX
8696 && GET_CODE (SET_DEST (set)) == REG
8697 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
8698 GET_MODE (SET_DEST (set)))
8699 == 1)
8700 && GET_CODE (SET_SRC (set)) == PLUS
8701 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
8702 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
8703 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
8704 {
8705 rtx reg = SET_DEST (set);
8706 rtx plus = SET_SRC (set);
8707 rtx base = XEXP (plus, 1);
8708 rtx prev = prev_nonnote_insn (insn);
8709 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
8710 unsigned int regno = REGNO (reg);
8711 rtx const_reg = NULL_RTX;
8712 rtx reg_sum = NULL_RTX;
8713
8714 /* Now, we need an index register.
8715 We'll set index_reg to this index register, const_reg to the
8716 register that is to be loaded with the constant
8717 (denoted as REGZ in the substitution illustration above),
8718 and reg_sum to the register-register that we want to use to
8719 substitute uses of REG (typically in MEMs) with.
8720 First check REG and BASE for being index registers;
8721 we can use them even if they are not dead. */
8722 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
8723 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
8724 REGNO (base)))
8725 {
8726 const_reg = reg;
8727 reg_sum = plus;
8728 }
8729 else
8730 {
8731 /* Otherwise, look for a free index register. Since we have
8732 checked above that neiter REG nor BASE are index registers,
8733 if we find anything at all, it will be different from these
8734 two registers. */
8735 for (i = first_index_reg; i <= last_index_reg; i++)
8736 {
8737 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
8738 i)
8739 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
8740 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
8741 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
8742 {
8743 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
8744
8745 const_reg = index_reg;
8746 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
8747 break;
8748 }
8749 }
8750 }
8751
8752 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
8753 (REGY), i.e. BASE, is not clobbered before the last use we'll
8754 create. */
8755 if (prev_set != 0
8756 && GET_CODE (SET_SRC (prev_set)) == CONST_INT
8757 && rtx_equal_p (SET_DEST (prev_set), reg)
8758 && reg_state[regno].use_index >= 0
8759 && (reg_state[REGNO (base)].store_ruid
8760 <= reg_state[regno].use_ruid)
8761 && reg_sum != 0)
8762 {
8763 int i;
8764
8765 /* Change destination register and, if necessary, the
8766 constant value in PREV, the constant loading instruction. */
8767 validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
8768 if (reg_state[regno].offset != const0_rtx)
8769 validate_change (prev,
8770 &SET_SRC (prev_set),
8771 GEN_INT (INTVAL (SET_SRC (prev_set))
8772 + INTVAL (reg_state[regno].offset)),
8773 1);
8774
8775 /* Now for every use of REG that we have recorded, replace REG
8776 with REG_SUM. */
8777 for (i = reg_state[regno].use_index;
8778 i < RELOAD_COMBINE_MAX_USES; i++)
8779 validate_change (reg_state[regno].reg_use[i].insn,
8780 reg_state[regno].reg_use[i].usep,
8781 reg_sum, 1);
8782
8783 if (apply_change_group ())
8784 {
8785 rtx *np;
8786
8787 /* Delete the reg-reg addition. */
8788 PUT_CODE (insn, NOTE);
8789 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8790 NOTE_SOURCE_FILE (insn) = 0;
8791
8792 if (reg_state[regno].offset != const0_rtx)
8793 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
8794 are now invalid. */
8795 for (np = &REG_NOTES (prev); *np;)
8796 {
8797 if (REG_NOTE_KIND (*np) == REG_EQUAL
8798 || REG_NOTE_KIND (*np) == REG_EQUIV)
8799 *np = XEXP (*np, 1);
8800 else
8801 np = &XEXP (*np, 1);
8802 }
8803
8804 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
8805 reg_state[REGNO (const_reg)].store_ruid
8806 = reload_combine_ruid;
8807 continue;
8808 }
8809 }
8810 }
8811
8812 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
8813
8814 if (GET_CODE (insn) == CALL_INSN)
8815 {
8816 rtx link;
8817
8818 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8819 if (call_used_regs[r])
8820 {
8821 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8822 reg_state[r].store_ruid = reload_combine_ruid;
8823 }
8824
8825 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
8826 link = XEXP (link, 1))
8827 {
8828 rtx usage_rtx = XEXP (XEXP (link, 0), 0);
8829 if (GET_CODE (usage_rtx) == REG)
8830 {
8831 unsigned int i;
8832 unsigned int start_reg = REGNO (usage_rtx);
8833 unsigned int num_regs =
8834 HARD_REGNO_NREGS (start_reg, GET_MODE (usage_rtx));
8835 unsigned int end_reg = start_reg + num_regs - 1;
8836 for (i = start_reg; i <= end_reg; i++)
8837 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
8838 {
8839 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
8840 reg_state[i].store_ruid = reload_combine_ruid;
8841 }
8842 else
8843 reg_state[i].use_index = -1;
8844 }
8845 }
8846
8847 }
8848 else if (GET_CODE (insn) == JUMP_INSN
8849 && GET_CODE (PATTERN (insn)) != RETURN)
8850 {
8851 /* Non-spill registers might be used at the call destination in
8852 some unknown fashion, so we have to mark the unknown use. */
8853 HARD_REG_SET *live;
8854
8855 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
8856 && JUMP_LABEL (insn))
8857 live = &LABEL_LIVE (JUMP_LABEL (insn));
8858 else
8859 live = &ever_live_at_start;
8860
8861 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
8862 if (TEST_HARD_REG_BIT (*live, i))
8863 reg_state[i].use_index = -1;
8864 }
8865
8866 reload_combine_note_use (&PATTERN (insn), insn);
8867 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
8868 {
8869 if (REG_NOTE_KIND (note) == REG_INC
8870 && GET_CODE (XEXP (note, 0)) == REG)
8871 {
8872 int regno = REGNO (XEXP (note, 0));
8873
8874 reg_state[regno].store_ruid = reload_combine_ruid;
8875 reg_state[regno].use_index = -1;
8876 }
8877 }
8878 }
8879
8880 free (label_live);
8881 }
8882
8883 /* Check if DST is a register or a subreg of a register; if it is,
8884 update reg_state[regno].store_ruid and reg_state[regno].use_index
8885 accordingly. Called via note_stores from reload_combine. */
8886
8887 static void
8888 reload_combine_note_store (dst, set, data)
8889 rtx dst, set;
8890 void *data ATTRIBUTE_UNUSED;
8891 {
8892 int regno = 0;
8893 int i;
8894 enum machine_mode mode = GET_MODE (dst);
8895
8896 if (GET_CODE (dst) == SUBREG)
8897 {
8898 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
8899 GET_MODE (SUBREG_REG (dst)),
8900 SUBREG_BYTE (dst),
8901 GET_MODE (dst));
8902 dst = SUBREG_REG (dst);
8903 }
8904 if (GET_CODE (dst) != REG)
8905 return;
8906 regno += REGNO (dst);
8907
8908 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
8909 careful with registers / register parts that are not full words.
8910
8911 Similarly for ZERO_EXTRACT and SIGN_EXTRACT. */
8912 if (GET_CODE (set) != SET
8913 || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
8914 || GET_CODE (SET_DEST (set)) == SIGN_EXTRACT
8915 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
8916 {
8917 for (i = HARD_REGNO_NREGS (regno, mode) - 1 + regno; i >= regno; i--)
8918 {
8919 reg_state[i].use_index = -1;
8920 reg_state[i].store_ruid = reload_combine_ruid;
8921 }
8922 }
8923 else
8924 {
8925 for (i = HARD_REGNO_NREGS (regno, mode) - 1 + regno; i >= regno; i--)
8926 {
8927 reg_state[i].store_ruid = reload_combine_ruid;
8928 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
8929 }
8930 }
8931 }
8932
8933 /* XP points to a piece of rtl that has to be checked for any uses of
8934 registers.
8935 *XP is the pattern of INSN, or a part of it.
8936 Called from reload_combine, and recursively by itself. */
8937 static void
8938 reload_combine_note_use (xp, insn)
8939 rtx *xp, insn;
8940 {
8941 rtx x = *xp;
8942 enum rtx_code code = x->code;
8943 const char *fmt;
8944 int i, j;
8945 rtx offset = const0_rtx; /* For the REG case below. */
8946
8947 switch (code)
8948 {
8949 case SET:
8950 if (GET_CODE (SET_DEST (x)) == REG)
8951 {
8952 reload_combine_note_use (&SET_SRC (x), insn);
8953 return;
8954 }
8955 break;
8956
8957 case USE:
8958 /* If this is the USE of a return value, we can't change it. */
8959 if (GET_CODE (XEXP (x, 0)) == REG && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
8960 {
8961 /* Mark the return register as used in an unknown fashion. */
8962 rtx reg = XEXP (x, 0);
8963 int regno = REGNO (reg);
8964 int nregs = HARD_REGNO_NREGS (regno, GET_MODE (reg));
8965
8966 while (--nregs >= 0)
8967 reg_state[regno + nregs].use_index = -1;
8968 return;
8969 }
8970 break;
8971
8972 case CLOBBER:
8973 if (GET_CODE (SET_DEST (x)) == REG)
8974 return;
8975 break;
8976
8977 case PLUS:
8978 /* We are interested in (plus (reg) (const_int)) . */
8979 if (GET_CODE (XEXP (x, 0)) != REG
8980 || GET_CODE (XEXP (x, 1)) != CONST_INT)
8981 break;
8982 offset = XEXP (x, 1);
8983 x = XEXP (x, 0);
8984 /* Fall through. */
8985 case REG:
8986 {
8987 int regno = REGNO (x);
8988 int use_index;
8989 int nregs;
8990
8991 /* Some spurious USEs of pseudo registers might remain.
8992 Just ignore them. */
8993 if (regno >= FIRST_PSEUDO_REGISTER)
8994 return;
8995
8996 nregs = HARD_REGNO_NREGS (regno, GET_MODE (x));
8997
8998 /* We can't substitute into multi-hard-reg uses. */
8999 if (nregs > 1)
9000 {
9001 while (--nregs >= 0)
9002 reg_state[regno + nregs].use_index = -1;
9003 return;
9004 }
9005
9006 /* If this register is already used in some unknown fashion, we
9007 can't do anything.
9008 If we decrement the index from zero to -1, we can't store more
9009 uses, so this register becomes used in an unknown fashion. */
9010 use_index = --reg_state[regno].use_index;
9011 if (use_index < 0)
9012 return;
9013
9014 if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9015 {
9016 /* We have found another use for a register that is already
9017 used later. Check if the offsets match; if not, mark the
9018 register as used in an unknown fashion. */
9019 if (! rtx_equal_p (offset, reg_state[regno].offset))
9020 {
9021 reg_state[regno].use_index = -1;
9022 return;
9023 }
9024 }
9025 else
9026 {
9027 /* This is the first use of this register we have seen since we
9028 marked it as dead. */
9029 reg_state[regno].offset = offset;
9030 reg_state[regno].use_ruid = reload_combine_ruid;
9031 }
9032 reg_state[regno].reg_use[use_index].insn = insn;
9033 reg_state[regno].reg_use[use_index].usep = xp;
9034 return;
9035 }
9036
9037 default:
9038 break;
9039 }
9040
9041 /* Recursively process the components of X. */
9042 fmt = GET_RTX_FORMAT (code);
9043 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9044 {
9045 if (fmt[i] == 'e')
9046 reload_combine_note_use (&XEXP (x, i), insn);
9047 else if (fmt[i] == 'E')
9048 {
9049 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9050 reload_combine_note_use (&XVECEXP (x, i, j), insn);
9051 }
9052 }
9053 }
9054 \f
9055 /* See if we can reduce the cost of a constant by replacing a move
9056 with an add. We track situations in which a register is set to a
9057 constant or to a register plus a constant. */
9058 /* We cannot do our optimization across labels. Invalidating all the
9059 information about register contents we have would be costly, so we
9060 use move2add_last_label_luid to note where the label is and then
9061 later disable any optimization that would cross it.
9062 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9063 reg_set_luid[n] is greater than last_label_luid[n] . */
9064 static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9065
9066 /* If reg_base_reg[n] is negative, register n has been set to
9067 reg_offset[n] in mode reg_mode[n] .
9068 If reg_base_reg[n] is non-negative, register n has been set to the
9069 sum of reg_offset[n] and the value of register reg_base_reg[n]
9070 before reg_set_luid[n], calculated in mode reg_mode[n] . */
9071 static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
9072 static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9073 static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9074
9075 /* move2add_luid is linearily increased while scanning the instructions
9076 from first to last. It is used to set reg_set_luid in
9077 reload_cse_move2add and move2add_note_store. */
9078 static int move2add_luid;
9079
9080 /* move2add_last_label_luid is set whenever a label is found. Labels
9081 invalidate all previously collected reg_offset data. */
9082 static int move2add_last_label_luid;
9083
9084 /* Generate a CONST_INT and force it in the range of MODE. */
9085
9086 static HOST_WIDE_INT
9087 sext_for_mode (mode, value)
9088 enum machine_mode mode;
9089 HOST_WIDE_INT value;
9090 {
9091 HOST_WIDE_INT cval = value & GET_MODE_MASK (mode);
9092 int width = GET_MODE_BITSIZE (mode);
9093
9094 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative number,
9095 sign extend it. */
9096 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
9097 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
9098 cval |= (HOST_WIDE_INT) -1 << width;
9099
9100 return cval;
9101 }
9102
9103 /* ??? We don't know how zero / sign extension is handled, hence we
9104 can't go from a narrower to a wider mode. */
9105 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
9106 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
9107 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
9108 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (OUTMODE), \
9109 GET_MODE_BITSIZE (INMODE))))
9110
9111 static void
9112 reload_cse_move2add (first)
9113 rtx first;
9114 {
9115 int i;
9116 rtx insn;
9117
9118 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
9119 reg_set_luid[i] = 0;
9120
9121 move2add_last_label_luid = 0;
9122 move2add_luid = 2;
9123 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
9124 {
9125 rtx pat, note;
9126
9127 if (GET_CODE (insn) == CODE_LABEL)
9128 {
9129 move2add_last_label_luid = move2add_luid;
9130 /* We're going to increment move2add_luid twice after a
9131 label, so that we can use move2add_last_label_luid + 1 as
9132 the luid for constants. */
9133 move2add_luid++;
9134 continue;
9135 }
9136 if (! INSN_P (insn))
9137 continue;
9138 pat = PATTERN (insn);
9139 /* For simplicity, we only perform this optimization on
9140 straightforward SETs. */
9141 if (GET_CODE (pat) == SET
9142 && GET_CODE (SET_DEST (pat)) == REG)
9143 {
9144 rtx reg = SET_DEST (pat);
9145 int regno = REGNO (reg);
9146 rtx src = SET_SRC (pat);
9147
9148 /* Check if we have valid information on the contents of this
9149 register in the mode of REG. */
9150 if (reg_set_luid[regno] > move2add_last_label_luid
9151 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), reg_mode[regno]))
9152 {
9153 /* Try to transform (set (REGX) (CONST_INT A))
9154 ...
9155 (set (REGX) (CONST_INT B))
9156 to
9157 (set (REGX) (CONST_INT A))
9158 ...
9159 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9160
9161 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
9162 {
9163 int success = 0;
9164 rtx new_src = GEN_INT (sext_for_mode (GET_MODE (reg),
9165 INTVAL (src)
9166 - reg_offset[regno]));
9167 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
9168 use (set (reg) (reg)) instead.
9169 We don't delete this insn, nor do we convert it into a
9170 note, to avoid losing register notes or the return
9171 value flag. jump2 already knowns how to get rid of
9172 no-op moves. */
9173 if (new_src == const0_rtx)
9174 success = validate_change (insn, &SET_SRC (pat), reg, 0);
9175 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
9176 && have_add2_insn (reg, new_src))
9177 success = validate_change (insn, &PATTERN (insn),
9178 gen_add2_insn (reg, new_src), 0);
9179 reg_set_luid[regno] = move2add_luid;
9180 reg_mode[regno] = GET_MODE (reg);
9181 reg_offset[regno] = INTVAL (src);
9182 continue;
9183 }
9184
9185 /* Try to transform (set (REGX) (REGY))
9186 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9187 ...
9188 (set (REGX) (REGY))
9189 (set (REGX) (PLUS (REGX) (CONST_INT B)))
9190 to
9191 (REGX) (REGY))
9192 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9193 ...
9194 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9195 else if (GET_CODE (src) == REG
9196 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
9197 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
9198 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg),
9199 reg_mode[REGNO (src)]))
9200 {
9201 rtx next = next_nonnote_insn (insn);
9202 rtx set = NULL_RTX;
9203 if (next)
9204 set = single_set (next);
9205 if (set
9206 && SET_DEST (set) == reg
9207 && GET_CODE (SET_SRC (set)) == PLUS
9208 && XEXP (SET_SRC (set), 0) == reg
9209 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
9210 {
9211 rtx src3 = XEXP (SET_SRC (set), 1);
9212 HOST_WIDE_INT added_offset = INTVAL (src3);
9213 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
9214 HOST_WIDE_INT regno_offset = reg_offset[regno];
9215 rtx new_src = GEN_INT (sext_for_mode (GET_MODE (reg),
9216 added_offset
9217 + base_offset
9218 - regno_offset));
9219 int success = 0;
9220
9221 if (new_src == const0_rtx)
9222 /* See above why we create (set (reg) (reg)) here. */
9223 success
9224 = validate_change (next, &SET_SRC (set), reg, 0);
9225 else if ((rtx_cost (new_src, PLUS)
9226 < COSTS_N_INSNS (1) + rtx_cost (src3, SET))
9227 && have_add2_insn (reg, new_src))
9228 success
9229 = validate_change (next, &PATTERN (next),
9230 gen_add2_insn (reg, new_src), 0);
9231 if (success)
9232 {
9233 /* INSN might be the first insn in a basic block
9234 if the preceding insn is a conditional jump
9235 or a possible-throwing call. */
9236 PUT_CODE (insn, NOTE);
9237 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9238 NOTE_SOURCE_FILE (insn) = 0;
9239 }
9240 insn = next;
9241 reg_mode[regno] = GET_MODE (reg);
9242 reg_offset[regno] = sext_for_mode (GET_MODE (reg),
9243 added_offset
9244 + base_offset);
9245 continue;
9246 }
9247 }
9248 }
9249 }
9250
9251 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9252 {
9253 if (REG_NOTE_KIND (note) == REG_INC
9254 && GET_CODE (XEXP (note, 0)) == REG)
9255 {
9256 /* Reset the information about this register. */
9257 int regno = REGNO (XEXP (note, 0));
9258 if (regno < FIRST_PSEUDO_REGISTER)
9259 reg_set_luid[regno] = 0;
9260 }
9261 }
9262 note_stores (PATTERN (insn), move2add_note_store, NULL);
9263 /* If this is a CALL_INSN, all call used registers are stored with
9264 unknown values. */
9265 if (GET_CODE (insn) == CALL_INSN)
9266 {
9267 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
9268 {
9269 if (call_used_regs[i])
9270 /* Reset the information about this register. */
9271 reg_set_luid[i] = 0;
9272 }
9273 }
9274 }
9275 }
9276
9277 /* SET is a SET or CLOBBER that sets DST.
9278 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
9279 Called from reload_cse_move2add via note_stores. */
9280
9281 static void
9282 move2add_note_store (dst, set, data)
9283 rtx dst, set;
9284 void *data ATTRIBUTE_UNUSED;
9285 {
9286 unsigned int regno = 0;
9287 unsigned int i;
9288 enum machine_mode mode = GET_MODE (dst);
9289
9290 if (GET_CODE (dst) == SUBREG)
9291 {
9292 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
9293 GET_MODE (SUBREG_REG (dst)),
9294 SUBREG_BYTE (dst),
9295 GET_MODE (dst));
9296 dst = SUBREG_REG (dst);
9297 }
9298
9299 /* Some targets do argument pushes without adding REG_INC notes. */
9300
9301 if (GET_CODE (dst) == MEM)
9302 {
9303 dst = XEXP (dst, 0);
9304 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
9305 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
9306 reg_set_luid[REGNO (XEXP (dst, 0))] = 0;
9307 return;
9308 }
9309 if (GET_CODE (dst) != REG)
9310 return;
9311
9312 regno += REGNO (dst);
9313
9314 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET
9315 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
9316 && GET_CODE (SET_DEST (set)) != SIGN_EXTRACT
9317 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
9318 {
9319 rtx src = SET_SRC (set);
9320 rtx base_reg;
9321 HOST_WIDE_INT offset;
9322 int base_regno;
9323 /* This may be different from mode, if SET_DEST (set) is a
9324 SUBREG. */
9325 enum machine_mode dst_mode = GET_MODE (dst);
9326
9327 switch (GET_CODE (src))
9328 {
9329 case PLUS:
9330 if (GET_CODE (XEXP (src, 0)) == REG)
9331 {
9332 base_reg = XEXP (src, 0);
9333
9334 if (GET_CODE (XEXP (src, 1)) == CONST_INT)
9335 offset = INTVAL (XEXP (src, 1));
9336 else if (GET_CODE (XEXP (src, 1)) == REG
9337 && (reg_set_luid[REGNO (XEXP (src, 1))]
9338 > move2add_last_label_luid)
9339 && (MODES_OK_FOR_MOVE2ADD
9340 (dst_mode, reg_mode[REGNO (XEXP (src, 1))])))
9341 {
9342 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0)
9343 offset = reg_offset[REGNO (XEXP (src, 1))];
9344 /* Maybe the first register is known to be a
9345 constant. */
9346 else if (reg_set_luid[REGNO (base_reg)]
9347 > move2add_last_label_luid
9348 && (MODES_OK_FOR_MOVE2ADD
9349 (dst_mode, reg_mode[REGNO (XEXP (src, 1))]))
9350 && reg_base_reg[REGNO (base_reg)] < 0)
9351 {
9352 offset = reg_offset[REGNO (base_reg)];
9353 base_reg = XEXP (src, 1);
9354 }
9355 else
9356 goto invalidate;
9357 }
9358 else
9359 goto invalidate;
9360
9361 break;
9362 }
9363
9364 goto invalidate;
9365
9366 case REG:
9367 base_reg = src;
9368 offset = 0;
9369 break;
9370
9371 case CONST_INT:
9372 /* Start tracking the register as a constant. */
9373 reg_base_reg[regno] = -1;
9374 reg_offset[regno] = INTVAL (SET_SRC (set));
9375 /* We assign the same luid to all registers set to constants. */
9376 reg_set_luid[regno] = move2add_last_label_luid + 1;
9377 reg_mode[regno] = mode;
9378 return;
9379
9380 default:
9381 invalidate:
9382 /* Invalidate the contents of the register. */
9383 reg_set_luid[regno] = 0;
9384 return;
9385 }
9386
9387 base_regno = REGNO (base_reg);
9388 /* If information about the base register is not valid, set it
9389 up as a new base register, pretending its value is known
9390 starting from the current insn. */
9391 if (reg_set_luid[base_regno] <= move2add_last_label_luid)
9392 {
9393 reg_base_reg[base_regno] = base_regno;
9394 reg_offset[base_regno] = 0;
9395 reg_set_luid[base_regno] = move2add_luid;
9396 reg_mode[base_regno] = mode;
9397 }
9398 else if (! MODES_OK_FOR_MOVE2ADD (dst_mode,
9399 reg_mode[base_regno]))
9400 goto invalidate;
9401
9402 reg_mode[regno] = mode;
9403
9404 /* Copy base information from our base register. */
9405 reg_set_luid[regno] = reg_set_luid[base_regno];
9406 reg_base_reg[regno] = reg_base_reg[base_regno];
9407
9408 /* Compute the sum of the offsets or constants. */
9409 reg_offset[regno] = sext_for_mode (dst_mode,
9410 offset
9411 + reg_offset[base_regno]);
9412 }
9413 else
9414 {
9415 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, mode);
9416
9417 for (i = regno; i < endregno; i++)
9418 /* Reset the information about this register. */
9419 reg_set_luid[i] = 0;
9420 }
9421 }
9422
9423 #ifdef AUTO_INC_DEC
9424 static void
9425 add_auto_inc_notes (insn, x)
9426 rtx insn;
9427 rtx x;
9428 {
9429 enum rtx_code code = GET_CODE (x);
9430 const char *fmt;
9431 int i, j;
9432
9433 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9434 {
9435 REG_NOTES (insn)
9436 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
9437 return;
9438 }
9439
9440 /* Scan all the operand sub-expressions. */
9441 fmt = GET_RTX_FORMAT (code);
9442 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9443 {
9444 if (fmt[i] == 'e')
9445 add_auto_inc_notes (insn, XEXP (x, i));
9446 else if (fmt[i] == 'E')
9447 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9448 add_auto_inc_notes (insn, XVECEXP (x, i, j));
9449 }
9450 }
9451 #endif
9452
9453 /* Copy EH notes from an insn to its reloads. */
9454 static void
9455 copy_eh_notes (insn, x)
9456 rtx insn;
9457 rtx x;
9458 {
9459 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
9460 if (eh_note)
9461 {
9462 for (; x != 0; x = NEXT_INSN (x))
9463 {
9464 if (may_trap_p (PATTERN (x)))
9465 REG_NOTES (x)
9466 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
9467 REG_NOTES (x));
9468 }
9469 }
9470 }
9471
9472 /* This is used by reload pass, that does emit some instructions after
9473 abnormal calls moving basic block end, but in fact it wants to emit
9474 them on the edge. Looks for abnormal call edges, find backward the
9475 proper call and fix the damage.
9476
9477 Similar handle instructions throwing exceptions internally. */
9478 static void
9479 fixup_abnormal_edges ()
9480 {
9481 int i;
9482 bool inserted = false;
9483
9484 for (i = 0; i < n_basic_blocks; i++)
9485 {
9486 basic_block bb = BASIC_BLOCK (i);
9487 edge e;
9488
9489 /* Look for cases we are interested in - an calls or instructions causing
9490 exceptions. */
9491 for (e = bb->succ; e; e = e->succ_next)
9492 {
9493 if (e->flags & EDGE_ABNORMAL_CALL)
9494 break;
9495 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
9496 == (EDGE_ABNORMAL | EDGE_EH))
9497 break;
9498 }
9499 if (e && GET_CODE (bb->end) != CALL_INSN && !can_throw_internal (bb->end))
9500 {
9501 rtx insn = bb->end, stop = NEXT_INSN (bb->end);
9502 rtx next;
9503 for (e = bb->succ; e; e = e->succ_next)
9504 if (e->flags & EDGE_FALLTHRU)
9505 break;
9506 /* Get past the new insns generated. Allow notes, as the insns may
9507 be already deleted. */
9508 while ((GET_CODE (insn) == INSN || GET_CODE (insn) == NOTE)
9509 && !can_throw_internal (insn)
9510 && insn != bb->head)
9511 insn = PREV_INSN (insn);
9512 if (GET_CODE (insn) != CALL_INSN && !can_throw_internal (insn))
9513 abort ();
9514 bb->end = insn;
9515 inserted = true;
9516 insn = NEXT_INSN (insn);
9517 while (insn && insn != stop)
9518 {
9519 next = NEXT_INSN (insn);
9520 if (INSN_P (insn))
9521 {
9522 insert_insn_on_edge (PATTERN (insn), e);
9523 flow_delete_insn (insn);
9524 }
9525 insn = next;
9526 }
9527 }
9528 }
9529 if (inserted)
9530 commit_edge_insertions ();
9531 }