Remove erroneously-installed change.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
186
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
192
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
198
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206 static char spill_indirect_levels;
207
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212 char indirect_symref_ok;
213
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216 char double_reg_address_ok;
217
218 /* Record the stack slot for each spilled hard register. */
219
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222 /* Width allocated so far for that stack slot. */
223
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231 char *basic_block_needs[N_REG_CLASSES];
232
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
236
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240 int caller_save_needed;
241
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245 int reload_in_progress = 0;
246
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258 struct obstack reload_obstack;
259 char *reload_firstobj;
260
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
263
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
266 \f
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272 static struct elim_table
273 {
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290 } reg_eliminate[] =
291
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296 #ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298 #else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300 #endif
301
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
308
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
311
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322 /* Number of labels in the current function. */
323
324 static int num_labels;
325
326 struct hard_reg_n_uses { int regno; int uses; };
327 \f
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void mark_scratch_live PROTO((rtx));
340 static void set_label_offsets PROTO((rtx, rtx, int));
341 static int eliminate_regs_in_insn PROTO((rtx, int));
342 static void mark_not_eliminable PROTO((rtx, rtx));
343 static int spill_hard_reg PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs PROTO((rtx));
345 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347 static void order_regs_for_reload PROTO((void));
348 static int compare_spill_regs PROTO((short *, short *));
349 static void reload_as_needed PROTO((rtx, int));
350 static void forget_old_reloads_1 PROTO((rtx, rtx));
351 static int reload_reg_class_lower PROTO((short *, short *));
352 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
354 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
356 static int reload_reg_free_p PROTO((int, int, enum reload_type));
357 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
359 static int reloads_conflict PROTO((int, int));
360 static int allocate_reload_reg PROTO((int, rtx, int, int));
361 static void choose_reload_regs PROTO((rtx, rtx));
362 static void merge_assigned_reloads PROTO((rtx));
363 static void emit_reload_insns PROTO((rtx));
364 static void delete_output_reload PROTO((rtx, int, rtx));
365 static void inc_for_reload PROTO((rtx, rtx, int));
366 static int constraint_accepts_reg_p PROTO((char *, rtx));
367 static int count_occurrences PROTO((rtx, rtx));
368 \f
369 /* Initialize the reload pass once per compilation. */
370
371 void
372 init_reload ()
373 {
374 register int i;
375
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
379
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
384 GEN_INT (4)));
385 spill_indirect_levels = 0;
386
387 while (memory_address_p (QImode, tem))
388 {
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
391 }
392
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
394
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
397
398 /* See if reg+reg is a valid (and offsettable) address. */
399
400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
401 {
402 tem = gen_rtx (PLUS, Pmode,
403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
407
408 if (memory_address_p (QImode, tem))
409 {
410 double_reg_address_ok = 1;
411 break;
412 }
413 }
414
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
418 }
419
420 /* Main entry point for the reload pass.
421
422 FIRST is the first insn of the function being compiled.
423
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
429
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
433 are spilled from them, and where the pseudo regs are reallocated to.
434
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
437
438 int
439 reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
443 {
444 register int class;
445 register int i, j, k;
446 register rtx insn;
447 register struct elim_table *ep;
448
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
455
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
458
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
461
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
464
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
467
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
470
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474 #endif
475
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
480
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
483 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
484
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
488
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
493
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
496
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
500
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
504
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
509
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
514
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
526 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
529
530 #ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532 #endif
533
534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
538
539 for (insn = first; insn; insn = NEXT_INSN (insn))
540 {
541 rtx set = single_set (insn);
542
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
544 {
545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
546 if (note
547 #ifdef LEGITIMATE_PIC_OPERAND_P
548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550 #endif
551 )
552 {
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
556 {
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
560 {
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
566 }
567 else
568 continue;
569
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
577 }
578 }
579 }
580
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
589
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
592 }
593
594 /* Does this function require a frame pointer? */
595
596 frame_pointer_needed = (! flag_omit_frame_pointer
597 #ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605 #endif
606 || FRAME_POINTER_REQUIRED);
607
608 num_eliminable = 0;
609
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612 #ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
614 {
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
617 && (ep->from != HARD_FRAME_POINTER_REGNUM
618 || ! frame_pointer_needed));
619 }
620 #else
621 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
622 = ! frame_pointer_needed;
623 #endif
624
625 /* Count the number of eliminable registers and build the FROM and TO
626 REG rtx's. Note that code in gen_rtx will cause, e.g.,
627 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
628 We depend on this. */
629 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
630 {
631 num_eliminable += ep->can_eliminate;
632 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
633 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
634 }
635
636 num_labels = max_label_num () - get_first_label_num ();
637
638 /* Allocate the tables used to store offset information at labels. */
639 offsets_known_at = (char *) alloca (num_labels);
640 offsets_at
641 = (int (*)[NUM_ELIMINABLE_REGS])
642 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
643
644 offsets_known_at -= get_first_label_num ();
645 offsets_at -= get_first_label_num ();
646
647 /* Alter each pseudo-reg rtx to contain its hard reg number.
648 Assign stack slots to the pseudos that lack hard regs or equivalents.
649 Do not touch virtual registers. */
650
651 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
652 alter_reg (i, -1);
653
654 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
655 because the stack size may be a part of the offset computation for
656 register elimination. */
657 assign_stack_local (BLKmode, 0, 0);
658
659 /* If we have some registers we think can be eliminated, scan all insns to
660 see if there is an insn that sets one of these registers to something
661 other than itself plus a constant. If so, the register cannot be
662 eliminated. Doing this scan here eliminates an extra pass through the
663 main reload loop in the most common case where register elimination
664 cannot be done. */
665 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
666 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
667 || GET_CODE (insn) == CALL_INSN)
668 note_stores (PATTERN (insn), mark_not_eliminable);
669
670 #ifndef REGISTER_CONSTRAINTS
671 /* If all the pseudo regs have hard regs,
672 except for those that are never referenced,
673 we know that no reloads are needed. */
674 /* But that is not true if there are register constraints, since
675 in that case some pseudos might be in the wrong kind of hard reg. */
676
677 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
678 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
679 break;
680
681 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
682 return;
683 #endif
684
685 /* Compute the order of preference for hard registers to spill.
686 Store them by decreasing preference in potential_reload_regs. */
687
688 order_regs_for_reload ();
689
690 /* So far, no hard regs have been spilled. */
691 n_spills = 0;
692 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
693 spill_reg_order[i] = -1;
694
695 /* On most machines, we can't use any register explicitly used in the
696 rtl as a spill register. But on some, we have to. Those will have
697 taken care to keep the life of hard regs as short as possible. */
698
699 #ifndef SMALL_REGISTER_CLASSES
700 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
701 #endif
702
703 /* Spill any hard regs that we know we can't eliminate. */
704 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
705 if (! ep->can_eliminate)
706 {
707 spill_hard_reg (ep->from, global, dumpfile, 1);
708 regs_ever_live[ep->from] = 1;
709 }
710
711 if (global)
712 for (i = 0; i < N_REG_CLASSES; i++)
713 {
714 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
715 bzero (basic_block_needs[i], n_basic_blocks);
716 }
717
718 /* From now on, we need to emit any moves without making new pseudos. */
719 reload_in_progress = 1;
720
721 /* This loop scans the entire function each go-round
722 and repeats until one repetition spills no additional hard regs. */
723
724 /* This flag is set when a pseudo reg is spilled,
725 to require another pass. Note that getting an additional reload
726 reg does not necessarily imply any pseudo reg was spilled;
727 sometimes we find a reload reg that no pseudo reg was allocated in. */
728 something_changed = 1;
729 /* This flag is set if there are any insns that require reloading. */
730 something_needs_reloads = 0;
731 /* This flag is set if there are any insns that require register
732 eliminations. */
733 something_needs_elimination = 0;
734 while (something_changed)
735 {
736 rtx after_call = 0;
737
738 /* For each class, number of reload regs needed in that class.
739 This is the maximum over all insns of the needs in that class
740 of the individual insn. */
741 int max_needs[N_REG_CLASSES];
742 /* For each class, size of group of consecutive regs
743 that is needed for the reloads of this class. */
744 int group_size[N_REG_CLASSES];
745 /* For each class, max number of consecutive groups needed.
746 (Each group contains group_size[CLASS] consecutive registers.) */
747 int max_groups[N_REG_CLASSES];
748 /* For each class, max number needed of regs that don't belong
749 to any of the groups. */
750 int max_nongroups[N_REG_CLASSES];
751 /* For each class, the machine mode which requires consecutive
752 groups of regs of that class.
753 If two different modes ever require groups of one class,
754 they must be the same size and equally restrictive for that class,
755 otherwise we can't handle the complexity. */
756 enum machine_mode group_mode[N_REG_CLASSES];
757 /* Record the insn where each maximum need is first found. */
758 rtx max_needs_insn[N_REG_CLASSES];
759 rtx max_groups_insn[N_REG_CLASSES];
760 rtx max_nongroups_insn[N_REG_CLASSES];
761 rtx x;
762 int starting_frame_size = get_frame_size ();
763 static char *reg_class_names[] = REG_CLASS_NAMES;
764
765 something_changed = 0;
766 bzero ((char *) max_needs, sizeof max_needs);
767 bzero ((char *) max_groups, sizeof max_groups);
768 bzero ((char *) max_nongroups, sizeof max_nongroups);
769 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
770 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
771 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
772 bzero ((char *) group_size, sizeof group_size);
773 for (i = 0; i < N_REG_CLASSES; i++)
774 group_mode[i] = VOIDmode;
775
776 /* Keep track of which basic blocks are needing the reloads. */
777 this_block = 0;
778
779 /* Remember whether any element of basic_block_needs
780 changes from 0 to 1 in this pass. */
781 new_basic_block_needs = 0;
782
783 /* Reset all offsets on eliminable registers to their initial values. */
784 #ifdef ELIMINABLE_REGS
785 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
786 {
787 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
788 ep->previous_offset = ep->offset
789 = ep->max_offset = ep->initial_offset;
790 }
791 #else
792 #ifdef INITIAL_FRAME_POINTER_OFFSET
793 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
794 #else
795 if (!FRAME_POINTER_REQUIRED)
796 abort ();
797 reg_eliminate[0].initial_offset = 0;
798 #endif
799 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
800 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
801 #endif
802
803 num_not_at_initial_offset = 0;
804
805 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
806
807 /* Set a known offset for each forced label to be at the initial offset
808 of each elimination. We do this because we assume that all
809 computed jumps occur from a location where each elimination is
810 at its initial offset. */
811
812 for (x = forced_labels; x; x = XEXP (x, 1))
813 if (XEXP (x, 0))
814 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
815
816 /* For each pseudo register that has an equivalent location defined,
817 try to eliminate any eliminable registers (such as the frame pointer)
818 assuming initial offsets for the replacement register, which
819 is the normal case.
820
821 If the resulting location is directly addressable, substitute
822 the MEM we just got directly for the old REG.
823
824 If it is not addressable but is a constant or the sum of a hard reg
825 and constant, it is probably not addressable because the constant is
826 out of range, in that case record the address; we will generate
827 hairy code to compute the address in a register each time it is
828 needed. Similarly if it is a hard register, but one that is not
829 valid as an address register.
830
831 If the location is not addressable, but does not have one of the
832 above forms, assign a stack slot. We have to do this to avoid the
833 potential of producing lots of reloads if, e.g., a location involves
834 a pseudo that didn't get a hard register and has an equivalent memory
835 location that also involves a pseudo that didn't get a hard register.
836
837 Perhaps at some point we will improve reload_when_needed handling
838 so this problem goes away. But that's very hairy. */
839
840 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
841 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
842 {
843 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
844
845 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
846 XEXP (x, 0)))
847 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
848 else if (CONSTANT_P (XEXP (x, 0))
849 || (GET_CODE (XEXP (x, 0)) == REG
850 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
851 || (GET_CODE (XEXP (x, 0)) == PLUS
852 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
853 && (REGNO (XEXP (XEXP (x, 0), 0))
854 < FIRST_PSEUDO_REGISTER)
855 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
856 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
857 else
858 {
859 /* Make a new stack slot. Then indicate that something
860 changed so we go back and recompute offsets for
861 eliminable registers because the allocation of memory
862 below might change some offset. reg_equiv_{mem,address}
863 will be set up for this pseudo on the next pass around
864 the loop. */
865 reg_equiv_memory_loc[i] = 0;
866 reg_equiv_init[i] = 0;
867 alter_reg (i, -1);
868 something_changed = 1;
869 }
870 }
871
872 /* If we allocated another pseudo to the stack, redo elimination
873 bookkeeping. */
874 if (something_changed)
875 continue;
876
877 /* If caller-saves needs a group, initialize the group to include
878 the size and mode required for caller-saves. */
879
880 if (caller_save_group_size > 1)
881 {
882 group_mode[(int) caller_save_spill_class] = Pmode;
883 group_size[(int) caller_save_spill_class] = caller_save_group_size;
884 }
885
886 /* Compute the most additional registers needed by any instruction.
887 Collect information separately for each class of regs. */
888
889 for (insn = first; insn; insn = NEXT_INSN (insn))
890 {
891 if (global && this_block + 1 < n_basic_blocks
892 && insn == basic_block_head[this_block+1])
893 ++this_block;
894
895 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
896 might include REG_LABEL), we need to see what effects this
897 has on the known offsets at labels. */
898
899 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
900 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
901 && REG_NOTES (insn) != 0))
902 set_label_offsets (insn, insn, 0);
903
904 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
905 {
906 /* Nonzero means don't use a reload reg that overlaps
907 the place where a function value can be returned. */
908 rtx avoid_return_reg = 0;
909
910 rtx old_body = PATTERN (insn);
911 int old_code = INSN_CODE (insn);
912 rtx old_notes = REG_NOTES (insn);
913 int did_elimination = 0;
914
915 /* To compute the number of reload registers of each class
916 needed for an insn, we must similate what choose_reload_regs
917 can do. We do this by splitting an insn into an "input" and
918 an "output" part. RELOAD_OTHER reloads are used in both.
919 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
920 which must be live over the entire input section of reloads,
921 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
922 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
923 inputs.
924
925 The registers needed for output are RELOAD_OTHER and
926 RELOAD_FOR_OUTPUT, which are live for the entire output
927 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
928 reloads for each operand.
929
930 The total number of registers needed is the maximum of the
931 inputs and outputs. */
932
933 struct needs
934 {
935 /* [0] is normal, [1] is nongroup. */
936 int regs[2][N_REG_CLASSES];
937 int groups[N_REG_CLASSES];
938 };
939
940 /* Each `struct needs' corresponds to one RELOAD_... type. */
941 struct {
942 struct needs other;
943 struct needs input;
944 struct needs output;
945 struct needs insn;
946 struct needs other_addr;
947 struct needs op_addr;
948 struct needs op_addr_reload;
949 struct needs in_addr[MAX_RECOG_OPERANDS];
950 struct needs out_addr[MAX_RECOG_OPERANDS];
951 } insn_needs;
952
953 /* If needed, eliminate any eliminable registers. */
954 if (num_eliminable)
955 did_elimination = eliminate_regs_in_insn (insn, 0);
956
957 #ifdef SMALL_REGISTER_CLASSES
958 /* Set avoid_return_reg if this is an insn
959 that might use the value of a function call. */
960 if (GET_CODE (insn) == CALL_INSN)
961 {
962 if (GET_CODE (PATTERN (insn)) == SET)
963 after_call = SET_DEST (PATTERN (insn));
964 else if (GET_CODE (PATTERN (insn)) == PARALLEL
965 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
966 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
967 else
968 after_call = 0;
969 }
970 else if (after_call != 0
971 && !(GET_CODE (PATTERN (insn)) == SET
972 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
973 {
974 if (reg_referenced_p (after_call, PATTERN (insn)))
975 avoid_return_reg = after_call;
976 after_call = 0;
977 }
978 #endif /* SMALL_REGISTER_CLASSES */
979
980 /* Analyze the instruction. */
981 find_reloads (insn, 0, spill_indirect_levels, global,
982 spill_reg_order);
983
984 /* Remember for later shortcuts which insns had any reloads or
985 register eliminations.
986
987 One might think that it would be worthwhile to mark insns
988 that need register replacements but not reloads, but this is
989 not safe because find_reloads may do some manipulation of
990 the insn (such as swapping commutative operands), which would
991 be lost when we restore the old pattern after register
992 replacement. So the actions of find_reloads must be redone in
993 subsequent passes or in reload_as_needed.
994
995 However, it is safe to mark insns that need reloads
996 but not register replacement. */
997
998 PUT_MODE (insn, (did_elimination ? QImode
999 : n_reloads ? HImode
1000 : GET_MODE (insn) == DImode ? DImode
1001 : VOIDmode));
1002
1003 /* Discard any register replacements done. */
1004 if (did_elimination)
1005 {
1006 obstack_free (&reload_obstack, reload_firstobj);
1007 PATTERN (insn) = old_body;
1008 INSN_CODE (insn) = old_code;
1009 REG_NOTES (insn) = old_notes;
1010 something_needs_elimination = 1;
1011 }
1012
1013 /* If this insn has no reloads, we need not do anything except
1014 in the case of a CALL_INSN when we have caller-saves and
1015 caller-save needs reloads. */
1016
1017 if (n_reloads == 0
1018 && ! (GET_CODE (insn) == CALL_INSN
1019 && caller_save_spill_class != NO_REGS))
1020 continue;
1021
1022 something_needs_reloads = 1;
1023 bzero ((char *) &insn_needs, sizeof insn_needs);
1024
1025 /* Count each reload once in every class
1026 containing the reload's own class. */
1027
1028 for (i = 0; i < n_reloads; i++)
1029 {
1030 register enum reg_class *p;
1031 enum reg_class class = reload_reg_class[i];
1032 int size;
1033 enum machine_mode mode;
1034 int nongroup_need;
1035 struct needs *this_needs;
1036
1037 /* Don't count the dummy reloads, for which one of the
1038 regs mentioned in the insn can be used for reloading.
1039 Don't count optional reloads.
1040 Don't count reloads that got combined with others. */
1041 if (reload_reg_rtx[i] != 0
1042 || reload_optional[i] != 0
1043 || (reload_out[i] == 0 && reload_in[i] == 0
1044 && ! reload_secondary_p[i]))
1045 continue;
1046
1047 /* Show that a reload register of this class is needed
1048 in this basic block. We do not use insn_needs and
1049 insn_groups because they are overly conservative for
1050 this purpose. */
1051 if (global && ! basic_block_needs[(int) class][this_block])
1052 {
1053 basic_block_needs[(int) class][this_block] = 1;
1054 new_basic_block_needs = 1;
1055 }
1056
1057
1058 mode = reload_inmode[i];
1059 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1060 mode = reload_outmode[i];
1061 size = CLASS_MAX_NREGS (class, mode);
1062
1063 /* If this class doesn't want a group, determine if we have
1064 a nongroup need or a regular need. We have a nongroup
1065 need if this reload conflicts with a group reload whose
1066 class intersects with this reload's class. */
1067
1068 nongroup_need = 0;
1069 if (size == 1)
1070 for (j = 0; j < n_reloads; j++)
1071 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1072 (GET_MODE_SIZE (reload_outmode[j])
1073 > GET_MODE_SIZE (reload_inmode[j]))
1074 ? reload_outmode[j]
1075 : reload_inmode[j])
1076 > 1)
1077 && (!reload_optional[j])
1078 && (reload_in[j] != 0 || reload_out[j] != 0
1079 || reload_secondary_p[j])
1080 && reloads_conflict (i, j)
1081 && reg_classes_intersect_p (class,
1082 reload_reg_class[j]))
1083 {
1084 nongroup_need = 1;
1085 break;
1086 }
1087
1088 /* Decide which time-of-use to count this reload for. */
1089 switch (reload_when_needed[i])
1090 {
1091 case RELOAD_OTHER:
1092 this_needs = &insn_needs.other;
1093 break;
1094 case RELOAD_FOR_INPUT:
1095 this_needs = &insn_needs.input;
1096 break;
1097 case RELOAD_FOR_OUTPUT:
1098 this_needs = &insn_needs.output;
1099 break;
1100 case RELOAD_FOR_INSN:
1101 this_needs = &insn_needs.insn;
1102 break;
1103 case RELOAD_FOR_OTHER_ADDRESS:
1104 this_needs = &insn_needs.other_addr;
1105 break;
1106 case RELOAD_FOR_INPUT_ADDRESS:
1107 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1108 break;
1109 case RELOAD_FOR_OUTPUT_ADDRESS:
1110 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1111 break;
1112 case RELOAD_FOR_OPERAND_ADDRESS:
1113 this_needs = &insn_needs.op_addr;
1114 break;
1115 case RELOAD_FOR_OPADDR_ADDR:
1116 this_needs = &insn_needs.op_addr_reload;
1117 break;
1118 }
1119
1120 if (size > 1)
1121 {
1122 enum machine_mode other_mode, allocate_mode;
1123
1124 /* Count number of groups needed separately from
1125 number of individual regs needed. */
1126 this_needs->groups[(int) class]++;
1127 p = reg_class_superclasses[(int) class];
1128 while (*p != LIM_REG_CLASSES)
1129 this_needs->groups[(int) *p++]++;
1130
1131 /* Record size and mode of a group of this class. */
1132 /* If more than one size group is needed,
1133 make all groups the largest needed size. */
1134 if (group_size[(int) class] < size)
1135 {
1136 other_mode = group_mode[(int) class];
1137 allocate_mode = mode;
1138
1139 group_size[(int) class] = size;
1140 group_mode[(int) class] = mode;
1141 }
1142 else
1143 {
1144 other_mode = mode;
1145 allocate_mode = group_mode[(int) class];
1146 }
1147
1148 /* Crash if two dissimilar machine modes both need
1149 groups of consecutive regs of the same class. */
1150
1151 if (other_mode != VOIDmode && other_mode != allocate_mode
1152 && ! modes_equiv_for_class_p (allocate_mode,
1153 other_mode, class))
1154 abort ();
1155 }
1156 else if (size == 1)
1157 {
1158 this_needs->regs[nongroup_need][(int) class] += 1;
1159 p = reg_class_superclasses[(int) class];
1160 while (*p != LIM_REG_CLASSES)
1161 this_needs->regs[nongroup_need][(int) *p++] += 1;
1162 }
1163 else
1164 abort ();
1165 }
1166
1167 /* All reloads have been counted for this insn;
1168 now merge the various times of use.
1169 This sets insn_needs, etc., to the maximum total number
1170 of registers needed at any point in this insn. */
1171
1172 for (i = 0; i < N_REG_CLASSES; i++)
1173 {
1174 int in_max, out_max;
1175
1176 /* Compute normal and nongroup needs. */
1177 for (j = 0; j <= 1; j++)
1178 {
1179 for (in_max = 0, out_max = 0, k = 0;
1180 k < reload_n_operands; k++)
1181 {
1182 in_max
1183 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1184 out_max
1185 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1186 }
1187
1188 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1189 and operand addresses but not things used to reload
1190 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1191 don't conflict with things needed to reload inputs or
1192 outputs. */
1193
1194 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1195 insn_needs.op_addr_reload.regs[j][i]),
1196 in_max);
1197
1198 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1199
1200 insn_needs.input.regs[j][i]
1201 = MAX (insn_needs.input.regs[j][i]
1202 + insn_needs.op_addr.regs[j][i]
1203 + insn_needs.insn.regs[j][i],
1204 in_max + insn_needs.input.regs[j][i]);
1205
1206 insn_needs.output.regs[j][i] += out_max;
1207 insn_needs.other.regs[j][i]
1208 += MAX (MAX (insn_needs.input.regs[j][i],
1209 insn_needs.output.regs[j][i]),
1210 insn_needs.other_addr.regs[j][i]);
1211
1212 }
1213
1214 /* Now compute group needs. */
1215 for (in_max = 0, out_max = 0, j = 0;
1216 j < reload_n_operands; j++)
1217 {
1218 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1219 out_max
1220 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1221 }
1222
1223 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1224 insn_needs.op_addr_reload.groups[i]),
1225 in_max);
1226 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1227
1228 insn_needs.input.groups[i]
1229 = MAX (insn_needs.input.groups[i]
1230 + insn_needs.op_addr.groups[i]
1231 + insn_needs.insn.groups[i],
1232 in_max + insn_needs.input.groups[i]);
1233
1234 insn_needs.output.groups[i] += out_max;
1235 insn_needs.other.groups[i]
1236 += MAX (MAX (insn_needs.input.groups[i],
1237 insn_needs.output.groups[i]),
1238 insn_needs.other_addr.groups[i]);
1239 }
1240
1241 /* If this is a CALL_INSN and caller-saves will need
1242 a spill register, act as if the spill register is
1243 needed for this insn. However, the spill register
1244 can be used by any reload of this insn, so we only
1245 need do something if no need for that class has
1246 been recorded.
1247
1248 The assumption that every CALL_INSN will trigger a
1249 caller-save is highly conservative, however, the number
1250 of cases where caller-saves will need a spill register but
1251 a block containing a CALL_INSN won't need a spill register
1252 of that class should be quite rare.
1253
1254 If a group is needed, the size and mode of the group will
1255 have been set up at the beginning of this loop. */
1256
1257 if (GET_CODE (insn) == CALL_INSN
1258 && caller_save_spill_class != NO_REGS)
1259 {
1260 /* See if this register would conflict with any reload
1261 that needs a group. */
1262 int nongroup_need = 0;
1263 int *caller_save_needs;
1264
1265 for (j = 0; j < n_reloads; j++)
1266 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1267 (GET_MODE_SIZE (reload_outmode[j])
1268 > GET_MODE_SIZE (reload_inmode[j]))
1269 ? reload_outmode[j]
1270 : reload_inmode[j])
1271 > 1)
1272 && reg_classes_intersect_p (caller_save_spill_class,
1273 reload_reg_class[j]))
1274 {
1275 nongroup_need = 1;
1276 break;
1277 }
1278
1279 caller_save_needs
1280 = (caller_save_group_size > 1
1281 ? insn_needs.other.groups
1282 : insn_needs.other.regs[nongroup_need]);
1283
1284 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1285 {
1286 register enum reg_class *p
1287 = reg_class_superclasses[(int) caller_save_spill_class];
1288
1289 caller_save_needs[(int) caller_save_spill_class]++;
1290
1291 while (*p != LIM_REG_CLASSES)
1292 caller_save_needs[(int) *p++] += 1;
1293 }
1294
1295 /* Show that this basic block will need a register of
1296 this class. */
1297
1298 if (global
1299 && ! (basic_block_needs[(int) caller_save_spill_class]
1300 [this_block]))
1301 {
1302 basic_block_needs[(int) caller_save_spill_class]
1303 [this_block] = 1;
1304 new_basic_block_needs = 1;
1305 }
1306 }
1307
1308 #ifdef SMALL_REGISTER_CLASSES
1309 /* If this insn stores the value of a function call,
1310 and that value is in a register that has been spilled,
1311 and if the insn needs a reload in a class
1312 that might use that register as the reload register,
1313 then add add an extra need in that class.
1314 This makes sure we have a register available that does
1315 not overlap the return value. */
1316
1317 if (avoid_return_reg)
1318 {
1319 int regno = REGNO (avoid_return_reg);
1320 int nregs
1321 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1322 int r;
1323 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1324
1325 /* First compute the "basic needs", which counts a
1326 need only in the smallest class in which it
1327 is required. */
1328
1329 bcopy (insn_needs.other.regs[0], basic_needs,
1330 sizeof basic_needs);
1331 bcopy (insn_needs.other.groups, basic_groups,
1332 sizeof basic_groups);
1333
1334 for (i = 0; i < N_REG_CLASSES; i++)
1335 {
1336 enum reg_class *p;
1337
1338 if (basic_needs[i] >= 0)
1339 for (p = reg_class_superclasses[i];
1340 *p != LIM_REG_CLASSES; p++)
1341 basic_needs[(int) *p] -= basic_needs[i];
1342
1343 if (basic_groups[i] >= 0)
1344 for (p = reg_class_superclasses[i];
1345 *p != LIM_REG_CLASSES; p++)
1346 basic_groups[(int) *p] -= basic_groups[i];
1347 }
1348
1349 /* Now count extra regs if there might be a conflict with
1350 the return value register.
1351
1352 ??? This is not quite correct because we don't properly
1353 handle the case of groups, but if we end up doing
1354 something wrong, it either will end up not mattering or
1355 we will abort elsewhere. */
1356
1357 for (r = regno; r < regno + nregs; r++)
1358 if (spill_reg_order[r] >= 0)
1359 for (i = 0; i < N_REG_CLASSES; i++)
1360 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1361 {
1362 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1363 {
1364 enum reg_class *p;
1365
1366 insn_needs.other.regs[0][i]++;
1367 p = reg_class_superclasses[i];
1368 while (*p != LIM_REG_CLASSES)
1369 insn_needs.other.regs[0][(int) *p++]++;
1370 }
1371 }
1372 }
1373 #endif /* SMALL_REGISTER_CLASSES */
1374
1375 /* For each class, collect maximum need of any insn. */
1376
1377 for (i = 0; i < N_REG_CLASSES; i++)
1378 {
1379 if (max_needs[i] < insn_needs.other.regs[0][i])
1380 {
1381 max_needs[i] = insn_needs.other.regs[0][i];
1382 max_needs_insn[i] = insn;
1383 }
1384 if (max_groups[i] < insn_needs.other.groups[i])
1385 {
1386 max_groups[i] = insn_needs.other.groups[i];
1387 max_groups_insn[i] = insn;
1388 }
1389 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1390 {
1391 max_nongroups[i] = insn_needs.other.regs[1][i];
1392 max_nongroups_insn[i] = insn;
1393 }
1394 }
1395 }
1396 /* Note that there is a continue statement above. */
1397 }
1398
1399 /* If we allocated any new memory locations, make another pass
1400 since it might have changed elimination offsets. */
1401 if (starting_frame_size != get_frame_size ())
1402 something_changed = 1;
1403
1404 if (dumpfile)
1405 for (i = 0; i < N_REG_CLASSES; i++)
1406 {
1407 if (max_needs[i] > 0)
1408 fprintf (dumpfile,
1409 ";; Need %d reg%s of class %s (for insn %d).\n",
1410 max_needs[i], max_needs[i] == 1 ? "" : "s",
1411 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1412 if (max_nongroups[i] > 0)
1413 fprintf (dumpfile,
1414 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1415 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1416 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1417 if (max_groups[i] > 0)
1418 fprintf (dumpfile,
1419 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1420 max_groups[i], max_groups[i] == 1 ? "" : "s",
1421 mode_name[(int) group_mode[i]],
1422 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1423 }
1424
1425 /* If we have caller-saves, set up the save areas and see if caller-save
1426 will need a spill register. */
1427
1428 if (caller_save_needed
1429 && ! setup_save_areas (&something_changed)
1430 && caller_save_spill_class == NO_REGS)
1431 {
1432 /* The class we will need depends on whether the machine
1433 supports the sum of two registers for an address; see
1434 find_address_reloads for details. */
1435
1436 caller_save_spill_class
1437 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1438 caller_save_group_size
1439 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1440 something_changed = 1;
1441 }
1442
1443 /* See if anything that happened changes which eliminations are valid.
1444 For example, on the Sparc, whether or not the frame pointer can
1445 be eliminated can depend on what registers have been used. We need
1446 not check some conditions again (such as flag_omit_frame_pointer)
1447 since they can't have changed. */
1448
1449 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1450 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1451 #ifdef ELIMINABLE_REGS
1452 || ! CAN_ELIMINATE (ep->from, ep->to)
1453 #endif
1454 )
1455 ep->can_eliminate = 0;
1456
1457 /* Look for the case where we have discovered that we can't replace
1458 register A with register B and that means that we will now be
1459 trying to replace register A with register C. This means we can
1460 no longer replace register C with register B and we need to disable
1461 such an elimination, if it exists. This occurs often with A == ap,
1462 B == sp, and C == fp. */
1463
1464 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1465 {
1466 struct elim_table *op;
1467 register int new_to = -1;
1468
1469 if (! ep->can_eliminate && ep->can_eliminate_previous)
1470 {
1471 /* Find the current elimination for ep->from, if there is a
1472 new one. */
1473 for (op = reg_eliminate;
1474 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1475 if (op->from == ep->from && op->can_eliminate)
1476 {
1477 new_to = op->to;
1478 break;
1479 }
1480
1481 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1482 disable it. */
1483 for (op = reg_eliminate;
1484 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1485 if (op->from == new_to && op->to == ep->to)
1486 op->can_eliminate = 0;
1487 }
1488 }
1489
1490 /* See if any registers that we thought we could eliminate the previous
1491 time are no longer eliminable. If so, something has changed and we
1492 must spill the register. Also, recompute the number of eliminable
1493 registers and see if the frame pointer is needed; it is if there is
1494 no elimination of the frame pointer that we can perform. */
1495
1496 frame_pointer_needed = 1;
1497 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1498 {
1499 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1500 && ep->to != HARD_FRAME_POINTER_REGNUM)
1501 frame_pointer_needed = 0;
1502
1503 if (! ep->can_eliminate && ep->can_eliminate_previous)
1504 {
1505 ep->can_eliminate_previous = 0;
1506 spill_hard_reg (ep->from, global, dumpfile, 1);
1507 regs_ever_live[ep->from] = 1;
1508 something_changed = 1;
1509 num_eliminable--;
1510 }
1511 }
1512
1513 /* If all needs are met, we win. */
1514
1515 for (i = 0; i < N_REG_CLASSES; i++)
1516 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1517 break;
1518 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1519 break;
1520
1521 /* Not all needs are met; must spill some hard regs. */
1522
1523 /* Put all registers spilled so far back in potential_reload_regs, but
1524 put them at the front, since we've already spilled most of the
1525 psuedos in them (we might have left some pseudos unspilled if they
1526 were in a block that didn't need any spill registers of a conflicting
1527 class. We used to try to mark off the need for those registers,
1528 but doing so properly is very complex and reallocating them is the
1529 simpler approach. First, "pack" potential_reload_regs by pushing
1530 any nonnegative entries towards the end. That will leave room
1531 for the registers we already spilled.
1532
1533 Also, undo the marking of the spill registers from the last time
1534 around in FORBIDDEN_REGS since we will be probably be allocating
1535 them again below.
1536
1537 ??? It is theoretically possible that we might end up not using one
1538 of our previously-spilled registers in this allocation, even though
1539 they are at the head of the list. It's not clear what to do about
1540 this, but it was no better before, when we marked off the needs met
1541 by the previously-spilled registers. With the current code, globals
1542 can be allocated into these registers, but locals cannot. */
1543
1544 if (n_spills)
1545 {
1546 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1547 if (potential_reload_regs[i] != -1)
1548 potential_reload_regs[j--] = potential_reload_regs[i];
1549
1550 for (i = 0; i < n_spills; i++)
1551 {
1552 potential_reload_regs[i] = spill_regs[i];
1553 spill_reg_order[spill_regs[i]] = -1;
1554 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1555 }
1556
1557 n_spills = 0;
1558 }
1559
1560 /* Now find more reload regs to satisfy the remaining need
1561 Do it by ascending class number, since otherwise a reg
1562 might be spilled for a big class and might fail to count
1563 for a smaller class even though it belongs to that class.
1564
1565 Count spilled regs in `spills', and add entries to
1566 `spill_regs' and `spill_reg_order'.
1567
1568 ??? Note there is a problem here.
1569 When there is a need for a group in a high-numbered class,
1570 and also need for non-group regs that come from a lower class,
1571 the non-group regs are chosen first. If there aren't many regs,
1572 they might leave no room for a group.
1573
1574 This was happening on the 386. To fix it, we added the code
1575 that calls possible_group_p, so that the lower class won't
1576 break up the last possible group.
1577
1578 Really fixing the problem would require changes above
1579 in counting the regs already spilled, and in choose_reload_regs.
1580 It might be hard to avoid introducing bugs there. */
1581
1582 CLEAR_HARD_REG_SET (counted_for_groups);
1583 CLEAR_HARD_REG_SET (counted_for_nongroups);
1584
1585 for (class = 0; class < N_REG_CLASSES; class++)
1586 {
1587 /* First get the groups of registers.
1588 If we got single registers first, we might fragment
1589 possible groups. */
1590 while (max_groups[class] > 0)
1591 {
1592 /* If any single spilled regs happen to form groups,
1593 count them now. Maybe we don't really need
1594 to spill another group. */
1595 count_possible_groups (group_size, group_mode, max_groups);
1596
1597 if (max_groups[class] <= 0)
1598 break;
1599
1600 /* Groups of size 2 (the only groups used on most machines)
1601 are treated specially. */
1602 if (group_size[class] == 2)
1603 {
1604 /* First, look for a register that will complete a group. */
1605 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1606 {
1607 int other;
1608
1609 j = potential_reload_regs[i];
1610 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1611 &&
1612 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1613 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1614 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1615 && HARD_REGNO_MODE_OK (other, group_mode[class])
1616 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1617 other)
1618 /* We don't want one part of another group.
1619 We could get "two groups" that overlap! */
1620 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1621 ||
1622 (j < FIRST_PSEUDO_REGISTER - 1
1623 && (other = j + 1, spill_reg_order[other] >= 0)
1624 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1625 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1626 && HARD_REGNO_MODE_OK (j, group_mode[class])
1627 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1628 other)
1629 && ! TEST_HARD_REG_BIT (counted_for_groups,
1630 other))))
1631 {
1632 register enum reg_class *p;
1633
1634 /* We have found one that will complete a group,
1635 so count off one group as provided. */
1636 max_groups[class]--;
1637 p = reg_class_superclasses[class];
1638 while (*p != LIM_REG_CLASSES)
1639 max_groups[(int) *p++]--;
1640
1641 /* Indicate both these regs are part of a group. */
1642 SET_HARD_REG_BIT (counted_for_groups, j);
1643 SET_HARD_REG_BIT (counted_for_groups, other);
1644 break;
1645 }
1646 }
1647 /* We can't complete a group, so start one. */
1648 #ifdef SMALL_REGISTER_CLASSES
1649 /* Look for a pair neither of which is explicitly used. */
1650 if (i == FIRST_PSEUDO_REGISTER)
1651 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1652 {
1653 int k;
1654 j = potential_reload_regs[i];
1655 /* Verify that J+1 is a potential reload reg. */
1656 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1657 if (potential_reload_regs[k] == j + 1)
1658 break;
1659 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1660 && k < FIRST_PSEUDO_REGISTER
1661 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1662 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1663 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1664 && HARD_REGNO_MODE_OK (j, group_mode[class])
1665 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1666 j + 1)
1667 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1668 /* Reject J at this stage
1669 if J+1 was explicitly used. */
1670 && ! regs_explicitly_used[j + 1])
1671 break;
1672 }
1673 #endif
1674 /* Now try any group at all
1675 whose registers are not in bad_spill_regs. */
1676 if (i == FIRST_PSEUDO_REGISTER)
1677 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1678 {
1679 int k;
1680 j = potential_reload_regs[i];
1681 /* Verify that J+1 is a potential reload reg. */
1682 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1683 if (potential_reload_regs[k] == j + 1)
1684 break;
1685 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1686 && k < FIRST_PSEUDO_REGISTER
1687 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1688 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1689 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1690 && HARD_REGNO_MODE_OK (j, group_mode[class])
1691 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1692 j + 1)
1693 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1694 break;
1695 }
1696
1697 /* I should be the index in potential_reload_regs
1698 of the new reload reg we have found. */
1699
1700 if (i >= FIRST_PSEUDO_REGISTER)
1701 {
1702 /* There are no groups left to spill. */
1703 spill_failure (max_groups_insn[class]);
1704 failure = 1;
1705 goto failed;
1706 }
1707 else
1708 something_changed
1709 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1710 global, dumpfile);
1711 }
1712 else
1713 {
1714 /* For groups of more than 2 registers,
1715 look for a sufficient sequence of unspilled registers,
1716 and spill them all at once. */
1717 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1718 {
1719 int k;
1720
1721 j = potential_reload_regs[i];
1722 if (j >= 0
1723 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1724 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1725 {
1726 /* Check each reg in the sequence. */
1727 for (k = 0; k < group_size[class]; k++)
1728 if (! (spill_reg_order[j + k] < 0
1729 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1730 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1731 break;
1732 /* We got a full sequence, so spill them all. */
1733 if (k == group_size[class])
1734 {
1735 register enum reg_class *p;
1736 for (k = 0; k < group_size[class]; k++)
1737 {
1738 int idx;
1739 SET_HARD_REG_BIT (counted_for_groups, j + k);
1740 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1741 if (potential_reload_regs[idx] == j + k)
1742 break;
1743 something_changed
1744 |= new_spill_reg (idx, class,
1745 max_needs, NULL_PTR,
1746 global, dumpfile);
1747 }
1748
1749 /* We have found one that will complete a group,
1750 so count off one group as provided. */
1751 max_groups[class]--;
1752 p = reg_class_superclasses[class];
1753 while (*p != LIM_REG_CLASSES)
1754 max_groups[(int) *p++]--;
1755
1756 break;
1757 }
1758 }
1759 }
1760 /* We couldn't find any registers for this reload.
1761 Avoid going into an infinite loop. */
1762 if (i >= FIRST_PSEUDO_REGISTER)
1763 {
1764 /* There are no groups left. */
1765 spill_failure (max_groups_insn[class]);
1766 failure = 1;
1767 goto failed;
1768 }
1769 }
1770 }
1771
1772 /* Now similarly satisfy all need for single registers. */
1773
1774 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1775 {
1776 #ifdef SMALL_REGISTER_CLASSES
1777 /* This should be right for all machines, but only the 386
1778 is known to need it, so this conditional plays safe.
1779 ??? For 2.5, try making this unconditional. */
1780 /* If we spilled enough regs, but they weren't counted
1781 against the non-group need, see if we can count them now.
1782 If so, we can avoid some actual spilling. */
1783 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1784 for (i = 0; i < n_spills; i++)
1785 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1786 spill_regs[i])
1787 && !TEST_HARD_REG_BIT (counted_for_groups,
1788 spill_regs[i])
1789 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1790 spill_regs[i])
1791 && max_nongroups[class] > 0)
1792 {
1793 register enum reg_class *p;
1794
1795 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1796 max_nongroups[class]--;
1797 p = reg_class_superclasses[class];
1798 while (*p != LIM_REG_CLASSES)
1799 max_nongroups[(int) *p++]--;
1800 }
1801 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1802 break;
1803 #endif
1804
1805 /* Consider the potential reload regs that aren't
1806 yet in use as reload regs, in order of preference.
1807 Find the most preferred one that's in this class. */
1808
1809 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1810 if (potential_reload_regs[i] >= 0
1811 && TEST_HARD_REG_BIT (reg_class_contents[class],
1812 potential_reload_regs[i])
1813 /* If this reg will not be available for groups,
1814 pick one that does not foreclose possible groups.
1815 This is a kludge, and not very general,
1816 but it should be sufficient to make the 386 work,
1817 and the problem should not occur on machines with
1818 more registers. */
1819 && (max_nongroups[class] == 0
1820 || possible_group_p (potential_reload_regs[i], max_groups)))
1821 break;
1822
1823 /* If we couldn't get a register, try to get one even if we
1824 might foreclose possible groups. This may cause problems
1825 later, but that's better than aborting now, since it is
1826 possible that we will, in fact, be able to form the needed
1827 group even with this allocation. */
1828
1829 if (i >= FIRST_PSEUDO_REGISTER
1830 && (asm_noperands (max_needs[class] > 0
1831 ? max_needs_insn[class]
1832 : max_nongroups_insn[class])
1833 < 0))
1834 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1835 if (potential_reload_regs[i] >= 0
1836 && TEST_HARD_REG_BIT (reg_class_contents[class],
1837 potential_reload_regs[i]))
1838 break;
1839
1840 /* I should be the index in potential_reload_regs
1841 of the new reload reg we have found. */
1842
1843 if (i >= FIRST_PSEUDO_REGISTER)
1844 {
1845 /* There are no possible registers left to spill. */
1846 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1847 : max_nongroups_insn[class]);
1848 failure = 1;
1849 goto failed;
1850 }
1851 else
1852 something_changed
1853 |= new_spill_reg (i, class, max_needs, max_nongroups,
1854 global, dumpfile);
1855 }
1856 }
1857 }
1858
1859 /* If global-alloc was run, notify it of any register eliminations we have
1860 done. */
1861 if (global)
1862 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1863 if (ep->can_eliminate)
1864 mark_elimination (ep->from, ep->to);
1865
1866 /* Insert code to save and restore call-clobbered hard regs
1867 around calls. Tell if what mode to use so that we will process
1868 those insns in reload_as_needed if we have to. */
1869
1870 if (caller_save_needed)
1871 save_call_clobbered_regs (num_eliminable ? QImode
1872 : caller_save_spill_class != NO_REGS ? HImode
1873 : VOIDmode);
1874
1875 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1876 If that insn didn't set the register (i.e., it copied the register to
1877 memory), just delete that insn instead of the equivalencing insn plus
1878 anything now dead. If we call delete_dead_insn on that insn, we may
1879 delete the insn that actually sets the register if the register die
1880 there and that is incorrect. */
1881
1882 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1883 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1884 && GET_CODE (reg_equiv_init[i]) != NOTE)
1885 {
1886 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1887 delete_dead_insn (reg_equiv_init[i]);
1888 else
1889 {
1890 PUT_CODE (reg_equiv_init[i], NOTE);
1891 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1892 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1893 }
1894 }
1895
1896 /* Use the reload registers where necessary
1897 by generating move instructions to move the must-be-register
1898 values into or out of the reload registers. */
1899
1900 if (something_needs_reloads || something_needs_elimination
1901 || (caller_save_needed && num_eliminable)
1902 || caller_save_spill_class != NO_REGS)
1903 reload_as_needed (first, global);
1904
1905 /* If we were able to eliminate the frame pointer, show that it is no
1906 longer live at the start of any basic block. If it ls live by
1907 virtue of being in a pseudo, that pseudo will be marked live
1908 and hence the frame pointer will be known to be live via that
1909 pseudo. */
1910
1911 if (! frame_pointer_needed)
1912 for (i = 0; i < n_basic_blocks; i++)
1913 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1914 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1915 % REGSET_ELT_BITS));
1916
1917 /* Come here (with failure set nonzero) if we can't get enough spill regs
1918 and we decide not to abort about it. */
1919 failed:
1920
1921 reload_in_progress = 0;
1922
1923 /* Now eliminate all pseudo regs by modifying them into
1924 their equivalent memory references.
1925 The REG-rtx's for the pseudos are modified in place,
1926 so all insns that used to refer to them now refer to memory.
1927
1928 For a reg that has a reg_equiv_address, all those insns
1929 were changed by reloading so that no insns refer to it any longer;
1930 but the DECL_RTL of a variable decl may refer to it,
1931 and if so this causes the debugging info to mention the variable. */
1932
1933 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1934 {
1935 rtx addr = 0;
1936 int in_struct = 0;
1937 if (reg_equiv_mem[i])
1938 {
1939 addr = XEXP (reg_equiv_mem[i], 0);
1940 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1941 }
1942 if (reg_equiv_address[i])
1943 addr = reg_equiv_address[i];
1944 if (addr)
1945 {
1946 if (reg_renumber[i] < 0)
1947 {
1948 rtx reg = regno_reg_rtx[i];
1949 XEXP (reg, 0) = addr;
1950 REG_USERVAR_P (reg) = 0;
1951 MEM_IN_STRUCT_P (reg) = in_struct;
1952 PUT_CODE (reg, MEM);
1953 }
1954 else if (reg_equiv_mem[i])
1955 XEXP (reg_equiv_mem[i], 0) = addr;
1956 }
1957 }
1958
1959 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1960 /* Make a pass over all the insns and remove death notes for things that
1961 are no longer registers or no longer die in the insn (e.g., an input
1962 and output pseudo being tied). */
1963
1964 for (insn = first; insn; insn = NEXT_INSN (insn))
1965 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1966 {
1967 rtx note, next;
1968
1969 for (note = REG_NOTES (insn); note; note = next)
1970 {
1971 next = XEXP (note, 1);
1972 if (REG_NOTE_KIND (note) == REG_DEAD
1973 && (GET_CODE (XEXP (note, 0)) != REG
1974 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1975 remove_note (insn, note);
1976 }
1977 }
1978 #endif
1979
1980 /* Indicate that we no longer have known memory locations or constants. */
1981 reg_equiv_constant = 0;
1982 reg_equiv_memory_loc = 0;
1983
1984 if (scratch_list)
1985 free (scratch_list);
1986 scratch_list = 0;
1987 if (scratch_block)
1988 free (scratch_block);
1989 scratch_block = 0;
1990
1991 return failure;
1992 }
1993 \f
1994 /* Nonzero if, after spilling reg REGNO for non-groups,
1995 it will still be possible to find a group if we still need one. */
1996
1997 static int
1998 possible_group_p (regno, max_groups)
1999 int regno;
2000 int *max_groups;
2001 {
2002 int i;
2003 int class = (int) NO_REGS;
2004
2005 for (i = 0; i < (int) N_REG_CLASSES; i++)
2006 if (max_groups[i] > 0)
2007 {
2008 class = i;
2009 break;
2010 }
2011
2012 if (class == (int) NO_REGS)
2013 return 1;
2014
2015 /* Consider each pair of consecutive registers. */
2016 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2017 {
2018 /* Ignore pairs that include reg REGNO. */
2019 if (i == regno || i + 1 == regno)
2020 continue;
2021
2022 /* Ignore pairs that are outside the class that needs the group.
2023 ??? Here we fail to handle the case where two different classes
2024 independently need groups. But this never happens with our
2025 current machine descriptions. */
2026 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2027 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2028 continue;
2029
2030 /* A pair of consecutive regs we can still spill does the trick. */
2031 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2032 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2033 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2034 return 1;
2035
2036 /* A pair of one already spilled and one we can spill does it
2037 provided the one already spilled is not otherwise reserved. */
2038 if (spill_reg_order[i] < 0
2039 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2040 && spill_reg_order[i + 1] >= 0
2041 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2042 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2043 return 1;
2044 if (spill_reg_order[i + 1] < 0
2045 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2046 && spill_reg_order[i] >= 0
2047 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2048 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2049 return 1;
2050 }
2051
2052 return 0;
2053 }
2054 \f
2055 /* Count any groups that can be formed from the registers recently spilled.
2056 This is done class by class, in order of ascending class number. */
2057
2058 static void
2059 count_possible_groups (group_size, group_mode, max_groups)
2060 int *group_size;
2061 enum machine_mode *group_mode;
2062 int *max_groups;
2063 {
2064 int i;
2065 /* Now find all consecutive groups of spilled registers
2066 and mark each group off against the need for such groups.
2067 But don't count them against ordinary need, yet. */
2068
2069 for (i = 0; i < N_REG_CLASSES; i++)
2070 if (group_size[i] > 1)
2071 {
2072 HARD_REG_SET new;
2073 int j;
2074
2075 CLEAR_HARD_REG_SET (new);
2076
2077 /* Make a mask of all the regs that are spill regs in class I. */
2078 for (j = 0; j < n_spills; j++)
2079 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2080 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2081 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2082 spill_regs[j]))
2083 SET_HARD_REG_BIT (new, spill_regs[j]);
2084
2085 /* Find each consecutive group of them. */
2086 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2087 if (TEST_HARD_REG_BIT (new, j)
2088 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2089 /* Next line in case group-mode for this class
2090 demands an even-odd pair. */
2091 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2092 {
2093 int k;
2094 for (k = 1; k < group_size[i]; k++)
2095 if (! TEST_HARD_REG_BIT (new, j + k))
2096 break;
2097 if (k == group_size[i])
2098 {
2099 /* We found a group. Mark it off against this class's
2100 need for groups, and against each superclass too. */
2101 register enum reg_class *p;
2102 max_groups[i]--;
2103 p = reg_class_superclasses[i];
2104 while (*p != LIM_REG_CLASSES)
2105 max_groups[(int) *p++]--;
2106 /* Don't count these registers again. */
2107 for (k = 0; k < group_size[i]; k++)
2108 SET_HARD_REG_BIT (counted_for_groups, j + k);
2109 }
2110 /* Skip to the last reg in this group. When j is incremented
2111 above, it will then point to the first reg of the next
2112 possible group. */
2113 j += k - 1;
2114 }
2115 }
2116
2117 }
2118 \f
2119 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2120 another mode that needs to be reloaded for the same register class CLASS.
2121 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2122 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2123
2124 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2125 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2126 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2127 causes unnecessary failures on machines requiring alignment of register
2128 groups when the two modes are different sizes, because the larger mode has
2129 more strict alignment rules than the smaller mode. */
2130
2131 static int
2132 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2133 enum machine_mode allocate_mode, other_mode;
2134 enum reg_class class;
2135 {
2136 register int regno;
2137 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2138 {
2139 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2140 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2141 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2142 return 0;
2143 }
2144 return 1;
2145 }
2146
2147 /* Handle the failure to find a register to spill.
2148 INSN should be one of the insns which needed this particular spill reg. */
2149
2150 static void
2151 spill_failure (insn)
2152 rtx insn;
2153 {
2154 if (asm_noperands (PATTERN (insn)) >= 0)
2155 error_for_asm (insn, "`asm' needs too many reloads");
2156 else
2157 abort ();
2158 }
2159
2160 /* Add a new register to the tables of available spill-registers
2161 (as well as spilling all pseudos allocated to the register).
2162 I is the index of this register in potential_reload_regs.
2163 CLASS is the regclass whose need is being satisfied.
2164 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2165 so that this register can count off against them.
2166 MAX_NONGROUPS is 0 if this register is part of a group.
2167 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2168
2169 static int
2170 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2171 int i;
2172 int class;
2173 int *max_needs;
2174 int *max_nongroups;
2175 int global;
2176 FILE *dumpfile;
2177 {
2178 register enum reg_class *p;
2179 int val;
2180 int regno = potential_reload_regs[i];
2181
2182 if (i >= FIRST_PSEUDO_REGISTER)
2183 abort (); /* Caller failed to find any register. */
2184
2185 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2186 fatal ("fixed or forbidden register was spilled.\n\
2187 This may be due to a compiler bug or to impossible asm\n\
2188 statements or clauses.");
2189
2190 /* Make reg REGNO an additional reload reg. */
2191
2192 potential_reload_regs[i] = -1;
2193 spill_regs[n_spills] = regno;
2194 spill_reg_order[regno] = n_spills;
2195 if (dumpfile)
2196 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2197
2198 /* Clear off the needs we just satisfied. */
2199
2200 max_needs[class]--;
2201 p = reg_class_superclasses[class];
2202 while (*p != LIM_REG_CLASSES)
2203 max_needs[(int) *p++]--;
2204
2205 if (max_nongroups && max_nongroups[class] > 0)
2206 {
2207 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2208 max_nongroups[class]--;
2209 p = reg_class_superclasses[class];
2210 while (*p != LIM_REG_CLASSES)
2211 max_nongroups[(int) *p++]--;
2212 }
2213
2214 /* Spill every pseudo reg that was allocated to this reg
2215 or to something that overlaps this reg. */
2216
2217 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2218
2219 /* If there are some registers still to eliminate and this register
2220 wasn't ever used before, additional stack space may have to be
2221 allocated to store this register. Thus, we may have changed the offset
2222 between the stack and frame pointers, so mark that something has changed.
2223 (If new pseudos were spilled, thus requiring more space, VAL would have
2224 been set non-zero by the call to spill_hard_reg above since additional
2225 reloads may be needed in that case.
2226
2227 One might think that we need only set VAL to 1 if this is a call-used
2228 register. However, the set of registers that must be saved by the
2229 prologue is not identical to the call-used set. For example, the
2230 register used by the call insn for the return PC is a call-used register,
2231 but must be saved by the prologue. */
2232 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2233 val = 1;
2234
2235 regs_ever_live[spill_regs[n_spills]] = 1;
2236 n_spills++;
2237
2238 return val;
2239 }
2240 \f
2241 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2242 data that is dead in INSN. */
2243
2244 static void
2245 delete_dead_insn (insn)
2246 rtx insn;
2247 {
2248 rtx prev = prev_real_insn (insn);
2249 rtx prev_dest;
2250
2251 /* If the previous insn sets a register that dies in our insn, delete it
2252 too. */
2253 if (prev && GET_CODE (PATTERN (prev)) == SET
2254 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2255 && reg_mentioned_p (prev_dest, PATTERN (insn))
2256 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2257 delete_dead_insn (prev);
2258
2259 PUT_CODE (insn, NOTE);
2260 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2261 NOTE_SOURCE_FILE (insn) = 0;
2262 }
2263
2264 /* Modify the home of pseudo-reg I.
2265 The new home is present in reg_renumber[I].
2266
2267 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2268 or it may be -1, meaning there is none or it is not relevant.
2269 This is used so that all pseudos spilled from a given hard reg
2270 can share one stack slot. */
2271
2272 static void
2273 alter_reg (i, from_reg)
2274 register int i;
2275 int from_reg;
2276 {
2277 /* When outputting an inline function, this can happen
2278 for a reg that isn't actually used. */
2279 if (regno_reg_rtx[i] == 0)
2280 return;
2281
2282 /* If the reg got changed to a MEM at rtl-generation time,
2283 ignore it. */
2284 if (GET_CODE (regno_reg_rtx[i]) != REG)
2285 return;
2286
2287 /* Modify the reg-rtx to contain the new hard reg
2288 number or else to contain its pseudo reg number. */
2289 REGNO (regno_reg_rtx[i])
2290 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2291
2292 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2293 allocate a stack slot for it. */
2294
2295 if (reg_renumber[i] < 0
2296 && reg_n_refs[i] > 0
2297 && reg_equiv_constant[i] == 0
2298 && reg_equiv_memory_loc[i] == 0)
2299 {
2300 register rtx x;
2301 int inherent_size = PSEUDO_REGNO_BYTES (i);
2302 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2303 int adjust = 0;
2304
2305 /* Each pseudo reg has an inherent size which comes from its own mode,
2306 and a total size which provides room for paradoxical subregs
2307 which refer to the pseudo reg in wider modes.
2308
2309 We can use a slot already allocated if it provides both
2310 enough inherent space and enough total space.
2311 Otherwise, we allocate a new slot, making sure that it has no less
2312 inherent space, and no less total space, then the previous slot. */
2313 if (from_reg == -1)
2314 {
2315 /* No known place to spill from => no slot to reuse. */
2316 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2317 #if BYTES_BIG_ENDIAN
2318 /* Cancel the big-endian correction done in assign_stack_local.
2319 Get the address of the beginning of the slot.
2320 This is so we can do a big-endian correction unconditionally
2321 below. */
2322 adjust = inherent_size - total_size;
2323 #endif
2324 }
2325 /* Reuse a stack slot if possible. */
2326 else if (spill_stack_slot[from_reg] != 0
2327 && spill_stack_slot_width[from_reg] >= total_size
2328 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2329 >= inherent_size))
2330 x = spill_stack_slot[from_reg];
2331 /* Allocate a bigger slot. */
2332 else
2333 {
2334 /* Compute maximum size needed, both for inherent size
2335 and for total size. */
2336 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2337 if (spill_stack_slot[from_reg])
2338 {
2339 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2340 > inherent_size)
2341 mode = GET_MODE (spill_stack_slot[from_reg]);
2342 if (spill_stack_slot_width[from_reg] > total_size)
2343 total_size = spill_stack_slot_width[from_reg];
2344 }
2345 /* Make a slot with that size. */
2346 x = assign_stack_local (mode, total_size, -1);
2347 #if BYTES_BIG_ENDIAN
2348 /* Cancel the big-endian correction done in assign_stack_local.
2349 Get the address of the beginning of the slot.
2350 This is so we can do a big-endian correction unconditionally
2351 below. */
2352 adjust = GET_MODE_SIZE (mode) - total_size;
2353 #endif
2354 spill_stack_slot[from_reg] = x;
2355 spill_stack_slot_width[from_reg] = total_size;
2356 }
2357
2358 #if BYTES_BIG_ENDIAN
2359 /* On a big endian machine, the "address" of the slot
2360 is the address of the low part that fits its inherent mode. */
2361 if (inherent_size < total_size)
2362 adjust += (total_size - inherent_size);
2363 #endif /* BYTES_BIG_ENDIAN */
2364
2365 /* If we have any adjustment to make, or if the stack slot is the
2366 wrong mode, make a new stack slot. */
2367 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2368 {
2369 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2370 plus_constant (XEXP (x, 0), adjust));
2371 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2372 }
2373
2374 /* Save the stack slot for later. */
2375 reg_equiv_memory_loc[i] = x;
2376 }
2377 }
2378
2379 /* Mark the slots in regs_ever_live for the hard regs
2380 used by pseudo-reg number REGNO. */
2381
2382 void
2383 mark_home_live (regno)
2384 int regno;
2385 {
2386 register int i, lim;
2387 i = reg_renumber[regno];
2388 if (i < 0)
2389 return;
2390 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2391 while (i < lim)
2392 regs_ever_live[i++] = 1;
2393 }
2394
2395 /* Mark the registers used in SCRATCH as being live. */
2396
2397 static void
2398 mark_scratch_live (scratch)
2399 rtx scratch;
2400 {
2401 register int i;
2402 int regno = REGNO (scratch);
2403 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2404
2405 for (i = regno; i < lim; i++)
2406 regs_ever_live[i] = 1;
2407 }
2408 \f
2409 /* This function handles the tracking of elimination offsets around branches.
2410
2411 X is a piece of RTL being scanned.
2412
2413 INSN is the insn that it came from, if any.
2414
2415 INITIAL_P is non-zero if we are to set the offset to be the initial
2416 offset and zero if we are setting the offset of the label to be the
2417 current offset. */
2418
2419 static void
2420 set_label_offsets (x, insn, initial_p)
2421 rtx x;
2422 rtx insn;
2423 int initial_p;
2424 {
2425 enum rtx_code code = GET_CODE (x);
2426 rtx tem;
2427 int i;
2428 struct elim_table *p;
2429
2430 switch (code)
2431 {
2432 case LABEL_REF:
2433 if (LABEL_REF_NONLOCAL_P (x))
2434 return;
2435
2436 x = XEXP (x, 0);
2437
2438 /* ... fall through ... */
2439
2440 case CODE_LABEL:
2441 /* If we know nothing about this label, set the desired offsets. Note
2442 that this sets the offset at a label to be the offset before a label
2443 if we don't know anything about the label. This is not correct for
2444 the label after a BARRIER, but is the best guess we can make. If
2445 we guessed wrong, we will suppress an elimination that might have
2446 been possible had we been able to guess correctly. */
2447
2448 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2449 {
2450 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2451 offsets_at[CODE_LABEL_NUMBER (x)][i]
2452 = (initial_p ? reg_eliminate[i].initial_offset
2453 : reg_eliminate[i].offset);
2454 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2455 }
2456
2457 /* Otherwise, if this is the definition of a label and it is
2458 preceded by a BARRIER, set our offsets to the known offset of
2459 that label. */
2460
2461 else if (x == insn
2462 && (tem = prev_nonnote_insn (insn)) != 0
2463 && GET_CODE (tem) == BARRIER)
2464 {
2465 num_not_at_initial_offset = 0;
2466 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2467 {
2468 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2469 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2470 if (reg_eliminate[i].can_eliminate
2471 && (reg_eliminate[i].offset
2472 != reg_eliminate[i].initial_offset))
2473 num_not_at_initial_offset++;
2474 }
2475 }
2476
2477 else
2478 /* If neither of the above cases is true, compare each offset
2479 with those previously recorded and suppress any eliminations
2480 where the offsets disagree. */
2481
2482 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2483 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2484 != (initial_p ? reg_eliminate[i].initial_offset
2485 : reg_eliminate[i].offset))
2486 reg_eliminate[i].can_eliminate = 0;
2487
2488 return;
2489
2490 case JUMP_INSN:
2491 set_label_offsets (PATTERN (insn), insn, initial_p);
2492
2493 /* ... fall through ... */
2494
2495 case INSN:
2496 case CALL_INSN:
2497 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2498 and hence must have all eliminations at their initial offsets. */
2499 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2500 if (REG_NOTE_KIND (tem) == REG_LABEL)
2501 set_label_offsets (XEXP (tem, 0), insn, 1);
2502 return;
2503
2504 case ADDR_VEC:
2505 case ADDR_DIFF_VEC:
2506 /* Each of the labels in the address vector must be at their initial
2507 offsets. We want the first first for ADDR_VEC and the second
2508 field for ADDR_DIFF_VEC. */
2509
2510 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2511 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2512 insn, initial_p);
2513 return;
2514
2515 case SET:
2516 /* We only care about setting PC. If the source is not RETURN,
2517 IF_THEN_ELSE, or a label, disable any eliminations not at
2518 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2519 isn't one of those possibilities. For branches to a label,
2520 call ourselves recursively.
2521
2522 Note that this can disable elimination unnecessarily when we have
2523 a non-local goto since it will look like a non-constant jump to
2524 someplace in the current function. This isn't a significant
2525 problem since such jumps will normally be when all elimination
2526 pairs are back to their initial offsets. */
2527
2528 if (SET_DEST (x) != pc_rtx)
2529 return;
2530
2531 switch (GET_CODE (SET_SRC (x)))
2532 {
2533 case PC:
2534 case RETURN:
2535 return;
2536
2537 case LABEL_REF:
2538 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2539 return;
2540
2541 case IF_THEN_ELSE:
2542 tem = XEXP (SET_SRC (x), 1);
2543 if (GET_CODE (tem) == LABEL_REF)
2544 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2545 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2546 break;
2547
2548 tem = XEXP (SET_SRC (x), 2);
2549 if (GET_CODE (tem) == LABEL_REF)
2550 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2551 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2552 break;
2553 return;
2554 }
2555
2556 /* If we reach here, all eliminations must be at their initial
2557 offset because we are doing a jump to a variable address. */
2558 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2559 if (p->offset != p->initial_offset)
2560 p->can_eliminate = 0;
2561 }
2562 }
2563 \f
2564 /* Used for communication between the next two function to properly share
2565 the vector for an ASM_OPERANDS. */
2566
2567 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2568
2569 /* Scan X and replace any eliminable registers (such as fp) with a
2570 replacement (such as sp), plus an offset.
2571
2572 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2573 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2574 MEM, we are allowed to replace a sum of a register and the constant zero
2575 with the register, which we cannot do outside a MEM. In addition, we need
2576 to record the fact that a register is referenced outside a MEM.
2577
2578 If INSN is an insn, it is the insn containing X. If we replace a REG
2579 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2580 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2581 that the REG is being modified.
2582
2583 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2584 That's used when we eliminate in expressions stored in notes.
2585 This means, do not set ref_outside_mem even if the reference
2586 is outside of MEMs.
2587
2588 If we see a modification to a register we know about, take the
2589 appropriate action (see case SET, below).
2590
2591 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2592 replacements done assuming all offsets are at their initial values. If
2593 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2594 encounter, return the actual location so that find_reloads will do
2595 the proper thing. */
2596
2597 rtx
2598 eliminate_regs (x, mem_mode, insn)
2599 rtx x;
2600 enum machine_mode mem_mode;
2601 rtx insn;
2602 {
2603 enum rtx_code code = GET_CODE (x);
2604 struct elim_table *ep;
2605 int regno;
2606 rtx new;
2607 int i, j;
2608 char *fmt;
2609 int copied = 0;
2610
2611 switch (code)
2612 {
2613 case CONST_INT:
2614 case CONST_DOUBLE:
2615 case CONST:
2616 case SYMBOL_REF:
2617 case CODE_LABEL:
2618 case PC:
2619 case CC0:
2620 case ASM_INPUT:
2621 case ADDR_VEC:
2622 case ADDR_DIFF_VEC:
2623 case RETURN:
2624 return x;
2625
2626 case REG:
2627 regno = REGNO (x);
2628
2629 /* First handle the case where we encounter a bare register that
2630 is eliminable. Replace it with a PLUS. */
2631 if (regno < FIRST_PSEUDO_REGISTER)
2632 {
2633 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2634 ep++)
2635 if (ep->from_rtx == x && ep->can_eliminate)
2636 {
2637 if (! mem_mode
2638 /* Refs inside notes don't count for this purpose. */
2639 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2640 || GET_CODE (insn) == INSN_LIST)))
2641 ep->ref_outside_mem = 1;
2642 return plus_constant (ep->to_rtx, ep->previous_offset);
2643 }
2644
2645 }
2646 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2647 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2648 {
2649 /* In this case, find_reloads would attempt to either use an
2650 incorrect address (if something is not at its initial offset)
2651 or substitute an replaced address into an insn (which loses
2652 if the offset is changed by some later action). So we simply
2653 return the replaced stack slot (assuming it is changed by
2654 elimination) and ignore the fact that this is actually a
2655 reference to the pseudo. Ensure we make a copy of the
2656 address in case it is shared. */
2657 new = eliminate_regs (reg_equiv_memory_loc[regno],
2658 mem_mode, insn);
2659 if (new != reg_equiv_memory_loc[regno])
2660 {
2661 cannot_omit_stores[regno] = 1;
2662 return copy_rtx (new);
2663 }
2664 }
2665 return x;
2666
2667 case PLUS:
2668 /* If this is the sum of an eliminable register and a constant, rework
2669 the sum. */
2670 if (GET_CODE (XEXP (x, 0)) == REG
2671 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2672 && CONSTANT_P (XEXP (x, 1)))
2673 {
2674 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2675 ep++)
2676 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2677 {
2678 if (! mem_mode
2679 /* Refs inside notes don't count for this purpose. */
2680 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2681 || GET_CODE (insn) == INSN_LIST)))
2682 ep->ref_outside_mem = 1;
2683
2684 /* The only time we want to replace a PLUS with a REG (this
2685 occurs when the constant operand of the PLUS is the negative
2686 of the offset) is when we are inside a MEM. We won't want
2687 to do so at other times because that would change the
2688 structure of the insn in a way that reload can't handle.
2689 We special-case the commonest situation in
2690 eliminate_regs_in_insn, so just replace a PLUS with a
2691 PLUS here, unless inside a MEM. */
2692 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2693 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2694 return ep->to_rtx;
2695 else
2696 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2697 plus_constant (XEXP (x, 1),
2698 ep->previous_offset));
2699 }
2700
2701 /* If the register is not eliminable, we are done since the other
2702 operand is a constant. */
2703 return x;
2704 }
2705
2706 /* If this is part of an address, we want to bring any constant to the
2707 outermost PLUS. We will do this by doing register replacement in
2708 our operands and seeing if a constant shows up in one of them.
2709
2710 We assume here this is part of an address (or a "load address" insn)
2711 since an eliminable register is not likely to appear in any other
2712 context.
2713
2714 If we have (plus (eliminable) (reg)), we want to produce
2715 (plus (plus (replacement) (reg) (const))). If this was part of a
2716 normal add insn, (plus (replacement) (reg)) will be pushed as a
2717 reload. This is the desired action. */
2718
2719 {
2720 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2721 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2722
2723 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2724 {
2725 /* If one side is a PLUS and the other side is a pseudo that
2726 didn't get a hard register but has a reg_equiv_constant,
2727 we must replace the constant here since it may no longer
2728 be in the position of any operand. */
2729 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2730 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2731 && reg_renumber[REGNO (new1)] < 0
2732 && reg_equiv_constant != 0
2733 && reg_equiv_constant[REGNO (new1)] != 0)
2734 new1 = reg_equiv_constant[REGNO (new1)];
2735 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2736 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2737 && reg_renumber[REGNO (new0)] < 0
2738 && reg_equiv_constant[REGNO (new0)] != 0)
2739 new0 = reg_equiv_constant[REGNO (new0)];
2740
2741 new = form_sum (new0, new1);
2742
2743 /* As above, if we are not inside a MEM we do not want to
2744 turn a PLUS into something else. We might try to do so here
2745 for an addition of 0 if we aren't optimizing. */
2746 if (! mem_mode && GET_CODE (new) != PLUS)
2747 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2748 else
2749 return new;
2750 }
2751 }
2752 return x;
2753
2754 case MULT:
2755 /* If this is the product of an eliminable register and a
2756 constant, apply the distribute law and move the constant out
2757 so that we have (plus (mult ..) ..). This is needed in order
2758 to keep load-address insns valid. This case is pathalogical.
2759 We ignore the possibility of overflow here. */
2760 if (GET_CODE (XEXP (x, 0)) == REG
2761 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2762 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2763 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2764 ep++)
2765 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2766 {
2767 if (! mem_mode
2768 /* Refs inside notes don't count for this purpose. */
2769 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2770 || GET_CODE (insn) == INSN_LIST)))
2771 ep->ref_outside_mem = 1;
2772
2773 return
2774 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2775 ep->previous_offset * INTVAL (XEXP (x, 1)));
2776 }
2777
2778 /* ... fall through ... */
2779
2780 case CALL:
2781 case COMPARE:
2782 case MINUS:
2783 case DIV: case UDIV:
2784 case MOD: case UMOD:
2785 case AND: case IOR: case XOR:
2786 case ROTATERT: case ROTATE:
2787 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2788 case NE: case EQ:
2789 case GE: case GT: case GEU: case GTU:
2790 case LE: case LT: case LEU: case LTU:
2791 {
2792 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2793 rtx new1
2794 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2795
2796 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2797 return gen_rtx (code, GET_MODE (x), new0, new1);
2798 }
2799 return x;
2800
2801 case EXPR_LIST:
2802 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2803 if (XEXP (x, 0))
2804 {
2805 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2806 if (new != XEXP (x, 0))
2807 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2808 }
2809
2810 /* ... fall through ... */
2811
2812 case INSN_LIST:
2813 /* Now do eliminations in the rest of the chain. If this was
2814 an EXPR_LIST, this might result in allocating more memory than is
2815 strictly needed, but it simplifies the code. */
2816 if (XEXP (x, 1))
2817 {
2818 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2819 if (new != XEXP (x, 1))
2820 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2821 }
2822 return x;
2823
2824 case PRE_INC:
2825 case POST_INC:
2826 case PRE_DEC:
2827 case POST_DEC:
2828 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2829 if (ep->to_rtx == XEXP (x, 0))
2830 {
2831 int size = GET_MODE_SIZE (mem_mode);
2832
2833 /* If more bytes than MEM_MODE are pushed, account for them. */
2834 #ifdef PUSH_ROUNDING
2835 if (ep->to_rtx == stack_pointer_rtx)
2836 size = PUSH_ROUNDING (size);
2837 #endif
2838 if (code == PRE_DEC || code == POST_DEC)
2839 ep->offset += size;
2840 else
2841 ep->offset -= size;
2842 }
2843
2844 /* Fall through to generic unary operation case. */
2845 case USE:
2846 case STRICT_LOW_PART:
2847 case NEG: case NOT:
2848 case SIGN_EXTEND: case ZERO_EXTEND:
2849 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2850 case FLOAT: case FIX:
2851 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2852 case ABS:
2853 case SQRT:
2854 case FFS:
2855 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2856 if (new != XEXP (x, 0))
2857 return gen_rtx (code, GET_MODE (x), new);
2858 return x;
2859
2860 case SUBREG:
2861 /* Similar to above processing, but preserve SUBREG_WORD.
2862 Convert (subreg (mem)) to (mem) if not paradoxical.
2863 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2864 pseudo didn't get a hard reg, we must replace this with the
2865 eliminated version of the memory location because push_reloads
2866 may do the replacement in certain circumstances. */
2867 if (GET_CODE (SUBREG_REG (x)) == REG
2868 && (GET_MODE_SIZE (GET_MODE (x))
2869 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2870 && reg_equiv_memory_loc != 0
2871 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2872 {
2873 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2874 mem_mode, insn);
2875
2876 /* If we didn't change anything, we must retain the pseudo. */
2877 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2878 new = XEXP (x, 0);
2879 else
2880 /* Otherwise, ensure NEW isn't shared in case we have to reload
2881 it. */
2882 new = copy_rtx (new);
2883 }
2884 else
2885 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2886
2887 if (new != XEXP (x, 0))
2888 {
2889 if (GET_CODE (new) == MEM
2890 && (GET_MODE_SIZE (GET_MODE (x))
2891 <= GET_MODE_SIZE (GET_MODE (new)))
2892 #ifdef LOAD_EXTEND_OP
2893 /* On these machines we will be reloading what is
2894 inside the SUBREG if it originally was a pseudo and
2895 the inner and outer modes are both a word or
2896 smaller. So leave the SUBREG then. */
2897 && ! (GET_CODE (SUBREG_REG (x)) == REG
2898 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2899 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2900 #endif
2901 )
2902 {
2903 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2904 enum machine_mode mode = GET_MODE (x);
2905
2906 #if BYTES_BIG_ENDIAN
2907 offset += (MIN (UNITS_PER_WORD,
2908 GET_MODE_SIZE (GET_MODE (new)))
2909 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2910 #endif
2911
2912 PUT_MODE (new, mode);
2913 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2914 return new;
2915 }
2916 else
2917 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2918 }
2919
2920 return x;
2921
2922 case CLOBBER:
2923 /* If clobbering a register that is the replacement register for an
2924 elimination we still think can be performed, note that it cannot
2925 be performed. Otherwise, we need not be concerned about it. */
2926 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2927 if (ep->to_rtx == XEXP (x, 0))
2928 ep->can_eliminate = 0;
2929
2930 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2931 if (new != XEXP (x, 0))
2932 return gen_rtx (code, GET_MODE (x), new);
2933 return x;
2934
2935 case ASM_OPERANDS:
2936 {
2937 rtx *temp_vec;
2938 /* Properly handle sharing input and constraint vectors. */
2939 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2940 {
2941 /* When we come to a new vector not seen before,
2942 scan all its elements; keep the old vector if none
2943 of them changes; otherwise, make a copy. */
2944 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2945 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2946 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2947 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2948 mem_mode, insn);
2949
2950 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2951 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2952 break;
2953
2954 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2955 new_asm_operands_vec = old_asm_operands_vec;
2956 else
2957 new_asm_operands_vec
2958 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2959 }
2960
2961 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2962 if (new_asm_operands_vec == old_asm_operands_vec)
2963 return x;
2964
2965 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2966 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2967 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2968 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2969 ASM_OPERANDS_SOURCE_FILE (x),
2970 ASM_OPERANDS_SOURCE_LINE (x));
2971 new->volatil = x->volatil;
2972 return new;
2973 }
2974
2975 case SET:
2976 /* Check for setting a register that we know about. */
2977 if (GET_CODE (SET_DEST (x)) == REG)
2978 {
2979 /* See if this is setting the replacement register for an
2980 elimination.
2981
2982 If DEST is the hard frame pointer, we do nothing because we
2983 assume that all assignments to the frame pointer are for
2984 non-local gotos and are being done at a time when they are valid
2985 and do not disturb anything else. Some machines want to
2986 eliminate a fake argument pointer (or even a fake frame pointer)
2987 with either the real frame or the stack pointer. Assignments to
2988 the hard frame pointer must not prevent this elimination. */
2989
2990 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2991 ep++)
2992 if (ep->to_rtx == SET_DEST (x)
2993 && SET_DEST (x) != hard_frame_pointer_rtx)
2994 {
2995 /* If it is being incremented, adjust the offset. Otherwise,
2996 this elimination can't be done. */
2997 rtx src = SET_SRC (x);
2998
2999 if (GET_CODE (src) == PLUS
3000 && XEXP (src, 0) == SET_DEST (x)
3001 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3002 ep->offset -= INTVAL (XEXP (src, 1));
3003 else
3004 ep->can_eliminate = 0;
3005 }
3006
3007 /* Now check to see we are assigning to a register that can be
3008 eliminated. If so, it must be as part of a PARALLEL, since we
3009 will not have been called if this is a single SET. So indicate
3010 that we can no longer eliminate this reg. */
3011 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3012 ep++)
3013 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3014 ep->can_eliminate = 0;
3015 }
3016
3017 /* Now avoid the loop below in this common case. */
3018 {
3019 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3020 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3021
3022 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3023 write a CLOBBER insn. */
3024 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3025 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3026 && GET_CODE (insn) != INSN_LIST)
3027 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3028
3029 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3030 return gen_rtx (SET, VOIDmode, new0, new1);
3031 }
3032
3033 return x;
3034
3035 case MEM:
3036 /* Our only special processing is to pass the mode of the MEM to our
3037 recursive call and copy the flags. While we are here, handle this
3038 case more efficiently. */
3039 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3040 if (new != XEXP (x, 0))
3041 {
3042 new = gen_rtx (MEM, GET_MODE (x), new);
3043 new->volatil = x->volatil;
3044 new->unchanging = x->unchanging;
3045 new->in_struct = x->in_struct;
3046 return new;
3047 }
3048 else
3049 return x;
3050 }
3051
3052 /* Process each of our operands recursively. If any have changed, make a
3053 copy of the rtx. */
3054 fmt = GET_RTX_FORMAT (code);
3055 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3056 {
3057 if (*fmt == 'e')
3058 {
3059 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3060 if (new != XEXP (x, i) && ! copied)
3061 {
3062 rtx new_x = rtx_alloc (code);
3063 bcopy ((char *) x, (char *) new_x,
3064 (sizeof (*new_x) - sizeof (new_x->fld)
3065 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3066 x = new_x;
3067 copied = 1;
3068 }
3069 XEXP (x, i) = new;
3070 }
3071 else if (*fmt == 'E')
3072 {
3073 int copied_vec = 0;
3074 for (j = 0; j < XVECLEN (x, i); j++)
3075 {
3076 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3077 if (new != XVECEXP (x, i, j) && ! copied_vec)
3078 {
3079 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3080 &XVECEXP (x, i, 0));
3081 if (! copied)
3082 {
3083 rtx new_x = rtx_alloc (code);
3084 bcopy ((char *) x, (char *) new_x,
3085 (sizeof (*new_x) - sizeof (new_x->fld)
3086 + (sizeof (new_x->fld[0])
3087 * GET_RTX_LENGTH (code))));
3088 x = new_x;
3089 copied = 1;
3090 }
3091 XVEC (x, i) = new_v;
3092 copied_vec = 1;
3093 }
3094 XVECEXP (x, i, j) = new;
3095 }
3096 }
3097 }
3098
3099 return x;
3100 }
3101 \f
3102 /* Scan INSN and eliminate all eliminable registers in it.
3103
3104 If REPLACE is nonzero, do the replacement destructively. Also
3105 delete the insn as dead it if it is setting an eliminable register.
3106
3107 If REPLACE is zero, do all our allocations in reload_obstack.
3108
3109 If no eliminations were done and this insn doesn't require any elimination
3110 processing (these are not identical conditions: it might be updating sp,
3111 but not referencing fp; this needs to be seen during reload_as_needed so
3112 that the offset between fp and sp can be taken into consideration), zero
3113 is returned. Otherwise, 1 is returned. */
3114
3115 static int
3116 eliminate_regs_in_insn (insn, replace)
3117 rtx insn;
3118 int replace;
3119 {
3120 rtx old_body = PATTERN (insn);
3121 rtx new_body;
3122 int val = 0;
3123 struct elim_table *ep;
3124
3125 if (! replace)
3126 push_obstacks (&reload_obstack, &reload_obstack);
3127
3128 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3129 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3130 {
3131 /* Check for setting an eliminable register. */
3132 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3133 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3134 {
3135 /* In this case this insn isn't serving a useful purpose. We
3136 will delete it in reload_as_needed once we know that this
3137 elimination is, in fact, being done.
3138
3139 If REPLACE isn't set, we can't delete this insn, but neededn't
3140 process it since it won't be used unless something changes. */
3141 if (replace)
3142 delete_dead_insn (insn);
3143 val = 1;
3144 goto done;
3145 }
3146
3147 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3148 in the insn is the negative of the offset in FROM. Substitute
3149 (set (reg) (reg to)) for the insn and change its code.
3150
3151 We have to do this here, rather than in eliminate_regs, do that we can
3152 change the insn code. */
3153
3154 if (GET_CODE (SET_SRC (old_body)) == PLUS
3155 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3156 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3157 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3158 ep++)
3159 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
3160 && ep->can_eliminate)
3161 {
3162 /* We must stop at the first elimination that will be used.
3163 If this one would replace the PLUS with a REG, do it
3164 now. Otherwise, quit the loop and let eliminate_regs
3165 do its normal replacement. */
3166 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3167 {
3168 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3169 SET_DEST (old_body), ep->to_rtx);
3170 INSN_CODE (insn) = -1;
3171 val = 1;
3172 goto done;
3173 }
3174
3175 break;
3176 }
3177 }
3178
3179 old_asm_operands_vec = 0;
3180
3181 /* Replace the body of this insn with a substituted form. If we changed
3182 something, return non-zero.
3183
3184 If we are replacing a body that was a (set X (plus Y Z)), try to
3185 re-recognize the insn. We do this in case we had a simple addition
3186 but now can do this as a load-address. This saves an insn in this
3187 common case. */
3188
3189 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3190 if (new_body != old_body)
3191 {
3192 /* If we aren't replacing things permanently and we changed something,
3193 make another copy to ensure that all the RTL is new. Otherwise
3194 things can go wrong if find_reload swaps commutative operands
3195 and one is inside RTL that has been copied while the other is not. */
3196
3197 /* Don't copy an asm_operands because (1) there's no need and (2)
3198 copy_rtx can't do it properly when there are multiple outputs. */
3199 if (! replace && asm_noperands (old_body) < 0)
3200 new_body = copy_rtx (new_body);
3201
3202 /* If we had a move insn but now we don't, rerecognize it. */
3203 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3204 && (GET_CODE (new_body) != SET
3205 || GET_CODE (SET_SRC (new_body)) != REG))
3206 /* If this was a load from or store to memory, compare
3207 the MEM in recog_operand to the one in the insn. If they
3208 are not equal, then rerecognize the insn. */
3209 || (GET_CODE (old_body) == SET
3210 && ((GET_CODE (SET_SRC (old_body)) == MEM
3211 && SET_SRC (old_body) != recog_operand[1])
3212 || (GET_CODE (SET_DEST (old_body)) == MEM
3213 && SET_DEST (old_body) != recog_operand[0])))
3214 /* If this was an add insn before, rerecognize. */
3215 ||
3216 (GET_CODE (old_body) == SET
3217 && GET_CODE (SET_SRC (old_body)) == PLUS))
3218 {
3219 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3220 /* If recognition fails, store the new body anyway.
3221 It's normal to have recognition failures here
3222 due to bizarre memory addresses; reloading will fix them. */
3223 PATTERN (insn) = new_body;
3224 }
3225 else
3226 PATTERN (insn) = new_body;
3227
3228 val = 1;
3229 }
3230
3231 /* Loop through all elimination pairs. See if any have changed and
3232 recalculate the number not at initial offset.
3233
3234 Compute the maximum offset (minimum offset if the stack does not
3235 grow downward) for each elimination pair.
3236
3237 We also detect a cases where register elimination cannot be done,
3238 namely, if a register would be both changed and referenced outside a MEM
3239 in the resulting insn since such an insn is often undefined and, even if
3240 not, we cannot know what meaning will be given to it. Note that it is
3241 valid to have a register used in an address in an insn that changes it
3242 (presumably with a pre- or post-increment or decrement).
3243
3244 If anything changes, return nonzero. */
3245
3246 num_not_at_initial_offset = 0;
3247 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3248 {
3249 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3250 ep->can_eliminate = 0;
3251
3252 ep->ref_outside_mem = 0;
3253
3254 if (ep->previous_offset != ep->offset)
3255 val = 1;
3256
3257 ep->previous_offset = ep->offset;
3258 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3259 num_not_at_initial_offset++;
3260
3261 #ifdef STACK_GROWS_DOWNWARD
3262 ep->max_offset = MAX (ep->max_offset, ep->offset);
3263 #else
3264 ep->max_offset = MIN (ep->max_offset, ep->offset);
3265 #endif
3266 }
3267
3268 done:
3269 /* If we changed something, perform elmination in REG_NOTES. This is
3270 needed even when REPLACE is zero because a REG_DEAD note might refer
3271 to a register that we eliminate and could cause a different number
3272 of spill registers to be needed in the final reload pass than in
3273 the pre-passes. */
3274 if (val && REG_NOTES (insn) != 0)
3275 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3276
3277 if (! replace)
3278 pop_obstacks ();
3279
3280 return val;
3281 }
3282
3283 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3284 replacement we currently believe is valid, mark it as not eliminable if X
3285 modifies DEST in any way other than by adding a constant integer to it.
3286
3287 If DEST is the frame pointer, we do nothing because we assume that
3288 all assignments to the hard frame pointer are nonlocal gotos and are being
3289 done at a time when they are valid and do not disturb anything else.
3290 Some machines want to eliminate a fake argument pointer with either the
3291 frame or stack pointer. Assignments to the hard frame pointer must not
3292 prevent this elimination.
3293
3294 Called via note_stores from reload before starting its passes to scan
3295 the insns of the function. */
3296
3297 static void
3298 mark_not_eliminable (dest, x)
3299 rtx dest;
3300 rtx x;
3301 {
3302 register int i;
3303
3304 /* A SUBREG of a hard register here is just changing its mode. We should
3305 not see a SUBREG of an eliminable hard register, but check just in
3306 case. */
3307 if (GET_CODE (dest) == SUBREG)
3308 dest = SUBREG_REG (dest);
3309
3310 if (dest == hard_frame_pointer_rtx)
3311 return;
3312
3313 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3314 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3315 && (GET_CODE (x) != SET
3316 || GET_CODE (SET_SRC (x)) != PLUS
3317 || XEXP (SET_SRC (x), 0) != dest
3318 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3319 {
3320 reg_eliminate[i].can_eliminate_previous
3321 = reg_eliminate[i].can_eliminate = 0;
3322 num_eliminable--;
3323 }
3324 }
3325 \f
3326 /* Kick all pseudos out of hard register REGNO.
3327 If GLOBAL is nonzero, try to find someplace else to put them.
3328 If DUMPFILE is nonzero, log actions taken on that file.
3329
3330 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3331 because we found we can't eliminate some register. In the case, no pseudos
3332 are allowed to be in the register, even if they are only in a block that
3333 doesn't require spill registers, unlike the case when we are spilling this
3334 hard reg to produce another spill register.
3335
3336 Return nonzero if any pseudos needed to be kicked out. */
3337
3338 static int
3339 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3340 register int regno;
3341 int global;
3342 FILE *dumpfile;
3343 int cant_eliminate;
3344 {
3345 enum reg_class class = REGNO_REG_CLASS (regno);
3346 int something_changed = 0;
3347 register int i;
3348
3349 SET_HARD_REG_BIT (forbidden_regs, regno);
3350
3351 /* Spill every pseudo reg that was allocated to this reg
3352 or to something that overlaps this reg. */
3353
3354 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3355 if (reg_renumber[i] >= 0
3356 && reg_renumber[i] <= regno
3357 && (reg_renumber[i]
3358 + HARD_REGNO_NREGS (reg_renumber[i],
3359 PSEUDO_REGNO_MODE (i))
3360 > regno))
3361 {
3362 /* If this register belongs solely to a basic block which needed no
3363 spilling of any class that this register is contained in,
3364 leave it be, unless we are spilling this register because
3365 it was a hard register that can't be eliminated. */
3366
3367 if (! cant_eliminate
3368 && basic_block_needs[0]
3369 && reg_basic_block[i] >= 0
3370 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3371 {
3372 enum reg_class *p;
3373
3374 for (p = reg_class_superclasses[(int) class];
3375 *p != LIM_REG_CLASSES; p++)
3376 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3377 break;
3378
3379 if (*p == LIM_REG_CLASSES)
3380 continue;
3381 }
3382
3383 /* Mark it as no longer having a hard register home. */
3384 reg_renumber[i] = -1;
3385 /* We will need to scan everything again. */
3386 something_changed = 1;
3387 if (global)
3388 retry_global_alloc (i, forbidden_regs);
3389
3390 alter_reg (i, regno);
3391 if (dumpfile)
3392 {
3393 if (reg_renumber[i] == -1)
3394 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3395 else
3396 fprintf (dumpfile, " Register %d now in %d.\n\n",
3397 i, reg_renumber[i]);
3398 }
3399 }
3400 for (i = 0; i < scratch_list_length; i++)
3401 {
3402 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3403 {
3404 if (! cant_eliminate && basic_block_needs[0]
3405 && ! basic_block_needs[(int) class][scratch_block[i]])
3406 {
3407 enum reg_class *p;
3408
3409 for (p = reg_class_superclasses[(int) class];
3410 *p != LIM_REG_CLASSES; p++)
3411 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3412 break;
3413
3414 if (*p == LIM_REG_CLASSES)
3415 continue;
3416 }
3417 PUT_CODE (scratch_list[i], SCRATCH);
3418 scratch_list[i] = 0;
3419 something_changed = 1;
3420 continue;
3421 }
3422 }
3423
3424 return something_changed;
3425 }
3426 \f
3427 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3428 Also mark any hard registers used to store user variables as
3429 forbidden from being used for spill registers. */
3430
3431 static void
3432 scan_paradoxical_subregs (x)
3433 register rtx x;
3434 {
3435 register int i;
3436 register char *fmt;
3437 register enum rtx_code code = GET_CODE (x);
3438
3439 switch (code)
3440 {
3441 case REG:
3442 #ifdef SMALL_REGISTER_CLASSES
3443 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3444 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3445 #endif
3446 return;
3447
3448 case CONST_INT:
3449 case CONST:
3450 case SYMBOL_REF:
3451 case LABEL_REF:
3452 case CONST_DOUBLE:
3453 case CC0:
3454 case PC:
3455 case USE:
3456 case CLOBBER:
3457 return;
3458
3459 case SUBREG:
3460 if (GET_CODE (SUBREG_REG (x)) == REG
3461 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3462 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3463 = GET_MODE_SIZE (GET_MODE (x));
3464 return;
3465 }
3466
3467 fmt = GET_RTX_FORMAT (code);
3468 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3469 {
3470 if (fmt[i] == 'e')
3471 scan_paradoxical_subregs (XEXP (x, i));
3472 else if (fmt[i] == 'E')
3473 {
3474 register int j;
3475 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3476 scan_paradoxical_subregs (XVECEXP (x, i, j));
3477 }
3478 }
3479 }
3480 \f
3481 static int
3482 hard_reg_use_compare (p1, p2)
3483 struct hard_reg_n_uses *p1, *p2;
3484 {
3485 int tem = p1->uses - p2->uses;
3486 if (tem != 0) return tem;
3487 /* If regs are equally good, sort by regno,
3488 so that the results of qsort leave nothing to chance. */
3489 return p1->regno - p2->regno;
3490 }
3491
3492 /* Choose the order to consider regs for use as reload registers
3493 based on how much trouble would be caused by spilling one.
3494 Store them in order of decreasing preference in potential_reload_regs. */
3495
3496 static void
3497 order_regs_for_reload ()
3498 {
3499 register int i;
3500 register int o = 0;
3501 int large = 0;
3502
3503 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3504
3505 CLEAR_HARD_REG_SET (bad_spill_regs);
3506
3507 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3508 potential_reload_regs[i] = -1;
3509
3510 /* Count number of uses of each hard reg by pseudo regs allocated to it
3511 and then order them by decreasing use. */
3512
3513 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3514 {
3515 hard_reg_n_uses[i].uses = 0;
3516 hard_reg_n_uses[i].regno = i;
3517 }
3518
3519 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3520 {
3521 int regno = reg_renumber[i];
3522 if (regno >= 0)
3523 {
3524 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3525 while (regno < lim)
3526 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3527 }
3528 large += reg_n_refs[i];
3529 }
3530
3531 /* Now fixed registers (which cannot safely be used for reloading)
3532 get a very high use count so they will be considered least desirable.
3533 Registers used explicitly in the rtl code are almost as bad. */
3534
3535 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3536 {
3537 if (fixed_regs[i])
3538 {
3539 hard_reg_n_uses[i].uses += 2 * large + 2;
3540 SET_HARD_REG_BIT (bad_spill_regs, i);
3541 }
3542 else if (regs_explicitly_used[i])
3543 {
3544 hard_reg_n_uses[i].uses += large + 1;
3545 #ifndef SMALL_REGISTER_CLASSES
3546 /* ??? We are doing this here because of the potential that
3547 bad code may be generated if a register explicitly used in
3548 an insn was used as a spill register for that insn. But
3549 not using these are spill registers may lose on some machine.
3550 We'll have to see how this works out. */
3551 SET_HARD_REG_BIT (bad_spill_regs, i);
3552 #endif
3553 }
3554 }
3555 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3556 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3557
3558 #ifdef ELIMINABLE_REGS
3559 /* If registers other than the frame pointer are eliminable, mark them as
3560 poor choices. */
3561 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3562 {
3563 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3564 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3565 }
3566 #endif
3567
3568 /* Prefer registers not so far used, for use in temporary loading.
3569 Among them, if REG_ALLOC_ORDER is defined, use that order.
3570 Otherwise, prefer registers not preserved by calls. */
3571
3572 #ifdef REG_ALLOC_ORDER
3573 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3574 {
3575 int regno = reg_alloc_order[i];
3576
3577 if (hard_reg_n_uses[regno].uses == 0)
3578 potential_reload_regs[o++] = regno;
3579 }
3580 #else
3581 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3582 {
3583 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3584 potential_reload_regs[o++] = i;
3585 }
3586 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3587 {
3588 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3589 potential_reload_regs[o++] = i;
3590 }
3591 #endif
3592
3593 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3594 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3595
3596 /* Now add the regs that are already used,
3597 preferring those used less often. The fixed and otherwise forbidden
3598 registers will be at the end of this list. */
3599
3600 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3601 if (hard_reg_n_uses[i].uses != 0)
3602 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3603 }
3604 \f
3605 /* Used in reload_as_needed to sort the spilled regs. */
3606 static int
3607 compare_spill_regs (r1, r2)
3608 short *r1, *r2;
3609 {
3610 return *r1 < *r2 ? -1: 1;
3611 }
3612
3613 /* Reload pseudo-registers into hard regs around each insn as needed.
3614 Additional register load insns are output before the insn that needs it
3615 and perhaps store insns after insns that modify the reloaded pseudo reg.
3616
3617 reg_last_reload_reg and reg_reloaded_contents keep track of
3618 which registers are already available in reload registers.
3619 We update these for the reloads that we perform,
3620 as the insns are scanned. */
3621
3622 static void
3623 reload_as_needed (first, live_known)
3624 rtx first;
3625 int live_known;
3626 {
3627 register rtx insn;
3628 register int i;
3629 int this_block = 0;
3630 rtx x;
3631 rtx after_call = 0;
3632
3633 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3634 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3635 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3636 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3637 reg_has_output_reload = (char *) alloca (max_regno);
3638 for (i = 0; i < n_spills; i++)
3639 {
3640 reg_reloaded_contents[i] = -1;
3641 reg_reloaded_insn[i] = 0;
3642 }
3643
3644 /* Reset all offsets on eliminable registers to their initial values. */
3645 #ifdef ELIMINABLE_REGS
3646 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3647 {
3648 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3649 reg_eliminate[i].initial_offset);
3650 reg_eliminate[i].previous_offset
3651 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3652 }
3653 #else
3654 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3655 reg_eliminate[0].previous_offset
3656 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3657 #endif
3658
3659 num_not_at_initial_offset = 0;
3660
3661 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3662 pack registers with group needs. */
3663 if (n_spills > 1)
3664 {
3665 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3666 for (i = 0; i < n_spills; i++)
3667 spill_reg_order[spill_regs[i]] = i;
3668 }
3669
3670 for (insn = first; insn;)
3671 {
3672 register rtx next = NEXT_INSN (insn);
3673
3674 /* Notice when we move to a new basic block. */
3675 if (live_known && this_block + 1 < n_basic_blocks
3676 && insn == basic_block_head[this_block+1])
3677 ++this_block;
3678
3679 /* If we pass a label, copy the offsets from the label information
3680 into the current offsets of each elimination. */
3681 if (GET_CODE (insn) == CODE_LABEL)
3682 {
3683 num_not_at_initial_offset = 0;
3684 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3685 {
3686 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3687 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3688 if (reg_eliminate[i].can_eliminate
3689 && (reg_eliminate[i].offset
3690 != reg_eliminate[i].initial_offset))
3691 num_not_at_initial_offset++;
3692 }
3693 }
3694
3695 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3696 {
3697 rtx avoid_return_reg = 0;
3698
3699 #ifdef SMALL_REGISTER_CLASSES
3700 /* Set avoid_return_reg if this is an insn
3701 that might use the value of a function call. */
3702 if (GET_CODE (insn) == CALL_INSN)
3703 {
3704 if (GET_CODE (PATTERN (insn)) == SET)
3705 after_call = SET_DEST (PATTERN (insn));
3706 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3707 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3708 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3709 else
3710 after_call = 0;
3711 }
3712 else if (after_call != 0
3713 && !(GET_CODE (PATTERN (insn)) == SET
3714 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3715 {
3716 if (reg_referenced_p (after_call, PATTERN (insn)))
3717 avoid_return_reg = after_call;
3718 after_call = 0;
3719 }
3720 #endif /* SMALL_REGISTER_CLASSES */
3721
3722 /* If this is a USE and CLOBBER of a MEM, ensure that any
3723 references to eliminable registers have been removed. */
3724
3725 if ((GET_CODE (PATTERN (insn)) == USE
3726 || GET_CODE (PATTERN (insn)) == CLOBBER)
3727 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3728 XEXP (XEXP (PATTERN (insn), 0), 0)
3729 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3730 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3731
3732 /* If we need to do register elimination processing, do so.
3733 This might delete the insn, in which case we are done. */
3734 if (num_eliminable && GET_MODE (insn) == QImode)
3735 {
3736 eliminate_regs_in_insn (insn, 1);
3737 if (GET_CODE (insn) == NOTE)
3738 {
3739 insn = next;
3740 continue;
3741 }
3742 }
3743
3744 if (GET_MODE (insn) == VOIDmode)
3745 n_reloads = 0;
3746 /* First find the pseudo regs that must be reloaded for this insn.
3747 This info is returned in the tables reload_... (see reload.h).
3748 Also modify the body of INSN by substituting RELOAD
3749 rtx's for those pseudo regs. */
3750 else
3751 {
3752 bzero (reg_has_output_reload, max_regno);
3753 CLEAR_HARD_REG_SET (reg_is_output_reload);
3754
3755 find_reloads (insn, 1, spill_indirect_levels, live_known,
3756 spill_reg_order);
3757 }
3758
3759 if (n_reloads > 0)
3760 {
3761 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3762 rtx p;
3763 int class;
3764
3765 /* If this block has not had spilling done for a
3766 particular clas and we have any non-optionals that need a
3767 spill reg in that class, abort. */
3768
3769 for (class = 0; class < N_REG_CLASSES; class++)
3770 if (basic_block_needs[class] != 0
3771 && basic_block_needs[class][this_block] == 0)
3772 for (i = 0; i < n_reloads; i++)
3773 if (class == (int) reload_reg_class[i]
3774 && reload_reg_rtx[i] == 0
3775 && ! reload_optional[i]
3776 && (reload_in[i] != 0 || reload_out[i] != 0
3777 || reload_secondary_p[i] != 0))
3778 abort ();
3779
3780 /* Now compute which reload regs to reload them into. Perhaps
3781 reusing reload regs from previous insns, or else output
3782 load insns to reload them. Maybe output store insns too.
3783 Record the choices of reload reg in reload_reg_rtx. */
3784 choose_reload_regs (insn, avoid_return_reg);
3785
3786 #ifdef SMALL_REGISTER_CLASSES
3787 /* Merge any reloads that we didn't combine for fear of
3788 increasing the number of spill registers needed but now
3789 discover can be safely merged. */
3790 merge_assigned_reloads (insn);
3791 #endif
3792
3793 /* Generate the insns to reload operands into or out of
3794 their reload regs. */
3795 emit_reload_insns (insn);
3796
3797 /* Substitute the chosen reload regs from reload_reg_rtx
3798 into the insn's body (or perhaps into the bodies of other
3799 load and store insn that we just made for reloading
3800 and that we moved the structure into). */
3801 subst_reloads ();
3802
3803 /* If this was an ASM, make sure that all the reload insns
3804 we have generated are valid. If not, give an error
3805 and delete them. */
3806
3807 if (asm_noperands (PATTERN (insn)) >= 0)
3808 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3809 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3810 && (recog_memoized (p) < 0
3811 || (insn_extract (p),
3812 ! constrain_operands (INSN_CODE (p), 1))))
3813 {
3814 error_for_asm (insn,
3815 "`asm' operand requires impossible reload");
3816 PUT_CODE (p, NOTE);
3817 NOTE_SOURCE_FILE (p) = 0;
3818 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3819 }
3820 }
3821 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3822 is no longer validly lying around to save a future reload.
3823 Note that this does not detect pseudos that were reloaded
3824 for this insn in order to be stored in
3825 (obeying register constraints). That is correct; such reload
3826 registers ARE still valid. */
3827 note_stores (PATTERN (insn), forget_old_reloads_1);
3828
3829 /* There may have been CLOBBER insns placed after INSN. So scan
3830 between INSN and NEXT and use them to forget old reloads. */
3831 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3832 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3833 note_stores (PATTERN (x), forget_old_reloads_1);
3834
3835 #ifdef AUTO_INC_DEC
3836 /* Likewise for regs altered by auto-increment in this insn.
3837 But note that the reg-notes are not changed by reloading:
3838 they still contain the pseudo-regs, not the spill regs. */
3839 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3840 if (REG_NOTE_KIND (x) == REG_INC)
3841 {
3842 /* See if this pseudo reg was reloaded in this insn.
3843 If so, its last-reload info is still valid
3844 because it is based on this insn's reload. */
3845 for (i = 0; i < n_reloads; i++)
3846 if (reload_out[i] == XEXP (x, 0))
3847 break;
3848
3849 if (i == n_reloads)
3850 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3851 }
3852 #endif
3853 }
3854 /* A reload reg's contents are unknown after a label. */
3855 if (GET_CODE (insn) == CODE_LABEL)
3856 for (i = 0; i < n_spills; i++)
3857 {
3858 reg_reloaded_contents[i] = -1;
3859 reg_reloaded_insn[i] = 0;
3860 }
3861
3862 /* Don't assume a reload reg is still good after a call insn
3863 if it is a call-used reg. */
3864 else if (GET_CODE (insn) == CALL_INSN)
3865 for (i = 0; i < n_spills; i++)
3866 if (call_used_regs[spill_regs[i]])
3867 {
3868 reg_reloaded_contents[i] = -1;
3869 reg_reloaded_insn[i] = 0;
3870 }
3871
3872 /* In case registers overlap, allow certain insns to invalidate
3873 particular hard registers. */
3874
3875 #ifdef INSN_CLOBBERS_REGNO_P
3876 for (i = 0 ; i < n_spills ; i++)
3877 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3878 {
3879 reg_reloaded_contents[i] = -1;
3880 reg_reloaded_insn[i] = 0;
3881 }
3882 #endif
3883
3884 insn = next;
3885
3886 #ifdef USE_C_ALLOCA
3887 alloca (0);
3888 #endif
3889 }
3890 }
3891
3892 /* Discard all record of any value reloaded from X,
3893 or reloaded in X from someplace else;
3894 unless X is an output reload reg of the current insn.
3895
3896 X may be a hard reg (the reload reg)
3897 or it may be a pseudo reg that was reloaded from. */
3898
3899 static void
3900 forget_old_reloads_1 (x, ignored)
3901 rtx x;
3902 rtx ignored;
3903 {
3904 register int regno;
3905 int nr;
3906 int offset = 0;
3907
3908 /* note_stores does give us subregs of hard regs. */
3909 while (GET_CODE (x) == SUBREG)
3910 {
3911 offset += SUBREG_WORD (x);
3912 x = SUBREG_REG (x);
3913 }
3914
3915 if (GET_CODE (x) != REG)
3916 return;
3917
3918 regno = REGNO (x) + offset;
3919
3920 if (regno >= FIRST_PSEUDO_REGISTER)
3921 nr = 1;
3922 else
3923 {
3924 int i;
3925 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3926 /* Storing into a spilled-reg invalidates its contents.
3927 This can happen if a block-local pseudo is allocated to that reg
3928 and it wasn't spilled because this block's total need is 0.
3929 Then some insn might have an optional reload and use this reg. */
3930 for (i = 0; i < nr; i++)
3931 if (spill_reg_order[regno + i] >= 0
3932 /* But don't do this if the reg actually serves as an output
3933 reload reg in the current instruction. */
3934 && (n_reloads == 0
3935 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3936 {
3937 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3938 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3939 }
3940 }
3941
3942 /* Since value of X has changed,
3943 forget any value previously copied from it. */
3944
3945 while (nr-- > 0)
3946 /* But don't forget a copy if this is the output reload
3947 that establishes the copy's validity. */
3948 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3949 reg_last_reload_reg[regno + nr] = 0;
3950 }
3951 \f
3952 /* For each reload, the mode of the reload register. */
3953 static enum machine_mode reload_mode[MAX_RELOADS];
3954
3955 /* For each reload, the largest number of registers it will require. */
3956 static int reload_nregs[MAX_RELOADS];
3957
3958 /* Comparison function for qsort to decide which of two reloads
3959 should be handled first. *P1 and *P2 are the reload numbers. */
3960
3961 static int
3962 reload_reg_class_lower (p1, p2)
3963 short *p1, *p2;
3964 {
3965 register int r1 = *p1, r2 = *p2;
3966 register int t;
3967
3968 /* Consider required reloads before optional ones. */
3969 t = reload_optional[r1] - reload_optional[r2];
3970 if (t != 0)
3971 return t;
3972
3973 /* Count all solitary classes before non-solitary ones. */
3974 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3975 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3976 if (t != 0)
3977 return t;
3978
3979 /* Aside from solitaires, consider all multi-reg groups first. */
3980 t = reload_nregs[r2] - reload_nregs[r1];
3981 if (t != 0)
3982 return t;
3983
3984 /* Consider reloads in order of increasing reg-class number. */
3985 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3986 if (t != 0)
3987 return t;
3988
3989 /* If reloads are equally urgent, sort by reload number,
3990 so that the results of qsort leave nothing to chance. */
3991 return r1 - r2;
3992 }
3993 \f
3994 /* The following HARD_REG_SETs indicate when each hard register is
3995 used for a reload of various parts of the current insn. */
3996
3997 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3998 static HARD_REG_SET reload_reg_used;
3999 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4000 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4001 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4002 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4003 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4004 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4005 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4006 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4007 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4008 static HARD_REG_SET reload_reg_used_in_op_addr;
4009 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4010 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4011 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4012 static HARD_REG_SET reload_reg_used_in_insn;
4013 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4014 static HARD_REG_SET reload_reg_used_in_other_addr;
4015
4016 /* If reg is in use as a reload reg for any sort of reload. */
4017 static HARD_REG_SET reload_reg_used_at_all;
4018
4019 /* If reg is use as an inherited reload. We just mark the first register
4020 in the group. */
4021 static HARD_REG_SET reload_reg_used_for_inherit;
4022
4023 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4024 TYPE. MODE is used to indicate how many consecutive regs are
4025 actually used. */
4026
4027 static void
4028 mark_reload_reg_in_use (regno, opnum, type, mode)
4029 int regno;
4030 int opnum;
4031 enum reload_type type;
4032 enum machine_mode mode;
4033 {
4034 int nregs = HARD_REGNO_NREGS (regno, mode);
4035 int i;
4036
4037 for (i = regno; i < nregs + regno; i++)
4038 {
4039 switch (type)
4040 {
4041 case RELOAD_OTHER:
4042 SET_HARD_REG_BIT (reload_reg_used, i);
4043 break;
4044
4045 case RELOAD_FOR_INPUT_ADDRESS:
4046 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4047 break;
4048
4049 case RELOAD_FOR_OUTPUT_ADDRESS:
4050 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4051 break;
4052
4053 case RELOAD_FOR_OPERAND_ADDRESS:
4054 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4055 break;
4056
4057 case RELOAD_FOR_OPADDR_ADDR:
4058 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4059 break;
4060
4061 case RELOAD_FOR_OTHER_ADDRESS:
4062 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4063 break;
4064
4065 case RELOAD_FOR_INPUT:
4066 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4067 break;
4068
4069 case RELOAD_FOR_OUTPUT:
4070 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4071 break;
4072
4073 case RELOAD_FOR_INSN:
4074 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4075 break;
4076 }
4077
4078 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4079 }
4080 }
4081
4082 /* Similarly, but show REGNO is no longer in use for a reload. */
4083
4084 static void
4085 clear_reload_reg_in_use (regno, opnum, type, mode)
4086 int regno;
4087 int opnum;
4088 enum reload_type type;
4089 enum machine_mode mode;
4090 {
4091 int nregs = HARD_REGNO_NREGS (regno, mode);
4092 int i;
4093
4094 for (i = regno; i < nregs + regno; i++)
4095 {
4096 switch (type)
4097 {
4098 case RELOAD_OTHER:
4099 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4100 break;
4101
4102 case RELOAD_FOR_INPUT_ADDRESS:
4103 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4104 break;
4105
4106 case RELOAD_FOR_OUTPUT_ADDRESS:
4107 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4108 break;
4109
4110 case RELOAD_FOR_OPERAND_ADDRESS:
4111 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4112 break;
4113
4114 case RELOAD_FOR_OPADDR_ADDR:
4115 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4116 break;
4117
4118 case RELOAD_FOR_OTHER_ADDRESS:
4119 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4120 break;
4121
4122 case RELOAD_FOR_INPUT:
4123 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4124 break;
4125
4126 case RELOAD_FOR_OUTPUT:
4127 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4128 break;
4129
4130 case RELOAD_FOR_INSN:
4131 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4132 break;
4133 }
4134 }
4135 }
4136
4137 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4138 specified by OPNUM and TYPE. */
4139
4140 static int
4141 reload_reg_free_p (regno, opnum, type)
4142 int regno;
4143 int opnum;
4144 enum reload_type type;
4145 {
4146 int i;
4147
4148 /* In use for a RELOAD_OTHER means it's not available for anything except
4149 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4150 to be used only for inputs. */
4151
4152 if (type != RELOAD_FOR_OTHER_ADDRESS
4153 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4154 return 0;
4155
4156 switch (type)
4157 {
4158 case RELOAD_OTHER:
4159 /* In use for anything means not available for a RELOAD_OTHER. */
4160 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4161
4162 /* The other kinds of use can sometimes share a register. */
4163 case RELOAD_FOR_INPUT:
4164 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4165 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4166 return 0;
4167
4168 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4169 return 0;
4170
4171 /* If it is used for some other input, can't use it. */
4172 for (i = 0; i < reload_n_operands; i++)
4173 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4174 return 0;
4175
4176 /* If it is used in a later operand's address, can't use it. */
4177 for (i = opnum + 1; i < reload_n_operands; i++)
4178 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4179 return 0;
4180
4181 return 1;
4182
4183 case RELOAD_FOR_INPUT_ADDRESS:
4184 /* Can't use a register if it is used for an input address for this
4185 operand or used as an input in an earlier one. */
4186 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4187 return 0;
4188
4189 for (i = 0; i < opnum; i++)
4190 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4191 return 0;
4192
4193 return 1;
4194
4195 case RELOAD_FOR_OUTPUT_ADDRESS:
4196 /* Can't use a register if it is used for an output address for this
4197 operand or used as an output in this or a later operand. */
4198 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4199 return 0;
4200
4201 for (i = opnum; i < reload_n_operands; i++)
4202 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4203 return 0;
4204
4205 return 1;
4206
4207 case RELOAD_FOR_OPERAND_ADDRESS:
4208 for (i = 0; i < reload_n_operands; i++)
4209 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4210 return 0;
4211
4212 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4213 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4214
4215 case RELOAD_FOR_OPADDR_ADDR:
4216 for (i = 0; i < reload_n_operands; i++)
4217 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4218 return 0;
4219
4220 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4221
4222 case RELOAD_FOR_OUTPUT:
4223 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4224 outputs, or an operand address for this or an earlier output. */
4225 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4226 return 0;
4227
4228 for (i = 0; i < reload_n_operands; i++)
4229 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4230 return 0;
4231
4232 for (i = 0; i <= opnum; i++)
4233 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4234 return 0;
4235
4236 return 1;
4237
4238 case RELOAD_FOR_INSN:
4239 for (i = 0; i < reload_n_operands; i++)
4240 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4241 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4242 return 0;
4243
4244 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4245 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4246
4247 case RELOAD_FOR_OTHER_ADDRESS:
4248 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4249 }
4250 abort ();
4251 }
4252
4253 /* Return 1 if the value in reload reg REGNO, as used by a reload
4254 needed for the part of the insn specified by OPNUM and TYPE,
4255 is not in use for a reload in any prior part of the insn.
4256
4257 We can assume that the reload reg was already tested for availability
4258 at the time it is needed, and we should not check this again,
4259 in case the reg has already been marked in use. */
4260
4261 static int
4262 reload_reg_free_before_p (regno, opnum, type)
4263 int regno;
4264 int opnum;
4265 enum reload_type type;
4266 {
4267 int i;
4268
4269 switch (type)
4270 {
4271 case RELOAD_FOR_OTHER_ADDRESS:
4272 /* These always come first. */
4273 return 1;
4274
4275 case RELOAD_OTHER:
4276 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4277
4278 /* If this use is for part of the insn,
4279 check the reg is not in use for any prior part. It is tempting
4280 to try to do this by falling through from objecs that occur
4281 later in the insn to ones that occur earlier, but that will not
4282 correctly take into account the fact that here we MUST ignore
4283 things that would prevent the register from being allocated in
4284 the first place, since we know that it was allocated. */
4285
4286 case RELOAD_FOR_OUTPUT_ADDRESS:
4287 /* Earlier reloads are for earlier outputs or their addresses,
4288 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4289 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4290 RELOAD_OTHER).. */
4291 for (i = 0; i < opnum; i++)
4292 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4293 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4294 return 0;
4295
4296 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4297 return 0;
4298
4299 for (i = 0; i < reload_n_operands; i++)
4300 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4301 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4302 return 0;
4303
4304 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4305 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4306 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4307
4308 case RELOAD_FOR_OUTPUT:
4309 /* This can't be used in the output address for this operand and
4310 anything that can't be used for it, except that we've already
4311 tested for RELOAD_FOR_INSN objects. */
4312
4313 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4314 return 0;
4315
4316 for (i = 0; i < opnum; i++)
4317 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4318 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4319 return 0;
4320
4321 for (i = 0; i < reload_n_operands; i++)
4322 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4323 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4324 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4325 return 0;
4326
4327 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4328
4329 case RELOAD_FOR_OPERAND_ADDRESS:
4330 case RELOAD_FOR_OPADDR_ADDR:
4331 case RELOAD_FOR_INSN:
4332 /* These can't conflict with inputs, or each other, so all we have to
4333 test is input addresses and the addresses of OTHER items. */
4334
4335 for (i = 0; i < reload_n_operands; i++)
4336 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4337 return 0;
4338
4339 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4340
4341 case RELOAD_FOR_INPUT:
4342 /* The only things earlier are the address for this and
4343 earlier inputs, other inputs (which we know we don't conflict
4344 with), and addresses of RELOAD_OTHER objects. */
4345
4346 for (i = 0; i <= opnum; i++)
4347 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4348 return 0;
4349
4350 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4351
4352 case RELOAD_FOR_INPUT_ADDRESS:
4353 /* Similarly, all we have to check is for use in earlier inputs'
4354 addresses. */
4355 for (i = 0; i < opnum; i++)
4356 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4357 return 0;
4358
4359 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4360 }
4361 abort ();
4362 }
4363
4364 /* Return 1 if the value in reload reg REGNO, as used by a reload
4365 needed for the part of the insn specified by OPNUM and TYPE,
4366 is still available in REGNO at the end of the insn.
4367
4368 We can assume that the reload reg was already tested for availability
4369 at the time it is needed, and we should not check this again,
4370 in case the reg has already been marked in use. */
4371
4372 static int
4373 reload_reg_reaches_end_p (regno, opnum, type)
4374 int regno;
4375 int opnum;
4376 enum reload_type type;
4377 {
4378 int i;
4379
4380 switch (type)
4381 {
4382 case RELOAD_OTHER:
4383 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4384 its value must reach the end. */
4385 return 1;
4386
4387 /* If this use is for part of the insn,
4388 its value reaches if no subsequent part uses the same register.
4389 Just like the above function, don't try to do this with lots
4390 of fallthroughs. */
4391
4392 case RELOAD_FOR_OTHER_ADDRESS:
4393 /* Here we check for everything else, since these don't conflict
4394 with anything else and everything comes later. */
4395
4396 for (i = 0; i < reload_n_operands; i++)
4397 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4398 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4399 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4400 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4401 return 0;
4402
4403 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4404 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4405 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4406
4407 case RELOAD_FOR_INPUT_ADDRESS:
4408 /* Similar, except that we check only for this and subsequent inputs
4409 and the address of only subsequent inputs and we do not need
4410 to check for RELOAD_OTHER objects since they are known not to
4411 conflict. */
4412
4413 for (i = opnum; i < reload_n_operands; i++)
4414 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4415 return 0;
4416
4417 for (i = opnum + 1; i < reload_n_operands; i++)
4418 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4419 return 0;
4420
4421 for (i = 0; i < reload_n_operands; i++)
4422 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4423 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4424 return 0;
4425
4426 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4427 return 0;
4428
4429 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4430 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4431
4432 case RELOAD_FOR_INPUT:
4433 /* Similar to input address, except we start at the next operand for
4434 both input and input address and we do not check for
4435 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4436 would conflict. */
4437
4438 for (i = opnum + 1; i < reload_n_operands; i++)
4439 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4440 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4441 return 0;
4442
4443 /* ... fall through ... */
4444
4445 case RELOAD_FOR_OPERAND_ADDRESS:
4446 /* Check outputs and their addresses. */
4447
4448 for (i = 0; i < reload_n_operands; i++)
4449 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4450 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4451 return 0;
4452
4453 return 1;
4454
4455 case RELOAD_FOR_OPADDR_ADDR:
4456 for (i = 0; i < reload_n_operands; i++)
4457 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4458 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4459 return 0;
4460
4461 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4462 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4463
4464 case RELOAD_FOR_INSN:
4465 /* These conflict with other outputs with RELOAD_OTHER. So
4466 we need only check for output addresses. */
4467
4468 opnum = -1;
4469
4470 /* ... fall through ... */
4471
4472 case RELOAD_FOR_OUTPUT:
4473 case RELOAD_FOR_OUTPUT_ADDRESS:
4474 /* We already know these can't conflict with a later output. So the
4475 only thing to check are later output addresses. */
4476 for (i = opnum + 1; i < reload_n_operands; i++)
4477 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4478 return 0;
4479
4480 return 1;
4481 }
4482
4483 abort ();
4484 }
4485 \f
4486 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4487 Return 0 otherwise.
4488
4489 This function uses the same algorithm as reload_reg_free_p above. */
4490
4491 static int
4492 reloads_conflict (r1, r2)
4493 int r1, r2;
4494 {
4495 enum reload_type r1_type = reload_when_needed[r1];
4496 enum reload_type r2_type = reload_when_needed[r2];
4497 int r1_opnum = reload_opnum[r1];
4498 int r2_opnum = reload_opnum[r2];
4499
4500 /* RELOAD_OTHER conflicts with everything except
4501 RELOAD_FOR_OTHER_ADDRESS. */
4502
4503 if ((r1_type == RELOAD_OTHER && r2_type != RELOAD_FOR_OTHER_ADDRESS)
4504 || (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS))
4505 return 1;
4506
4507 /* Otherwise, check conflicts differently for each type. */
4508
4509 switch (r1_type)
4510 {
4511 case RELOAD_FOR_INPUT:
4512 return (r2_type == RELOAD_FOR_INSN
4513 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4514 || r2_type == RELOAD_FOR_OPADDR_ADDR
4515 || r2_type == RELOAD_FOR_INPUT
4516 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4517
4518 case RELOAD_FOR_INPUT_ADDRESS:
4519 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4520 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4521
4522 case RELOAD_FOR_OUTPUT_ADDRESS:
4523 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4524 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4525
4526 case RELOAD_FOR_OPERAND_ADDRESS:
4527 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4528 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4529
4530 case RELOAD_FOR_OPADDR_ADDR:
4531 return (r2_type == RELOAD_FOR_INPUT
4532 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4533
4534 case RELOAD_FOR_OUTPUT:
4535 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4536 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4537 && r2_opnum >= r1_opnum));
4538
4539 case RELOAD_FOR_INSN:
4540 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4541 || r2_type == RELOAD_FOR_INSN
4542 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4543
4544 case RELOAD_FOR_OTHER_ADDRESS:
4545 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4546
4547 default:
4548 abort ();
4549 }
4550 }
4551 \f
4552 /* Vector of reload-numbers showing the order in which the reloads should
4553 be processed. */
4554 short reload_order[MAX_RELOADS];
4555
4556 /* Indexed by reload number, 1 if incoming value
4557 inherited from previous insns. */
4558 char reload_inherited[MAX_RELOADS];
4559
4560 /* For an inherited reload, this is the insn the reload was inherited from,
4561 if we know it. Otherwise, this is 0. */
4562 rtx reload_inheritance_insn[MAX_RELOADS];
4563
4564 /* If non-zero, this is a place to get the value of the reload,
4565 rather than using reload_in. */
4566 rtx reload_override_in[MAX_RELOADS];
4567
4568 /* For each reload, the index in spill_regs of the spill register used,
4569 or -1 if we did not need one of the spill registers for this reload. */
4570 int reload_spill_index[MAX_RELOADS];
4571
4572 /* Index of last register assigned as a spill register. We allocate in
4573 a round-robin fashio. */
4574
4575 static int last_spill_reg = 0;
4576
4577 /* Find a spill register to use as a reload register for reload R.
4578 LAST_RELOAD is non-zero if this is the last reload for the insn being
4579 processed.
4580
4581 Set reload_reg_rtx[R] to the register allocated.
4582
4583 If NOERROR is nonzero, we return 1 if successful,
4584 or 0 if we couldn't find a spill reg and we didn't change anything. */
4585
4586 static int
4587 allocate_reload_reg (r, insn, last_reload, noerror)
4588 int r;
4589 rtx insn;
4590 int last_reload;
4591 int noerror;
4592 {
4593 int i;
4594 int pass;
4595 int count;
4596 rtx new;
4597 int regno;
4598
4599 /* If we put this reload ahead, thinking it is a group,
4600 then insist on finding a group. Otherwise we can grab a
4601 reg that some other reload needs.
4602 (That can happen when we have a 68000 DATA_OR_FP_REG
4603 which is a group of data regs or one fp reg.)
4604 We need not be so restrictive if there are no more reloads
4605 for this insn.
4606
4607 ??? Really it would be nicer to have smarter handling
4608 for that kind of reg class, where a problem like this is normal.
4609 Perhaps those classes should be avoided for reloading
4610 by use of more alternatives. */
4611
4612 int force_group = reload_nregs[r] > 1 && ! last_reload;
4613
4614 /* If we want a single register and haven't yet found one,
4615 take any reg in the right class and not in use.
4616 If we want a consecutive group, here is where we look for it.
4617
4618 We use two passes so we can first look for reload regs to
4619 reuse, which are already in use for other reloads in this insn,
4620 and only then use additional registers.
4621 I think that maximizing reuse is needed to make sure we don't
4622 run out of reload regs. Suppose we have three reloads, and
4623 reloads A and B can share regs. These need two regs.
4624 Suppose A and B are given different regs.
4625 That leaves none for C. */
4626 for (pass = 0; pass < 2; pass++)
4627 {
4628 /* I is the index in spill_regs.
4629 We advance it round-robin between insns to use all spill regs
4630 equally, so that inherited reloads have a chance
4631 of leapfrogging each other. Don't do this, however, when we have
4632 group needs and failure would be fatal; if we only have a relatively
4633 small number of spill registers, and more than one of them has
4634 group needs, then by starting in the middle, we may end up
4635 allocating the first one in such a way that we are not left with
4636 sufficient groups to handle the rest. */
4637
4638 if (noerror || ! force_group)
4639 i = last_spill_reg;
4640 else
4641 i = -1;
4642
4643 for (count = 0; count < n_spills; count++)
4644 {
4645 int class = (int) reload_reg_class[r];
4646
4647 i = (i + 1) % n_spills;
4648
4649 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4650 reload_when_needed[r])
4651 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4652 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4653 /* Look first for regs to share, then for unshared. But
4654 don't share regs used for inherited reloads; they are
4655 the ones we want to preserve. */
4656 && (pass
4657 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4658 spill_regs[i])
4659 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4660 spill_regs[i]))))
4661 {
4662 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4663 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4664 (on 68000) got us two FP regs. If NR is 1,
4665 we would reject both of them. */
4666 if (force_group)
4667 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4668 /* If we need only one reg, we have already won. */
4669 if (nr == 1)
4670 {
4671 /* But reject a single reg if we demand a group. */
4672 if (force_group)
4673 continue;
4674 break;
4675 }
4676 /* Otherwise check that as many consecutive regs as we need
4677 are available here.
4678 Also, don't use for a group registers that are
4679 needed for nongroups. */
4680 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4681 while (nr > 1)
4682 {
4683 regno = spill_regs[i] + nr - 1;
4684 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4685 && spill_reg_order[regno] >= 0
4686 && reload_reg_free_p (regno, reload_opnum[r],
4687 reload_when_needed[r])
4688 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4689 regno)))
4690 break;
4691 nr--;
4692 }
4693 if (nr == 1)
4694 break;
4695 }
4696 }
4697
4698 /* If we found something on pass 1, omit pass 2. */
4699 if (count < n_spills)
4700 break;
4701 }
4702
4703 /* We should have found a spill register by now. */
4704 if (count == n_spills)
4705 {
4706 if (noerror)
4707 return 0;
4708 goto failure;
4709 }
4710
4711 /* I is the index in SPILL_REG_RTX of the reload register we are to
4712 allocate. Get an rtx for it and find its register number. */
4713
4714 new = spill_reg_rtx[i];
4715
4716 if (new == 0 || GET_MODE (new) != reload_mode[r])
4717 spill_reg_rtx[i] = new
4718 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4719
4720 regno = true_regnum (new);
4721
4722 /* Detect when the reload reg can't hold the reload mode.
4723 This used to be one `if', but Sequent compiler can't handle that. */
4724 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4725 {
4726 enum machine_mode test_mode = VOIDmode;
4727 if (reload_in[r])
4728 test_mode = GET_MODE (reload_in[r]);
4729 /* If reload_in[r] has VOIDmode, it means we will load it
4730 in whatever mode the reload reg has: to wit, reload_mode[r].
4731 We have already tested that for validity. */
4732 /* Aside from that, we need to test that the expressions
4733 to reload from or into have modes which are valid for this
4734 reload register. Otherwise the reload insns would be invalid. */
4735 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4736 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4737 if (! (reload_out[r] != 0
4738 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4739 {
4740 /* The reg is OK. */
4741 last_spill_reg = i;
4742
4743 /* Mark as in use for this insn the reload regs we use
4744 for this. */
4745 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4746 reload_when_needed[r], reload_mode[r]);
4747
4748 reload_reg_rtx[r] = new;
4749 reload_spill_index[r] = i;
4750 return 1;
4751 }
4752 }
4753
4754 /* The reg is not OK. */
4755 if (noerror)
4756 return 0;
4757
4758 failure:
4759 if (asm_noperands (PATTERN (insn)) < 0)
4760 /* It's the compiler's fault. */
4761 abort ();
4762
4763 /* It's the user's fault; the operand's mode and constraint
4764 don't match. Disable this reload so we don't crash in final. */
4765 error_for_asm (insn,
4766 "`asm' operand constraint incompatible with operand size");
4767 reload_in[r] = 0;
4768 reload_out[r] = 0;
4769 reload_reg_rtx[r] = 0;
4770 reload_optional[r] = 1;
4771 reload_secondary_p[r] = 1;
4772
4773 return 1;
4774 }
4775 \f
4776 /* Assign hard reg targets for the pseudo-registers we must reload
4777 into hard regs for this insn.
4778 Also output the instructions to copy them in and out of the hard regs.
4779
4780 For machines with register classes, we are responsible for
4781 finding a reload reg in the proper class. */
4782
4783 static void
4784 choose_reload_regs (insn, avoid_return_reg)
4785 rtx insn;
4786 rtx avoid_return_reg;
4787 {
4788 register int i, j;
4789 int max_group_size = 1;
4790 enum reg_class group_class = NO_REGS;
4791 int inheritance;
4792
4793 rtx save_reload_reg_rtx[MAX_RELOADS];
4794 char save_reload_inherited[MAX_RELOADS];
4795 rtx save_reload_inheritance_insn[MAX_RELOADS];
4796 rtx save_reload_override_in[MAX_RELOADS];
4797 int save_reload_spill_index[MAX_RELOADS];
4798 HARD_REG_SET save_reload_reg_used;
4799 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4800 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4801 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4802 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4803 HARD_REG_SET save_reload_reg_used_in_op_addr;
4804 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4805 HARD_REG_SET save_reload_reg_used_in_insn;
4806 HARD_REG_SET save_reload_reg_used_in_other_addr;
4807 HARD_REG_SET save_reload_reg_used_at_all;
4808
4809 bzero (reload_inherited, MAX_RELOADS);
4810 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4811 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4812
4813 CLEAR_HARD_REG_SET (reload_reg_used);
4814 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4815 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4816 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4817 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4818 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4819
4820 for (i = 0; i < reload_n_operands; i++)
4821 {
4822 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4823 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4824 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4825 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4826 }
4827
4828 #ifdef SMALL_REGISTER_CLASSES
4829 /* Don't bother with avoiding the return reg
4830 if we have no mandatory reload that could use it. */
4831 if (avoid_return_reg)
4832 {
4833 int do_avoid = 0;
4834 int regno = REGNO (avoid_return_reg);
4835 int nregs
4836 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4837 int r;
4838
4839 for (r = regno; r < regno + nregs; r++)
4840 if (spill_reg_order[r] >= 0)
4841 for (j = 0; j < n_reloads; j++)
4842 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4843 && (reload_in[j] != 0 || reload_out[j] != 0
4844 || reload_secondary_p[j])
4845 &&
4846 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4847 do_avoid = 1;
4848 if (!do_avoid)
4849 avoid_return_reg = 0;
4850 }
4851 #endif /* SMALL_REGISTER_CLASSES */
4852
4853 #if 0 /* Not needed, now that we can always retry without inheritance. */
4854 /* See if we have more mandatory reloads than spill regs.
4855 If so, then we cannot risk optimizations that could prevent
4856 reloads from sharing one spill register.
4857
4858 Since we will try finding a better register than reload_reg_rtx
4859 unless it is equal to reload_in or reload_out, count such reloads. */
4860
4861 {
4862 int tem = 0;
4863 #ifdef SMALL_REGISTER_CLASSES
4864 int tem = (avoid_return_reg != 0);
4865 #endif
4866 for (j = 0; j < n_reloads; j++)
4867 if (! reload_optional[j]
4868 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4869 && (reload_reg_rtx[j] == 0
4870 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4871 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4872 tem++;
4873 if (tem > n_spills)
4874 must_reuse = 1;
4875 }
4876 #endif
4877
4878 #ifdef SMALL_REGISTER_CLASSES
4879 /* Don't use the subroutine call return reg for a reload
4880 if we are supposed to avoid it. */
4881 if (avoid_return_reg)
4882 {
4883 int regno = REGNO (avoid_return_reg);
4884 int nregs
4885 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4886 int r;
4887
4888 for (r = regno; r < regno + nregs; r++)
4889 if (spill_reg_order[r] >= 0)
4890 SET_HARD_REG_BIT (reload_reg_used, r);
4891 }
4892 #endif /* SMALL_REGISTER_CLASSES */
4893
4894 /* In order to be certain of getting the registers we need,
4895 we must sort the reloads into order of increasing register class.
4896 Then our grabbing of reload registers will parallel the process
4897 that provided the reload registers.
4898
4899 Also note whether any of the reloads wants a consecutive group of regs.
4900 If so, record the maximum size of the group desired and what
4901 register class contains all the groups needed by this insn. */
4902
4903 for (j = 0; j < n_reloads; j++)
4904 {
4905 reload_order[j] = j;
4906 reload_spill_index[j] = -1;
4907
4908 reload_mode[j]
4909 = (reload_inmode[j] == VOIDmode
4910 || (GET_MODE_SIZE (reload_outmode[j])
4911 > GET_MODE_SIZE (reload_inmode[j])))
4912 ? reload_outmode[j] : reload_inmode[j];
4913
4914 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4915
4916 if (reload_nregs[j] > 1)
4917 {
4918 max_group_size = MAX (reload_nregs[j], max_group_size);
4919 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4920 }
4921
4922 /* If we have already decided to use a certain register,
4923 don't use it in another way. */
4924 if (reload_reg_rtx[j])
4925 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
4926 reload_when_needed[j], reload_mode[j]);
4927 }
4928
4929 if (n_reloads > 1)
4930 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4931
4932 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
4933 sizeof reload_reg_rtx);
4934 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4935 bcopy ((char *) reload_inheritance_insn,
4936 (char *) save_reload_inheritance_insn,
4937 sizeof reload_inheritance_insn);
4938 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
4939 sizeof reload_override_in);
4940 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
4941 sizeof reload_spill_index);
4942 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4943 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4944 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4945 reload_reg_used_in_op_addr);
4946
4947 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
4948 reload_reg_used_in_op_addr_reload);
4949
4950 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4951 reload_reg_used_in_insn);
4952 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4953 reload_reg_used_in_other_addr);
4954
4955 for (i = 0; i < reload_n_operands; i++)
4956 {
4957 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4958 reload_reg_used_in_output[i]);
4959 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4960 reload_reg_used_in_input[i]);
4961 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4962 reload_reg_used_in_input_addr[i]);
4963 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4964 reload_reg_used_in_output_addr[i]);
4965 }
4966
4967 /* If -O, try first with inheritance, then turning it off.
4968 If not -O, don't do inheritance.
4969 Using inheritance when not optimizing leads to paradoxes
4970 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4971 because one side of the comparison might be inherited. */
4972
4973 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
4974 {
4975 /* Process the reloads in order of preference just found.
4976 Beyond this point, subregs can be found in reload_reg_rtx.
4977
4978 This used to look for an existing reloaded home for all
4979 of the reloads, and only then perform any new reloads.
4980 But that could lose if the reloads were done out of reg-class order
4981 because a later reload with a looser constraint might have an old
4982 home in a register needed by an earlier reload with a tighter constraint.
4983
4984 To solve this, we make two passes over the reloads, in the order
4985 described above. In the first pass we try to inherit a reload
4986 from a previous insn. If there is a later reload that needs a
4987 class that is a proper subset of the class being processed, we must
4988 also allocate a spill register during the first pass.
4989
4990 Then make a second pass over the reloads to allocate any reloads
4991 that haven't been given registers yet. */
4992
4993 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
4994
4995 for (j = 0; j < n_reloads; j++)
4996 {
4997 register int r = reload_order[j];
4998
4999 /* Ignore reloads that got marked inoperative. */
5000 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5001 continue;
5002
5003 /* If find_reloads chose a to use reload_in or reload_out as a reload
5004 register, we don't need to chose one. Otherwise, try even if it found
5005 one since we might save an insn if we find the value lying around. */
5006 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5007 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5008 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5009 continue;
5010
5011 #if 0 /* No longer needed for correct operation.
5012 It might give better code, or might not; worth an experiment? */
5013 /* If this is an optional reload, we can't inherit from earlier insns
5014 until we are sure that any non-optional reloads have been allocated.
5015 The following code takes advantage of the fact that optional reloads
5016 are at the end of reload_order. */
5017 if (reload_optional[r] != 0)
5018 for (i = 0; i < j; i++)
5019 if ((reload_out[reload_order[i]] != 0
5020 || reload_in[reload_order[i]] != 0
5021 || reload_secondary_p[reload_order[i]])
5022 && ! reload_optional[reload_order[i]]
5023 && reload_reg_rtx[reload_order[i]] == 0)
5024 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5025 #endif
5026
5027 /* First see if this pseudo is already available as reloaded
5028 for a previous insn. We cannot try to inherit for reloads
5029 that are smaller than the maximum number of registers needed
5030 for groups unless the register we would allocate cannot be used
5031 for the groups.
5032
5033 We could check here to see if this is a secondary reload for
5034 an object that is already in a register of the desired class.
5035 This would avoid the need for the secondary reload register.
5036 But this is complex because we can't easily determine what
5037 objects might want to be loaded via this reload. So let a register
5038 be allocated here. In `emit_reload_insns' we suppress one of the
5039 loads in the case described above. */
5040
5041 if (inheritance)
5042 {
5043 register int regno = -1;
5044 enum machine_mode mode;
5045
5046 if (reload_in[r] == 0)
5047 ;
5048 else if (GET_CODE (reload_in[r]) == REG)
5049 {
5050 regno = REGNO (reload_in[r]);
5051 mode = GET_MODE (reload_in[r]);
5052 }
5053 else if (GET_CODE (reload_in_reg[r]) == REG)
5054 {
5055 regno = REGNO (reload_in_reg[r]);
5056 mode = GET_MODE (reload_in_reg[r]);
5057 }
5058 #if 0
5059 /* This won't work, since REGNO can be a pseudo reg number.
5060 Also, it takes much more hair to keep track of all the things
5061 that can invalidate an inherited reload of part of a pseudoreg. */
5062 else if (GET_CODE (reload_in[r]) == SUBREG
5063 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5064 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5065 #endif
5066
5067 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5068 {
5069 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5070
5071 if (reg_reloaded_contents[i] == regno
5072 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5073 >= GET_MODE_SIZE (mode))
5074 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5075 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5076 spill_regs[i])
5077 && (reload_nregs[r] == max_group_size
5078 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5079 spill_regs[i]))
5080 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5081 reload_when_needed[r])
5082 && reload_reg_free_before_p (spill_regs[i],
5083 reload_opnum[r],
5084 reload_when_needed[r]))
5085 {
5086 /* If a group is needed, verify that all the subsequent
5087 registers still have their values intact. */
5088 int nr
5089 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5090 int k;
5091
5092 for (k = 1; k < nr; k++)
5093 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5094 != regno)
5095 break;
5096
5097 if (k == nr)
5098 {
5099 int i1;
5100
5101 /* We found a register that contains the
5102 value we need. If this register is the
5103 same as an `earlyclobber' operand of the
5104 current insn, just mark it as a place to
5105 reload from since we can't use it as the
5106 reload register itself. */
5107
5108 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5109 if (reg_overlap_mentioned_for_reload_p
5110 (reg_last_reload_reg[regno],
5111 reload_earlyclobbers[i1]))
5112 break;
5113
5114 if (i1 != n_earlyclobbers
5115 /* Don't really use the inherited spill reg
5116 if we need it wider than we've got it. */
5117 || (GET_MODE_SIZE (reload_mode[r])
5118 > GET_MODE_SIZE (mode)))
5119 reload_override_in[r] = reg_last_reload_reg[regno];
5120 else
5121 {
5122 int k;
5123 /* We can use this as a reload reg. */
5124 /* Mark the register as in use for this part of
5125 the insn. */
5126 mark_reload_reg_in_use (spill_regs[i],
5127 reload_opnum[r],
5128 reload_when_needed[r],
5129 reload_mode[r]);
5130 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5131 reload_inherited[r] = 1;
5132 reload_inheritance_insn[r]
5133 = reg_reloaded_insn[i];
5134 reload_spill_index[r] = i;
5135 for (k = 0; k < nr; k++)
5136 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5137 spill_regs[i + k]);
5138 }
5139 }
5140 }
5141 }
5142 }
5143
5144 /* Here's another way to see if the value is already lying around. */
5145 if (inheritance
5146 && reload_in[r] != 0
5147 && ! reload_inherited[r]
5148 && reload_out[r] == 0
5149 && (CONSTANT_P (reload_in[r])
5150 || GET_CODE (reload_in[r]) == PLUS
5151 || GET_CODE (reload_in[r]) == REG
5152 || GET_CODE (reload_in[r]) == MEM)
5153 && (reload_nregs[r] == max_group_size
5154 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5155 {
5156 register rtx equiv
5157 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5158 -1, NULL_PTR, 0, reload_mode[r]);
5159 int regno;
5160
5161 if (equiv != 0)
5162 {
5163 if (GET_CODE (equiv) == REG)
5164 regno = REGNO (equiv);
5165 else if (GET_CODE (equiv) == SUBREG)
5166 {
5167 /* This must be a SUBREG of a hard register.
5168 Make a new REG since this might be used in an
5169 address and not all machines support SUBREGs
5170 there. */
5171 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5172 equiv = gen_rtx (REG, reload_mode[r], regno);
5173 }
5174 else
5175 abort ();
5176 }
5177
5178 /* If we found a spill reg, reject it unless it is free
5179 and of the desired class. */
5180 if (equiv != 0
5181 && ((spill_reg_order[regno] >= 0
5182 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5183 reload_when_needed[r]))
5184 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5185 regno)))
5186 equiv = 0;
5187
5188 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5189 equiv = 0;
5190
5191 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5192 equiv = 0;
5193
5194 /* We found a register that contains the value we need.
5195 If this register is the same as an `earlyclobber' operand
5196 of the current insn, just mark it as a place to reload from
5197 since we can't use it as the reload register itself. */
5198
5199 if (equiv != 0)
5200 for (i = 0; i < n_earlyclobbers; i++)
5201 if (reg_overlap_mentioned_for_reload_p (equiv,
5202 reload_earlyclobbers[i]))
5203 {
5204 reload_override_in[r] = equiv;
5205 equiv = 0;
5206 break;
5207 }
5208
5209 /* JRV: If the equiv register we have found is explicitly
5210 clobbered in the current insn, mark but don't use, as above. */
5211
5212 if (equiv != 0 && regno_clobbered_p (regno, insn))
5213 {
5214 reload_override_in[r] = equiv;
5215 equiv = 0;
5216 }
5217
5218 /* If we found an equivalent reg, say no code need be generated
5219 to load it, and use it as our reload reg. */
5220 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5221 {
5222 reload_reg_rtx[r] = equiv;
5223 reload_inherited[r] = 1;
5224 /* If it is a spill reg,
5225 mark the spill reg as in use for this insn. */
5226 i = spill_reg_order[regno];
5227 if (i >= 0)
5228 {
5229 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5230 int k;
5231 mark_reload_reg_in_use (regno, reload_opnum[r],
5232 reload_when_needed[r],
5233 reload_mode[r]);
5234 for (k = 0; k < nr; k++)
5235 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5236 }
5237 }
5238 }
5239
5240 /* If we found a register to use already, or if this is an optional
5241 reload, we are done. */
5242 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5243 continue;
5244
5245 #if 0 /* No longer needed for correct operation. Might or might not
5246 give better code on the average. Want to experiment? */
5247
5248 /* See if there is a later reload that has a class different from our
5249 class that intersects our class or that requires less register
5250 than our reload. If so, we must allocate a register to this
5251 reload now, since that reload might inherit a previous reload
5252 and take the only available register in our class. Don't do this
5253 for optional reloads since they will force all previous reloads
5254 to be allocated. Also don't do this for reloads that have been
5255 turned off. */
5256
5257 for (i = j + 1; i < n_reloads; i++)
5258 {
5259 int s = reload_order[i];
5260
5261 if ((reload_in[s] == 0 && reload_out[s] == 0
5262 && ! reload_secondary_p[s])
5263 || reload_optional[s])
5264 continue;
5265
5266 if ((reload_reg_class[s] != reload_reg_class[r]
5267 && reg_classes_intersect_p (reload_reg_class[r],
5268 reload_reg_class[s]))
5269 || reload_nregs[s] < reload_nregs[r])
5270 break;
5271 }
5272
5273 if (i == n_reloads)
5274 continue;
5275
5276 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5277 #endif
5278 }
5279
5280 /* Now allocate reload registers for anything non-optional that
5281 didn't get one yet. */
5282 for (j = 0; j < n_reloads; j++)
5283 {
5284 register int r = reload_order[j];
5285
5286 /* Ignore reloads that got marked inoperative. */
5287 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5288 continue;
5289
5290 /* Skip reloads that already have a register allocated or are
5291 optional. */
5292 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5293 continue;
5294
5295 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5296 break;
5297 }
5298
5299 /* If that loop got all the way, we have won. */
5300 if (j == n_reloads)
5301 break;
5302
5303 fail:
5304 /* Loop around and try without any inheritance. */
5305 /* First undo everything done by the failed attempt
5306 to allocate with inheritance. */
5307 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5308 sizeof reload_reg_rtx);
5309 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5310 sizeof reload_inherited);
5311 bcopy ((char *) save_reload_inheritance_insn,
5312 (char *) reload_inheritance_insn,
5313 sizeof reload_inheritance_insn);
5314 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5315 sizeof reload_override_in);
5316 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5317 sizeof reload_spill_index);
5318 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5319 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5320 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5321 save_reload_reg_used_in_op_addr);
5322 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5323 save_reload_reg_used_in_op_addr_reload);
5324 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5325 save_reload_reg_used_in_insn);
5326 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5327 save_reload_reg_used_in_other_addr);
5328
5329 for (i = 0; i < reload_n_operands; i++)
5330 {
5331 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5332 save_reload_reg_used_in_input[i]);
5333 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5334 save_reload_reg_used_in_output[i]);
5335 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5336 save_reload_reg_used_in_input_addr[i]);
5337 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5338 save_reload_reg_used_in_output_addr[i]);
5339 }
5340 }
5341
5342 /* If we thought we could inherit a reload, because it seemed that
5343 nothing else wanted the same reload register earlier in the insn,
5344 verify that assumption, now that all reloads have been assigned. */
5345
5346 for (j = 0; j < n_reloads; j++)
5347 {
5348 register int r = reload_order[j];
5349
5350 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5351 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5352 reload_opnum[r],
5353 reload_when_needed[r]))
5354 reload_inherited[r] = 0;
5355
5356 /* If we found a better place to reload from,
5357 validate it in the same fashion, if it is a reload reg. */
5358 if (reload_override_in[r]
5359 && (GET_CODE (reload_override_in[r]) == REG
5360 || GET_CODE (reload_override_in[r]) == SUBREG))
5361 {
5362 int regno = true_regnum (reload_override_in[r]);
5363 if (spill_reg_order[regno] >= 0
5364 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5365 reload_when_needed[r]))
5366 reload_override_in[r] = 0;
5367 }
5368 }
5369
5370 /* Now that reload_override_in is known valid,
5371 actually override reload_in. */
5372 for (j = 0; j < n_reloads; j++)
5373 if (reload_override_in[j])
5374 reload_in[j] = reload_override_in[j];
5375
5376 /* If this reload won't be done because it has been cancelled or is
5377 optional and not inherited, clear reload_reg_rtx so other
5378 routines (such as subst_reloads) don't get confused. */
5379 for (j = 0; j < n_reloads; j++)
5380 if (reload_reg_rtx[j] != 0
5381 && ((reload_optional[j] && ! reload_inherited[j])
5382 || (reload_in[j] == 0 && reload_out[j] == 0
5383 && ! reload_secondary_p[j])))
5384 {
5385 int regno = true_regnum (reload_reg_rtx[j]);
5386
5387 if (spill_reg_order[regno] >= 0)
5388 clear_reload_reg_in_use (regno, reload_opnum[j],
5389 reload_when_needed[j], reload_mode[j]);
5390 reload_reg_rtx[j] = 0;
5391 }
5392
5393 /* Record which pseudos and which spill regs have output reloads. */
5394 for (j = 0; j < n_reloads; j++)
5395 {
5396 register int r = reload_order[j];
5397
5398 i = reload_spill_index[r];
5399
5400 /* I is nonneg if this reload used one of the spill regs.
5401 If reload_reg_rtx[r] is 0, this is an optional reload
5402 that we opted to ignore. */
5403 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5404 && reload_reg_rtx[r] != 0)
5405 {
5406 register int nregno = REGNO (reload_out[r]);
5407 int nr = 1;
5408
5409 if (nregno < FIRST_PSEUDO_REGISTER)
5410 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5411
5412 while (--nr >= 0)
5413 reg_has_output_reload[nregno + nr] = 1;
5414
5415 if (i >= 0)
5416 {
5417 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5418 while (--nr >= 0)
5419 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5420 }
5421
5422 if (reload_when_needed[r] != RELOAD_OTHER
5423 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5424 && reload_when_needed[r] != RELOAD_FOR_INSN)
5425 abort ();
5426 }
5427 }
5428 }
5429 \f
5430 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5431 reloads of the same item for fear that we might not have enough reload
5432 registers. However, normally they will get the same reload register
5433 and hence actually need not be loaded twice.
5434
5435 Here we check for the most common case of this phenomenon: when we have
5436 a number of reloads for the same object, each of which were allocated
5437 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5438 reload, and is not modified in the insn itself. If we find such,
5439 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5440 This will not increase the number of spill registers needed and will
5441 prevent redundant code. */
5442
5443 #ifdef SMALL_REGISTER_CLASSES
5444
5445 static void
5446 merge_assigned_reloads (insn)
5447 rtx insn;
5448 {
5449 int i, j;
5450
5451 /* Scan all the reloads looking for ones that only load values and
5452 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5453 assigned and not modified by INSN. */
5454
5455 for (i = 0; i < n_reloads; i++)
5456 {
5457 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5458 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5459 || reg_set_p (reload_reg_rtx[i], insn))
5460 continue;
5461
5462 /* Look at all other reloads. Ensure that the only use of this
5463 reload_reg_rtx is in a reload that just loads the same value
5464 as we do. Note that any secondary reloads must be of the identical
5465 class since the values, modes, and result registers are the
5466 same, so we need not do anything with any secondary reloads. */
5467
5468 for (j = 0; j < n_reloads; j++)
5469 {
5470 if (i == j || reload_reg_rtx[j] == 0
5471 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5472 reload_reg_rtx[i]))
5473 continue;
5474
5475 /* If the reload regs aren't exactly the same (e.g, different modes)
5476 or if the values are different, we can't merge anything with this
5477 reload register. */
5478
5479 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5480 || reload_out[j] != 0 || reload_in[j] == 0
5481 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5482 break;
5483 }
5484
5485 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5486 we, in fact, found any matching reloads. */
5487
5488 if (j == n_reloads)
5489 {
5490 for (j = 0; j < n_reloads; j++)
5491 if (i != j && reload_reg_rtx[j] != 0
5492 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5493 {
5494 reload_when_needed[i] = RELOAD_OTHER;
5495 reload_in[j] = 0;
5496 transfer_replacements (i, j);
5497 }
5498
5499 /* If this is now RELOAD_OTHER, look for any reloads that load
5500 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5501 if they were for inputs, RELOAD_OTHER for outputs. Note that
5502 this test is equivalent to looking for reloads for this operand
5503 number. */
5504
5505 if (reload_when_needed[i] == RELOAD_OTHER)
5506 for (j = 0; j < n_reloads; j++)
5507 if (reload_in[j] != 0
5508 && reload_when_needed[i] != RELOAD_OTHER
5509 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5510 reload_in[i]))
5511 reload_when_needed[j]
5512 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5513 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5514 }
5515 }
5516 }
5517 #endif /* SMALL_RELOAD_CLASSES */
5518 \f
5519 /* Output insns to reload values in and out of the chosen reload regs. */
5520
5521 static void
5522 emit_reload_insns (insn)
5523 rtx insn;
5524 {
5525 register int j;
5526 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5527 rtx other_input_address_reload_insns = 0;
5528 rtx other_input_reload_insns = 0;
5529 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5530 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5531 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5532 rtx operand_reload_insns = 0;
5533 rtx other_operand_reload_insns = 0;
5534 rtx following_insn = NEXT_INSN (insn);
5535 rtx before_insn = insn;
5536 int special;
5537 /* Values to be put in spill_reg_store are put here first. */
5538 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5539
5540 for (j = 0; j < reload_n_operands; j++)
5541 input_reload_insns[j] = input_address_reload_insns[j]
5542 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5543
5544 /* Now output the instructions to copy the data into and out of the
5545 reload registers. Do these in the order that the reloads were reported,
5546 since reloads of base and index registers precede reloads of operands
5547 and the operands may need the base and index registers reloaded. */
5548
5549 for (j = 0; j < n_reloads; j++)
5550 {
5551 register rtx old;
5552 rtx oldequiv_reg = 0;
5553 rtx store_insn = 0;
5554
5555 old = reload_in[j];
5556 if (old != 0 && ! reload_inherited[j]
5557 && ! rtx_equal_p (reload_reg_rtx[j], old)
5558 && reload_reg_rtx[j] != 0)
5559 {
5560 register rtx reloadreg = reload_reg_rtx[j];
5561 rtx oldequiv = 0;
5562 enum machine_mode mode;
5563 rtx *where;
5564
5565 /* Determine the mode to reload in.
5566 This is very tricky because we have three to choose from.
5567 There is the mode the insn operand wants (reload_inmode[J]).
5568 There is the mode of the reload register RELOADREG.
5569 There is the intrinsic mode of the operand, which we could find
5570 by stripping some SUBREGs.
5571 It turns out that RELOADREG's mode is irrelevant:
5572 we can change that arbitrarily.
5573
5574 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5575 then the reload reg may not support QImode moves, so use SImode.
5576 If foo is in memory due to spilling a pseudo reg, this is safe,
5577 because the QImode value is in the least significant part of a
5578 slot big enough for a SImode. If foo is some other sort of
5579 memory reference, then it is impossible to reload this case,
5580 so previous passes had better make sure this never happens.
5581
5582 Then consider a one-word union which has SImode and one of its
5583 members is a float, being fetched as (SUBREG:SF union:SI).
5584 We must fetch that as SFmode because we could be loading into
5585 a float-only register. In this case OLD's mode is correct.
5586
5587 Consider an immediate integer: it has VOIDmode. Here we need
5588 to get a mode from something else.
5589
5590 In some cases, there is a fourth mode, the operand's
5591 containing mode. If the insn specifies a containing mode for
5592 this operand, it overrides all others.
5593
5594 I am not sure whether the algorithm here is always right,
5595 but it does the right things in those cases. */
5596
5597 mode = GET_MODE (old);
5598 if (mode == VOIDmode)
5599 mode = reload_inmode[j];
5600
5601 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5602 /* If we need a secondary register for this operation, see if
5603 the value is already in a register in that class. Don't
5604 do this if the secondary register will be used as a scratch
5605 register. */
5606
5607 if (reload_secondary_in_reload[j] >= 0
5608 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5609 && optimize)
5610 oldequiv
5611 = find_equiv_reg (old, insn,
5612 reload_reg_class[reload_secondary_in_reload[j]],
5613 -1, NULL_PTR, 0, mode);
5614 #endif
5615
5616 /* If reloading from memory, see if there is a register
5617 that already holds the same value. If so, reload from there.
5618 We can pass 0 as the reload_reg_p argument because
5619 any other reload has either already been emitted,
5620 in which case find_equiv_reg will see the reload-insn,
5621 or has yet to be emitted, in which case it doesn't matter
5622 because we will use this equiv reg right away. */
5623
5624 if (oldequiv == 0 && optimize
5625 && (GET_CODE (old) == MEM
5626 || (GET_CODE (old) == REG
5627 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5628 && reg_renumber[REGNO (old)] < 0)))
5629 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5630 -1, NULL_PTR, 0, mode);
5631
5632 if (oldequiv)
5633 {
5634 int regno = true_regnum (oldequiv);
5635
5636 /* If OLDEQUIV is a spill register, don't use it for this
5637 if any other reload needs it at an earlier stage of this insn
5638 or at this stage. */
5639 if (spill_reg_order[regno] >= 0
5640 && (! reload_reg_free_p (regno, reload_opnum[j],
5641 reload_when_needed[j])
5642 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5643 reload_when_needed[j])))
5644 oldequiv = 0;
5645
5646 /* If OLDEQUIV is not a spill register,
5647 don't use it if any other reload wants it. */
5648 if (spill_reg_order[regno] < 0)
5649 {
5650 int k;
5651 for (k = 0; k < n_reloads; k++)
5652 if (reload_reg_rtx[k] != 0 && k != j
5653 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5654 oldequiv))
5655 {
5656 oldequiv = 0;
5657 break;
5658 }
5659 }
5660
5661 /* If it is no cheaper to copy from OLDEQUIV into the
5662 reload register than it would be to move from memory,
5663 don't use it. Likewise, if we need a secondary register
5664 or memory. */
5665
5666 if (oldequiv != 0
5667 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5668 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5669 reload_reg_class[j])
5670 >= MEMORY_MOVE_COST (mode)))
5671 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5672 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5673 mode, oldequiv)
5674 != NO_REGS)
5675 #endif
5676 #ifdef SECONDARY_MEMORY_NEEDED
5677 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5678 REGNO_REG_CLASS (regno),
5679 mode)
5680 #endif
5681 ))
5682 oldequiv = 0;
5683 }
5684
5685 if (oldequiv == 0)
5686 oldequiv = old;
5687 else if (GET_CODE (oldequiv) == REG)
5688 oldequiv_reg = oldequiv;
5689 else if (GET_CODE (oldequiv) == SUBREG)
5690 oldequiv_reg = SUBREG_REG (oldequiv);
5691
5692 /* If we are reloading from a register that was recently stored in
5693 with an output-reload, see if we can prove there was
5694 actually no need to store the old value in it. */
5695
5696 if (optimize && GET_CODE (oldequiv) == REG
5697 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5698 && spill_reg_order[REGNO (oldequiv)] >= 0
5699 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5700 && find_reg_note (insn, REG_DEAD, reload_in[j])
5701 /* This is unsafe if operand occurs more than once in current
5702 insn. Perhaps some occurrences weren't reloaded. */
5703 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5704 delete_output_reload
5705 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5706
5707 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5708 then load RELOADREG from OLDEQUIV. Note that we cannot use
5709 gen_lowpart_common since it can do the wrong thing when
5710 RELOADREG has a multi-word mode. Note that RELOADREG
5711 must always be a REG here. */
5712
5713 if (GET_MODE (reloadreg) != mode)
5714 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5715 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5716 oldequiv = SUBREG_REG (oldequiv);
5717 if (GET_MODE (oldequiv) != VOIDmode
5718 && mode != GET_MODE (oldequiv))
5719 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5720
5721 /* Switch to the right place to emit the reload insns. */
5722 switch (reload_when_needed[j])
5723 {
5724 case RELOAD_OTHER:
5725 where = &other_input_reload_insns;
5726 break;
5727 case RELOAD_FOR_INPUT:
5728 where = &input_reload_insns[reload_opnum[j]];
5729 break;
5730 case RELOAD_FOR_INPUT_ADDRESS:
5731 where = &input_address_reload_insns[reload_opnum[j]];
5732 break;
5733 case RELOAD_FOR_OUTPUT_ADDRESS:
5734 where = &output_address_reload_insns[reload_opnum[j]];
5735 break;
5736 case RELOAD_FOR_OPERAND_ADDRESS:
5737 where = &operand_reload_insns;
5738 break;
5739 case RELOAD_FOR_OPADDR_ADDR:
5740 where = &other_operand_reload_insns;
5741 break;
5742 case RELOAD_FOR_OTHER_ADDRESS:
5743 where = &other_input_address_reload_insns;
5744 break;
5745 default:
5746 abort ();
5747 }
5748
5749 push_to_sequence (*where);
5750 special = 0;
5751
5752 /* Auto-increment addresses must be reloaded in a special way. */
5753 if (GET_CODE (oldequiv) == POST_INC
5754 || GET_CODE (oldequiv) == POST_DEC
5755 || GET_CODE (oldequiv) == PRE_INC
5756 || GET_CODE (oldequiv) == PRE_DEC)
5757 {
5758 /* We are not going to bother supporting the case where a
5759 incremented register can't be copied directly from
5760 OLDEQUIV since this seems highly unlikely. */
5761 if (reload_secondary_in_reload[j] >= 0)
5762 abort ();
5763 /* Prevent normal processing of this reload. */
5764 special = 1;
5765 /* Output a special code sequence for this case. */
5766 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5767 }
5768
5769 /* If we are reloading a pseudo-register that was set by the previous
5770 insn, see if we can get rid of that pseudo-register entirely
5771 by redirecting the previous insn into our reload register. */
5772
5773 else if (optimize && GET_CODE (old) == REG
5774 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5775 && dead_or_set_p (insn, old)
5776 /* This is unsafe if some other reload
5777 uses the same reg first. */
5778 && reload_reg_free_before_p (REGNO (reloadreg),
5779 reload_opnum[j],
5780 reload_when_needed[j]))
5781 {
5782 rtx temp = PREV_INSN (insn);
5783 while (temp && GET_CODE (temp) == NOTE)
5784 temp = PREV_INSN (temp);
5785 if (temp
5786 && GET_CODE (temp) == INSN
5787 && GET_CODE (PATTERN (temp)) == SET
5788 && SET_DEST (PATTERN (temp)) == old
5789 /* Make sure we can access insn_operand_constraint. */
5790 && asm_noperands (PATTERN (temp)) < 0
5791 /* This is unsafe if prev insn rejects our reload reg. */
5792 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5793 reloadreg)
5794 /* This is unsafe if operand occurs more than once in current
5795 insn. Perhaps some occurrences aren't reloaded. */
5796 && count_occurrences (PATTERN (insn), old) == 1
5797 /* Don't risk splitting a matching pair of operands. */
5798 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5799 {
5800 /* Store into the reload register instead of the pseudo. */
5801 SET_DEST (PATTERN (temp)) = reloadreg;
5802 /* If these are the only uses of the pseudo reg,
5803 pretend for GDB it lives in the reload reg we used. */
5804 if (reg_n_deaths[REGNO (old)] == 1
5805 && reg_n_sets[REGNO (old)] == 1)
5806 {
5807 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5808 alter_reg (REGNO (old), -1);
5809 }
5810 special = 1;
5811 }
5812 }
5813
5814 /* We can't do that, so output an insn to load RELOADREG. */
5815
5816 if (! special)
5817 {
5818 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5819 rtx second_reload_reg = 0;
5820 enum insn_code icode;
5821
5822 /* If we have a secondary reload, pick up the secondary register
5823 and icode, if any. If OLDEQUIV and OLD are different or
5824 if this is an in-out reload, recompute whether or not we
5825 still need a secondary register and what the icode should
5826 be. If we still need a secondary register and the class or
5827 icode is different, go back to reloading from OLD if using
5828 OLDEQUIV means that we got the wrong type of register. We
5829 cannot have different class or icode due to an in-out reload
5830 because we don't make such reloads when both the input and
5831 output need secondary reload registers. */
5832
5833 if (reload_secondary_in_reload[j] >= 0)
5834 {
5835 int secondary_reload = reload_secondary_in_reload[j];
5836 rtx real_oldequiv = oldequiv;
5837 rtx real_old = old;
5838
5839 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5840 and similarly for OLD.
5841 See comments in get_secondary_reload in reload.c. */
5842 if (GET_CODE (oldequiv) == REG
5843 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5844 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5845 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5846
5847 if (GET_CODE (old) == REG
5848 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5849 && reg_equiv_mem[REGNO (old)] != 0)
5850 real_old = reg_equiv_mem[REGNO (old)];
5851
5852 second_reload_reg = reload_reg_rtx[secondary_reload];
5853 icode = reload_secondary_in_icode[j];
5854
5855 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5856 || (reload_in[j] != 0 && reload_out[j] != 0))
5857 {
5858 enum reg_class new_class
5859 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5860 mode, real_oldequiv);
5861
5862 if (new_class == NO_REGS)
5863 second_reload_reg = 0;
5864 else
5865 {
5866 enum insn_code new_icode;
5867 enum machine_mode new_mode;
5868
5869 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5870 REGNO (second_reload_reg)))
5871 oldequiv = old, real_oldequiv = real_old;
5872 else
5873 {
5874 new_icode = reload_in_optab[(int) mode];
5875 if (new_icode != CODE_FOR_nothing
5876 && ((insn_operand_predicate[(int) new_icode][0]
5877 && ! ((*insn_operand_predicate[(int) new_icode][0])
5878 (reloadreg, mode)))
5879 || (insn_operand_predicate[(int) new_icode][1]
5880 && ! ((*insn_operand_predicate[(int) new_icode][1])
5881 (real_oldequiv, mode)))))
5882 new_icode = CODE_FOR_nothing;
5883
5884 if (new_icode == CODE_FOR_nothing)
5885 new_mode = mode;
5886 else
5887 new_mode = insn_operand_mode[(int) new_icode][2];
5888
5889 if (GET_MODE (second_reload_reg) != new_mode)
5890 {
5891 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5892 new_mode))
5893 oldequiv = old, real_oldequiv = real_old;
5894 else
5895 second_reload_reg
5896 = gen_rtx (REG, new_mode,
5897 REGNO (second_reload_reg));
5898 }
5899 }
5900 }
5901 }
5902
5903 /* If we still need a secondary reload register, check
5904 to see if it is being used as a scratch or intermediate
5905 register and generate code appropriately. If we need
5906 a scratch register, use REAL_OLDEQUIV since the form of
5907 the insn may depend on the actual address if it is
5908 a MEM. */
5909
5910 if (second_reload_reg)
5911 {
5912 if (icode != CODE_FOR_nothing)
5913 {
5914 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5915 second_reload_reg));
5916 special = 1;
5917 }
5918 else
5919 {
5920 /* See if we need a scratch register to load the
5921 intermediate register (a tertiary reload). */
5922 enum insn_code tertiary_icode
5923 = reload_secondary_in_icode[secondary_reload];
5924
5925 if (tertiary_icode != CODE_FOR_nothing)
5926 {
5927 rtx third_reload_reg
5928 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
5929
5930 emit_insn ((GEN_FCN (tertiary_icode)
5931 (second_reload_reg, real_oldequiv,
5932 third_reload_reg)));
5933 }
5934 else
5935 gen_input_reload (second_reload_reg, oldequiv,
5936 reload_opnum[j],
5937 reload_when_needed[j]);
5938
5939 oldequiv = second_reload_reg;
5940 }
5941 }
5942 }
5943 #endif
5944
5945 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
5946 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5947 reload_when_needed[j]);
5948
5949 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5950 /* We may have to make a REG_DEAD note for the secondary reload
5951 register in the insns we just made. Find the last insn that
5952 mentioned the register. */
5953 if (! special && second_reload_reg
5954 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5955 {
5956 rtx prev;
5957
5958 for (prev = get_last_insn (); prev;
5959 prev = PREV_INSN (prev))
5960 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
5961 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5962 PATTERN (prev)))
5963 {
5964 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5965 second_reload_reg,
5966 REG_NOTES (prev));
5967 break;
5968 }
5969 }
5970 #endif
5971 }
5972
5973 /* End this sequence. */
5974 *where = get_insns ();
5975 end_sequence ();
5976 }
5977
5978 /* Add a note saying the input reload reg
5979 dies in this insn, if anyone cares. */
5980 #ifdef PRESERVE_DEATH_INFO_REGNO_P
5981 if (old != 0
5982 && reload_reg_rtx[j] != old
5983 && reload_reg_rtx[j] != 0
5984 && reload_out[j] == 0
5985 && ! reload_inherited[j]
5986 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5987 {
5988 register rtx reloadreg = reload_reg_rtx[j];
5989
5990 #if 0
5991 /* We can't abort here because we need to support this for sched.c.
5992 It's not terrible to miss a REG_DEAD note, but we should try
5993 to figure out how to do this correctly. */
5994 /* The code below is incorrect for address-only reloads. */
5995 if (reload_when_needed[j] != RELOAD_OTHER
5996 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5997 abort ();
5998 #endif
5999
6000 /* Add a death note to this insn, for an input reload. */
6001
6002 if ((reload_when_needed[j] == RELOAD_OTHER
6003 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6004 && ! dead_or_set_p (insn, reloadreg))
6005 REG_NOTES (insn)
6006 = gen_rtx (EXPR_LIST, REG_DEAD,
6007 reloadreg, REG_NOTES (insn));
6008 }
6009
6010 /* When we inherit a reload, the last marked death of the reload reg
6011 may no longer really be a death. */
6012 if (reload_reg_rtx[j] != 0
6013 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6014 && reload_inherited[j])
6015 {
6016 /* Handle inheriting an output reload.
6017 Remove the death note from the output reload insn. */
6018 if (reload_spill_index[j] >= 0
6019 && GET_CODE (reload_in[j]) == REG
6020 && spill_reg_store[reload_spill_index[j]] != 0
6021 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6022 REG_DEAD, REGNO (reload_reg_rtx[j])))
6023 remove_death (REGNO (reload_reg_rtx[j]),
6024 spill_reg_store[reload_spill_index[j]]);
6025 /* Likewise for input reloads that were inherited. */
6026 else if (reload_spill_index[j] >= 0
6027 && GET_CODE (reload_in[j]) == REG
6028 && spill_reg_store[reload_spill_index[j]] == 0
6029 && reload_inheritance_insn[j] != 0
6030 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6031 REGNO (reload_reg_rtx[j])))
6032 remove_death (REGNO (reload_reg_rtx[j]),
6033 reload_inheritance_insn[j]);
6034 else
6035 {
6036 rtx prev;
6037
6038 /* We got this register from find_equiv_reg.
6039 Search back for its last death note and get rid of it.
6040 But don't search back too far.
6041 Don't go past a place where this reg is set,
6042 since a death note before that remains valid. */
6043 for (prev = PREV_INSN (insn);
6044 prev && GET_CODE (prev) != CODE_LABEL;
6045 prev = PREV_INSN (prev))
6046 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6047 && dead_or_set_p (prev, reload_reg_rtx[j]))
6048 {
6049 if (find_regno_note (prev, REG_DEAD,
6050 REGNO (reload_reg_rtx[j])))
6051 remove_death (REGNO (reload_reg_rtx[j]), prev);
6052 break;
6053 }
6054 }
6055 }
6056
6057 /* We might have used find_equiv_reg above to choose an alternate
6058 place from which to reload. If so, and it died, we need to remove
6059 that death and move it to one of the insns we just made. */
6060
6061 if (oldequiv_reg != 0
6062 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6063 {
6064 rtx prev, prev1;
6065
6066 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6067 prev = PREV_INSN (prev))
6068 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6069 && dead_or_set_p (prev, oldequiv_reg))
6070 {
6071 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6072 {
6073 for (prev1 = this_reload_insn;
6074 prev1; prev1 = PREV_INSN (prev1))
6075 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6076 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6077 PATTERN (prev1)))
6078 {
6079 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6080 oldequiv_reg,
6081 REG_NOTES (prev1));
6082 break;
6083 }
6084 remove_death (REGNO (oldequiv_reg), prev);
6085 }
6086 break;
6087 }
6088 }
6089 #endif
6090
6091 /* If we are reloading a register that was recently stored in with an
6092 output-reload, see if we can prove there was
6093 actually no need to store the old value in it. */
6094
6095 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6096 && reload_in[j] != 0
6097 && GET_CODE (reload_in[j]) == REG
6098 #if 0
6099 /* There doesn't seem to be any reason to restrict this to pseudos
6100 and doing so loses in the case where we are copying from a
6101 register of the wrong class. */
6102 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6103 #endif
6104 && spill_reg_store[reload_spill_index[j]] != 0
6105 /* This is unsafe if some other reload uses the same reg first. */
6106 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6107 reload_opnum[j], reload_when_needed[j])
6108 && dead_or_set_p (insn, reload_in[j])
6109 /* This is unsafe if operand occurs more than once in current
6110 insn. Perhaps some occurrences weren't reloaded. */
6111 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6112 delete_output_reload (insn, j,
6113 spill_reg_store[reload_spill_index[j]]);
6114
6115 /* Input-reloading is done. Now do output-reloading,
6116 storing the value from the reload-register after the main insn
6117 if reload_out[j] is nonzero.
6118
6119 ??? At some point we need to support handling output reloads of
6120 JUMP_INSNs or insns that set cc0. */
6121 old = reload_out[j];
6122 if (old != 0
6123 && reload_reg_rtx[j] != old
6124 && reload_reg_rtx[j] != 0)
6125 {
6126 register rtx reloadreg = reload_reg_rtx[j];
6127 register rtx second_reloadreg = 0;
6128 rtx note, p;
6129 enum machine_mode mode;
6130 int special = 0;
6131
6132 /* An output operand that dies right away does need a reload,
6133 but need not be copied from it. Show the new location in the
6134 REG_UNUSED note. */
6135 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6136 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6137 {
6138 XEXP (note, 0) = reload_reg_rtx[j];
6139 continue;
6140 }
6141 else if (GET_CODE (old) == SCRATCH)
6142 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6143 but we don't want to make an output reload. */
6144 continue;
6145
6146 #if 0
6147 /* Strip off of OLD any size-increasing SUBREGs such as
6148 (SUBREG:SI foo:QI 0). */
6149
6150 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6151 && (GET_MODE_SIZE (GET_MODE (old))
6152 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6153 old = SUBREG_REG (old);
6154 #endif
6155
6156 /* If is a JUMP_INSN, we can't support output reloads yet. */
6157 if (GET_CODE (insn) == JUMP_INSN)
6158 abort ();
6159
6160 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6161
6162 /* Determine the mode to reload in.
6163 See comments above (for input reloading). */
6164
6165 mode = GET_MODE (old);
6166 if (mode == VOIDmode)
6167 {
6168 /* VOIDmode should never happen for an output. */
6169 if (asm_noperands (PATTERN (insn)) < 0)
6170 /* It's the compiler's fault. */
6171 abort ();
6172 error_for_asm (insn, "output operand is constant in `asm'");
6173 /* Prevent crash--use something we know is valid. */
6174 mode = word_mode;
6175 old = gen_rtx (REG, mode, REGNO (reloadreg));
6176 }
6177
6178 if (GET_MODE (reloadreg) != mode)
6179 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6180
6181 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6182
6183 /* If we need two reload regs, set RELOADREG to the intermediate
6184 one, since it will be stored into OUT. We might need a secondary
6185 register only for an input reload, so check again here. */
6186
6187 if (reload_secondary_out_reload[j] >= 0)
6188 {
6189 rtx real_old = old;
6190
6191 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6192 && reg_equiv_mem[REGNO (old)] != 0)
6193 real_old = reg_equiv_mem[REGNO (old)];
6194
6195 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6196 mode, real_old)
6197 != NO_REGS))
6198 {
6199 second_reloadreg = reloadreg;
6200 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6201
6202 /* See if RELOADREG is to be used as a scratch register
6203 or as an intermediate register. */
6204 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6205 {
6206 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6207 (real_old, second_reloadreg, reloadreg)));
6208 special = 1;
6209 }
6210 else
6211 {
6212 /* See if we need both a scratch and intermediate reload
6213 register. */
6214 int secondary_reload = reload_secondary_out_reload[j];
6215 enum insn_code tertiary_icode
6216 = reload_secondary_out_icode[secondary_reload];
6217 rtx pat;
6218
6219 if (GET_MODE (reloadreg) != mode)
6220 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6221
6222 if (tertiary_icode != CODE_FOR_nothing)
6223 {
6224 rtx third_reloadreg
6225 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6226 pat = (GEN_FCN (tertiary_icode)
6227 (reloadreg, second_reloadreg, third_reloadreg));
6228 }
6229 #ifdef SECONDARY_MEMORY_NEEDED
6230 /* If we need a memory location to do the move, do it that way. */
6231 else if (GET_CODE (reloadreg) == REG
6232 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6233 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6234 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6235 GET_MODE (second_reloadreg)))
6236 {
6237 /* Get the memory to use and rewrite both registers
6238 to its mode. */
6239 rtx loc
6240 = get_secondary_mem (reloadreg,
6241 GET_MODE (second_reloadreg),
6242 reload_opnum[j],
6243 reload_when_needed[j]);
6244 rtx tmp_reloadreg;
6245
6246 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6247 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6248 REGNO (second_reloadreg));
6249
6250 if (GET_MODE (loc) != GET_MODE (reloadreg))
6251 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6252 REGNO (reloadreg));
6253 else
6254 tmp_reloadreg = reloadreg;
6255
6256 emit_move_insn (loc, second_reloadreg);
6257 pat = gen_move_insn (tmp_reloadreg, loc);
6258 }
6259 #endif
6260 else
6261 pat = gen_move_insn (reloadreg, second_reloadreg);
6262
6263 emit_insn (pat);
6264 }
6265 }
6266 }
6267 #endif
6268
6269 /* Output the last reload insn. */
6270 if (! special)
6271 {
6272 #ifdef SECONDARY_MEMORY_NEEDED
6273 /* If we need a memory location to do the move, do it that way. */
6274 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6275 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6276 REGNO_REG_CLASS (REGNO (reloadreg)),
6277 GET_MODE (reloadreg)))
6278 {
6279 /* Get the memory to use and rewrite both registers to
6280 its mode. */
6281 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6282 reload_opnum[j],
6283 reload_when_needed[j]);
6284
6285 if (GET_MODE (loc) != GET_MODE (reloadreg))
6286 reloadreg = gen_rtx (REG, GET_MODE (loc),
6287 REGNO (reloadreg));
6288
6289 if (GET_MODE (loc) != GET_MODE (old))
6290 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6291
6292 emit_insn (gen_move_insn (loc, reloadreg));
6293 emit_insn (gen_move_insn (old, loc));
6294 }
6295 else
6296 #endif
6297 emit_insn (gen_move_insn (old, reloadreg));
6298 }
6299
6300 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6301 /* If final will look at death notes for this reg,
6302 put one on the last output-reload insn to use it. Similarly
6303 for any secondary register. */
6304 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6305 for (p = get_last_insn (); p; p = PREV_INSN (p))
6306 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6307 && reg_overlap_mentioned_for_reload_p (reloadreg,
6308 PATTERN (p)))
6309 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6310 reloadreg, REG_NOTES (p));
6311
6312 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6313 if (! special
6314 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6315 for (p = get_last_insn (); p; p = PREV_INSN (p))
6316 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6317 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6318 PATTERN (p)))
6319 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6320 second_reloadreg, REG_NOTES (p));
6321 #endif
6322 #endif
6323 /* Look at all insns we emitted, just to be safe. */
6324 for (p = get_insns (); p; p = NEXT_INSN (p))
6325 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6326 {
6327 /* If this output reload doesn't come from a spill reg,
6328 clear any memory of reloaded copies of the pseudo reg.
6329 If this output reload comes from a spill reg,
6330 reg_has_output_reload will make this do nothing. */
6331 note_stores (PATTERN (p), forget_old_reloads_1);
6332
6333 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6334 store_insn = p;
6335 }
6336
6337 output_reload_insns[reload_opnum[j]] = get_insns ();
6338 end_sequence ();
6339
6340 }
6341
6342 if (reload_spill_index[j] >= 0)
6343 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6344 }
6345
6346 /* Now write all the insns we made for reloads in the order expected by
6347 the allocation functions. Prior to the insn being reloaded, we write
6348 the following reloads:
6349
6350 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6351
6352 RELOAD_OTHER reloads.
6353
6354 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6355 the RELOAD_FOR_INPUT reload for the operand.
6356
6357 RELOAD_FOR_OPADDR_ADDRS reloads.
6358
6359 RELOAD_FOR_OPERAND_ADDRESS reloads.
6360
6361 After the insn being reloaded, we write the following:
6362
6363 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6364 the RELOAD_FOR_OUTPUT reload for that operand. */
6365
6366 emit_insns_before (other_input_address_reload_insns, before_insn);
6367 emit_insns_before (other_input_reload_insns, before_insn);
6368
6369 for (j = 0; j < reload_n_operands; j++)
6370 {
6371 emit_insns_before (input_address_reload_insns[j], before_insn);
6372 emit_insns_before (input_reload_insns[j], before_insn);
6373 }
6374
6375 emit_insns_before (other_operand_reload_insns, before_insn);
6376 emit_insns_before (operand_reload_insns, before_insn);
6377
6378 for (j = 0; j < reload_n_operands; j++)
6379 {
6380 emit_insns_before (output_address_reload_insns[j], following_insn);
6381 emit_insns_before (output_reload_insns[j], following_insn);
6382 }
6383
6384 /* Move death notes from INSN
6385 to output-operand-address and output reload insns. */
6386 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6387 {
6388 rtx insn1;
6389 /* Loop over those insns, last ones first. */
6390 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6391 insn1 = PREV_INSN (insn1))
6392 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6393 {
6394 rtx source = SET_SRC (PATTERN (insn1));
6395 rtx dest = SET_DEST (PATTERN (insn1));
6396
6397 /* The note we will examine next. */
6398 rtx reg_notes = REG_NOTES (insn);
6399 /* The place that pointed to this note. */
6400 rtx *prev_reg_note = &REG_NOTES (insn);
6401
6402 /* If the note is for something used in the source of this
6403 reload insn, or in the output address, move the note. */
6404 while (reg_notes)
6405 {
6406 rtx next_reg_notes = XEXP (reg_notes, 1);
6407 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6408 && GET_CODE (XEXP (reg_notes, 0)) == REG
6409 && ((GET_CODE (dest) != REG
6410 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6411 dest))
6412 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6413 source)))
6414 {
6415 *prev_reg_note = next_reg_notes;
6416 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6417 REG_NOTES (insn1) = reg_notes;
6418 }
6419 else
6420 prev_reg_note = &XEXP (reg_notes, 1);
6421
6422 reg_notes = next_reg_notes;
6423 }
6424 }
6425 }
6426 #endif
6427
6428 /* For all the spill regs newly reloaded in this instruction,
6429 record what they were reloaded from, so subsequent instructions
6430 can inherit the reloads.
6431
6432 Update spill_reg_store for the reloads of this insn.
6433 Copy the elements that were updated in the loop above. */
6434
6435 for (j = 0; j < n_reloads; j++)
6436 {
6437 register int r = reload_order[j];
6438 register int i = reload_spill_index[r];
6439
6440 /* I is nonneg if this reload used one of the spill regs.
6441 If reload_reg_rtx[r] is 0, this is an optional reload
6442 that we opted to ignore.
6443
6444 Also ignore reloads that don't reach the end of the insn,
6445 since we will eventually see the one that does. */
6446
6447 if (i >= 0 && reload_reg_rtx[r] != 0
6448 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6449 reload_when_needed[r]))
6450 {
6451 /* First, clear out memory of what used to be in this spill reg.
6452 If consecutive registers are used, clear them all. */
6453 int nr
6454 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6455 int k;
6456
6457 for (k = 0; k < nr; k++)
6458 {
6459 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6460 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6461 }
6462
6463 /* Maybe the spill reg contains a copy of reload_out. */
6464 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6465 {
6466 register int nregno = REGNO (reload_out[r]);
6467 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6468 : HARD_REGNO_NREGS (nregno,
6469 GET_MODE (reload_reg_rtx[r])));
6470
6471 spill_reg_store[i] = new_spill_reg_store[i];
6472 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6473
6474 /* If NREGNO is a hard register, it may occupy more than
6475 one register. If it does, say what is in the
6476 rest of the registers assuming that both registers
6477 agree on how many words the object takes. If not,
6478 invalidate the subsequent registers. */
6479
6480 if (nregno < FIRST_PSEUDO_REGISTER)
6481 for (k = 1; k < nnr; k++)
6482 reg_last_reload_reg[nregno + k]
6483 = (nr == nnr ? gen_rtx (REG,
6484 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6485 REGNO (reload_reg_rtx[r]) + k)
6486 : 0);
6487
6488 /* Now do the inverse operation. */
6489 for (k = 0; k < nr; k++)
6490 {
6491 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6492 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6493 : nregno + k);
6494 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6495 }
6496 }
6497
6498 /* Maybe the spill reg contains a copy of reload_in. Only do
6499 something if there will not be an output reload for
6500 the register being reloaded. */
6501 else if (reload_out[r] == 0
6502 && reload_in[r] != 0
6503 && ((GET_CODE (reload_in[r]) == REG
6504 && ! reg_has_output_reload[REGNO (reload_in[r])]
6505 || (GET_CODE (reload_in_reg[r]) == REG
6506 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6507 {
6508 register int nregno;
6509 int nnr;
6510
6511 if (GET_CODE (reload_in[r]) == REG)
6512 nregno = REGNO (reload_in[r]);
6513 else
6514 nregno = REGNO (reload_in_reg[r]);
6515
6516 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6517 : HARD_REGNO_NREGS (nregno,
6518 GET_MODE (reload_reg_rtx[r])));
6519
6520 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6521
6522 if (nregno < FIRST_PSEUDO_REGISTER)
6523 for (k = 1; k < nnr; k++)
6524 reg_last_reload_reg[nregno + k]
6525 = (nr == nnr ? gen_rtx (REG,
6526 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6527 REGNO (reload_reg_rtx[r]) + k)
6528 : 0);
6529
6530 /* Unless we inherited this reload, show we haven't
6531 recently done a store. */
6532 if (! reload_inherited[r])
6533 spill_reg_store[i] = 0;
6534
6535 for (k = 0; k < nr; k++)
6536 {
6537 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6538 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6539 : nregno + k);
6540 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6541 = insn;
6542 }
6543 }
6544 }
6545
6546 /* The following if-statement was #if 0'd in 1.34 (or before...).
6547 It's reenabled in 1.35 because supposedly nothing else
6548 deals with this problem. */
6549
6550 /* If a register gets output-reloaded from a non-spill register,
6551 that invalidates any previous reloaded copy of it.
6552 But forget_old_reloads_1 won't get to see it, because
6553 it thinks only about the original insn. So invalidate it here. */
6554 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6555 {
6556 register int nregno = REGNO (reload_out[r]);
6557 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6558
6559 while (num_regs-- > 0)
6560 reg_last_reload_reg[nregno + num_regs] = 0;
6561 }
6562 }
6563 }
6564 \f
6565 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6566 operand OPNUM with reload type TYPE.
6567
6568 Returns first insn emitted. */
6569
6570 rtx
6571 gen_input_reload (reloadreg, in, opnum, type)
6572 rtx reloadreg;
6573 rtx in;
6574 int opnum;
6575 enum reload_type type;
6576 {
6577 rtx last = get_last_insn ();
6578
6579 /* How to do this reload can get quite tricky. Normally, we are being
6580 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6581 register that didn't get a hard register. In that case we can just
6582 call emit_move_insn.
6583
6584 We can also be asked to reload a PLUS that adds a register or a MEM to
6585 another register, constant or MEM. This can occur during frame pointer
6586 elimination and while reloading addresses. This case is handled by
6587 trying to emit a single insn to perform the add. If it is not valid,
6588 we use a two insn sequence.
6589
6590 Finally, we could be called to handle an 'o' constraint by putting
6591 an address into a register. In that case, we first try to do this
6592 with a named pattern of "reload_load_address". If no such pattern
6593 exists, we just emit a SET insn and hope for the best (it will normally
6594 be valid on machines that use 'o').
6595
6596 This entire process is made complex because reload will never
6597 process the insns we generate here and so we must ensure that
6598 they will fit their constraints and also by the fact that parts of
6599 IN might be being reloaded separately and replaced with spill registers.
6600 Because of this, we are, in some sense, just guessing the right approach
6601 here. The one listed above seems to work.
6602
6603 ??? At some point, this whole thing needs to be rethought. */
6604
6605 if (GET_CODE (in) == PLUS
6606 && (GET_CODE (XEXP (in, 0)) == REG
6607 || GET_CODE (XEXP (in, 0)) == MEM)
6608 && (GET_CODE (XEXP (in, 1)) == REG
6609 || CONSTANT_P (XEXP (in, 1))
6610 || GET_CODE (XEXP (in, 1)) == MEM))
6611 {
6612 /* We need to compute the sum of a register or a MEM and another
6613 register, constant, or MEM, and put it into the reload
6614 register. The best possible way of doing this is if the machine
6615 has a three-operand ADD insn that accepts the required operands.
6616
6617 The simplest approach is to try to generate such an insn and see if it
6618 is recognized and matches its constraints. If so, it can be used.
6619
6620 It might be better not to actually emit the insn unless it is valid,
6621 but we need to pass the insn as an operand to `recog' and
6622 `insn_extract' and it is simpler to emit and then delete the insn if
6623 not valid than to dummy things up. */
6624
6625 rtx op0, op1, tem, insn;
6626 int code;
6627
6628 op0 = find_replacement (&XEXP (in, 0));
6629 op1 = find_replacement (&XEXP (in, 1));
6630
6631 /* Since constraint checking is strict, commutativity won't be
6632 checked, so we need to do that here to avoid spurious failure
6633 if the add instruction is two-address and the second operand
6634 of the add is the same as the reload reg, which is frequently
6635 the case. If the insn would be A = B + A, rearrange it so
6636 it will be A = A + B as constrain_operands expects. */
6637
6638 if (GET_CODE (XEXP (in, 1)) == REG
6639 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6640 tem = op0, op0 = op1, op1 = tem;
6641
6642 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6643 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6644
6645 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6646 code = recog_memoized (insn);
6647
6648 if (code >= 0)
6649 {
6650 insn_extract (insn);
6651 /* We want constrain operands to treat this insn strictly in
6652 its validity determination, i.e., the way it would after reload
6653 has completed. */
6654 if (constrain_operands (code, 1))
6655 return insn;
6656 }
6657
6658 delete_insns_since (last);
6659
6660 /* If that failed, we must use a conservative two-insn sequence.
6661 use move to copy constant, MEM, or pseudo register to the reload
6662 register since "move" will be able to handle an arbitrary operand,
6663 unlike add which can't, in general. Then add the registers.
6664
6665 If there is another way to do this for a specific machine, a
6666 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6667 we emit below. */
6668
6669 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6670 || (GET_CODE (op1) == REG
6671 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6672 tem = op0, op0 = op1, op1 = tem;
6673
6674 emit_insn (gen_move_insn (reloadreg, op0));
6675
6676 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6677 This fixes a problem on the 32K where the stack pointer cannot
6678 be used as an operand of an add insn. */
6679
6680 if (rtx_equal_p (op0, op1))
6681 op1 = reloadreg;
6682
6683 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6684
6685 /* If that failed, copy the address register to the reload register.
6686 Then add the constant to the reload register. */
6687
6688 code = recog_memoized (insn);
6689
6690 if (code >= 0)
6691 {
6692 insn_extract (insn);
6693 /* We want constrain operands to treat this insn strictly in
6694 its validity determination, i.e., the way it would after reload
6695 has completed. */
6696 if (constrain_operands (code, 1))
6697 return insn;
6698 }
6699
6700 delete_insns_since (last);
6701
6702 emit_insn (gen_move_insn (reloadreg, op1));
6703 emit_insn (gen_add2_insn (reloadreg, op0));
6704 }
6705
6706 #ifdef SECONDARY_MEMORY_NEEDED
6707 /* If we need a memory location to do the move, do it that way. */
6708 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6709 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6710 REGNO_REG_CLASS (REGNO (reloadreg)),
6711 GET_MODE (reloadreg)))
6712 {
6713 /* Get the memory to use and rewrite both registers to its mode. */
6714 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6715
6716 if (GET_MODE (loc) != GET_MODE (reloadreg))
6717 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6718
6719 if (GET_MODE (loc) != GET_MODE (in))
6720 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6721
6722 emit_insn (gen_move_insn (loc, in));
6723 emit_insn (gen_move_insn (reloadreg, loc));
6724 }
6725 #endif
6726
6727 /* If IN is a simple operand, use gen_move_insn. */
6728 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6729 emit_insn (gen_move_insn (reloadreg, in));
6730
6731 #ifdef HAVE_reload_load_address
6732 else if (HAVE_reload_load_address)
6733 emit_insn (gen_reload_load_address (reloadreg, in));
6734 #endif
6735
6736 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6737 else
6738 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6739
6740 /* Return the first insn emitted.
6741 We can not just return get_last_insn, because there may have
6742 been multiple instructions emitted. Also note that gen_move_insn may
6743 emit more than one insn itself, so we can not assume that there is one
6744 insn emitted per emit_insn_before call. */
6745
6746 return last ? NEXT_INSN (last) : get_insns ();
6747 }
6748 \f
6749 /* Delete a previously made output-reload
6750 whose result we now believe is not needed.
6751 First we double-check.
6752
6753 INSN is the insn now being processed.
6754 OUTPUT_RELOAD_INSN is the insn of the output reload.
6755 J is the reload-number for this insn. */
6756
6757 static void
6758 delete_output_reload (insn, j, output_reload_insn)
6759 rtx insn;
6760 int j;
6761 rtx output_reload_insn;
6762 {
6763 register rtx i1;
6764
6765 /* Get the raw pseudo-register referred to. */
6766
6767 rtx reg = reload_in[j];
6768 while (GET_CODE (reg) == SUBREG)
6769 reg = SUBREG_REG (reg);
6770
6771 /* If the pseudo-reg we are reloading is no longer referenced
6772 anywhere between the store into it and here,
6773 and no jumps or labels intervene, then the value can get
6774 here through the reload reg alone.
6775 Otherwise, give up--return. */
6776 for (i1 = NEXT_INSN (output_reload_insn);
6777 i1 != insn; i1 = NEXT_INSN (i1))
6778 {
6779 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6780 return;
6781 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6782 && reg_mentioned_p (reg, PATTERN (i1)))
6783 return;
6784 }
6785
6786 if (cannot_omit_stores[REGNO (reg)])
6787 return;
6788
6789 /* If this insn will store in the pseudo again,
6790 the previous store can be removed. */
6791 if (reload_out[j] == reload_in[j])
6792 delete_insn (output_reload_insn);
6793
6794 /* See if the pseudo reg has been completely replaced
6795 with reload regs. If so, delete the store insn
6796 and forget we had a stack slot for the pseudo. */
6797 else if (reg_n_deaths[REGNO (reg)] == 1
6798 && reg_basic_block[REGNO (reg)] >= 0
6799 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6800 {
6801 rtx i2;
6802
6803 /* We know that it was used only between here
6804 and the beginning of the current basic block.
6805 (We also know that the last use before INSN was
6806 the output reload we are thinking of deleting, but never mind that.)
6807 Search that range; see if any ref remains. */
6808 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6809 {
6810 rtx set = single_set (i2);
6811
6812 /* Uses which just store in the pseudo don't count,
6813 since if they are the only uses, they are dead. */
6814 if (set != 0 && SET_DEST (set) == reg)
6815 continue;
6816 if (GET_CODE (i2) == CODE_LABEL
6817 || GET_CODE (i2) == JUMP_INSN)
6818 break;
6819 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6820 && reg_mentioned_p (reg, PATTERN (i2)))
6821 /* Some other ref remains;
6822 we can't do anything. */
6823 return;
6824 }
6825
6826 /* Delete the now-dead stores into this pseudo. */
6827 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6828 {
6829 rtx set = single_set (i2);
6830
6831 if (set != 0 && SET_DEST (set) == reg)
6832 delete_insn (i2);
6833 if (GET_CODE (i2) == CODE_LABEL
6834 || GET_CODE (i2) == JUMP_INSN)
6835 break;
6836 }
6837
6838 /* For the debugging info,
6839 say the pseudo lives in this reload reg. */
6840 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6841 alter_reg (REGNO (reg), -1);
6842 }
6843 }
6844 \f
6845 /* Output reload-insns to reload VALUE into RELOADREG.
6846 VALUE is an autoincrement or autodecrement RTX whose operand
6847 is a register or memory location;
6848 so reloading involves incrementing that location.
6849
6850 INC_AMOUNT is the number to increment or decrement by (always positive).
6851 This cannot be deduced from VALUE. */
6852
6853 static void
6854 inc_for_reload (reloadreg, value, inc_amount)
6855 rtx reloadreg;
6856 rtx value;
6857 int inc_amount;
6858 {
6859 /* REG or MEM to be copied and incremented. */
6860 rtx incloc = XEXP (value, 0);
6861 /* Nonzero if increment after copying. */
6862 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6863 rtx last;
6864 rtx inc;
6865 rtx add_insn;
6866 int code;
6867
6868 /* No hard register is equivalent to this register after
6869 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6870 we could inc/dec that register as well (maybe even using it for
6871 the source), but I'm not sure it's worth worrying about. */
6872 if (GET_CODE (incloc) == REG)
6873 reg_last_reload_reg[REGNO (incloc)] = 0;
6874
6875 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6876 inc_amount = - inc_amount;
6877
6878 inc = GEN_INT (inc_amount);
6879
6880 /* If this is post-increment, first copy the location to the reload reg. */
6881 if (post)
6882 emit_insn (gen_move_insn (reloadreg, incloc));
6883
6884 /* See if we can directly increment INCLOC. Use a method similar to that
6885 in gen_input_reload. */
6886
6887 last = get_last_insn ();
6888 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6889 gen_rtx (PLUS, GET_MODE (incloc),
6890 incloc, inc)));
6891
6892 code = recog_memoized (add_insn);
6893 if (code >= 0)
6894 {
6895 insn_extract (add_insn);
6896 if (constrain_operands (code, 1))
6897 {
6898 /* If this is a pre-increment and we have incremented the value
6899 where it lives, copy the incremented value to RELOADREG to
6900 be used as an address. */
6901
6902 if (! post)
6903 emit_insn (gen_move_insn (reloadreg, incloc));
6904
6905 return;
6906 }
6907 }
6908
6909 delete_insns_since (last);
6910
6911 /* If couldn't do the increment directly, must increment in RELOADREG.
6912 The way we do this depends on whether this is pre- or post-increment.
6913 For pre-increment, copy INCLOC to the reload register, increment it
6914 there, then save back. */
6915
6916 if (! post)
6917 {
6918 emit_insn (gen_move_insn (reloadreg, incloc));
6919 emit_insn (gen_add2_insn (reloadreg, inc));
6920 emit_insn (gen_move_insn (incloc, reloadreg));
6921 }
6922 else
6923 {
6924 /* Postincrement.
6925 Because this might be a jump insn or a compare, and because RELOADREG
6926 may not be available after the insn in an input reload, we must do
6927 the incrementation before the insn being reloaded for.
6928
6929 We have already copied INCLOC to RELOADREG. Increment the copy in
6930 RELOADREG, save that back, then decrement RELOADREG so it has
6931 the original value. */
6932
6933 emit_insn (gen_add2_insn (reloadreg, inc));
6934 emit_insn (gen_move_insn (incloc, reloadreg));
6935 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6936 }
6937
6938 return;
6939 }
6940 \f
6941 /* Return 1 if we are certain that the constraint-string STRING allows
6942 the hard register REG. Return 0 if we can't be sure of this. */
6943
6944 static int
6945 constraint_accepts_reg_p (string, reg)
6946 char *string;
6947 rtx reg;
6948 {
6949 int value = 0;
6950 int regno = true_regnum (reg);
6951 int c;
6952
6953 /* Initialize for first alternative. */
6954 value = 0;
6955 /* Check that each alternative contains `g' or `r'. */
6956 while (1)
6957 switch (c = *string++)
6958 {
6959 case 0:
6960 /* If an alternative lacks `g' or `r', we lose. */
6961 return value;
6962 case ',':
6963 /* If an alternative lacks `g' or `r', we lose. */
6964 if (value == 0)
6965 return 0;
6966 /* Initialize for next alternative. */
6967 value = 0;
6968 break;
6969 case 'g':
6970 case 'r':
6971 /* Any general reg wins for this alternative. */
6972 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6973 value = 1;
6974 break;
6975 default:
6976 /* Any reg in specified class wins for this alternative. */
6977 {
6978 enum reg_class class = REG_CLASS_FROM_LETTER (c);
6979
6980 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
6981 value = 1;
6982 }
6983 }
6984 }
6985 \f
6986 /* Return the number of places FIND appears within X, but don't count
6987 an occurrence if some SET_DEST is FIND. */
6988
6989 static int
6990 count_occurrences (x, find)
6991 register rtx x, find;
6992 {
6993 register int i, j;
6994 register enum rtx_code code;
6995 register char *format_ptr;
6996 int count;
6997
6998 if (x == find)
6999 return 1;
7000 if (x == 0)
7001 return 0;
7002
7003 code = GET_CODE (x);
7004
7005 switch (code)
7006 {
7007 case REG:
7008 case QUEUED:
7009 case CONST_INT:
7010 case CONST_DOUBLE:
7011 case SYMBOL_REF:
7012 case CODE_LABEL:
7013 case PC:
7014 case CC0:
7015 return 0;
7016
7017 case SET:
7018 if (SET_DEST (x) == find)
7019 return count_occurrences (SET_SRC (x), find);
7020 break;
7021 }
7022
7023 format_ptr = GET_RTX_FORMAT (code);
7024 count = 0;
7025
7026 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7027 {
7028 switch (*format_ptr++)
7029 {
7030 case 'e':
7031 count += count_occurrences (XEXP (x, i), find);
7032 break;
7033
7034 case 'E':
7035 if (XVEC (x, i) != NULL)
7036 {
7037 for (j = 0; j < XVECLEN (x, i); j++)
7038 count += count_occurrences (XVECEXP (x, i, j), find);
7039 }
7040 break;
7041 }
7042 }
7043 return count;
7044 }