(eliminate_regs_in_insn): When checking for moves and adds, use
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
186
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
192
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
198
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206 static char spill_indirect_levels;
207
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212 char indirect_symref_ok;
213
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216 char double_reg_address_ok;
217
218 /* Record the stack slot for each spilled hard register. */
219
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222 /* Width allocated so far for that stack slot. */
223
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231 char *basic_block_needs[N_REG_CLASSES];
232
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
236
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240 int caller_save_needed;
241
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245 int reload_in_progress = 0;
246
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258 struct obstack reload_obstack;
259 char *reload_firstobj;
260
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
263
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
266 \f
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272 static struct elim_table
273 {
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290 } reg_eliminate[] =
291
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296 #ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298 #else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300 #endif
301
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
308
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
311
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322 /* Number of labels in the current function. */
323
324 static int num_labels;
325
326 struct hard_reg_n_uses { int regno; int uses; };
327 \f
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void mark_scratch_live PROTO((rtx));
340 static void set_label_offsets PROTO((rtx, rtx, int));
341 static int eliminate_regs_in_insn PROTO((rtx, int));
342 static void mark_not_eliminable PROTO((rtx, rtx));
343 static int spill_hard_reg PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs PROTO((rtx));
345 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347 static void order_regs_for_reload PROTO((void));
348 static int compare_spill_regs PROTO((short *, short *));
349 static void reload_as_needed PROTO((rtx, int));
350 static void forget_old_reloads_1 PROTO((rtx, rtx));
351 static int reload_reg_class_lower PROTO((short *, short *));
352 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
354 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
356 static int reload_reg_free_p PROTO((int, int, enum reload_type));
357 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
359 static int reloads_conflict PROTO((int, int));
360 static int allocate_reload_reg PROTO((int, rtx, int, int));
361 static void choose_reload_regs PROTO((rtx, rtx));
362 static void merge_assigned_reloads PROTO((rtx));
363 static void emit_reload_insns PROTO((rtx));
364 static void delete_output_reload PROTO((rtx, int, rtx));
365 static void inc_for_reload PROTO((rtx, rtx, int));
366 static int constraint_accepts_reg_p PROTO((char *, rtx));
367 static int count_occurrences PROTO((rtx, rtx));
368 \f
369 /* Initialize the reload pass once per compilation. */
370
371 void
372 init_reload ()
373 {
374 register int i;
375
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
379
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
384 GEN_INT (4)));
385 spill_indirect_levels = 0;
386
387 while (memory_address_p (QImode, tem))
388 {
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
391 }
392
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
394
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
397
398 /* See if reg+reg is a valid (and offsettable) address. */
399
400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
401 {
402 tem = gen_rtx (PLUS, Pmode,
403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
407
408 if (memory_address_p (QImode, tem))
409 {
410 double_reg_address_ok = 1;
411 break;
412 }
413 }
414
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
418 }
419
420 /* Main entry point for the reload pass.
421
422 FIRST is the first insn of the function being compiled.
423
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
429
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
433 are spilled from them, and where the pseudo regs are reallocated to.
434
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
437
438 int
439 reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
443 {
444 register int class;
445 register int i, j, k;
446 register rtx insn;
447 register struct elim_table *ep;
448
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
455
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
458
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
461
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
464
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
467
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
470
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474 #endif
475
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
480
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
483 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
484
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
488
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
493
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
496
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
500
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
504
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
509
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
514
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
526 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
529
530 #ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532 #endif
533
534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
538
539 for (insn = first; insn; insn = NEXT_INSN (insn))
540 {
541 rtx set = single_set (insn);
542
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
544 {
545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
546 if (note
547 #ifdef LEGITIMATE_PIC_OPERAND_P
548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550 #endif
551 )
552 {
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
556 {
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
560 {
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
566 }
567 else
568 continue;
569
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
577 }
578 }
579 }
580
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
589
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
592 }
593
594 /* Does this function require a frame pointer? */
595
596 frame_pointer_needed = (! flag_omit_frame_pointer
597 #ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605 #endif
606 || FRAME_POINTER_REQUIRED);
607
608 num_eliminable = 0;
609
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612 #ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
614 {
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
617 && (ep->from != HARD_FRAME_POINTER_REGNUM
618 || ! frame_pointer_needed));
619 }
620 #else
621 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
622 = ! frame_pointer_needed;
623 #endif
624
625 /* Count the number of eliminable registers and build the FROM and TO
626 REG rtx's. Note that code in gen_rtx will cause, e.g.,
627 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
628 We depend on this. */
629 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
630 {
631 num_eliminable += ep->can_eliminate;
632 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
633 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
634 }
635
636 num_labels = max_label_num () - get_first_label_num ();
637
638 /* Allocate the tables used to store offset information at labels. */
639 offsets_known_at = (char *) alloca (num_labels);
640 offsets_at
641 = (int (*)[NUM_ELIMINABLE_REGS])
642 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
643
644 offsets_known_at -= get_first_label_num ();
645 offsets_at -= get_first_label_num ();
646
647 /* Alter each pseudo-reg rtx to contain its hard reg number.
648 Assign stack slots to the pseudos that lack hard regs or equivalents.
649 Do not touch virtual registers. */
650
651 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
652 alter_reg (i, -1);
653
654 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
655 because the stack size may be a part of the offset computation for
656 register elimination. */
657 assign_stack_local (BLKmode, 0, 0);
658
659 /* If we have some registers we think can be eliminated, scan all insns to
660 see if there is an insn that sets one of these registers to something
661 other than itself plus a constant. If so, the register cannot be
662 eliminated. Doing this scan here eliminates an extra pass through the
663 main reload loop in the most common case where register elimination
664 cannot be done. */
665 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
666 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
667 || GET_CODE (insn) == CALL_INSN)
668 note_stores (PATTERN (insn), mark_not_eliminable);
669
670 #ifndef REGISTER_CONSTRAINTS
671 /* If all the pseudo regs have hard regs,
672 except for those that are never referenced,
673 we know that no reloads are needed. */
674 /* But that is not true if there are register constraints, since
675 in that case some pseudos might be in the wrong kind of hard reg. */
676
677 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
678 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
679 break;
680
681 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
682 return;
683 #endif
684
685 /* Compute the order of preference for hard registers to spill.
686 Store them by decreasing preference in potential_reload_regs. */
687
688 order_regs_for_reload ();
689
690 /* So far, no hard regs have been spilled. */
691 n_spills = 0;
692 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
693 spill_reg_order[i] = -1;
694
695 /* On most machines, we can't use any register explicitly used in the
696 rtl as a spill register. But on some, we have to. Those will have
697 taken care to keep the life of hard regs as short as possible. */
698
699 #ifndef SMALL_REGISTER_CLASSES
700 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
701 #endif
702
703 /* Spill any hard regs that we know we can't eliminate. */
704 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
705 if (! ep->can_eliminate)
706 {
707 spill_hard_reg (ep->from, global, dumpfile, 1);
708 regs_ever_live[ep->from] = 1;
709 }
710
711 if (global)
712 for (i = 0; i < N_REG_CLASSES; i++)
713 {
714 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
715 bzero (basic_block_needs[i], n_basic_blocks);
716 }
717
718 /* From now on, we need to emit any moves without making new pseudos. */
719 reload_in_progress = 1;
720
721 /* This loop scans the entire function each go-round
722 and repeats until one repetition spills no additional hard regs. */
723
724 /* This flag is set when a pseudo reg is spilled,
725 to require another pass. Note that getting an additional reload
726 reg does not necessarily imply any pseudo reg was spilled;
727 sometimes we find a reload reg that no pseudo reg was allocated in. */
728 something_changed = 1;
729 /* This flag is set if there are any insns that require reloading. */
730 something_needs_reloads = 0;
731 /* This flag is set if there are any insns that require register
732 eliminations. */
733 something_needs_elimination = 0;
734 while (something_changed)
735 {
736 rtx after_call = 0;
737
738 /* For each class, number of reload regs needed in that class.
739 This is the maximum over all insns of the needs in that class
740 of the individual insn. */
741 int max_needs[N_REG_CLASSES];
742 /* For each class, size of group of consecutive regs
743 that is needed for the reloads of this class. */
744 int group_size[N_REG_CLASSES];
745 /* For each class, max number of consecutive groups needed.
746 (Each group contains group_size[CLASS] consecutive registers.) */
747 int max_groups[N_REG_CLASSES];
748 /* For each class, max number needed of regs that don't belong
749 to any of the groups. */
750 int max_nongroups[N_REG_CLASSES];
751 /* For each class, the machine mode which requires consecutive
752 groups of regs of that class.
753 If two different modes ever require groups of one class,
754 they must be the same size and equally restrictive for that class,
755 otherwise we can't handle the complexity. */
756 enum machine_mode group_mode[N_REG_CLASSES];
757 /* Record the insn where each maximum need is first found. */
758 rtx max_needs_insn[N_REG_CLASSES];
759 rtx max_groups_insn[N_REG_CLASSES];
760 rtx max_nongroups_insn[N_REG_CLASSES];
761 rtx x;
762 int starting_frame_size = get_frame_size ();
763 static char *reg_class_names[] = REG_CLASS_NAMES;
764
765 something_changed = 0;
766 bzero ((char *) max_needs, sizeof max_needs);
767 bzero ((char *) max_groups, sizeof max_groups);
768 bzero ((char *) max_nongroups, sizeof max_nongroups);
769 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
770 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
771 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
772 bzero ((char *) group_size, sizeof group_size);
773 for (i = 0; i < N_REG_CLASSES; i++)
774 group_mode[i] = VOIDmode;
775
776 /* Keep track of which basic blocks are needing the reloads. */
777 this_block = 0;
778
779 /* Remember whether any element of basic_block_needs
780 changes from 0 to 1 in this pass. */
781 new_basic_block_needs = 0;
782
783 /* Reset all offsets on eliminable registers to their initial values. */
784 #ifdef ELIMINABLE_REGS
785 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
786 {
787 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
788 ep->previous_offset = ep->offset
789 = ep->max_offset = ep->initial_offset;
790 }
791 #else
792 #ifdef INITIAL_FRAME_POINTER_OFFSET
793 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
794 #else
795 if (!FRAME_POINTER_REQUIRED)
796 abort ();
797 reg_eliminate[0].initial_offset = 0;
798 #endif
799 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
800 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
801 #endif
802
803 num_not_at_initial_offset = 0;
804
805 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
806
807 /* Set a known offset for each forced label to be at the initial offset
808 of each elimination. We do this because we assume that all
809 computed jumps occur from a location where each elimination is
810 at its initial offset. */
811
812 for (x = forced_labels; x; x = XEXP (x, 1))
813 if (XEXP (x, 0))
814 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
815
816 /* For each pseudo register that has an equivalent location defined,
817 try to eliminate any eliminable registers (such as the frame pointer)
818 assuming initial offsets for the replacement register, which
819 is the normal case.
820
821 If the resulting location is directly addressable, substitute
822 the MEM we just got directly for the old REG.
823
824 If it is not addressable but is a constant or the sum of a hard reg
825 and constant, it is probably not addressable because the constant is
826 out of range, in that case record the address; we will generate
827 hairy code to compute the address in a register each time it is
828 needed. Similarly if it is a hard register, but one that is not
829 valid as an address register.
830
831 If the location is not addressable, but does not have one of the
832 above forms, assign a stack slot. We have to do this to avoid the
833 potential of producing lots of reloads if, e.g., a location involves
834 a pseudo that didn't get a hard register and has an equivalent memory
835 location that also involves a pseudo that didn't get a hard register.
836
837 Perhaps at some point we will improve reload_when_needed handling
838 so this problem goes away. But that's very hairy. */
839
840 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
841 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
842 {
843 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
844
845 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
846 XEXP (x, 0)))
847 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
848 else if (CONSTANT_P (XEXP (x, 0))
849 || (GET_CODE (XEXP (x, 0)) == REG
850 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
851 || (GET_CODE (XEXP (x, 0)) == PLUS
852 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
853 && (REGNO (XEXP (XEXP (x, 0), 0))
854 < FIRST_PSEUDO_REGISTER)
855 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
856 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
857 else
858 {
859 /* Make a new stack slot. Then indicate that something
860 changed so we go back and recompute offsets for
861 eliminable registers because the allocation of memory
862 below might change some offset. reg_equiv_{mem,address}
863 will be set up for this pseudo on the next pass around
864 the loop. */
865 reg_equiv_memory_loc[i] = 0;
866 reg_equiv_init[i] = 0;
867 alter_reg (i, -1);
868 something_changed = 1;
869 }
870 }
871
872 /* If we allocated another pseudo to the stack, redo elimination
873 bookkeeping. */
874 if (something_changed)
875 continue;
876
877 /* If caller-saves needs a group, initialize the group to include
878 the size and mode required for caller-saves. */
879
880 if (caller_save_group_size > 1)
881 {
882 group_mode[(int) caller_save_spill_class] = Pmode;
883 group_size[(int) caller_save_spill_class] = caller_save_group_size;
884 }
885
886 /* Compute the most additional registers needed by any instruction.
887 Collect information separately for each class of regs. */
888
889 for (insn = first; insn; insn = NEXT_INSN (insn))
890 {
891 if (global && this_block + 1 < n_basic_blocks
892 && insn == basic_block_head[this_block+1])
893 ++this_block;
894
895 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
896 might include REG_LABEL), we need to see what effects this
897 has on the known offsets at labels. */
898
899 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
900 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
901 && REG_NOTES (insn) != 0))
902 set_label_offsets (insn, insn, 0);
903
904 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
905 {
906 /* Nonzero means don't use a reload reg that overlaps
907 the place where a function value can be returned. */
908 rtx avoid_return_reg = 0;
909
910 rtx old_body = PATTERN (insn);
911 int old_code = INSN_CODE (insn);
912 rtx old_notes = REG_NOTES (insn);
913 int did_elimination = 0;
914
915 /* To compute the number of reload registers of each class
916 needed for an insn, we must similate what choose_reload_regs
917 can do. We do this by splitting an insn into an "input" and
918 an "output" part. RELOAD_OTHER reloads are used in both.
919 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
920 which must be live over the entire input section of reloads,
921 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
922 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
923 inputs.
924
925 The registers needed for output are RELOAD_OTHER and
926 RELOAD_FOR_OUTPUT, which are live for the entire output
927 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
928 reloads for each operand.
929
930 The total number of registers needed is the maximum of the
931 inputs and outputs. */
932
933 struct needs
934 {
935 /* [0] is normal, [1] is nongroup. */
936 int regs[2][N_REG_CLASSES];
937 int groups[N_REG_CLASSES];
938 };
939
940 /* Each `struct needs' corresponds to one RELOAD_... type. */
941 struct {
942 struct needs other;
943 struct needs input;
944 struct needs output;
945 struct needs insn;
946 struct needs other_addr;
947 struct needs op_addr;
948 struct needs op_addr_reload;
949 struct needs in_addr[MAX_RECOG_OPERANDS];
950 struct needs out_addr[MAX_RECOG_OPERANDS];
951 } insn_needs;
952
953 /* If needed, eliminate any eliminable registers. */
954 if (num_eliminable)
955 did_elimination = eliminate_regs_in_insn (insn, 0);
956
957 #ifdef SMALL_REGISTER_CLASSES
958 /* Set avoid_return_reg if this is an insn
959 that might use the value of a function call. */
960 if (GET_CODE (insn) == CALL_INSN)
961 {
962 if (GET_CODE (PATTERN (insn)) == SET)
963 after_call = SET_DEST (PATTERN (insn));
964 else if (GET_CODE (PATTERN (insn)) == PARALLEL
965 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
966 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
967 else
968 after_call = 0;
969 }
970 else if (after_call != 0
971 && !(GET_CODE (PATTERN (insn)) == SET
972 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
973 {
974 if (reg_referenced_p (after_call, PATTERN (insn)))
975 avoid_return_reg = after_call;
976 after_call = 0;
977 }
978 #endif /* SMALL_REGISTER_CLASSES */
979
980 /* Analyze the instruction. */
981 find_reloads (insn, 0, spill_indirect_levels, global,
982 spill_reg_order);
983
984 /* Remember for later shortcuts which insns had any reloads or
985 register eliminations.
986
987 One might think that it would be worthwhile to mark insns
988 that need register replacements but not reloads, but this is
989 not safe because find_reloads may do some manipulation of
990 the insn (such as swapping commutative operands), which would
991 be lost when we restore the old pattern after register
992 replacement. So the actions of find_reloads must be redone in
993 subsequent passes or in reload_as_needed.
994
995 However, it is safe to mark insns that need reloads
996 but not register replacement. */
997
998 PUT_MODE (insn, (did_elimination ? QImode
999 : n_reloads ? HImode
1000 : GET_MODE (insn) == DImode ? DImode
1001 : VOIDmode));
1002
1003 /* Discard any register replacements done. */
1004 if (did_elimination)
1005 {
1006 obstack_free (&reload_obstack, reload_firstobj);
1007 PATTERN (insn) = old_body;
1008 INSN_CODE (insn) = old_code;
1009 REG_NOTES (insn) = old_notes;
1010 something_needs_elimination = 1;
1011 }
1012
1013 /* If this insn has no reloads, we need not do anything except
1014 in the case of a CALL_INSN when we have caller-saves and
1015 caller-save needs reloads. */
1016
1017 if (n_reloads == 0
1018 && ! (GET_CODE (insn) == CALL_INSN
1019 && caller_save_spill_class != NO_REGS))
1020 continue;
1021
1022 something_needs_reloads = 1;
1023 bzero ((char *) &insn_needs, sizeof insn_needs);
1024
1025 /* Count each reload once in every class
1026 containing the reload's own class. */
1027
1028 for (i = 0; i < n_reloads; i++)
1029 {
1030 register enum reg_class *p;
1031 enum reg_class class = reload_reg_class[i];
1032 int size;
1033 enum machine_mode mode;
1034 int nongroup_need;
1035 struct needs *this_needs;
1036
1037 /* Don't count the dummy reloads, for which one of the
1038 regs mentioned in the insn can be used for reloading.
1039 Don't count optional reloads.
1040 Don't count reloads that got combined with others. */
1041 if (reload_reg_rtx[i] != 0
1042 || reload_optional[i] != 0
1043 || (reload_out[i] == 0 && reload_in[i] == 0
1044 && ! reload_secondary_p[i]))
1045 continue;
1046
1047 /* Show that a reload register of this class is needed
1048 in this basic block. We do not use insn_needs and
1049 insn_groups because they are overly conservative for
1050 this purpose. */
1051 if (global && ! basic_block_needs[(int) class][this_block])
1052 {
1053 basic_block_needs[(int) class][this_block] = 1;
1054 new_basic_block_needs = 1;
1055 }
1056
1057
1058 mode = reload_inmode[i];
1059 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1060 mode = reload_outmode[i];
1061 size = CLASS_MAX_NREGS (class, mode);
1062
1063 /* If this class doesn't want a group, determine if we have
1064 a nongroup need or a regular need. We have a nongroup
1065 need if this reload conflicts with a group reload whose
1066 class intersects with this reload's class. */
1067
1068 nongroup_need = 0;
1069 if (size == 1)
1070 for (j = 0; j < n_reloads; j++)
1071 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1072 (GET_MODE_SIZE (reload_outmode[j])
1073 > GET_MODE_SIZE (reload_inmode[j]))
1074 ? reload_outmode[j]
1075 : reload_inmode[j])
1076 > 1)
1077 && (!reload_optional[j])
1078 && (reload_in[j] != 0 || reload_out[j] != 0
1079 || reload_secondary_p[j])
1080 && reloads_conflict (i, j)
1081 && reg_classes_intersect_p (class,
1082 reload_reg_class[j]))
1083 {
1084 nongroup_need = 1;
1085 break;
1086 }
1087
1088 /* Decide which time-of-use to count this reload for. */
1089 switch (reload_when_needed[i])
1090 {
1091 case RELOAD_OTHER:
1092 this_needs = &insn_needs.other;
1093 break;
1094 case RELOAD_FOR_INPUT:
1095 this_needs = &insn_needs.input;
1096 break;
1097 case RELOAD_FOR_OUTPUT:
1098 this_needs = &insn_needs.output;
1099 break;
1100 case RELOAD_FOR_INSN:
1101 this_needs = &insn_needs.insn;
1102 break;
1103 case RELOAD_FOR_OTHER_ADDRESS:
1104 this_needs = &insn_needs.other_addr;
1105 break;
1106 case RELOAD_FOR_INPUT_ADDRESS:
1107 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1108 break;
1109 case RELOAD_FOR_OUTPUT_ADDRESS:
1110 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1111 break;
1112 case RELOAD_FOR_OPERAND_ADDRESS:
1113 this_needs = &insn_needs.op_addr;
1114 break;
1115 case RELOAD_FOR_OPADDR_ADDR:
1116 this_needs = &insn_needs.op_addr_reload;
1117 break;
1118 }
1119
1120 if (size > 1)
1121 {
1122 enum machine_mode other_mode, allocate_mode;
1123
1124 /* Count number of groups needed separately from
1125 number of individual regs needed. */
1126 this_needs->groups[(int) class]++;
1127 p = reg_class_superclasses[(int) class];
1128 while (*p != LIM_REG_CLASSES)
1129 this_needs->groups[(int) *p++]++;
1130
1131 /* Record size and mode of a group of this class. */
1132 /* If more than one size group is needed,
1133 make all groups the largest needed size. */
1134 if (group_size[(int) class] < size)
1135 {
1136 other_mode = group_mode[(int) class];
1137 allocate_mode = mode;
1138
1139 group_size[(int) class] = size;
1140 group_mode[(int) class] = mode;
1141 }
1142 else
1143 {
1144 other_mode = mode;
1145 allocate_mode = group_mode[(int) class];
1146 }
1147
1148 /* Crash if two dissimilar machine modes both need
1149 groups of consecutive regs of the same class. */
1150
1151 if (other_mode != VOIDmode && other_mode != allocate_mode
1152 && ! modes_equiv_for_class_p (allocate_mode,
1153 other_mode, class))
1154 abort ();
1155 }
1156 else if (size == 1)
1157 {
1158 this_needs->regs[nongroup_need][(int) class] += 1;
1159 p = reg_class_superclasses[(int) class];
1160 while (*p != LIM_REG_CLASSES)
1161 this_needs->regs[nongroup_need][(int) *p++] += 1;
1162 }
1163 else
1164 abort ();
1165 }
1166
1167 /* All reloads have been counted for this insn;
1168 now merge the various times of use.
1169 This sets insn_needs, etc., to the maximum total number
1170 of registers needed at any point in this insn. */
1171
1172 for (i = 0; i < N_REG_CLASSES; i++)
1173 {
1174 int in_max, out_max;
1175
1176 /* Compute normal and nongroup needs. */
1177 for (j = 0; j <= 1; j++)
1178 {
1179 for (in_max = 0, out_max = 0, k = 0;
1180 k < reload_n_operands; k++)
1181 {
1182 in_max
1183 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1184 out_max
1185 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1186 }
1187
1188 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1189 and operand addresses but not things used to reload
1190 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1191 don't conflict with things needed to reload inputs or
1192 outputs. */
1193
1194 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1195 insn_needs.op_addr_reload.regs[j][i]),
1196 in_max);
1197
1198 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1199
1200 insn_needs.input.regs[j][i]
1201 = MAX (insn_needs.input.regs[j][i]
1202 + insn_needs.op_addr.regs[j][i]
1203 + insn_needs.insn.regs[j][i],
1204 in_max + insn_needs.input.regs[j][i]);
1205
1206 insn_needs.output.regs[j][i] += out_max;
1207 insn_needs.other.regs[j][i]
1208 += MAX (MAX (insn_needs.input.regs[j][i],
1209 insn_needs.output.regs[j][i]),
1210 insn_needs.other_addr.regs[j][i]);
1211
1212 }
1213
1214 /* Now compute group needs. */
1215 for (in_max = 0, out_max = 0, j = 0;
1216 j < reload_n_operands; j++)
1217 {
1218 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1219 out_max
1220 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1221 }
1222
1223 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1224 insn_needs.op_addr_reload.groups[i]),
1225 in_max);
1226 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1227
1228 insn_needs.input.groups[i]
1229 = MAX (insn_needs.input.groups[i]
1230 + insn_needs.op_addr.groups[i]
1231 + insn_needs.insn.groups[i],
1232 in_max + insn_needs.input.groups[i]);
1233
1234 insn_needs.output.groups[i] += out_max;
1235 insn_needs.other.groups[i]
1236 += MAX (MAX (insn_needs.input.groups[i],
1237 insn_needs.output.groups[i]),
1238 insn_needs.other_addr.groups[i]);
1239 }
1240
1241 /* If this is a CALL_INSN and caller-saves will need
1242 a spill register, act as if the spill register is
1243 needed for this insn. However, the spill register
1244 can be used by any reload of this insn, so we only
1245 need do something if no need for that class has
1246 been recorded.
1247
1248 The assumption that every CALL_INSN will trigger a
1249 caller-save is highly conservative, however, the number
1250 of cases where caller-saves will need a spill register but
1251 a block containing a CALL_INSN won't need a spill register
1252 of that class should be quite rare.
1253
1254 If a group is needed, the size and mode of the group will
1255 have been set up at the beginning of this loop. */
1256
1257 if (GET_CODE (insn) == CALL_INSN
1258 && caller_save_spill_class != NO_REGS)
1259 {
1260 /* See if this register would conflict with any reload
1261 that needs a group. */
1262 int nongroup_need = 0;
1263 int *caller_save_needs;
1264
1265 for (j = 0; j < n_reloads; j++)
1266 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1267 (GET_MODE_SIZE (reload_outmode[j])
1268 > GET_MODE_SIZE (reload_inmode[j]))
1269 ? reload_outmode[j]
1270 : reload_inmode[j])
1271 > 1)
1272 && reg_classes_intersect_p (caller_save_spill_class,
1273 reload_reg_class[j]))
1274 {
1275 nongroup_need = 1;
1276 break;
1277 }
1278
1279 caller_save_needs
1280 = (caller_save_group_size > 1
1281 ? insn_needs.other.groups
1282 : insn_needs.other.regs[nongroup_need]);
1283
1284 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1285 {
1286 register enum reg_class *p
1287 = reg_class_superclasses[(int) caller_save_spill_class];
1288
1289 caller_save_needs[(int) caller_save_spill_class]++;
1290
1291 while (*p != LIM_REG_CLASSES)
1292 caller_save_needs[(int) *p++] += 1;
1293 }
1294
1295 /* Show that this basic block will need a register of
1296 this class. */
1297
1298 if (global
1299 && ! (basic_block_needs[(int) caller_save_spill_class]
1300 [this_block]))
1301 {
1302 basic_block_needs[(int) caller_save_spill_class]
1303 [this_block] = 1;
1304 new_basic_block_needs = 1;
1305 }
1306 }
1307
1308 #ifdef SMALL_REGISTER_CLASSES
1309 /* If this insn stores the value of a function call,
1310 and that value is in a register that has been spilled,
1311 and if the insn needs a reload in a class
1312 that might use that register as the reload register,
1313 then add add an extra need in that class.
1314 This makes sure we have a register available that does
1315 not overlap the return value. */
1316
1317 if (avoid_return_reg)
1318 {
1319 int regno = REGNO (avoid_return_reg);
1320 int nregs
1321 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1322 int r;
1323 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1324
1325 /* First compute the "basic needs", which counts a
1326 need only in the smallest class in which it
1327 is required. */
1328
1329 bcopy (insn_needs.other.regs[0], basic_needs,
1330 sizeof basic_needs);
1331 bcopy (insn_needs.other.groups, basic_groups,
1332 sizeof basic_groups);
1333
1334 for (i = 0; i < N_REG_CLASSES; i++)
1335 {
1336 enum reg_class *p;
1337
1338 if (basic_needs[i] >= 0)
1339 for (p = reg_class_superclasses[i];
1340 *p != LIM_REG_CLASSES; p++)
1341 basic_needs[(int) *p] -= basic_needs[i];
1342
1343 if (basic_groups[i] >= 0)
1344 for (p = reg_class_superclasses[i];
1345 *p != LIM_REG_CLASSES; p++)
1346 basic_groups[(int) *p] -= basic_groups[i];
1347 }
1348
1349 /* Now count extra regs if there might be a conflict with
1350 the return value register.
1351
1352 ??? This is not quite correct because we don't properly
1353 handle the case of groups, but if we end up doing
1354 something wrong, it either will end up not mattering or
1355 we will abort elsewhere. */
1356
1357 for (r = regno; r < regno + nregs; r++)
1358 if (spill_reg_order[r] >= 0)
1359 for (i = 0; i < N_REG_CLASSES; i++)
1360 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1361 {
1362 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1363 {
1364 enum reg_class *p;
1365
1366 insn_needs.other.regs[0][i]++;
1367 p = reg_class_superclasses[i];
1368 while (*p != LIM_REG_CLASSES)
1369 insn_needs.other.regs[0][(int) *p++]++;
1370 }
1371 }
1372 }
1373 #endif /* SMALL_REGISTER_CLASSES */
1374
1375 /* For each class, collect maximum need of any insn. */
1376
1377 for (i = 0; i < N_REG_CLASSES; i++)
1378 {
1379 if (max_needs[i] < insn_needs.other.regs[0][i])
1380 {
1381 max_needs[i] = insn_needs.other.regs[0][i];
1382 max_needs_insn[i] = insn;
1383 }
1384 if (max_groups[i] < insn_needs.other.groups[i])
1385 {
1386 max_groups[i] = insn_needs.other.groups[i];
1387 max_groups_insn[i] = insn;
1388 }
1389 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1390 {
1391 max_nongroups[i] = insn_needs.other.regs[1][i];
1392 max_nongroups_insn[i] = insn;
1393 }
1394 }
1395 }
1396 /* Note that there is a continue statement above. */
1397 }
1398
1399 /* If we allocated any new memory locations, make another pass
1400 since it might have changed elimination offsets. */
1401 if (starting_frame_size != get_frame_size ())
1402 something_changed = 1;
1403
1404 if (dumpfile)
1405 for (i = 0; i < N_REG_CLASSES; i++)
1406 {
1407 if (max_needs[i] > 0)
1408 fprintf (dumpfile,
1409 ";; Need %d reg%s of class %s (for insn %d).\n",
1410 max_needs[i], max_needs[i] == 1 ? "" : "s",
1411 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1412 if (max_nongroups[i] > 0)
1413 fprintf (dumpfile,
1414 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1415 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1416 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1417 if (max_groups[i] > 0)
1418 fprintf (dumpfile,
1419 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1420 max_groups[i], max_groups[i] == 1 ? "" : "s",
1421 mode_name[(int) group_mode[i]],
1422 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1423 }
1424
1425 /* If we have caller-saves, set up the save areas and see if caller-save
1426 will need a spill register. */
1427
1428 if (caller_save_needed
1429 && ! setup_save_areas (&something_changed)
1430 && caller_save_spill_class == NO_REGS)
1431 {
1432 /* The class we will need depends on whether the machine
1433 supports the sum of two registers for an address; see
1434 find_address_reloads for details. */
1435
1436 caller_save_spill_class
1437 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1438 caller_save_group_size
1439 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1440 something_changed = 1;
1441 }
1442
1443 /* See if anything that happened changes which eliminations are valid.
1444 For example, on the Sparc, whether or not the frame pointer can
1445 be eliminated can depend on what registers have been used. We need
1446 not check some conditions again (such as flag_omit_frame_pointer)
1447 since they can't have changed. */
1448
1449 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1450 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1451 #ifdef ELIMINABLE_REGS
1452 || ! CAN_ELIMINATE (ep->from, ep->to)
1453 #endif
1454 )
1455 ep->can_eliminate = 0;
1456
1457 /* Look for the case where we have discovered that we can't replace
1458 register A with register B and that means that we will now be
1459 trying to replace register A with register C. This means we can
1460 no longer replace register C with register B and we need to disable
1461 such an elimination, if it exists. This occurs often with A == ap,
1462 B == sp, and C == fp. */
1463
1464 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1465 {
1466 struct elim_table *op;
1467 register int new_to = -1;
1468
1469 if (! ep->can_eliminate && ep->can_eliminate_previous)
1470 {
1471 /* Find the current elimination for ep->from, if there is a
1472 new one. */
1473 for (op = reg_eliminate;
1474 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1475 if (op->from == ep->from && op->can_eliminate)
1476 {
1477 new_to = op->to;
1478 break;
1479 }
1480
1481 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1482 disable it. */
1483 for (op = reg_eliminate;
1484 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1485 if (op->from == new_to && op->to == ep->to)
1486 op->can_eliminate = 0;
1487 }
1488 }
1489
1490 /* See if any registers that we thought we could eliminate the previous
1491 time are no longer eliminable. If so, something has changed and we
1492 must spill the register. Also, recompute the number of eliminable
1493 registers and see if the frame pointer is needed; it is if there is
1494 no elimination of the frame pointer that we can perform. */
1495
1496 frame_pointer_needed = 1;
1497 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1498 {
1499 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1500 && ep->to != HARD_FRAME_POINTER_REGNUM)
1501 frame_pointer_needed = 0;
1502
1503 if (! ep->can_eliminate && ep->can_eliminate_previous)
1504 {
1505 ep->can_eliminate_previous = 0;
1506 spill_hard_reg (ep->from, global, dumpfile, 1);
1507 regs_ever_live[ep->from] = 1;
1508 something_changed = 1;
1509 num_eliminable--;
1510 }
1511 }
1512
1513 /* If all needs are met, we win. */
1514
1515 for (i = 0; i < N_REG_CLASSES; i++)
1516 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1517 break;
1518 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1519 break;
1520
1521 /* Not all needs are met; must spill some hard regs. */
1522
1523 /* Put all registers spilled so far back in potential_reload_regs, but
1524 put them at the front, since we've already spilled most of the
1525 psuedos in them (we might have left some pseudos unspilled if they
1526 were in a block that didn't need any spill registers of a conflicting
1527 class. We used to try to mark off the need for those registers,
1528 but doing so properly is very complex and reallocating them is the
1529 simpler approach. First, "pack" potential_reload_regs by pushing
1530 any nonnegative entries towards the end. That will leave room
1531 for the registers we already spilled.
1532
1533 Also, undo the marking of the spill registers from the last time
1534 around in FORBIDDEN_REGS since we will be probably be allocating
1535 them again below.
1536
1537 ??? It is theoretically possible that we might end up not using one
1538 of our previously-spilled registers in this allocation, even though
1539 they are at the head of the list. It's not clear what to do about
1540 this, but it was no better before, when we marked off the needs met
1541 by the previously-spilled registers. With the current code, globals
1542 can be allocated into these registers, but locals cannot. */
1543
1544 if (n_spills)
1545 {
1546 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1547 if (potential_reload_regs[i] != -1)
1548 potential_reload_regs[j--] = potential_reload_regs[i];
1549
1550 for (i = 0; i < n_spills; i++)
1551 {
1552 potential_reload_regs[i] = spill_regs[i];
1553 spill_reg_order[spill_regs[i]] = -1;
1554 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1555 }
1556
1557 n_spills = 0;
1558 }
1559
1560 /* Now find more reload regs to satisfy the remaining need
1561 Do it by ascending class number, since otherwise a reg
1562 might be spilled for a big class and might fail to count
1563 for a smaller class even though it belongs to that class.
1564
1565 Count spilled regs in `spills', and add entries to
1566 `spill_regs' and `spill_reg_order'.
1567
1568 ??? Note there is a problem here.
1569 When there is a need for a group in a high-numbered class,
1570 and also need for non-group regs that come from a lower class,
1571 the non-group regs are chosen first. If there aren't many regs,
1572 they might leave no room for a group.
1573
1574 This was happening on the 386. To fix it, we added the code
1575 that calls possible_group_p, so that the lower class won't
1576 break up the last possible group.
1577
1578 Really fixing the problem would require changes above
1579 in counting the regs already spilled, and in choose_reload_regs.
1580 It might be hard to avoid introducing bugs there. */
1581
1582 CLEAR_HARD_REG_SET (counted_for_groups);
1583 CLEAR_HARD_REG_SET (counted_for_nongroups);
1584
1585 for (class = 0; class < N_REG_CLASSES; class++)
1586 {
1587 /* First get the groups of registers.
1588 If we got single registers first, we might fragment
1589 possible groups. */
1590 while (max_groups[class] > 0)
1591 {
1592 /* If any single spilled regs happen to form groups,
1593 count them now. Maybe we don't really need
1594 to spill another group. */
1595 count_possible_groups (group_size, group_mode, max_groups);
1596
1597 if (max_groups[class] <= 0)
1598 break;
1599
1600 /* Groups of size 2 (the only groups used on most machines)
1601 are treated specially. */
1602 if (group_size[class] == 2)
1603 {
1604 /* First, look for a register that will complete a group. */
1605 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1606 {
1607 int other;
1608
1609 j = potential_reload_regs[i];
1610 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1611 &&
1612 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1613 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1614 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1615 && HARD_REGNO_MODE_OK (other, group_mode[class])
1616 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1617 other)
1618 /* We don't want one part of another group.
1619 We could get "two groups" that overlap! */
1620 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1621 ||
1622 (j < FIRST_PSEUDO_REGISTER - 1
1623 && (other = j + 1, spill_reg_order[other] >= 0)
1624 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1625 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1626 && HARD_REGNO_MODE_OK (j, group_mode[class])
1627 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1628 other)
1629 && ! TEST_HARD_REG_BIT (counted_for_groups,
1630 other))))
1631 {
1632 register enum reg_class *p;
1633
1634 /* We have found one that will complete a group,
1635 so count off one group as provided. */
1636 max_groups[class]--;
1637 p = reg_class_superclasses[class];
1638 while (*p != LIM_REG_CLASSES)
1639 max_groups[(int) *p++]--;
1640
1641 /* Indicate both these regs are part of a group. */
1642 SET_HARD_REG_BIT (counted_for_groups, j);
1643 SET_HARD_REG_BIT (counted_for_groups, other);
1644 break;
1645 }
1646 }
1647 /* We can't complete a group, so start one. */
1648 #ifdef SMALL_REGISTER_CLASSES
1649 /* Look for a pair neither of which is explicitly used. */
1650 if (i == FIRST_PSEUDO_REGISTER)
1651 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1652 {
1653 int k;
1654 j = potential_reload_regs[i];
1655 /* Verify that J+1 is a potential reload reg. */
1656 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1657 if (potential_reload_regs[k] == j + 1)
1658 break;
1659 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1660 && k < FIRST_PSEUDO_REGISTER
1661 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1662 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1663 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1664 && HARD_REGNO_MODE_OK (j, group_mode[class])
1665 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1666 j + 1)
1667 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1668 /* Reject J at this stage
1669 if J+1 was explicitly used. */
1670 && ! regs_explicitly_used[j + 1])
1671 break;
1672 }
1673 #endif
1674 /* Now try any group at all
1675 whose registers are not in bad_spill_regs. */
1676 if (i == FIRST_PSEUDO_REGISTER)
1677 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1678 {
1679 int k;
1680 j = potential_reload_regs[i];
1681 /* Verify that J+1 is a potential reload reg. */
1682 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1683 if (potential_reload_regs[k] == j + 1)
1684 break;
1685 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1686 && k < FIRST_PSEUDO_REGISTER
1687 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1688 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1689 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1690 && HARD_REGNO_MODE_OK (j, group_mode[class])
1691 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1692 j + 1)
1693 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1694 break;
1695 }
1696
1697 /* I should be the index in potential_reload_regs
1698 of the new reload reg we have found. */
1699
1700 if (i >= FIRST_PSEUDO_REGISTER)
1701 {
1702 /* There are no groups left to spill. */
1703 spill_failure (max_groups_insn[class]);
1704 failure = 1;
1705 goto failed;
1706 }
1707 else
1708 something_changed
1709 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1710 global, dumpfile);
1711 }
1712 else
1713 {
1714 /* For groups of more than 2 registers,
1715 look for a sufficient sequence of unspilled registers,
1716 and spill them all at once. */
1717 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1718 {
1719 int k;
1720
1721 j = potential_reload_regs[i];
1722 if (j >= 0
1723 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1724 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1725 {
1726 /* Check each reg in the sequence. */
1727 for (k = 0; k < group_size[class]; k++)
1728 if (! (spill_reg_order[j + k] < 0
1729 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1730 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1731 break;
1732 /* We got a full sequence, so spill them all. */
1733 if (k == group_size[class])
1734 {
1735 register enum reg_class *p;
1736 for (k = 0; k < group_size[class]; k++)
1737 {
1738 int idx;
1739 SET_HARD_REG_BIT (counted_for_groups, j + k);
1740 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1741 if (potential_reload_regs[idx] == j + k)
1742 break;
1743 something_changed
1744 |= new_spill_reg (idx, class,
1745 max_needs, NULL_PTR,
1746 global, dumpfile);
1747 }
1748
1749 /* We have found one that will complete a group,
1750 so count off one group as provided. */
1751 max_groups[class]--;
1752 p = reg_class_superclasses[class];
1753 while (*p != LIM_REG_CLASSES)
1754 max_groups[(int) *p++]--;
1755
1756 break;
1757 }
1758 }
1759 }
1760 /* We couldn't find any registers for this reload.
1761 Avoid going into an infinite loop. */
1762 if (i >= FIRST_PSEUDO_REGISTER)
1763 {
1764 /* There are no groups left. */
1765 spill_failure (max_groups_insn[class]);
1766 failure = 1;
1767 goto failed;
1768 }
1769 }
1770 }
1771
1772 /* Now similarly satisfy all need for single registers. */
1773
1774 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1775 {
1776 #ifdef SMALL_REGISTER_CLASSES
1777 /* This should be right for all machines, but only the 386
1778 is known to need it, so this conditional plays safe.
1779 ??? For 2.5, try making this unconditional. */
1780 /* If we spilled enough regs, but they weren't counted
1781 against the non-group need, see if we can count them now.
1782 If so, we can avoid some actual spilling. */
1783 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1784 for (i = 0; i < n_spills; i++)
1785 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1786 spill_regs[i])
1787 && !TEST_HARD_REG_BIT (counted_for_groups,
1788 spill_regs[i])
1789 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1790 spill_regs[i])
1791 && max_nongroups[class] > 0)
1792 {
1793 register enum reg_class *p;
1794
1795 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1796 max_nongroups[class]--;
1797 p = reg_class_superclasses[class];
1798 while (*p != LIM_REG_CLASSES)
1799 max_nongroups[(int) *p++]--;
1800 }
1801 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1802 break;
1803 #endif
1804
1805 /* Consider the potential reload regs that aren't
1806 yet in use as reload regs, in order of preference.
1807 Find the most preferred one that's in this class. */
1808
1809 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1810 if (potential_reload_regs[i] >= 0
1811 && TEST_HARD_REG_BIT (reg_class_contents[class],
1812 potential_reload_regs[i])
1813 /* If this reg will not be available for groups,
1814 pick one that does not foreclose possible groups.
1815 This is a kludge, and not very general,
1816 but it should be sufficient to make the 386 work,
1817 and the problem should not occur on machines with
1818 more registers. */
1819 && (max_nongroups[class] == 0
1820 || possible_group_p (potential_reload_regs[i], max_groups)))
1821 break;
1822
1823 /* If we couldn't get a register, try to get one even if we
1824 might foreclose possible groups. This may cause problems
1825 later, but that's better than aborting now, since it is
1826 possible that we will, in fact, be able to form the needed
1827 group even with this allocation. */
1828
1829 if (i >= FIRST_PSEUDO_REGISTER
1830 && (asm_noperands (max_needs[class] > 0
1831 ? max_needs_insn[class]
1832 : max_nongroups_insn[class])
1833 < 0))
1834 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1835 if (potential_reload_regs[i] >= 0
1836 && TEST_HARD_REG_BIT (reg_class_contents[class],
1837 potential_reload_regs[i]))
1838 break;
1839
1840 /* I should be the index in potential_reload_regs
1841 of the new reload reg we have found. */
1842
1843 if (i >= FIRST_PSEUDO_REGISTER)
1844 {
1845 /* There are no possible registers left to spill. */
1846 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1847 : max_nongroups_insn[class]);
1848 failure = 1;
1849 goto failed;
1850 }
1851 else
1852 something_changed
1853 |= new_spill_reg (i, class, max_needs, max_nongroups,
1854 global, dumpfile);
1855 }
1856 }
1857 }
1858
1859 /* If global-alloc was run, notify it of any register eliminations we have
1860 done. */
1861 if (global)
1862 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1863 if (ep->can_eliminate)
1864 mark_elimination (ep->from, ep->to);
1865
1866 /* Insert code to save and restore call-clobbered hard regs
1867 around calls. Tell if what mode to use so that we will process
1868 those insns in reload_as_needed if we have to. */
1869
1870 if (caller_save_needed)
1871 save_call_clobbered_regs (num_eliminable ? QImode
1872 : caller_save_spill_class != NO_REGS ? HImode
1873 : VOIDmode);
1874
1875 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1876 If that insn didn't set the register (i.e., it copied the register to
1877 memory), just delete that insn instead of the equivalencing insn plus
1878 anything now dead. If we call delete_dead_insn on that insn, we may
1879 delete the insn that actually sets the register if the register die
1880 there and that is incorrect. */
1881
1882 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1883 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1884 && GET_CODE (reg_equiv_init[i]) != NOTE)
1885 {
1886 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1887 delete_dead_insn (reg_equiv_init[i]);
1888 else
1889 {
1890 PUT_CODE (reg_equiv_init[i], NOTE);
1891 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1892 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1893 }
1894 }
1895
1896 /* Use the reload registers where necessary
1897 by generating move instructions to move the must-be-register
1898 values into or out of the reload registers. */
1899
1900 if (something_needs_reloads || something_needs_elimination
1901 || (caller_save_needed && num_eliminable)
1902 || caller_save_spill_class != NO_REGS)
1903 reload_as_needed (first, global);
1904
1905 /* If we were able to eliminate the frame pointer, show that it is no
1906 longer live at the start of any basic block. If it ls live by
1907 virtue of being in a pseudo, that pseudo will be marked live
1908 and hence the frame pointer will be known to be live via that
1909 pseudo. */
1910
1911 if (! frame_pointer_needed)
1912 for (i = 0; i < n_basic_blocks; i++)
1913 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1914 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1915 % REGSET_ELT_BITS));
1916
1917 /* Come here (with failure set nonzero) if we can't get enough spill regs
1918 and we decide not to abort about it. */
1919 failed:
1920
1921 reload_in_progress = 0;
1922
1923 /* Now eliminate all pseudo regs by modifying them into
1924 their equivalent memory references.
1925 The REG-rtx's for the pseudos are modified in place,
1926 so all insns that used to refer to them now refer to memory.
1927
1928 For a reg that has a reg_equiv_address, all those insns
1929 were changed by reloading so that no insns refer to it any longer;
1930 but the DECL_RTL of a variable decl may refer to it,
1931 and if so this causes the debugging info to mention the variable. */
1932
1933 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1934 {
1935 rtx addr = 0;
1936 int in_struct = 0;
1937 if (reg_equiv_mem[i])
1938 {
1939 addr = XEXP (reg_equiv_mem[i], 0);
1940 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1941 }
1942 if (reg_equiv_address[i])
1943 addr = reg_equiv_address[i];
1944 if (addr)
1945 {
1946 if (reg_renumber[i] < 0)
1947 {
1948 rtx reg = regno_reg_rtx[i];
1949 XEXP (reg, 0) = addr;
1950 REG_USERVAR_P (reg) = 0;
1951 MEM_IN_STRUCT_P (reg) = in_struct;
1952 PUT_CODE (reg, MEM);
1953 }
1954 else if (reg_equiv_mem[i])
1955 XEXP (reg_equiv_mem[i], 0) = addr;
1956 }
1957 }
1958
1959 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1960 /* Make a pass over all the insns and remove death notes for things that
1961 are no longer registers or no longer die in the insn (e.g., an input
1962 and output pseudo being tied). */
1963
1964 for (insn = first; insn; insn = NEXT_INSN (insn))
1965 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1966 {
1967 rtx note, next;
1968
1969 for (note = REG_NOTES (insn); note; note = next)
1970 {
1971 next = XEXP (note, 1);
1972 if (REG_NOTE_KIND (note) == REG_DEAD
1973 && (GET_CODE (XEXP (note, 0)) != REG
1974 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1975 remove_note (insn, note);
1976 }
1977 }
1978 #endif
1979
1980 /* Indicate that we no longer have known memory locations or constants. */
1981 reg_equiv_constant = 0;
1982 reg_equiv_memory_loc = 0;
1983
1984 if (scratch_list)
1985 free (scratch_list);
1986 scratch_list = 0;
1987 if (scratch_block)
1988 free (scratch_block);
1989 scratch_block = 0;
1990
1991 return failure;
1992 }
1993 \f
1994 /* Nonzero if, after spilling reg REGNO for non-groups,
1995 it will still be possible to find a group if we still need one. */
1996
1997 static int
1998 possible_group_p (regno, max_groups)
1999 int regno;
2000 int *max_groups;
2001 {
2002 int i;
2003 int class = (int) NO_REGS;
2004
2005 for (i = 0; i < (int) N_REG_CLASSES; i++)
2006 if (max_groups[i] > 0)
2007 {
2008 class = i;
2009 break;
2010 }
2011
2012 if (class == (int) NO_REGS)
2013 return 1;
2014
2015 /* Consider each pair of consecutive registers. */
2016 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2017 {
2018 /* Ignore pairs that include reg REGNO. */
2019 if (i == regno || i + 1 == regno)
2020 continue;
2021
2022 /* Ignore pairs that are outside the class that needs the group.
2023 ??? Here we fail to handle the case where two different classes
2024 independently need groups. But this never happens with our
2025 current machine descriptions. */
2026 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2027 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2028 continue;
2029
2030 /* A pair of consecutive regs we can still spill does the trick. */
2031 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2032 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2033 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2034 return 1;
2035
2036 /* A pair of one already spilled and one we can spill does it
2037 provided the one already spilled is not otherwise reserved. */
2038 if (spill_reg_order[i] < 0
2039 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2040 && spill_reg_order[i + 1] >= 0
2041 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2042 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2043 return 1;
2044 if (spill_reg_order[i + 1] < 0
2045 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2046 && spill_reg_order[i] >= 0
2047 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2048 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2049 return 1;
2050 }
2051
2052 return 0;
2053 }
2054 \f
2055 /* Count any groups that can be formed from the registers recently spilled.
2056 This is done class by class, in order of ascending class number. */
2057
2058 static void
2059 count_possible_groups (group_size, group_mode, max_groups)
2060 int *group_size;
2061 enum machine_mode *group_mode;
2062 int *max_groups;
2063 {
2064 int i;
2065 /* Now find all consecutive groups of spilled registers
2066 and mark each group off against the need for such groups.
2067 But don't count them against ordinary need, yet. */
2068
2069 for (i = 0; i < N_REG_CLASSES; i++)
2070 if (group_size[i] > 1)
2071 {
2072 HARD_REG_SET new;
2073 int j;
2074
2075 CLEAR_HARD_REG_SET (new);
2076
2077 /* Make a mask of all the regs that are spill regs in class I. */
2078 for (j = 0; j < n_spills; j++)
2079 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2080 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2081 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2082 spill_regs[j]))
2083 SET_HARD_REG_BIT (new, spill_regs[j]);
2084
2085 /* Find each consecutive group of them. */
2086 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2087 if (TEST_HARD_REG_BIT (new, j)
2088 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2089 /* Next line in case group-mode for this class
2090 demands an even-odd pair. */
2091 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2092 {
2093 int k;
2094 for (k = 1; k < group_size[i]; k++)
2095 if (! TEST_HARD_REG_BIT (new, j + k))
2096 break;
2097 if (k == group_size[i])
2098 {
2099 /* We found a group. Mark it off against this class's
2100 need for groups, and against each superclass too. */
2101 register enum reg_class *p;
2102 max_groups[i]--;
2103 p = reg_class_superclasses[i];
2104 while (*p != LIM_REG_CLASSES)
2105 max_groups[(int) *p++]--;
2106 /* Don't count these registers again. */
2107 for (k = 0; k < group_size[i]; k++)
2108 SET_HARD_REG_BIT (counted_for_groups, j + k);
2109 }
2110 /* Skip to the last reg in this group. When j is incremented
2111 above, it will then point to the first reg of the next
2112 possible group. */
2113 j += k - 1;
2114 }
2115 }
2116
2117 }
2118 \f
2119 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2120 another mode that needs to be reloaded for the same register class CLASS.
2121 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2122 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2123
2124 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2125 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2126 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2127 causes unnecessary failures on machines requiring alignment of register
2128 groups when the two modes are different sizes, because the larger mode has
2129 more strict alignment rules than the smaller mode. */
2130
2131 static int
2132 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2133 enum machine_mode allocate_mode, other_mode;
2134 enum reg_class class;
2135 {
2136 register int regno;
2137 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2138 {
2139 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2140 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2141 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2142 return 0;
2143 }
2144 return 1;
2145 }
2146
2147 /* Handle the failure to find a register to spill.
2148 INSN should be one of the insns which needed this particular spill reg. */
2149
2150 static void
2151 spill_failure (insn)
2152 rtx insn;
2153 {
2154 if (asm_noperands (PATTERN (insn)) >= 0)
2155 error_for_asm (insn, "`asm' needs too many reloads");
2156 else
2157 abort ();
2158 }
2159
2160 /* Add a new register to the tables of available spill-registers
2161 (as well as spilling all pseudos allocated to the register).
2162 I is the index of this register in potential_reload_regs.
2163 CLASS is the regclass whose need is being satisfied.
2164 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2165 so that this register can count off against them.
2166 MAX_NONGROUPS is 0 if this register is part of a group.
2167 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2168
2169 static int
2170 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2171 int i;
2172 int class;
2173 int *max_needs;
2174 int *max_nongroups;
2175 int global;
2176 FILE *dumpfile;
2177 {
2178 register enum reg_class *p;
2179 int val;
2180 int regno = potential_reload_regs[i];
2181
2182 if (i >= FIRST_PSEUDO_REGISTER)
2183 abort (); /* Caller failed to find any register. */
2184
2185 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2186 fatal ("fixed or forbidden register was spilled.\n\
2187 This may be due to a compiler bug or to impossible asm\n\
2188 statements or clauses.");
2189
2190 /* Make reg REGNO an additional reload reg. */
2191
2192 potential_reload_regs[i] = -1;
2193 spill_regs[n_spills] = regno;
2194 spill_reg_order[regno] = n_spills;
2195 if (dumpfile)
2196 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2197
2198 /* Clear off the needs we just satisfied. */
2199
2200 max_needs[class]--;
2201 p = reg_class_superclasses[class];
2202 while (*p != LIM_REG_CLASSES)
2203 max_needs[(int) *p++]--;
2204
2205 if (max_nongroups && max_nongroups[class] > 0)
2206 {
2207 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2208 max_nongroups[class]--;
2209 p = reg_class_superclasses[class];
2210 while (*p != LIM_REG_CLASSES)
2211 max_nongroups[(int) *p++]--;
2212 }
2213
2214 /* Spill every pseudo reg that was allocated to this reg
2215 or to something that overlaps this reg. */
2216
2217 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2218
2219 /* If there are some registers still to eliminate and this register
2220 wasn't ever used before, additional stack space may have to be
2221 allocated to store this register. Thus, we may have changed the offset
2222 between the stack and frame pointers, so mark that something has changed.
2223 (If new pseudos were spilled, thus requiring more space, VAL would have
2224 been set non-zero by the call to spill_hard_reg above since additional
2225 reloads may be needed in that case.
2226
2227 One might think that we need only set VAL to 1 if this is a call-used
2228 register. However, the set of registers that must be saved by the
2229 prologue is not identical to the call-used set. For example, the
2230 register used by the call insn for the return PC is a call-used register,
2231 but must be saved by the prologue. */
2232 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2233 val = 1;
2234
2235 regs_ever_live[spill_regs[n_spills]] = 1;
2236 n_spills++;
2237
2238 return val;
2239 }
2240 \f
2241 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2242 data that is dead in INSN. */
2243
2244 static void
2245 delete_dead_insn (insn)
2246 rtx insn;
2247 {
2248 rtx prev = prev_real_insn (insn);
2249 rtx prev_dest;
2250
2251 /* If the previous insn sets a register that dies in our insn, delete it
2252 too. */
2253 if (prev && GET_CODE (PATTERN (prev)) == SET
2254 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2255 && reg_mentioned_p (prev_dest, PATTERN (insn))
2256 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2257 delete_dead_insn (prev);
2258
2259 PUT_CODE (insn, NOTE);
2260 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2261 NOTE_SOURCE_FILE (insn) = 0;
2262 }
2263
2264 /* Modify the home of pseudo-reg I.
2265 The new home is present in reg_renumber[I].
2266
2267 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2268 or it may be -1, meaning there is none or it is not relevant.
2269 This is used so that all pseudos spilled from a given hard reg
2270 can share one stack slot. */
2271
2272 static void
2273 alter_reg (i, from_reg)
2274 register int i;
2275 int from_reg;
2276 {
2277 /* When outputting an inline function, this can happen
2278 for a reg that isn't actually used. */
2279 if (regno_reg_rtx[i] == 0)
2280 return;
2281
2282 /* If the reg got changed to a MEM at rtl-generation time,
2283 ignore it. */
2284 if (GET_CODE (regno_reg_rtx[i]) != REG)
2285 return;
2286
2287 /* Modify the reg-rtx to contain the new hard reg
2288 number or else to contain its pseudo reg number. */
2289 REGNO (regno_reg_rtx[i])
2290 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2291
2292 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2293 allocate a stack slot for it. */
2294
2295 if (reg_renumber[i] < 0
2296 && reg_n_refs[i] > 0
2297 && reg_equiv_constant[i] == 0
2298 && reg_equiv_memory_loc[i] == 0)
2299 {
2300 register rtx x;
2301 int inherent_size = PSEUDO_REGNO_BYTES (i);
2302 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2303 int adjust = 0;
2304
2305 /* Each pseudo reg has an inherent size which comes from its own mode,
2306 and a total size which provides room for paradoxical subregs
2307 which refer to the pseudo reg in wider modes.
2308
2309 We can use a slot already allocated if it provides both
2310 enough inherent space and enough total space.
2311 Otherwise, we allocate a new slot, making sure that it has no less
2312 inherent space, and no less total space, then the previous slot. */
2313 if (from_reg == -1)
2314 {
2315 /* No known place to spill from => no slot to reuse. */
2316 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2317 #if BYTES_BIG_ENDIAN
2318 /* Cancel the big-endian correction done in assign_stack_local.
2319 Get the address of the beginning of the slot.
2320 This is so we can do a big-endian correction unconditionally
2321 below. */
2322 adjust = inherent_size - total_size;
2323 #endif
2324 }
2325 /* Reuse a stack slot if possible. */
2326 else if (spill_stack_slot[from_reg] != 0
2327 && spill_stack_slot_width[from_reg] >= total_size
2328 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2329 >= inherent_size))
2330 x = spill_stack_slot[from_reg];
2331 /* Allocate a bigger slot. */
2332 else
2333 {
2334 /* Compute maximum size needed, both for inherent size
2335 and for total size. */
2336 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2337 if (spill_stack_slot[from_reg])
2338 {
2339 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2340 > inherent_size)
2341 mode = GET_MODE (spill_stack_slot[from_reg]);
2342 if (spill_stack_slot_width[from_reg] > total_size)
2343 total_size = spill_stack_slot_width[from_reg];
2344 }
2345 /* Make a slot with that size. */
2346 x = assign_stack_local (mode, total_size, -1);
2347 #if BYTES_BIG_ENDIAN
2348 /* Cancel the big-endian correction done in assign_stack_local.
2349 Get the address of the beginning of the slot.
2350 This is so we can do a big-endian correction unconditionally
2351 below. */
2352 adjust = GET_MODE_SIZE (mode) - total_size;
2353 #endif
2354 spill_stack_slot[from_reg] = x;
2355 spill_stack_slot_width[from_reg] = total_size;
2356 }
2357
2358 #if BYTES_BIG_ENDIAN
2359 /* On a big endian machine, the "address" of the slot
2360 is the address of the low part that fits its inherent mode. */
2361 if (inherent_size < total_size)
2362 adjust += (total_size - inherent_size);
2363 #endif /* BYTES_BIG_ENDIAN */
2364
2365 /* If we have any adjustment to make, or if the stack slot is the
2366 wrong mode, make a new stack slot. */
2367 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2368 {
2369 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2370 plus_constant (XEXP (x, 0), adjust));
2371 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2372 }
2373
2374 /* Save the stack slot for later. */
2375 reg_equiv_memory_loc[i] = x;
2376 }
2377 }
2378
2379 /* Mark the slots in regs_ever_live for the hard regs
2380 used by pseudo-reg number REGNO. */
2381
2382 void
2383 mark_home_live (regno)
2384 int regno;
2385 {
2386 register int i, lim;
2387 i = reg_renumber[regno];
2388 if (i < 0)
2389 return;
2390 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2391 while (i < lim)
2392 regs_ever_live[i++] = 1;
2393 }
2394
2395 /* Mark the registers used in SCRATCH as being live. */
2396
2397 static void
2398 mark_scratch_live (scratch)
2399 rtx scratch;
2400 {
2401 register int i;
2402 int regno = REGNO (scratch);
2403 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2404
2405 for (i = regno; i < lim; i++)
2406 regs_ever_live[i] = 1;
2407 }
2408 \f
2409 /* This function handles the tracking of elimination offsets around branches.
2410
2411 X is a piece of RTL being scanned.
2412
2413 INSN is the insn that it came from, if any.
2414
2415 INITIAL_P is non-zero if we are to set the offset to be the initial
2416 offset and zero if we are setting the offset of the label to be the
2417 current offset. */
2418
2419 static void
2420 set_label_offsets (x, insn, initial_p)
2421 rtx x;
2422 rtx insn;
2423 int initial_p;
2424 {
2425 enum rtx_code code = GET_CODE (x);
2426 rtx tem;
2427 int i;
2428 struct elim_table *p;
2429
2430 switch (code)
2431 {
2432 case LABEL_REF:
2433 if (LABEL_REF_NONLOCAL_P (x))
2434 return;
2435
2436 x = XEXP (x, 0);
2437
2438 /* ... fall through ... */
2439
2440 case CODE_LABEL:
2441 /* If we know nothing about this label, set the desired offsets. Note
2442 that this sets the offset at a label to be the offset before a label
2443 if we don't know anything about the label. This is not correct for
2444 the label after a BARRIER, but is the best guess we can make. If
2445 we guessed wrong, we will suppress an elimination that might have
2446 been possible had we been able to guess correctly. */
2447
2448 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2449 {
2450 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2451 offsets_at[CODE_LABEL_NUMBER (x)][i]
2452 = (initial_p ? reg_eliminate[i].initial_offset
2453 : reg_eliminate[i].offset);
2454 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2455 }
2456
2457 /* Otherwise, if this is the definition of a label and it is
2458 preceded by a BARRIER, set our offsets to the known offset of
2459 that label. */
2460
2461 else if (x == insn
2462 && (tem = prev_nonnote_insn (insn)) != 0
2463 && GET_CODE (tem) == BARRIER)
2464 {
2465 num_not_at_initial_offset = 0;
2466 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2467 {
2468 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2469 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2470 if (reg_eliminate[i].can_eliminate
2471 && (reg_eliminate[i].offset
2472 != reg_eliminate[i].initial_offset))
2473 num_not_at_initial_offset++;
2474 }
2475 }
2476
2477 else
2478 /* If neither of the above cases is true, compare each offset
2479 with those previously recorded and suppress any eliminations
2480 where the offsets disagree. */
2481
2482 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2483 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2484 != (initial_p ? reg_eliminate[i].initial_offset
2485 : reg_eliminate[i].offset))
2486 reg_eliminate[i].can_eliminate = 0;
2487
2488 return;
2489
2490 case JUMP_INSN:
2491 set_label_offsets (PATTERN (insn), insn, initial_p);
2492
2493 /* ... fall through ... */
2494
2495 case INSN:
2496 case CALL_INSN:
2497 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2498 and hence must have all eliminations at their initial offsets. */
2499 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2500 if (REG_NOTE_KIND (tem) == REG_LABEL)
2501 set_label_offsets (XEXP (tem, 0), insn, 1);
2502 return;
2503
2504 case ADDR_VEC:
2505 case ADDR_DIFF_VEC:
2506 /* Each of the labels in the address vector must be at their initial
2507 offsets. We want the first first for ADDR_VEC and the second
2508 field for ADDR_DIFF_VEC. */
2509
2510 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2511 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2512 insn, initial_p);
2513 return;
2514
2515 case SET:
2516 /* We only care about setting PC. If the source is not RETURN,
2517 IF_THEN_ELSE, or a label, disable any eliminations not at
2518 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2519 isn't one of those possibilities. For branches to a label,
2520 call ourselves recursively.
2521
2522 Note that this can disable elimination unnecessarily when we have
2523 a non-local goto since it will look like a non-constant jump to
2524 someplace in the current function. This isn't a significant
2525 problem since such jumps will normally be when all elimination
2526 pairs are back to their initial offsets. */
2527
2528 if (SET_DEST (x) != pc_rtx)
2529 return;
2530
2531 switch (GET_CODE (SET_SRC (x)))
2532 {
2533 case PC:
2534 case RETURN:
2535 return;
2536
2537 case LABEL_REF:
2538 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2539 return;
2540
2541 case IF_THEN_ELSE:
2542 tem = XEXP (SET_SRC (x), 1);
2543 if (GET_CODE (tem) == LABEL_REF)
2544 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2545 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2546 break;
2547
2548 tem = XEXP (SET_SRC (x), 2);
2549 if (GET_CODE (tem) == LABEL_REF)
2550 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2551 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2552 break;
2553 return;
2554 }
2555
2556 /* If we reach here, all eliminations must be at their initial
2557 offset because we are doing a jump to a variable address. */
2558 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2559 if (p->offset != p->initial_offset)
2560 p->can_eliminate = 0;
2561 }
2562 }
2563 \f
2564 /* Used for communication between the next two function to properly share
2565 the vector for an ASM_OPERANDS. */
2566
2567 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2568
2569 /* Scan X and replace any eliminable registers (such as fp) with a
2570 replacement (such as sp), plus an offset.
2571
2572 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2573 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2574 MEM, we are allowed to replace a sum of a register and the constant zero
2575 with the register, which we cannot do outside a MEM. In addition, we need
2576 to record the fact that a register is referenced outside a MEM.
2577
2578 If INSN is an insn, it is the insn containing X. If we replace a REG
2579 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2580 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2581 that the REG is being modified.
2582
2583 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2584 That's used when we eliminate in expressions stored in notes.
2585 This means, do not set ref_outside_mem even if the reference
2586 is outside of MEMs.
2587
2588 If we see a modification to a register we know about, take the
2589 appropriate action (see case SET, below).
2590
2591 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2592 replacements done assuming all offsets are at their initial values. If
2593 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2594 encounter, return the actual location so that find_reloads will do
2595 the proper thing. */
2596
2597 rtx
2598 eliminate_regs (x, mem_mode, insn)
2599 rtx x;
2600 enum machine_mode mem_mode;
2601 rtx insn;
2602 {
2603 enum rtx_code code = GET_CODE (x);
2604 struct elim_table *ep;
2605 int regno;
2606 rtx new;
2607 int i, j;
2608 char *fmt;
2609 int copied = 0;
2610
2611 switch (code)
2612 {
2613 case CONST_INT:
2614 case CONST_DOUBLE:
2615 case CONST:
2616 case SYMBOL_REF:
2617 case CODE_LABEL:
2618 case PC:
2619 case CC0:
2620 case ASM_INPUT:
2621 case ADDR_VEC:
2622 case ADDR_DIFF_VEC:
2623 case RETURN:
2624 return x;
2625
2626 case REG:
2627 regno = REGNO (x);
2628
2629 /* First handle the case where we encounter a bare register that
2630 is eliminable. Replace it with a PLUS. */
2631 if (regno < FIRST_PSEUDO_REGISTER)
2632 {
2633 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2634 ep++)
2635 if (ep->from_rtx == x && ep->can_eliminate)
2636 {
2637 if (! mem_mode
2638 /* Refs inside notes don't count for this purpose. */
2639 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2640 || GET_CODE (insn) == INSN_LIST)))
2641 ep->ref_outside_mem = 1;
2642 return plus_constant (ep->to_rtx, ep->previous_offset);
2643 }
2644
2645 }
2646 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2647 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2648 {
2649 /* In this case, find_reloads would attempt to either use an
2650 incorrect address (if something is not at its initial offset)
2651 or substitute an replaced address into an insn (which loses
2652 if the offset is changed by some later action). So we simply
2653 return the replaced stack slot (assuming it is changed by
2654 elimination) and ignore the fact that this is actually a
2655 reference to the pseudo. Ensure we make a copy of the
2656 address in case it is shared. */
2657 new = eliminate_regs (reg_equiv_memory_loc[regno],
2658 mem_mode, insn);
2659 if (new != reg_equiv_memory_loc[regno])
2660 {
2661 cannot_omit_stores[regno] = 1;
2662 return copy_rtx (new);
2663 }
2664 }
2665 return x;
2666
2667 case PLUS:
2668 /* If this is the sum of an eliminable register and a constant, rework
2669 the sum. */
2670 if (GET_CODE (XEXP (x, 0)) == REG
2671 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2672 && CONSTANT_P (XEXP (x, 1)))
2673 {
2674 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2675 ep++)
2676 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2677 {
2678 if (! mem_mode
2679 /* Refs inside notes don't count for this purpose. */
2680 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2681 || GET_CODE (insn) == INSN_LIST)))
2682 ep->ref_outside_mem = 1;
2683
2684 /* The only time we want to replace a PLUS with a REG (this
2685 occurs when the constant operand of the PLUS is the negative
2686 of the offset) is when we are inside a MEM. We won't want
2687 to do so at other times because that would change the
2688 structure of the insn in a way that reload can't handle.
2689 We special-case the commonest situation in
2690 eliminate_regs_in_insn, so just replace a PLUS with a
2691 PLUS here, unless inside a MEM. */
2692 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2693 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2694 return ep->to_rtx;
2695 else
2696 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2697 plus_constant (XEXP (x, 1),
2698 ep->previous_offset));
2699 }
2700
2701 /* If the register is not eliminable, we are done since the other
2702 operand is a constant. */
2703 return x;
2704 }
2705
2706 /* If this is part of an address, we want to bring any constant to the
2707 outermost PLUS. We will do this by doing register replacement in
2708 our operands and seeing if a constant shows up in one of them.
2709
2710 We assume here this is part of an address (or a "load address" insn)
2711 since an eliminable register is not likely to appear in any other
2712 context.
2713
2714 If we have (plus (eliminable) (reg)), we want to produce
2715 (plus (plus (replacement) (reg) (const))). If this was part of a
2716 normal add insn, (plus (replacement) (reg)) will be pushed as a
2717 reload. This is the desired action. */
2718
2719 {
2720 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2721 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2722
2723 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2724 {
2725 /* If one side is a PLUS and the other side is a pseudo that
2726 didn't get a hard register but has a reg_equiv_constant,
2727 we must replace the constant here since it may no longer
2728 be in the position of any operand. */
2729 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2730 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2731 && reg_renumber[REGNO (new1)] < 0
2732 && reg_equiv_constant != 0
2733 && reg_equiv_constant[REGNO (new1)] != 0)
2734 new1 = reg_equiv_constant[REGNO (new1)];
2735 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2736 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2737 && reg_renumber[REGNO (new0)] < 0
2738 && reg_equiv_constant[REGNO (new0)] != 0)
2739 new0 = reg_equiv_constant[REGNO (new0)];
2740
2741 new = form_sum (new0, new1);
2742
2743 /* As above, if we are not inside a MEM we do not want to
2744 turn a PLUS into something else. We might try to do so here
2745 for an addition of 0 if we aren't optimizing. */
2746 if (! mem_mode && GET_CODE (new) != PLUS)
2747 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2748 else
2749 return new;
2750 }
2751 }
2752 return x;
2753
2754 case MULT:
2755 /* If this is the product of an eliminable register and a
2756 constant, apply the distribute law and move the constant out
2757 so that we have (plus (mult ..) ..). This is needed in order
2758 to keep load-address insns valid. This case is pathalogical.
2759 We ignore the possibility of overflow here. */
2760 if (GET_CODE (XEXP (x, 0)) == REG
2761 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2762 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2763 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2764 ep++)
2765 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2766 {
2767 if (! mem_mode
2768 /* Refs inside notes don't count for this purpose. */
2769 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2770 || GET_CODE (insn) == INSN_LIST)))
2771 ep->ref_outside_mem = 1;
2772
2773 return
2774 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2775 ep->previous_offset * INTVAL (XEXP (x, 1)));
2776 }
2777
2778 /* ... fall through ... */
2779
2780 case CALL:
2781 case COMPARE:
2782 case MINUS:
2783 case DIV: case UDIV:
2784 case MOD: case UMOD:
2785 case AND: case IOR: case XOR:
2786 case ROTATERT: case ROTATE:
2787 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2788 case NE: case EQ:
2789 case GE: case GT: case GEU: case GTU:
2790 case LE: case LT: case LEU: case LTU:
2791 {
2792 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2793 rtx new1
2794 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2795
2796 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2797 return gen_rtx (code, GET_MODE (x), new0, new1);
2798 }
2799 return x;
2800
2801 case EXPR_LIST:
2802 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2803 if (XEXP (x, 0))
2804 {
2805 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2806 if (new != XEXP (x, 0))
2807 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2808 }
2809
2810 /* ... fall through ... */
2811
2812 case INSN_LIST:
2813 /* Now do eliminations in the rest of the chain. If this was
2814 an EXPR_LIST, this might result in allocating more memory than is
2815 strictly needed, but it simplifies the code. */
2816 if (XEXP (x, 1))
2817 {
2818 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2819 if (new != XEXP (x, 1))
2820 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2821 }
2822 return x;
2823
2824 case PRE_INC:
2825 case POST_INC:
2826 case PRE_DEC:
2827 case POST_DEC:
2828 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2829 if (ep->to_rtx == XEXP (x, 0))
2830 {
2831 int size = GET_MODE_SIZE (mem_mode);
2832
2833 /* If more bytes than MEM_MODE are pushed, account for them. */
2834 #ifdef PUSH_ROUNDING
2835 if (ep->to_rtx == stack_pointer_rtx)
2836 size = PUSH_ROUNDING (size);
2837 #endif
2838 if (code == PRE_DEC || code == POST_DEC)
2839 ep->offset += size;
2840 else
2841 ep->offset -= size;
2842 }
2843
2844 /* Fall through to generic unary operation case. */
2845 case USE:
2846 case STRICT_LOW_PART:
2847 case NEG: case NOT:
2848 case SIGN_EXTEND: case ZERO_EXTEND:
2849 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2850 case FLOAT: case FIX:
2851 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2852 case ABS:
2853 case SQRT:
2854 case FFS:
2855 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2856 if (new != XEXP (x, 0))
2857 return gen_rtx (code, GET_MODE (x), new);
2858 return x;
2859
2860 case SUBREG:
2861 /* Similar to above processing, but preserve SUBREG_WORD.
2862 Convert (subreg (mem)) to (mem) if not paradoxical.
2863 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2864 pseudo didn't get a hard reg, we must replace this with the
2865 eliminated version of the memory location because push_reloads
2866 may do the replacement in certain circumstances. */
2867 if (GET_CODE (SUBREG_REG (x)) == REG
2868 && (GET_MODE_SIZE (GET_MODE (x))
2869 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2870 && reg_equiv_memory_loc != 0
2871 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2872 {
2873 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2874 mem_mode, insn);
2875
2876 /* If we didn't change anything, we must retain the pseudo. */
2877 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2878 new = XEXP (x, 0);
2879 else
2880 /* Otherwise, ensure NEW isn't shared in case we have to reload
2881 it. */
2882 new = copy_rtx (new);
2883 }
2884 else
2885 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2886
2887 if (new != XEXP (x, 0))
2888 {
2889 if (GET_CODE (new) == MEM
2890 && (GET_MODE_SIZE (GET_MODE (x))
2891 <= GET_MODE_SIZE (GET_MODE (new)))
2892 #ifdef LOAD_EXTEND_OP
2893 /* On these machines we will be reloading what is
2894 inside the SUBREG if it originally was a pseudo and
2895 the inner and outer modes are both a word or
2896 smaller. So leave the SUBREG then. */
2897 && ! (GET_CODE (SUBREG_REG (x)) == REG
2898 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2899 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2900 #endif
2901 )
2902 {
2903 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2904 enum machine_mode mode = GET_MODE (x);
2905
2906 #if BYTES_BIG_ENDIAN
2907 offset += (MIN (UNITS_PER_WORD,
2908 GET_MODE_SIZE (GET_MODE (new)))
2909 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2910 #endif
2911
2912 PUT_MODE (new, mode);
2913 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2914 return new;
2915 }
2916 else
2917 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2918 }
2919
2920 return x;
2921
2922 case CLOBBER:
2923 /* If clobbering a register that is the replacement register for an
2924 elimination we still think can be performed, note that it cannot
2925 be performed. Otherwise, we need not be concerned about it. */
2926 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2927 if (ep->to_rtx == XEXP (x, 0))
2928 ep->can_eliminate = 0;
2929
2930 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2931 if (new != XEXP (x, 0))
2932 return gen_rtx (code, GET_MODE (x), new);
2933 return x;
2934
2935 case ASM_OPERANDS:
2936 {
2937 rtx *temp_vec;
2938 /* Properly handle sharing input and constraint vectors. */
2939 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2940 {
2941 /* When we come to a new vector not seen before,
2942 scan all its elements; keep the old vector if none
2943 of them changes; otherwise, make a copy. */
2944 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2945 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2946 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2947 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2948 mem_mode, insn);
2949
2950 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2951 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2952 break;
2953
2954 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2955 new_asm_operands_vec = old_asm_operands_vec;
2956 else
2957 new_asm_operands_vec
2958 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2959 }
2960
2961 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2962 if (new_asm_operands_vec == old_asm_operands_vec)
2963 return x;
2964
2965 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2966 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2967 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2968 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2969 ASM_OPERANDS_SOURCE_FILE (x),
2970 ASM_OPERANDS_SOURCE_LINE (x));
2971 new->volatil = x->volatil;
2972 return new;
2973 }
2974
2975 case SET:
2976 /* Check for setting a register that we know about. */
2977 if (GET_CODE (SET_DEST (x)) == REG)
2978 {
2979 /* See if this is setting the replacement register for an
2980 elimination.
2981
2982 If DEST is the hard frame pointer, we do nothing because we
2983 assume that all assignments to the frame pointer are for
2984 non-local gotos and are being done at a time when they are valid
2985 and do not disturb anything else. Some machines want to
2986 eliminate a fake argument pointer (or even a fake frame pointer)
2987 with either the real frame or the stack pointer. Assignments to
2988 the hard frame pointer must not prevent this elimination. */
2989
2990 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2991 ep++)
2992 if (ep->to_rtx == SET_DEST (x)
2993 && SET_DEST (x) != hard_frame_pointer_rtx)
2994 {
2995 /* If it is being incremented, adjust the offset. Otherwise,
2996 this elimination can't be done. */
2997 rtx src = SET_SRC (x);
2998
2999 if (GET_CODE (src) == PLUS
3000 && XEXP (src, 0) == SET_DEST (x)
3001 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3002 ep->offset -= INTVAL (XEXP (src, 1));
3003 else
3004 ep->can_eliminate = 0;
3005 }
3006
3007 /* Now check to see we are assigning to a register that can be
3008 eliminated. If so, it must be as part of a PARALLEL, since we
3009 will not have been called if this is a single SET. So indicate
3010 that we can no longer eliminate this reg. */
3011 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3012 ep++)
3013 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3014 ep->can_eliminate = 0;
3015 }
3016
3017 /* Now avoid the loop below in this common case. */
3018 {
3019 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3020 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3021
3022 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3023 write a CLOBBER insn. */
3024 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3025 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3026 && GET_CODE (insn) != INSN_LIST)
3027 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3028
3029 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3030 return gen_rtx (SET, VOIDmode, new0, new1);
3031 }
3032
3033 return x;
3034
3035 case MEM:
3036 /* Our only special processing is to pass the mode of the MEM to our
3037 recursive call and copy the flags. While we are here, handle this
3038 case more efficiently. */
3039 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3040 if (new != XEXP (x, 0))
3041 {
3042 new = gen_rtx (MEM, GET_MODE (x), new);
3043 new->volatil = x->volatil;
3044 new->unchanging = x->unchanging;
3045 new->in_struct = x->in_struct;
3046 return new;
3047 }
3048 else
3049 return x;
3050 }
3051
3052 /* Process each of our operands recursively. If any have changed, make a
3053 copy of the rtx. */
3054 fmt = GET_RTX_FORMAT (code);
3055 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3056 {
3057 if (*fmt == 'e')
3058 {
3059 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3060 if (new != XEXP (x, i) && ! copied)
3061 {
3062 rtx new_x = rtx_alloc (code);
3063 bcopy ((char *) x, (char *) new_x,
3064 (sizeof (*new_x) - sizeof (new_x->fld)
3065 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3066 x = new_x;
3067 copied = 1;
3068 }
3069 XEXP (x, i) = new;
3070 }
3071 else if (*fmt == 'E')
3072 {
3073 int copied_vec = 0;
3074 for (j = 0; j < XVECLEN (x, i); j++)
3075 {
3076 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3077 if (new != XVECEXP (x, i, j) && ! copied_vec)
3078 {
3079 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3080 &XVECEXP (x, i, 0));
3081 if (! copied)
3082 {
3083 rtx new_x = rtx_alloc (code);
3084 bcopy ((char *) x, (char *) new_x,
3085 (sizeof (*new_x) - sizeof (new_x->fld)
3086 + (sizeof (new_x->fld[0])
3087 * GET_RTX_LENGTH (code))));
3088 x = new_x;
3089 copied = 1;
3090 }
3091 XVEC (x, i) = new_v;
3092 copied_vec = 1;
3093 }
3094 XVECEXP (x, i, j) = new;
3095 }
3096 }
3097 }
3098
3099 return x;
3100 }
3101 \f
3102 /* Scan INSN and eliminate all eliminable registers in it.
3103
3104 If REPLACE is nonzero, do the replacement destructively. Also
3105 delete the insn as dead it if it is setting an eliminable register.
3106
3107 If REPLACE is zero, do all our allocations in reload_obstack.
3108
3109 If no eliminations were done and this insn doesn't require any elimination
3110 processing (these are not identical conditions: it might be updating sp,
3111 but not referencing fp; this needs to be seen during reload_as_needed so
3112 that the offset between fp and sp can be taken into consideration), zero
3113 is returned. Otherwise, 1 is returned. */
3114
3115 static int
3116 eliminate_regs_in_insn (insn, replace)
3117 rtx insn;
3118 int replace;
3119 {
3120 rtx old_body = PATTERN (insn);
3121 rtx old_set = single_set (insn);
3122 rtx new_body;
3123 int val = 0;
3124 struct elim_table *ep;
3125
3126 if (! replace)
3127 push_obstacks (&reload_obstack, &reload_obstack);
3128
3129 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3130 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3131 {
3132 /* Check for setting an eliminable register. */
3133 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3134 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3135 {
3136 /* In this case this insn isn't serving a useful purpose. We
3137 will delete it in reload_as_needed once we know that this
3138 elimination is, in fact, being done.
3139
3140 If REPLACE isn't set, we can't delete this insn, but neededn't
3141 process it since it won't be used unless something changes. */
3142 if (replace)
3143 delete_dead_insn (insn);
3144 val = 1;
3145 goto done;
3146 }
3147
3148 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3149 in the insn is the negative of the offset in FROM. Substitute
3150 (set (reg) (reg to)) for the insn and change its code.
3151
3152 We have to do this here, rather than in eliminate_regs, do that we can
3153 change the insn code. */
3154
3155 if (GET_CODE (SET_SRC (old_set)) == PLUS
3156 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3157 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3158 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3159 ep++)
3160 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3161 && ep->can_eliminate)
3162 {
3163 /* We must stop at the first elimination that will be used.
3164 If this one would replace the PLUS with a REG, do it
3165 now. Otherwise, quit the loop and let eliminate_regs
3166 do its normal replacement. */
3167 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3168 {
3169 /* We assume here that we don't need a PARALLEL of
3170 any CLOBBERs for this assignment. There's not
3171 much we can do if we do need it. */
3172 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3173 SET_DEST (old_set), ep->to_rtx);
3174 INSN_CODE (insn) = -1;
3175 val = 1;
3176 goto done;
3177 }
3178
3179 break;
3180 }
3181 }
3182
3183 old_asm_operands_vec = 0;
3184
3185 /* Replace the body of this insn with a substituted form. If we changed
3186 something, return non-zero.
3187
3188 If we are replacing a body that was a (set X (plus Y Z)), try to
3189 re-recognize the insn. We do this in case we had a simple addition
3190 but now can do this as a load-address. This saves an insn in this
3191 common case. */
3192
3193 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3194 if (new_body != old_body)
3195 {
3196 /* If we aren't replacing things permanently and we changed something,
3197 make another copy to ensure that all the RTL is new. Otherwise
3198 things can go wrong if find_reload swaps commutative operands
3199 and one is inside RTL that has been copied while the other is not. */
3200
3201 /* Don't copy an asm_operands because (1) there's no need and (2)
3202 copy_rtx can't do it properly when there are multiple outputs. */
3203 if (! replace && asm_noperands (old_body) < 0)
3204 new_body = copy_rtx (new_body);
3205
3206 /* If we had a move insn but now we don't, rerecognize it. This will
3207 cause spurious re-recognition if the old move had a PARALLEL since
3208 the new one still will, but we can't call single_set without
3209 having put NEW_BODY into the insn and the re-recognition won't
3210 hurt in this rare case. */
3211 if (old_set != 0
3212 && ((GET_CODE (SET_SRC (old_set)) == REG
3213 && (GET_CODE (new_body) != SET
3214 || GET_CODE (SET_SRC (new_body)) != REG))
3215 /* If this was a load from or store to memory, compare
3216 the MEM in recog_operand to the one in the insn. If they
3217 are not equal, then rerecognize the insn. */
3218 || (old_set != 0
3219 && ((GET_CODE (SET_SRC (old_set)) == MEM
3220 && SET_SRC (old_set) != recog_operand[1])
3221 || (GET_CODE (SET_DEST (old_set)) == MEM
3222 && SET_DEST (old_set) != recog_operand[0])))
3223 /* If this was an add insn before, rerecognize. */
3224 || GET_CODE (SET_SRC (old_set)) == PLUS))
3225 {
3226 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3227 /* If recognition fails, store the new body anyway.
3228 It's normal to have recognition failures here
3229 due to bizarre memory addresses; reloading will fix them. */
3230 PATTERN (insn) = new_body;
3231 }
3232 else
3233 PATTERN (insn) = new_body;
3234
3235 val = 1;
3236 }
3237
3238 /* Loop through all elimination pairs. See if any have changed and
3239 recalculate the number not at initial offset.
3240
3241 Compute the maximum offset (minimum offset if the stack does not
3242 grow downward) for each elimination pair.
3243
3244 We also detect a cases where register elimination cannot be done,
3245 namely, if a register would be both changed and referenced outside a MEM
3246 in the resulting insn since such an insn is often undefined and, even if
3247 not, we cannot know what meaning will be given to it. Note that it is
3248 valid to have a register used in an address in an insn that changes it
3249 (presumably with a pre- or post-increment or decrement).
3250
3251 If anything changes, return nonzero. */
3252
3253 num_not_at_initial_offset = 0;
3254 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3255 {
3256 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3257 ep->can_eliminate = 0;
3258
3259 ep->ref_outside_mem = 0;
3260
3261 if (ep->previous_offset != ep->offset)
3262 val = 1;
3263
3264 ep->previous_offset = ep->offset;
3265 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3266 num_not_at_initial_offset++;
3267
3268 #ifdef STACK_GROWS_DOWNWARD
3269 ep->max_offset = MAX (ep->max_offset, ep->offset);
3270 #else
3271 ep->max_offset = MIN (ep->max_offset, ep->offset);
3272 #endif
3273 }
3274
3275 done:
3276 /* If we changed something, perform elmination in REG_NOTES. This is
3277 needed even when REPLACE is zero because a REG_DEAD note might refer
3278 to a register that we eliminate and could cause a different number
3279 of spill registers to be needed in the final reload pass than in
3280 the pre-passes. */
3281 if (val && REG_NOTES (insn) != 0)
3282 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3283
3284 if (! replace)
3285 pop_obstacks ();
3286
3287 return val;
3288 }
3289
3290 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3291 replacement we currently believe is valid, mark it as not eliminable if X
3292 modifies DEST in any way other than by adding a constant integer to it.
3293
3294 If DEST is the frame pointer, we do nothing because we assume that
3295 all assignments to the hard frame pointer are nonlocal gotos and are being
3296 done at a time when they are valid and do not disturb anything else.
3297 Some machines want to eliminate a fake argument pointer with either the
3298 frame or stack pointer. Assignments to the hard frame pointer must not
3299 prevent this elimination.
3300
3301 Called via note_stores from reload before starting its passes to scan
3302 the insns of the function. */
3303
3304 static void
3305 mark_not_eliminable (dest, x)
3306 rtx dest;
3307 rtx x;
3308 {
3309 register int i;
3310
3311 /* A SUBREG of a hard register here is just changing its mode. We should
3312 not see a SUBREG of an eliminable hard register, but check just in
3313 case. */
3314 if (GET_CODE (dest) == SUBREG)
3315 dest = SUBREG_REG (dest);
3316
3317 if (dest == hard_frame_pointer_rtx)
3318 return;
3319
3320 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3321 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3322 && (GET_CODE (x) != SET
3323 || GET_CODE (SET_SRC (x)) != PLUS
3324 || XEXP (SET_SRC (x), 0) != dest
3325 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3326 {
3327 reg_eliminate[i].can_eliminate_previous
3328 = reg_eliminate[i].can_eliminate = 0;
3329 num_eliminable--;
3330 }
3331 }
3332 \f
3333 /* Kick all pseudos out of hard register REGNO.
3334 If GLOBAL is nonzero, try to find someplace else to put them.
3335 If DUMPFILE is nonzero, log actions taken on that file.
3336
3337 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3338 because we found we can't eliminate some register. In the case, no pseudos
3339 are allowed to be in the register, even if they are only in a block that
3340 doesn't require spill registers, unlike the case when we are spilling this
3341 hard reg to produce another spill register.
3342
3343 Return nonzero if any pseudos needed to be kicked out. */
3344
3345 static int
3346 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3347 register int regno;
3348 int global;
3349 FILE *dumpfile;
3350 int cant_eliminate;
3351 {
3352 enum reg_class class = REGNO_REG_CLASS (regno);
3353 int something_changed = 0;
3354 register int i;
3355
3356 SET_HARD_REG_BIT (forbidden_regs, regno);
3357
3358 /* Spill every pseudo reg that was allocated to this reg
3359 or to something that overlaps this reg. */
3360
3361 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3362 if (reg_renumber[i] >= 0
3363 && reg_renumber[i] <= regno
3364 && (reg_renumber[i]
3365 + HARD_REGNO_NREGS (reg_renumber[i],
3366 PSEUDO_REGNO_MODE (i))
3367 > regno))
3368 {
3369 /* If this register belongs solely to a basic block which needed no
3370 spilling of any class that this register is contained in,
3371 leave it be, unless we are spilling this register because
3372 it was a hard register that can't be eliminated. */
3373
3374 if (! cant_eliminate
3375 && basic_block_needs[0]
3376 && reg_basic_block[i] >= 0
3377 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3378 {
3379 enum reg_class *p;
3380
3381 for (p = reg_class_superclasses[(int) class];
3382 *p != LIM_REG_CLASSES; p++)
3383 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3384 break;
3385
3386 if (*p == LIM_REG_CLASSES)
3387 continue;
3388 }
3389
3390 /* Mark it as no longer having a hard register home. */
3391 reg_renumber[i] = -1;
3392 /* We will need to scan everything again. */
3393 something_changed = 1;
3394 if (global)
3395 retry_global_alloc (i, forbidden_regs);
3396
3397 alter_reg (i, regno);
3398 if (dumpfile)
3399 {
3400 if (reg_renumber[i] == -1)
3401 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3402 else
3403 fprintf (dumpfile, " Register %d now in %d.\n\n",
3404 i, reg_renumber[i]);
3405 }
3406 }
3407 for (i = 0; i < scratch_list_length; i++)
3408 {
3409 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3410 {
3411 if (! cant_eliminate && basic_block_needs[0]
3412 && ! basic_block_needs[(int) class][scratch_block[i]])
3413 {
3414 enum reg_class *p;
3415
3416 for (p = reg_class_superclasses[(int) class];
3417 *p != LIM_REG_CLASSES; p++)
3418 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3419 break;
3420
3421 if (*p == LIM_REG_CLASSES)
3422 continue;
3423 }
3424 PUT_CODE (scratch_list[i], SCRATCH);
3425 scratch_list[i] = 0;
3426 something_changed = 1;
3427 continue;
3428 }
3429 }
3430
3431 return something_changed;
3432 }
3433 \f
3434 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3435 Also mark any hard registers used to store user variables as
3436 forbidden from being used for spill registers. */
3437
3438 static void
3439 scan_paradoxical_subregs (x)
3440 register rtx x;
3441 {
3442 register int i;
3443 register char *fmt;
3444 register enum rtx_code code = GET_CODE (x);
3445
3446 switch (code)
3447 {
3448 case REG:
3449 #ifdef SMALL_REGISTER_CLASSES
3450 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3451 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3452 #endif
3453 return;
3454
3455 case CONST_INT:
3456 case CONST:
3457 case SYMBOL_REF:
3458 case LABEL_REF:
3459 case CONST_DOUBLE:
3460 case CC0:
3461 case PC:
3462 case USE:
3463 case CLOBBER:
3464 return;
3465
3466 case SUBREG:
3467 if (GET_CODE (SUBREG_REG (x)) == REG
3468 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3469 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3470 = GET_MODE_SIZE (GET_MODE (x));
3471 return;
3472 }
3473
3474 fmt = GET_RTX_FORMAT (code);
3475 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3476 {
3477 if (fmt[i] == 'e')
3478 scan_paradoxical_subregs (XEXP (x, i));
3479 else if (fmt[i] == 'E')
3480 {
3481 register int j;
3482 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3483 scan_paradoxical_subregs (XVECEXP (x, i, j));
3484 }
3485 }
3486 }
3487 \f
3488 static int
3489 hard_reg_use_compare (p1, p2)
3490 struct hard_reg_n_uses *p1, *p2;
3491 {
3492 int tem = p1->uses - p2->uses;
3493 if (tem != 0) return tem;
3494 /* If regs are equally good, sort by regno,
3495 so that the results of qsort leave nothing to chance. */
3496 return p1->regno - p2->regno;
3497 }
3498
3499 /* Choose the order to consider regs for use as reload registers
3500 based on how much trouble would be caused by spilling one.
3501 Store them in order of decreasing preference in potential_reload_regs. */
3502
3503 static void
3504 order_regs_for_reload ()
3505 {
3506 register int i;
3507 register int o = 0;
3508 int large = 0;
3509
3510 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3511
3512 CLEAR_HARD_REG_SET (bad_spill_regs);
3513
3514 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3515 potential_reload_regs[i] = -1;
3516
3517 /* Count number of uses of each hard reg by pseudo regs allocated to it
3518 and then order them by decreasing use. */
3519
3520 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3521 {
3522 hard_reg_n_uses[i].uses = 0;
3523 hard_reg_n_uses[i].regno = i;
3524 }
3525
3526 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3527 {
3528 int regno = reg_renumber[i];
3529 if (regno >= 0)
3530 {
3531 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3532 while (regno < lim)
3533 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3534 }
3535 large += reg_n_refs[i];
3536 }
3537
3538 /* Now fixed registers (which cannot safely be used for reloading)
3539 get a very high use count so they will be considered least desirable.
3540 Registers used explicitly in the rtl code are almost as bad. */
3541
3542 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3543 {
3544 if (fixed_regs[i])
3545 {
3546 hard_reg_n_uses[i].uses += 2 * large + 2;
3547 SET_HARD_REG_BIT (bad_spill_regs, i);
3548 }
3549 else if (regs_explicitly_used[i])
3550 {
3551 hard_reg_n_uses[i].uses += large + 1;
3552 #ifndef SMALL_REGISTER_CLASSES
3553 /* ??? We are doing this here because of the potential that
3554 bad code may be generated if a register explicitly used in
3555 an insn was used as a spill register for that insn. But
3556 not using these are spill registers may lose on some machine.
3557 We'll have to see how this works out. */
3558 SET_HARD_REG_BIT (bad_spill_regs, i);
3559 #endif
3560 }
3561 }
3562 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3563 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3564
3565 #ifdef ELIMINABLE_REGS
3566 /* If registers other than the frame pointer are eliminable, mark them as
3567 poor choices. */
3568 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3569 {
3570 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3571 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3572 }
3573 #endif
3574
3575 /* Prefer registers not so far used, for use in temporary loading.
3576 Among them, if REG_ALLOC_ORDER is defined, use that order.
3577 Otherwise, prefer registers not preserved by calls. */
3578
3579 #ifdef REG_ALLOC_ORDER
3580 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3581 {
3582 int regno = reg_alloc_order[i];
3583
3584 if (hard_reg_n_uses[regno].uses == 0)
3585 potential_reload_regs[o++] = regno;
3586 }
3587 #else
3588 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3589 {
3590 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3591 potential_reload_regs[o++] = i;
3592 }
3593 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3594 {
3595 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3596 potential_reload_regs[o++] = i;
3597 }
3598 #endif
3599
3600 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3601 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3602
3603 /* Now add the regs that are already used,
3604 preferring those used less often. The fixed and otherwise forbidden
3605 registers will be at the end of this list. */
3606
3607 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3608 if (hard_reg_n_uses[i].uses != 0)
3609 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3610 }
3611 \f
3612 /* Used in reload_as_needed to sort the spilled regs. */
3613 static int
3614 compare_spill_regs (r1, r2)
3615 short *r1, *r2;
3616 {
3617 return *r1 < *r2 ? -1: 1;
3618 }
3619
3620 /* Reload pseudo-registers into hard regs around each insn as needed.
3621 Additional register load insns are output before the insn that needs it
3622 and perhaps store insns after insns that modify the reloaded pseudo reg.
3623
3624 reg_last_reload_reg and reg_reloaded_contents keep track of
3625 which registers are already available in reload registers.
3626 We update these for the reloads that we perform,
3627 as the insns are scanned. */
3628
3629 static void
3630 reload_as_needed (first, live_known)
3631 rtx first;
3632 int live_known;
3633 {
3634 register rtx insn;
3635 register int i;
3636 int this_block = 0;
3637 rtx x;
3638 rtx after_call = 0;
3639
3640 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3641 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3642 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3643 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3644 reg_has_output_reload = (char *) alloca (max_regno);
3645 for (i = 0; i < n_spills; i++)
3646 {
3647 reg_reloaded_contents[i] = -1;
3648 reg_reloaded_insn[i] = 0;
3649 }
3650
3651 /* Reset all offsets on eliminable registers to their initial values. */
3652 #ifdef ELIMINABLE_REGS
3653 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3654 {
3655 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3656 reg_eliminate[i].initial_offset);
3657 reg_eliminate[i].previous_offset
3658 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3659 }
3660 #else
3661 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3662 reg_eliminate[0].previous_offset
3663 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3664 #endif
3665
3666 num_not_at_initial_offset = 0;
3667
3668 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3669 pack registers with group needs. */
3670 if (n_spills > 1)
3671 {
3672 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3673 for (i = 0; i < n_spills; i++)
3674 spill_reg_order[spill_regs[i]] = i;
3675 }
3676
3677 for (insn = first; insn;)
3678 {
3679 register rtx next = NEXT_INSN (insn);
3680
3681 /* Notice when we move to a new basic block. */
3682 if (live_known && this_block + 1 < n_basic_blocks
3683 && insn == basic_block_head[this_block+1])
3684 ++this_block;
3685
3686 /* If we pass a label, copy the offsets from the label information
3687 into the current offsets of each elimination. */
3688 if (GET_CODE (insn) == CODE_LABEL)
3689 {
3690 num_not_at_initial_offset = 0;
3691 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3692 {
3693 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3694 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3695 if (reg_eliminate[i].can_eliminate
3696 && (reg_eliminate[i].offset
3697 != reg_eliminate[i].initial_offset))
3698 num_not_at_initial_offset++;
3699 }
3700 }
3701
3702 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3703 {
3704 rtx avoid_return_reg = 0;
3705
3706 #ifdef SMALL_REGISTER_CLASSES
3707 /* Set avoid_return_reg if this is an insn
3708 that might use the value of a function call. */
3709 if (GET_CODE (insn) == CALL_INSN)
3710 {
3711 if (GET_CODE (PATTERN (insn)) == SET)
3712 after_call = SET_DEST (PATTERN (insn));
3713 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3714 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3715 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3716 else
3717 after_call = 0;
3718 }
3719 else if (after_call != 0
3720 && !(GET_CODE (PATTERN (insn)) == SET
3721 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3722 {
3723 if (reg_referenced_p (after_call, PATTERN (insn)))
3724 avoid_return_reg = after_call;
3725 after_call = 0;
3726 }
3727 #endif /* SMALL_REGISTER_CLASSES */
3728
3729 /* If this is a USE and CLOBBER of a MEM, ensure that any
3730 references to eliminable registers have been removed. */
3731
3732 if ((GET_CODE (PATTERN (insn)) == USE
3733 || GET_CODE (PATTERN (insn)) == CLOBBER)
3734 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3735 XEXP (XEXP (PATTERN (insn), 0), 0)
3736 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3737 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3738
3739 /* If we need to do register elimination processing, do so.
3740 This might delete the insn, in which case we are done. */
3741 if (num_eliminable && GET_MODE (insn) == QImode)
3742 {
3743 eliminate_regs_in_insn (insn, 1);
3744 if (GET_CODE (insn) == NOTE)
3745 {
3746 insn = next;
3747 continue;
3748 }
3749 }
3750
3751 if (GET_MODE (insn) == VOIDmode)
3752 n_reloads = 0;
3753 /* First find the pseudo regs that must be reloaded for this insn.
3754 This info is returned in the tables reload_... (see reload.h).
3755 Also modify the body of INSN by substituting RELOAD
3756 rtx's for those pseudo regs. */
3757 else
3758 {
3759 bzero (reg_has_output_reload, max_regno);
3760 CLEAR_HARD_REG_SET (reg_is_output_reload);
3761
3762 find_reloads (insn, 1, spill_indirect_levels, live_known,
3763 spill_reg_order);
3764 }
3765
3766 if (n_reloads > 0)
3767 {
3768 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3769 rtx p;
3770 int class;
3771
3772 /* If this block has not had spilling done for a
3773 particular clas and we have any non-optionals that need a
3774 spill reg in that class, abort. */
3775
3776 for (class = 0; class < N_REG_CLASSES; class++)
3777 if (basic_block_needs[class] != 0
3778 && basic_block_needs[class][this_block] == 0)
3779 for (i = 0; i < n_reloads; i++)
3780 if (class == (int) reload_reg_class[i]
3781 && reload_reg_rtx[i] == 0
3782 && ! reload_optional[i]
3783 && (reload_in[i] != 0 || reload_out[i] != 0
3784 || reload_secondary_p[i] != 0))
3785 abort ();
3786
3787 /* Now compute which reload regs to reload them into. Perhaps
3788 reusing reload regs from previous insns, or else output
3789 load insns to reload them. Maybe output store insns too.
3790 Record the choices of reload reg in reload_reg_rtx. */
3791 choose_reload_regs (insn, avoid_return_reg);
3792
3793 #ifdef SMALL_REGISTER_CLASSES
3794 /* Merge any reloads that we didn't combine for fear of
3795 increasing the number of spill registers needed but now
3796 discover can be safely merged. */
3797 merge_assigned_reloads (insn);
3798 #endif
3799
3800 /* Generate the insns to reload operands into or out of
3801 their reload regs. */
3802 emit_reload_insns (insn);
3803
3804 /* Substitute the chosen reload regs from reload_reg_rtx
3805 into the insn's body (or perhaps into the bodies of other
3806 load and store insn that we just made for reloading
3807 and that we moved the structure into). */
3808 subst_reloads ();
3809
3810 /* If this was an ASM, make sure that all the reload insns
3811 we have generated are valid. If not, give an error
3812 and delete them. */
3813
3814 if (asm_noperands (PATTERN (insn)) >= 0)
3815 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3816 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3817 && (recog_memoized (p) < 0
3818 || (insn_extract (p),
3819 ! constrain_operands (INSN_CODE (p), 1))))
3820 {
3821 error_for_asm (insn,
3822 "`asm' operand requires impossible reload");
3823 PUT_CODE (p, NOTE);
3824 NOTE_SOURCE_FILE (p) = 0;
3825 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3826 }
3827 }
3828 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3829 is no longer validly lying around to save a future reload.
3830 Note that this does not detect pseudos that were reloaded
3831 for this insn in order to be stored in
3832 (obeying register constraints). That is correct; such reload
3833 registers ARE still valid. */
3834 note_stores (PATTERN (insn), forget_old_reloads_1);
3835
3836 /* There may have been CLOBBER insns placed after INSN. So scan
3837 between INSN and NEXT and use them to forget old reloads. */
3838 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3839 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3840 note_stores (PATTERN (x), forget_old_reloads_1);
3841
3842 #ifdef AUTO_INC_DEC
3843 /* Likewise for regs altered by auto-increment in this insn.
3844 But note that the reg-notes are not changed by reloading:
3845 they still contain the pseudo-regs, not the spill regs. */
3846 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3847 if (REG_NOTE_KIND (x) == REG_INC)
3848 {
3849 /* See if this pseudo reg was reloaded in this insn.
3850 If so, its last-reload info is still valid
3851 because it is based on this insn's reload. */
3852 for (i = 0; i < n_reloads; i++)
3853 if (reload_out[i] == XEXP (x, 0))
3854 break;
3855
3856 if (i == n_reloads)
3857 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3858 }
3859 #endif
3860 }
3861 /* A reload reg's contents are unknown after a label. */
3862 if (GET_CODE (insn) == CODE_LABEL)
3863 for (i = 0; i < n_spills; i++)
3864 {
3865 reg_reloaded_contents[i] = -1;
3866 reg_reloaded_insn[i] = 0;
3867 }
3868
3869 /* Don't assume a reload reg is still good after a call insn
3870 if it is a call-used reg. */
3871 else if (GET_CODE (insn) == CALL_INSN)
3872 for (i = 0; i < n_spills; i++)
3873 if (call_used_regs[spill_regs[i]])
3874 {
3875 reg_reloaded_contents[i] = -1;
3876 reg_reloaded_insn[i] = 0;
3877 }
3878
3879 /* In case registers overlap, allow certain insns to invalidate
3880 particular hard registers. */
3881
3882 #ifdef INSN_CLOBBERS_REGNO_P
3883 for (i = 0 ; i < n_spills ; i++)
3884 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3885 {
3886 reg_reloaded_contents[i] = -1;
3887 reg_reloaded_insn[i] = 0;
3888 }
3889 #endif
3890
3891 insn = next;
3892
3893 #ifdef USE_C_ALLOCA
3894 alloca (0);
3895 #endif
3896 }
3897 }
3898
3899 /* Discard all record of any value reloaded from X,
3900 or reloaded in X from someplace else;
3901 unless X is an output reload reg of the current insn.
3902
3903 X may be a hard reg (the reload reg)
3904 or it may be a pseudo reg that was reloaded from. */
3905
3906 static void
3907 forget_old_reloads_1 (x, ignored)
3908 rtx x;
3909 rtx ignored;
3910 {
3911 register int regno;
3912 int nr;
3913 int offset = 0;
3914
3915 /* note_stores does give us subregs of hard regs. */
3916 while (GET_CODE (x) == SUBREG)
3917 {
3918 offset += SUBREG_WORD (x);
3919 x = SUBREG_REG (x);
3920 }
3921
3922 if (GET_CODE (x) != REG)
3923 return;
3924
3925 regno = REGNO (x) + offset;
3926
3927 if (regno >= FIRST_PSEUDO_REGISTER)
3928 nr = 1;
3929 else
3930 {
3931 int i;
3932 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3933 /* Storing into a spilled-reg invalidates its contents.
3934 This can happen if a block-local pseudo is allocated to that reg
3935 and it wasn't spilled because this block's total need is 0.
3936 Then some insn might have an optional reload and use this reg. */
3937 for (i = 0; i < nr; i++)
3938 if (spill_reg_order[regno + i] >= 0
3939 /* But don't do this if the reg actually serves as an output
3940 reload reg in the current instruction. */
3941 && (n_reloads == 0
3942 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3943 {
3944 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3945 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3946 }
3947 }
3948
3949 /* Since value of X has changed,
3950 forget any value previously copied from it. */
3951
3952 while (nr-- > 0)
3953 /* But don't forget a copy if this is the output reload
3954 that establishes the copy's validity. */
3955 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3956 reg_last_reload_reg[regno + nr] = 0;
3957 }
3958 \f
3959 /* For each reload, the mode of the reload register. */
3960 static enum machine_mode reload_mode[MAX_RELOADS];
3961
3962 /* For each reload, the largest number of registers it will require. */
3963 static int reload_nregs[MAX_RELOADS];
3964
3965 /* Comparison function for qsort to decide which of two reloads
3966 should be handled first. *P1 and *P2 are the reload numbers. */
3967
3968 static int
3969 reload_reg_class_lower (p1, p2)
3970 short *p1, *p2;
3971 {
3972 register int r1 = *p1, r2 = *p2;
3973 register int t;
3974
3975 /* Consider required reloads before optional ones. */
3976 t = reload_optional[r1] - reload_optional[r2];
3977 if (t != 0)
3978 return t;
3979
3980 /* Count all solitary classes before non-solitary ones. */
3981 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3982 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3983 if (t != 0)
3984 return t;
3985
3986 /* Aside from solitaires, consider all multi-reg groups first. */
3987 t = reload_nregs[r2] - reload_nregs[r1];
3988 if (t != 0)
3989 return t;
3990
3991 /* Consider reloads in order of increasing reg-class number. */
3992 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3993 if (t != 0)
3994 return t;
3995
3996 /* If reloads are equally urgent, sort by reload number,
3997 so that the results of qsort leave nothing to chance. */
3998 return r1 - r2;
3999 }
4000 \f
4001 /* The following HARD_REG_SETs indicate when each hard register is
4002 used for a reload of various parts of the current insn. */
4003
4004 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4005 static HARD_REG_SET reload_reg_used;
4006 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4007 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4008 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4009 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4010 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4011 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4012 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4013 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4014 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4015 static HARD_REG_SET reload_reg_used_in_op_addr;
4016 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4017 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4018 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4019 static HARD_REG_SET reload_reg_used_in_insn;
4020 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4021 static HARD_REG_SET reload_reg_used_in_other_addr;
4022
4023 /* If reg is in use as a reload reg for any sort of reload. */
4024 static HARD_REG_SET reload_reg_used_at_all;
4025
4026 /* If reg is use as an inherited reload. We just mark the first register
4027 in the group. */
4028 static HARD_REG_SET reload_reg_used_for_inherit;
4029
4030 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4031 TYPE. MODE is used to indicate how many consecutive regs are
4032 actually used. */
4033
4034 static void
4035 mark_reload_reg_in_use (regno, opnum, type, mode)
4036 int regno;
4037 int opnum;
4038 enum reload_type type;
4039 enum machine_mode mode;
4040 {
4041 int nregs = HARD_REGNO_NREGS (regno, mode);
4042 int i;
4043
4044 for (i = regno; i < nregs + regno; i++)
4045 {
4046 switch (type)
4047 {
4048 case RELOAD_OTHER:
4049 SET_HARD_REG_BIT (reload_reg_used, i);
4050 break;
4051
4052 case RELOAD_FOR_INPUT_ADDRESS:
4053 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4054 break;
4055
4056 case RELOAD_FOR_OUTPUT_ADDRESS:
4057 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4058 break;
4059
4060 case RELOAD_FOR_OPERAND_ADDRESS:
4061 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4062 break;
4063
4064 case RELOAD_FOR_OPADDR_ADDR:
4065 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4066 break;
4067
4068 case RELOAD_FOR_OTHER_ADDRESS:
4069 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4070 break;
4071
4072 case RELOAD_FOR_INPUT:
4073 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4074 break;
4075
4076 case RELOAD_FOR_OUTPUT:
4077 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4078 break;
4079
4080 case RELOAD_FOR_INSN:
4081 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4082 break;
4083 }
4084
4085 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4086 }
4087 }
4088
4089 /* Similarly, but show REGNO is no longer in use for a reload. */
4090
4091 static void
4092 clear_reload_reg_in_use (regno, opnum, type, mode)
4093 int regno;
4094 int opnum;
4095 enum reload_type type;
4096 enum machine_mode mode;
4097 {
4098 int nregs = HARD_REGNO_NREGS (regno, mode);
4099 int i;
4100
4101 for (i = regno; i < nregs + regno; i++)
4102 {
4103 switch (type)
4104 {
4105 case RELOAD_OTHER:
4106 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4107 break;
4108
4109 case RELOAD_FOR_INPUT_ADDRESS:
4110 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4111 break;
4112
4113 case RELOAD_FOR_OUTPUT_ADDRESS:
4114 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4115 break;
4116
4117 case RELOAD_FOR_OPERAND_ADDRESS:
4118 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4119 break;
4120
4121 case RELOAD_FOR_OPADDR_ADDR:
4122 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4123 break;
4124
4125 case RELOAD_FOR_OTHER_ADDRESS:
4126 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4127 break;
4128
4129 case RELOAD_FOR_INPUT:
4130 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4131 break;
4132
4133 case RELOAD_FOR_OUTPUT:
4134 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4135 break;
4136
4137 case RELOAD_FOR_INSN:
4138 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4139 break;
4140 }
4141 }
4142 }
4143
4144 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4145 specified by OPNUM and TYPE. */
4146
4147 static int
4148 reload_reg_free_p (regno, opnum, type)
4149 int regno;
4150 int opnum;
4151 enum reload_type type;
4152 {
4153 int i;
4154
4155 /* In use for a RELOAD_OTHER means it's not available for anything except
4156 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4157 to be used only for inputs. */
4158
4159 if (type != RELOAD_FOR_OTHER_ADDRESS
4160 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4161 return 0;
4162
4163 switch (type)
4164 {
4165 case RELOAD_OTHER:
4166 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4167 we can't use it for RELOAD_OTHER. */
4168 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4169 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4170 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4171 return 0;
4172
4173 for (i = 0; i < reload_n_operands; i++)
4174 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4175 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4176 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4177 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4178 return 0;
4179
4180 return 1;
4181
4182 case RELOAD_FOR_INPUT:
4183 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4184 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4185 return 0;
4186
4187 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4188 return 0;
4189
4190 /* If it is used for some other input, can't use it. */
4191 for (i = 0; i < reload_n_operands; i++)
4192 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4193 return 0;
4194
4195 /* If it is used in a later operand's address, can't use it. */
4196 for (i = opnum + 1; i < reload_n_operands; i++)
4197 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4198 return 0;
4199
4200 return 1;
4201
4202 case RELOAD_FOR_INPUT_ADDRESS:
4203 /* Can't use a register if it is used for an input address for this
4204 operand or used as an input in an earlier one. */
4205 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4206 return 0;
4207
4208 for (i = 0; i < opnum; i++)
4209 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4210 return 0;
4211
4212 return 1;
4213
4214 case RELOAD_FOR_OUTPUT_ADDRESS:
4215 /* Can't use a register if it is used for an output address for this
4216 operand or used as an output in this or a later operand. */
4217 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4218 return 0;
4219
4220 for (i = opnum; i < reload_n_operands; i++)
4221 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4222 return 0;
4223
4224 return 1;
4225
4226 case RELOAD_FOR_OPERAND_ADDRESS:
4227 for (i = 0; i < reload_n_operands; i++)
4228 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4229 return 0;
4230
4231 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4232 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4233
4234 case RELOAD_FOR_OPADDR_ADDR:
4235 for (i = 0; i < reload_n_operands; i++)
4236 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4237 return 0;
4238
4239 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4240
4241 case RELOAD_FOR_OUTPUT:
4242 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4243 outputs, or an operand address for this or an earlier output. */
4244 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4245 return 0;
4246
4247 for (i = 0; i < reload_n_operands; i++)
4248 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4249 return 0;
4250
4251 for (i = 0; i <= opnum; i++)
4252 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4253 return 0;
4254
4255 return 1;
4256
4257 case RELOAD_FOR_INSN:
4258 for (i = 0; i < reload_n_operands; i++)
4259 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4260 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4261 return 0;
4262
4263 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4264 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4265
4266 case RELOAD_FOR_OTHER_ADDRESS:
4267 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4268 }
4269 abort ();
4270 }
4271
4272 /* Return 1 if the value in reload reg REGNO, as used by a reload
4273 needed for the part of the insn specified by OPNUM and TYPE,
4274 is not in use for a reload in any prior part of the insn.
4275
4276 We can assume that the reload reg was already tested for availability
4277 at the time it is needed, and we should not check this again,
4278 in case the reg has already been marked in use. */
4279
4280 static int
4281 reload_reg_free_before_p (regno, opnum, type)
4282 int regno;
4283 int opnum;
4284 enum reload_type type;
4285 {
4286 int i;
4287
4288 switch (type)
4289 {
4290 case RELOAD_FOR_OTHER_ADDRESS:
4291 /* These always come first. */
4292 return 1;
4293
4294 case RELOAD_OTHER:
4295 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4296
4297 /* If this use is for part of the insn,
4298 check the reg is not in use for any prior part. It is tempting
4299 to try to do this by falling through from objecs that occur
4300 later in the insn to ones that occur earlier, but that will not
4301 correctly take into account the fact that here we MUST ignore
4302 things that would prevent the register from being allocated in
4303 the first place, since we know that it was allocated. */
4304
4305 case RELOAD_FOR_OUTPUT_ADDRESS:
4306 /* Earlier reloads are for earlier outputs or their addresses,
4307 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4308 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4309 RELOAD_OTHER).. */
4310 for (i = 0; i < opnum; i++)
4311 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4312 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4313 return 0;
4314
4315 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4316 return 0;
4317
4318 for (i = 0; i < reload_n_operands; i++)
4319 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4320 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4321 return 0;
4322
4323 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4324 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4325 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4326
4327 case RELOAD_FOR_OUTPUT:
4328 /* This can't be used in the output address for this operand and
4329 anything that can't be used for it, except that we've already
4330 tested for RELOAD_FOR_INSN objects. */
4331
4332 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4333 return 0;
4334
4335 for (i = 0; i < opnum; i++)
4336 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4337 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4338 return 0;
4339
4340 for (i = 0; i < reload_n_operands; i++)
4341 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4342 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4343 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4344 return 0;
4345
4346 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4347
4348 case RELOAD_FOR_OPERAND_ADDRESS:
4349 case RELOAD_FOR_OPADDR_ADDR:
4350 case RELOAD_FOR_INSN:
4351 /* These can't conflict with inputs, or each other, so all we have to
4352 test is input addresses and the addresses of OTHER items. */
4353
4354 for (i = 0; i < reload_n_operands; i++)
4355 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4356 return 0;
4357
4358 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4359
4360 case RELOAD_FOR_INPUT:
4361 /* The only things earlier are the address for this and
4362 earlier inputs, other inputs (which we know we don't conflict
4363 with), and addresses of RELOAD_OTHER objects. */
4364
4365 for (i = 0; i <= opnum; i++)
4366 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4367 return 0;
4368
4369 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4370
4371 case RELOAD_FOR_INPUT_ADDRESS:
4372 /* Similarly, all we have to check is for use in earlier inputs'
4373 addresses. */
4374 for (i = 0; i < opnum; i++)
4375 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4376 return 0;
4377
4378 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4379 }
4380 abort ();
4381 }
4382
4383 /* Return 1 if the value in reload reg REGNO, as used by a reload
4384 needed for the part of the insn specified by OPNUM and TYPE,
4385 is still available in REGNO at the end of the insn.
4386
4387 We can assume that the reload reg was already tested for availability
4388 at the time it is needed, and we should not check this again,
4389 in case the reg has already been marked in use. */
4390
4391 static int
4392 reload_reg_reaches_end_p (regno, opnum, type)
4393 int regno;
4394 int opnum;
4395 enum reload_type type;
4396 {
4397 int i;
4398
4399 switch (type)
4400 {
4401 case RELOAD_OTHER:
4402 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4403 its value must reach the end. */
4404 return 1;
4405
4406 /* If this use is for part of the insn,
4407 its value reaches if no subsequent part uses the same register.
4408 Just like the above function, don't try to do this with lots
4409 of fallthroughs. */
4410
4411 case RELOAD_FOR_OTHER_ADDRESS:
4412 /* Here we check for everything else, since these don't conflict
4413 with anything else and everything comes later. */
4414
4415 for (i = 0; i < reload_n_operands; i++)
4416 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4417 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4418 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4419 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4420 return 0;
4421
4422 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4423 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4424 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4425
4426 case RELOAD_FOR_INPUT_ADDRESS:
4427 /* Similar, except that we check only for this and subsequent inputs
4428 and the address of only subsequent inputs and we do not need
4429 to check for RELOAD_OTHER objects since they are known not to
4430 conflict. */
4431
4432 for (i = opnum; i < reload_n_operands; i++)
4433 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4434 return 0;
4435
4436 for (i = opnum + 1; i < reload_n_operands; i++)
4437 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4438 return 0;
4439
4440 for (i = 0; i < reload_n_operands; i++)
4441 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4442 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4443 return 0;
4444
4445 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4446 return 0;
4447
4448 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4449 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4450
4451 case RELOAD_FOR_INPUT:
4452 /* Similar to input address, except we start at the next operand for
4453 both input and input address and we do not check for
4454 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4455 would conflict. */
4456
4457 for (i = opnum + 1; i < reload_n_operands; i++)
4458 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4459 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4460 return 0;
4461
4462 /* ... fall through ... */
4463
4464 case RELOAD_FOR_OPERAND_ADDRESS:
4465 /* Check outputs and their addresses. */
4466
4467 for (i = 0; i < reload_n_operands; i++)
4468 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4469 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4470 return 0;
4471
4472 return 1;
4473
4474 case RELOAD_FOR_OPADDR_ADDR:
4475 for (i = 0; i < reload_n_operands; i++)
4476 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4477 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4478 return 0;
4479
4480 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4481 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4482
4483 case RELOAD_FOR_INSN:
4484 /* These conflict with other outputs with RELOAD_OTHER. So
4485 we need only check for output addresses. */
4486
4487 opnum = -1;
4488
4489 /* ... fall through ... */
4490
4491 case RELOAD_FOR_OUTPUT:
4492 case RELOAD_FOR_OUTPUT_ADDRESS:
4493 /* We already know these can't conflict with a later output. So the
4494 only thing to check are later output addresses. */
4495 for (i = opnum + 1; i < reload_n_operands; i++)
4496 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4497 return 0;
4498
4499 return 1;
4500 }
4501
4502 abort ();
4503 }
4504 \f
4505 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4506 Return 0 otherwise.
4507
4508 This function uses the same algorithm as reload_reg_free_p above. */
4509
4510 static int
4511 reloads_conflict (r1, r2)
4512 int r1, r2;
4513 {
4514 enum reload_type r1_type = reload_when_needed[r1];
4515 enum reload_type r2_type = reload_when_needed[r2];
4516 int r1_opnum = reload_opnum[r1];
4517 int r2_opnum = reload_opnum[r2];
4518
4519 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4520
4521 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4522 return 1;
4523
4524 /* Otherwise, check conflicts differently for each type. */
4525
4526 switch (r1_type)
4527 {
4528 case RELOAD_FOR_INPUT:
4529 return (r2_type == RELOAD_FOR_INSN
4530 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4531 || r2_type == RELOAD_FOR_OPADDR_ADDR
4532 || r2_type == RELOAD_FOR_INPUT
4533 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4534
4535 case RELOAD_FOR_INPUT_ADDRESS:
4536 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4537 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4538
4539 case RELOAD_FOR_OUTPUT_ADDRESS:
4540 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4541 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4542
4543 case RELOAD_FOR_OPERAND_ADDRESS:
4544 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4545 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4546
4547 case RELOAD_FOR_OPADDR_ADDR:
4548 return (r2_type == RELOAD_FOR_INPUT
4549 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4550
4551 case RELOAD_FOR_OUTPUT:
4552 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4553 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4554 && r2_opnum >= r1_opnum));
4555
4556 case RELOAD_FOR_INSN:
4557 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4558 || r2_type == RELOAD_FOR_INSN
4559 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4560
4561 case RELOAD_FOR_OTHER_ADDRESS:
4562 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4563
4564 case RELOAD_OTHER:
4565 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4566
4567 default:
4568 abort ();
4569 }
4570 }
4571 \f
4572 /* Vector of reload-numbers showing the order in which the reloads should
4573 be processed. */
4574 short reload_order[MAX_RELOADS];
4575
4576 /* Indexed by reload number, 1 if incoming value
4577 inherited from previous insns. */
4578 char reload_inherited[MAX_RELOADS];
4579
4580 /* For an inherited reload, this is the insn the reload was inherited from,
4581 if we know it. Otherwise, this is 0. */
4582 rtx reload_inheritance_insn[MAX_RELOADS];
4583
4584 /* If non-zero, this is a place to get the value of the reload,
4585 rather than using reload_in. */
4586 rtx reload_override_in[MAX_RELOADS];
4587
4588 /* For each reload, the index in spill_regs of the spill register used,
4589 or -1 if we did not need one of the spill registers for this reload. */
4590 int reload_spill_index[MAX_RELOADS];
4591
4592 /* Index of last register assigned as a spill register. We allocate in
4593 a round-robin fashio. */
4594
4595 static int last_spill_reg = 0;
4596
4597 /* Find a spill register to use as a reload register for reload R.
4598 LAST_RELOAD is non-zero if this is the last reload for the insn being
4599 processed.
4600
4601 Set reload_reg_rtx[R] to the register allocated.
4602
4603 If NOERROR is nonzero, we return 1 if successful,
4604 or 0 if we couldn't find a spill reg and we didn't change anything. */
4605
4606 static int
4607 allocate_reload_reg (r, insn, last_reload, noerror)
4608 int r;
4609 rtx insn;
4610 int last_reload;
4611 int noerror;
4612 {
4613 int i;
4614 int pass;
4615 int count;
4616 rtx new;
4617 int regno;
4618
4619 /* If we put this reload ahead, thinking it is a group,
4620 then insist on finding a group. Otherwise we can grab a
4621 reg that some other reload needs.
4622 (That can happen when we have a 68000 DATA_OR_FP_REG
4623 which is a group of data regs or one fp reg.)
4624 We need not be so restrictive if there are no more reloads
4625 for this insn.
4626
4627 ??? Really it would be nicer to have smarter handling
4628 for that kind of reg class, where a problem like this is normal.
4629 Perhaps those classes should be avoided for reloading
4630 by use of more alternatives. */
4631
4632 int force_group = reload_nregs[r] > 1 && ! last_reload;
4633
4634 /* If we want a single register and haven't yet found one,
4635 take any reg in the right class and not in use.
4636 If we want a consecutive group, here is where we look for it.
4637
4638 We use two passes so we can first look for reload regs to
4639 reuse, which are already in use for other reloads in this insn,
4640 and only then use additional registers.
4641 I think that maximizing reuse is needed to make sure we don't
4642 run out of reload regs. Suppose we have three reloads, and
4643 reloads A and B can share regs. These need two regs.
4644 Suppose A and B are given different regs.
4645 That leaves none for C. */
4646 for (pass = 0; pass < 2; pass++)
4647 {
4648 /* I is the index in spill_regs.
4649 We advance it round-robin between insns to use all spill regs
4650 equally, so that inherited reloads have a chance
4651 of leapfrogging each other. Don't do this, however, when we have
4652 group needs and failure would be fatal; if we only have a relatively
4653 small number of spill registers, and more than one of them has
4654 group needs, then by starting in the middle, we may end up
4655 allocating the first one in such a way that we are not left with
4656 sufficient groups to handle the rest. */
4657
4658 if (noerror || ! force_group)
4659 i = last_spill_reg;
4660 else
4661 i = -1;
4662
4663 for (count = 0; count < n_spills; count++)
4664 {
4665 int class = (int) reload_reg_class[r];
4666
4667 i = (i + 1) % n_spills;
4668
4669 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4670 reload_when_needed[r])
4671 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4672 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4673 /* Look first for regs to share, then for unshared. But
4674 don't share regs used for inherited reloads; they are
4675 the ones we want to preserve. */
4676 && (pass
4677 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4678 spill_regs[i])
4679 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4680 spill_regs[i]))))
4681 {
4682 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4683 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4684 (on 68000) got us two FP regs. If NR is 1,
4685 we would reject both of them. */
4686 if (force_group)
4687 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4688 /* If we need only one reg, we have already won. */
4689 if (nr == 1)
4690 {
4691 /* But reject a single reg if we demand a group. */
4692 if (force_group)
4693 continue;
4694 break;
4695 }
4696 /* Otherwise check that as many consecutive regs as we need
4697 are available here.
4698 Also, don't use for a group registers that are
4699 needed for nongroups. */
4700 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4701 while (nr > 1)
4702 {
4703 regno = spill_regs[i] + nr - 1;
4704 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4705 && spill_reg_order[regno] >= 0
4706 && reload_reg_free_p (regno, reload_opnum[r],
4707 reload_when_needed[r])
4708 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4709 regno)))
4710 break;
4711 nr--;
4712 }
4713 if (nr == 1)
4714 break;
4715 }
4716 }
4717
4718 /* If we found something on pass 1, omit pass 2. */
4719 if (count < n_spills)
4720 break;
4721 }
4722
4723 /* We should have found a spill register by now. */
4724 if (count == n_spills)
4725 {
4726 if (noerror)
4727 return 0;
4728 goto failure;
4729 }
4730
4731 /* I is the index in SPILL_REG_RTX of the reload register we are to
4732 allocate. Get an rtx for it and find its register number. */
4733
4734 new = spill_reg_rtx[i];
4735
4736 if (new == 0 || GET_MODE (new) != reload_mode[r])
4737 spill_reg_rtx[i] = new
4738 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4739
4740 regno = true_regnum (new);
4741
4742 /* Detect when the reload reg can't hold the reload mode.
4743 This used to be one `if', but Sequent compiler can't handle that. */
4744 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4745 {
4746 enum machine_mode test_mode = VOIDmode;
4747 if (reload_in[r])
4748 test_mode = GET_MODE (reload_in[r]);
4749 /* If reload_in[r] has VOIDmode, it means we will load it
4750 in whatever mode the reload reg has: to wit, reload_mode[r].
4751 We have already tested that for validity. */
4752 /* Aside from that, we need to test that the expressions
4753 to reload from or into have modes which are valid for this
4754 reload register. Otherwise the reload insns would be invalid. */
4755 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4756 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4757 if (! (reload_out[r] != 0
4758 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4759 {
4760 /* The reg is OK. */
4761 last_spill_reg = i;
4762
4763 /* Mark as in use for this insn the reload regs we use
4764 for this. */
4765 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4766 reload_when_needed[r], reload_mode[r]);
4767
4768 reload_reg_rtx[r] = new;
4769 reload_spill_index[r] = i;
4770 return 1;
4771 }
4772 }
4773
4774 /* The reg is not OK. */
4775 if (noerror)
4776 return 0;
4777
4778 failure:
4779 if (asm_noperands (PATTERN (insn)) < 0)
4780 /* It's the compiler's fault. */
4781 abort ();
4782
4783 /* It's the user's fault; the operand's mode and constraint
4784 don't match. Disable this reload so we don't crash in final. */
4785 error_for_asm (insn,
4786 "`asm' operand constraint incompatible with operand size");
4787 reload_in[r] = 0;
4788 reload_out[r] = 0;
4789 reload_reg_rtx[r] = 0;
4790 reload_optional[r] = 1;
4791 reload_secondary_p[r] = 1;
4792
4793 return 1;
4794 }
4795 \f
4796 /* Assign hard reg targets for the pseudo-registers we must reload
4797 into hard regs for this insn.
4798 Also output the instructions to copy them in and out of the hard regs.
4799
4800 For machines with register classes, we are responsible for
4801 finding a reload reg in the proper class. */
4802
4803 static void
4804 choose_reload_regs (insn, avoid_return_reg)
4805 rtx insn;
4806 rtx avoid_return_reg;
4807 {
4808 register int i, j;
4809 int max_group_size = 1;
4810 enum reg_class group_class = NO_REGS;
4811 int inheritance;
4812
4813 rtx save_reload_reg_rtx[MAX_RELOADS];
4814 char save_reload_inherited[MAX_RELOADS];
4815 rtx save_reload_inheritance_insn[MAX_RELOADS];
4816 rtx save_reload_override_in[MAX_RELOADS];
4817 int save_reload_spill_index[MAX_RELOADS];
4818 HARD_REG_SET save_reload_reg_used;
4819 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4820 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4821 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4822 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4823 HARD_REG_SET save_reload_reg_used_in_op_addr;
4824 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4825 HARD_REG_SET save_reload_reg_used_in_insn;
4826 HARD_REG_SET save_reload_reg_used_in_other_addr;
4827 HARD_REG_SET save_reload_reg_used_at_all;
4828
4829 bzero (reload_inherited, MAX_RELOADS);
4830 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4831 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4832
4833 CLEAR_HARD_REG_SET (reload_reg_used);
4834 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4835 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4836 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4837 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4838 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4839
4840 for (i = 0; i < reload_n_operands; i++)
4841 {
4842 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4843 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4844 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4845 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4846 }
4847
4848 #ifdef SMALL_REGISTER_CLASSES
4849 /* Don't bother with avoiding the return reg
4850 if we have no mandatory reload that could use it. */
4851 if (avoid_return_reg)
4852 {
4853 int do_avoid = 0;
4854 int regno = REGNO (avoid_return_reg);
4855 int nregs
4856 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4857 int r;
4858
4859 for (r = regno; r < regno + nregs; r++)
4860 if (spill_reg_order[r] >= 0)
4861 for (j = 0; j < n_reloads; j++)
4862 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4863 && (reload_in[j] != 0 || reload_out[j] != 0
4864 || reload_secondary_p[j])
4865 &&
4866 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4867 do_avoid = 1;
4868 if (!do_avoid)
4869 avoid_return_reg = 0;
4870 }
4871 #endif /* SMALL_REGISTER_CLASSES */
4872
4873 #if 0 /* Not needed, now that we can always retry without inheritance. */
4874 /* See if we have more mandatory reloads than spill regs.
4875 If so, then we cannot risk optimizations that could prevent
4876 reloads from sharing one spill register.
4877
4878 Since we will try finding a better register than reload_reg_rtx
4879 unless it is equal to reload_in or reload_out, count such reloads. */
4880
4881 {
4882 int tem = 0;
4883 #ifdef SMALL_REGISTER_CLASSES
4884 int tem = (avoid_return_reg != 0);
4885 #endif
4886 for (j = 0; j < n_reloads; j++)
4887 if (! reload_optional[j]
4888 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4889 && (reload_reg_rtx[j] == 0
4890 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4891 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4892 tem++;
4893 if (tem > n_spills)
4894 must_reuse = 1;
4895 }
4896 #endif
4897
4898 #ifdef SMALL_REGISTER_CLASSES
4899 /* Don't use the subroutine call return reg for a reload
4900 if we are supposed to avoid it. */
4901 if (avoid_return_reg)
4902 {
4903 int regno = REGNO (avoid_return_reg);
4904 int nregs
4905 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4906 int r;
4907
4908 for (r = regno; r < regno + nregs; r++)
4909 if (spill_reg_order[r] >= 0)
4910 SET_HARD_REG_BIT (reload_reg_used, r);
4911 }
4912 #endif /* SMALL_REGISTER_CLASSES */
4913
4914 /* In order to be certain of getting the registers we need,
4915 we must sort the reloads into order of increasing register class.
4916 Then our grabbing of reload registers will parallel the process
4917 that provided the reload registers.
4918
4919 Also note whether any of the reloads wants a consecutive group of regs.
4920 If so, record the maximum size of the group desired and what
4921 register class contains all the groups needed by this insn. */
4922
4923 for (j = 0; j < n_reloads; j++)
4924 {
4925 reload_order[j] = j;
4926 reload_spill_index[j] = -1;
4927
4928 reload_mode[j]
4929 = (reload_inmode[j] == VOIDmode
4930 || (GET_MODE_SIZE (reload_outmode[j])
4931 > GET_MODE_SIZE (reload_inmode[j])))
4932 ? reload_outmode[j] : reload_inmode[j];
4933
4934 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4935
4936 if (reload_nregs[j] > 1)
4937 {
4938 max_group_size = MAX (reload_nregs[j], max_group_size);
4939 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4940 }
4941
4942 /* If we have already decided to use a certain register,
4943 don't use it in another way. */
4944 if (reload_reg_rtx[j])
4945 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
4946 reload_when_needed[j], reload_mode[j]);
4947 }
4948
4949 if (n_reloads > 1)
4950 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4951
4952 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
4953 sizeof reload_reg_rtx);
4954 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4955 bcopy ((char *) reload_inheritance_insn,
4956 (char *) save_reload_inheritance_insn,
4957 sizeof reload_inheritance_insn);
4958 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
4959 sizeof reload_override_in);
4960 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
4961 sizeof reload_spill_index);
4962 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4963 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4964 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4965 reload_reg_used_in_op_addr);
4966
4967 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
4968 reload_reg_used_in_op_addr_reload);
4969
4970 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4971 reload_reg_used_in_insn);
4972 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4973 reload_reg_used_in_other_addr);
4974
4975 for (i = 0; i < reload_n_operands; i++)
4976 {
4977 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4978 reload_reg_used_in_output[i]);
4979 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4980 reload_reg_used_in_input[i]);
4981 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4982 reload_reg_used_in_input_addr[i]);
4983 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4984 reload_reg_used_in_output_addr[i]);
4985 }
4986
4987 /* If -O, try first with inheritance, then turning it off.
4988 If not -O, don't do inheritance.
4989 Using inheritance when not optimizing leads to paradoxes
4990 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4991 because one side of the comparison might be inherited. */
4992
4993 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
4994 {
4995 /* Process the reloads in order of preference just found.
4996 Beyond this point, subregs can be found in reload_reg_rtx.
4997
4998 This used to look for an existing reloaded home for all
4999 of the reloads, and only then perform any new reloads.
5000 But that could lose if the reloads were done out of reg-class order
5001 because a later reload with a looser constraint might have an old
5002 home in a register needed by an earlier reload with a tighter constraint.
5003
5004 To solve this, we make two passes over the reloads, in the order
5005 described above. In the first pass we try to inherit a reload
5006 from a previous insn. If there is a later reload that needs a
5007 class that is a proper subset of the class being processed, we must
5008 also allocate a spill register during the first pass.
5009
5010 Then make a second pass over the reloads to allocate any reloads
5011 that haven't been given registers yet. */
5012
5013 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5014
5015 for (j = 0; j < n_reloads; j++)
5016 {
5017 register int r = reload_order[j];
5018
5019 /* Ignore reloads that got marked inoperative. */
5020 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5021 continue;
5022
5023 /* If find_reloads chose a to use reload_in or reload_out as a reload
5024 register, we don't need to chose one. Otherwise, try even if it found
5025 one since we might save an insn if we find the value lying around. */
5026 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5027 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5028 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5029 continue;
5030
5031 #if 0 /* No longer needed for correct operation.
5032 It might give better code, or might not; worth an experiment? */
5033 /* If this is an optional reload, we can't inherit from earlier insns
5034 until we are sure that any non-optional reloads have been allocated.
5035 The following code takes advantage of the fact that optional reloads
5036 are at the end of reload_order. */
5037 if (reload_optional[r] != 0)
5038 for (i = 0; i < j; i++)
5039 if ((reload_out[reload_order[i]] != 0
5040 || reload_in[reload_order[i]] != 0
5041 || reload_secondary_p[reload_order[i]])
5042 && ! reload_optional[reload_order[i]]
5043 && reload_reg_rtx[reload_order[i]] == 0)
5044 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5045 #endif
5046
5047 /* First see if this pseudo is already available as reloaded
5048 for a previous insn. We cannot try to inherit for reloads
5049 that are smaller than the maximum number of registers needed
5050 for groups unless the register we would allocate cannot be used
5051 for the groups.
5052
5053 We could check here to see if this is a secondary reload for
5054 an object that is already in a register of the desired class.
5055 This would avoid the need for the secondary reload register.
5056 But this is complex because we can't easily determine what
5057 objects might want to be loaded via this reload. So let a register
5058 be allocated here. In `emit_reload_insns' we suppress one of the
5059 loads in the case described above. */
5060
5061 if (inheritance)
5062 {
5063 register int regno = -1;
5064 enum machine_mode mode;
5065
5066 if (reload_in[r] == 0)
5067 ;
5068 else if (GET_CODE (reload_in[r]) == REG)
5069 {
5070 regno = REGNO (reload_in[r]);
5071 mode = GET_MODE (reload_in[r]);
5072 }
5073 else if (GET_CODE (reload_in_reg[r]) == REG)
5074 {
5075 regno = REGNO (reload_in_reg[r]);
5076 mode = GET_MODE (reload_in_reg[r]);
5077 }
5078 #if 0
5079 /* This won't work, since REGNO can be a pseudo reg number.
5080 Also, it takes much more hair to keep track of all the things
5081 that can invalidate an inherited reload of part of a pseudoreg. */
5082 else if (GET_CODE (reload_in[r]) == SUBREG
5083 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5084 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5085 #endif
5086
5087 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5088 {
5089 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5090
5091 if (reg_reloaded_contents[i] == regno
5092 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5093 >= GET_MODE_SIZE (mode))
5094 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5095 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5096 spill_regs[i])
5097 && (reload_nregs[r] == max_group_size
5098 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5099 spill_regs[i]))
5100 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5101 reload_when_needed[r])
5102 && reload_reg_free_before_p (spill_regs[i],
5103 reload_opnum[r],
5104 reload_when_needed[r]))
5105 {
5106 /* If a group is needed, verify that all the subsequent
5107 registers still have their values intact. */
5108 int nr
5109 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5110 int k;
5111
5112 for (k = 1; k < nr; k++)
5113 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5114 != regno)
5115 break;
5116
5117 if (k == nr)
5118 {
5119 int i1;
5120
5121 /* We found a register that contains the
5122 value we need. If this register is the
5123 same as an `earlyclobber' operand of the
5124 current insn, just mark it as a place to
5125 reload from since we can't use it as the
5126 reload register itself. */
5127
5128 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5129 if (reg_overlap_mentioned_for_reload_p
5130 (reg_last_reload_reg[regno],
5131 reload_earlyclobbers[i1]))
5132 break;
5133
5134 if (i1 != n_earlyclobbers
5135 /* Don't really use the inherited spill reg
5136 if we need it wider than we've got it. */
5137 || (GET_MODE_SIZE (reload_mode[r])
5138 > GET_MODE_SIZE (mode)))
5139 reload_override_in[r] = reg_last_reload_reg[regno];
5140 else
5141 {
5142 int k;
5143 /* We can use this as a reload reg. */
5144 /* Mark the register as in use for this part of
5145 the insn. */
5146 mark_reload_reg_in_use (spill_regs[i],
5147 reload_opnum[r],
5148 reload_when_needed[r],
5149 reload_mode[r]);
5150 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5151 reload_inherited[r] = 1;
5152 reload_inheritance_insn[r]
5153 = reg_reloaded_insn[i];
5154 reload_spill_index[r] = i;
5155 for (k = 0; k < nr; k++)
5156 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5157 spill_regs[i + k]);
5158 }
5159 }
5160 }
5161 }
5162 }
5163
5164 /* Here's another way to see if the value is already lying around. */
5165 if (inheritance
5166 && reload_in[r] != 0
5167 && ! reload_inherited[r]
5168 && reload_out[r] == 0
5169 && (CONSTANT_P (reload_in[r])
5170 || GET_CODE (reload_in[r]) == PLUS
5171 || GET_CODE (reload_in[r]) == REG
5172 || GET_CODE (reload_in[r]) == MEM)
5173 && (reload_nregs[r] == max_group_size
5174 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5175 {
5176 register rtx equiv
5177 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5178 -1, NULL_PTR, 0, reload_mode[r]);
5179 int regno;
5180
5181 if (equiv != 0)
5182 {
5183 if (GET_CODE (equiv) == REG)
5184 regno = REGNO (equiv);
5185 else if (GET_CODE (equiv) == SUBREG)
5186 {
5187 /* This must be a SUBREG of a hard register.
5188 Make a new REG since this might be used in an
5189 address and not all machines support SUBREGs
5190 there. */
5191 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5192 equiv = gen_rtx (REG, reload_mode[r], regno);
5193 }
5194 else
5195 abort ();
5196 }
5197
5198 /* If we found a spill reg, reject it unless it is free
5199 and of the desired class. */
5200 if (equiv != 0
5201 && ((spill_reg_order[regno] >= 0
5202 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5203 reload_when_needed[r]))
5204 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5205 regno)))
5206 equiv = 0;
5207
5208 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5209 equiv = 0;
5210
5211 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5212 equiv = 0;
5213
5214 /* We found a register that contains the value we need.
5215 If this register is the same as an `earlyclobber' operand
5216 of the current insn, just mark it as a place to reload from
5217 since we can't use it as the reload register itself. */
5218
5219 if (equiv != 0)
5220 for (i = 0; i < n_earlyclobbers; i++)
5221 if (reg_overlap_mentioned_for_reload_p (equiv,
5222 reload_earlyclobbers[i]))
5223 {
5224 reload_override_in[r] = equiv;
5225 equiv = 0;
5226 break;
5227 }
5228
5229 /* JRV: If the equiv register we have found is explicitly
5230 clobbered in the current insn, mark but don't use, as above. */
5231
5232 if (equiv != 0 && regno_clobbered_p (regno, insn))
5233 {
5234 reload_override_in[r] = equiv;
5235 equiv = 0;
5236 }
5237
5238 /* If we found an equivalent reg, say no code need be generated
5239 to load it, and use it as our reload reg. */
5240 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5241 {
5242 reload_reg_rtx[r] = equiv;
5243 reload_inherited[r] = 1;
5244 /* If it is a spill reg,
5245 mark the spill reg as in use for this insn. */
5246 i = spill_reg_order[regno];
5247 if (i >= 0)
5248 {
5249 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5250 int k;
5251 mark_reload_reg_in_use (regno, reload_opnum[r],
5252 reload_when_needed[r],
5253 reload_mode[r]);
5254 for (k = 0; k < nr; k++)
5255 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5256 }
5257 }
5258 }
5259
5260 /* If we found a register to use already, or if this is an optional
5261 reload, we are done. */
5262 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5263 continue;
5264
5265 #if 0 /* No longer needed for correct operation. Might or might not
5266 give better code on the average. Want to experiment? */
5267
5268 /* See if there is a later reload that has a class different from our
5269 class that intersects our class or that requires less register
5270 than our reload. If so, we must allocate a register to this
5271 reload now, since that reload might inherit a previous reload
5272 and take the only available register in our class. Don't do this
5273 for optional reloads since they will force all previous reloads
5274 to be allocated. Also don't do this for reloads that have been
5275 turned off. */
5276
5277 for (i = j + 1; i < n_reloads; i++)
5278 {
5279 int s = reload_order[i];
5280
5281 if ((reload_in[s] == 0 && reload_out[s] == 0
5282 && ! reload_secondary_p[s])
5283 || reload_optional[s])
5284 continue;
5285
5286 if ((reload_reg_class[s] != reload_reg_class[r]
5287 && reg_classes_intersect_p (reload_reg_class[r],
5288 reload_reg_class[s]))
5289 || reload_nregs[s] < reload_nregs[r])
5290 break;
5291 }
5292
5293 if (i == n_reloads)
5294 continue;
5295
5296 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5297 #endif
5298 }
5299
5300 /* Now allocate reload registers for anything non-optional that
5301 didn't get one yet. */
5302 for (j = 0; j < n_reloads; j++)
5303 {
5304 register int r = reload_order[j];
5305
5306 /* Ignore reloads that got marked inoperative. */
5307 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5308 continue;
5309
5310 /* Skip reloads that already have a register allocated or are
5311 optional. */
5312 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5313 continue;
5314
5315 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5316 break;
5317 }
5318
5319 /* If that loop got all the way, we have won. */
5320 if (j == n_reloads)
5321 break;
5322
5323 fail:
5324 /* Loop around and try without any inheritance. */
5325 /* First undo everything done by the failed attempt
5326 to allocate with inheritance. */
5327 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5328 sizeof reload_reg_rtx);
5329 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5330 sizeof reload_inherited);
5331 bcopy ((char *) save_reload_inheritance_insn,
5332 (char *) reload_inheritance_insn,
5333 sizeof reload_inheritance_insn);
5334 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5335 sizeof reload_override_in);
5336 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5337 sizeof reload_spill_index);
5338 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5339 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5340 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5341 save_reload_reg_used_in_op_addr);
5342 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5343 save_reload_reg_used_in_op_addr_reload);
5344 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5345 save_reload_reg_used_in_insn);
5346 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5347 save_reload_reg_used_in_other_addr);
5348
5349 for (i = 0; i < reload_n_operands; i++)
5350 {
5351 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5352 save_reload_reg_used_in_input[i]);
5353 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5354 save_reload_reg_used_in_output[i]);
5355 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5356 save_reload_reg_used_in_input_addr[i]);
5357 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5358 save_reload_reg_used_in_output_addr[i]);
5359 }
5360 }
5361
5362 /* If we thought we could inherit a reload, because it seemed that
5363 nothing else wanted the same reload register earlier in the insn,
5364 verify that assumption, now that all reloads have been assigned. */
5365
5366 for (j = 0; j < n_reloads; j++)
5367 {
5368 register int r = reload_order[j];
5369
5370 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5371 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5372 reload_opnum[r],
5373 reload_when_needed[r]))
5374 reload_inherited[r] = 0;
5375
5376 /* If we found a better place to reload from,
5377 validate it in the same fashion, if it is a reload reg. */
5378 if (reload_override_in[r]
5379 && (GET_CODE (reload_override_in[r]) == REG
5380 || GET_CODE (reload_override_in[r]) == SUBREG))
5381 {
5382 int regno = true_regnum (reload_override_in[r]);
5383 if (spill_reg_order[regno] >= 0
5384 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5385 reload_when_needed[r]))
5386 reload_override_in[r] = 0;
5387 }
5388 }
5389
5390 /* Now that reload_override_in is known valid,
5391 actually override reload_in. */
5392 for (j = 0; j < n_reloads; j++)
5393 if (reload_override_in[j])
5394 reload_in[j] = reload_override_in[j];
5395
5396 /* If this reload won't be done because it has been cancelled or is
5397 optional and not inherited, clear reload_reg_rtx so other
5398 routines (such as subst_reloads) don't get confused. */
5399 for (j = 0; j < n_reloads; j++)
5400 if (reload_reg_rtx[j] != 0
5401 && ((reload_optional[j] && ! reload_inherited[j])
5402 || (reload_in[j] == 0 && reload_out[j] == 0
5403 && ! reload_secondary_p[j])))
5404 {
5405 int regno = true_regnum (reload_reg_rtx[j]);
5406
5407 if (spill_reg_order[regno] >= 0)
5408 clear_reload_reg_in_use (regno, reload_opnum[j],
5409 reload_when_needed[j], reload_mode[j]);
5410 reload_reg_rtx[j] = 0;
5411 }
5412
5413 /* Record which pseudos and which spill regs have output reloads. */
5414 for (j = 0; j < n_reloads; j++)
5415 {
5416 register int r = reload_order[j];
5417
5418 i = reload_spill_index[r];
5419
5420 /* I is nonneg if this reload used one of the spill regs.
5421 If reload_reg_rtx[r] is 0, this is an optional reload
5422 that we opted to ignore. */
5423 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5424 && reload_reg_rtx[r] != 0)
5425 {
5426 register int nregno = REGNO (reload_out[r]);
5427 int nr = 1;
5428
5429 if (nregno < FIRST_PSEUDO_REGISTER)
5430 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5431
5432 while (--nr >= 0)
5433 reg_has_output_reload[nregno + nr] = 1;
5434
5435 if (i >= 0)
5436 {
5437 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5438 while (--nr >= 0)
5439 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5440 }
5441
5442 if (reload_when_needed[r] != RELOAD_OTHER
5443 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5444 && reload_when_needed[r] != RELOAD_FOR_INSN)
5445 abort ();
5446 }
5447 }
5448 }
5449 \f
5450 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5451 reloads of the same item for fear that we might not have enough reload
5452 registers. However, normally they will get the same reload register
5453 and hence actually need not be loaded twice.
5454
5455 Here we check for the most common case of this phenomenon: when we have
5456 a number of reloads for the same object, each of which were allocated
5457 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5458 reload, and is not modified in the insn itself. If we find such,
5459 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5460 This will not increase the number of spill registers needed and will
5461 prevent redundant code. */
5462
5463 #ifdef SMALL_REGISTER_CLASSES
5464
5465 static void
5466 merge_assigned_reloads (insn)
5467 rtx insn;
5468 {
5469 int i, j;
5470
5471 /* Scan all the reloads looking for ones that only load values and
5472 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5473 assigned and not modified by INSN. */
5474
5475 for (i = 0; i < n_reloads; i++)
5476 {
5477 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5478 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5479 || reg_set_p (reload_reg_rtx[i], insn))
5480 continue;
5481
5482 /* Look at all other reloads. Ensure that the only use of this
5483 reload_reg_rtx is in a reload that just loads the same value
5484 as we do. Note that any secondary reloads must be of the identical
5485 class since the values, modes, and result registers are the
5486 same, so we need not do anything with any secondary reloads. */
5487
5488 for (j = 0; j < n_reloads; j++)
5489 {
5490 if (i == j || reload_reg_rtx[j] == 0
5491 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5492 reload_reg_rtx[i]))
5493 continue;
5494
5495 /* If the reload regs aren't exactly the same (e.g, different modes)
5496 or if the values are different, we can't merge anything with this
5497 reload register. */
5498
5499 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5500 || reload_out[j] != 0 || reload_in[j] == 0
5501 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5502 break;
5503 }
5504
5505 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5506 we, in fact, found any matching reloads. */
5507
5508 if (j == n_reloads)
5509 {
5510 for (j = 0; j < n_reloads; j++)
5511 if (i != j && reload_reg_rtx[j] != 0
5512 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5513 {
5514 reload_when_needed[i] = RELOAD_OTHER;
5515 reload_in[j] = 0;
5516 transfer_replacements (i, j);
5517 }
5518
5519 /* If this is now RELOAD_OTHER, look for any reloads that load
5520 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5521 if they were for inputs, RELOAD_OTHER for outputs. Note that
5522 this test is equivalent to looking for reloads for this operand
5523 number. */
5524
5525 if (reload_when_needed[i] == RELOAD_OTHER)
5526 for (j = 0; j < n_reloads; j++)
5527 if (reload_in[j] != 0
5528 && reload_when_needed[i] != RELOAD_OTHER
5529 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5530 reload_in[i]))
5531 reload_when_needed[j]
5532 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5533 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5534 }
5535 }
5536 }
5537 #endif /* SMALL_RELOAD_CLASSES */
5538 \f
5539 /* Output insns to reload values in and out of the chosen reload regs. */
5540
5541 static void
5542 emit_reload_insns (insn)
5543 rtx insn;
5544 {
5545 register int j;
5546 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5547 rtx other_input_address_reload_insns = 0;
5548 rtx other_input_reload_insns = 0;
5549 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5550 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5551 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5552 rtx operand_reload_insns = 0;
5553 rtx other_operand_reload_insns = 0;
5554 rtx following_insn = NEXT_INSN (insn);
5555 rtx before_insn = insn;
5556 int special;
5557 /* Values to be put in spill_reg_store are put here first. */
5558 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5559
5560 for (j = 0; j < reload_n_operands; j++)
5561 input_reload_insns[j] = input_address_reload_insns[j]
5562 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5563
5564 /* Now output the instructions to copy the data into and out of the
5565 reload registers. Do these in the order that the reloads were reported,
5566 since reloads of base and index registers precede reloads of operands
5567 and the operands may need the base and index registers reloaded. */
5568
5569 for (j = 0; j < n_reloads; j++)
5570 {
5571 register rtx old;
5572 rtx oldequiv_reg = 0;
5573 rtx store_insn = 0;
5574
5575 old = reload_in[j];
5576 if (old != 0 && ! reload_inherited[j]
5577 && ! rtx_equal_p (reload_reg_rtx[j], old)
5578 && reload_reg_rtx[j] != 0)
5579 {
5580 register rtx reloadreg = reload_reg_rtx[j];
5581 rtx oldequiv = 0;
5582 enum machine_mode mode;
5583 rtx *where;
5584
5585 /* Determine the mode to reload in.
5586 This is very tricky because we have three to choose from.
5587 There is the mode the insn operand wants (reload_inmode[J]).
5588 There is the mode of the reload register RELOADREG.
5589 There is the intrinsic mode of the operand, which we could find
5590 by stripping some SUBREGs.
5591 It turns out that RELOADREG's mode is irrelevant:
5592 we can change that arbitrarily.
5593
5594 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5595 then the reload reg may not support QImode moves, so use SImode.
5596 If foo is in memory due to spilling a pseudo reg, this is safe,
5597 because the QImode value is in the least significant part of a
5598 slot big enough for a SImode. If foo is some other sort of
5599 memory reference, then it is impossible to reload this case,
5600 so previous passes had better make sure this never happens.
5601
5602 Then consider a one-word union which has SImode and one of its
5603 members is a float, being fetched as (SUBREG:SF union:SI).
5604 We must fetch that as SFmode because we could be loading into
5605 a float-only register. In this case OLD's mode is correct.
5606
5607 Consider an immediate integer: it has VOIDmode. Here we need
5608 to get a mode from something else.
5609
5610 In some cases, there is a fourth mode, the operand's
5611 containing mode. If the insn specifies a containing mode for
5612 this operand, it overrides all others.
5613
5614 I am not sure whether the algorithm here is always right,
5615 but it does the right things in those cases. */
5616
5617 mode = GET_MODE (old);
5618 if (mode == VOIDmode)
5619 mode = reload_inmode[j];
5620
5621 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5622 /* If we need a secondary register for this operation, see if
5623 the value is already in a register in that class. Don't
5624 do this if the secondary register will be used as a scratch
5625 register. */
5626
5627 if (reload_secondary_in_reload[j] >= 0
5628 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5629 && optimize)
5630 oldequiv
5631 = find_equiv_reg (old, insn,
5632 reload_reg_class[reload_secondary_in_reload[j]],
5633 -1, NULL_PTR, 0, mode);
5634 #endif
5635
5636 /* If reloading from memory, see if there is a register
5637 that already holds the same value. If so, reload from there.
5638 We can pass 0 as the reload_reg_p argument because
5639 any other reload has either already been emitted,
5640 in which case find_equiv_reg will see the reload-insn,
5641 or has yet to be emitted, in which case it doesn't matter
5642 because we will use this equiv reg right away. */
5643
5644 if (oldequiv == 0 && optimize
5645 && (GET_CODE (old) == MEM
5646 || (GET_CODE (old) == REG
5647 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5648 && reg_renumber[REGNO (old)] < 0)))
5649 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5650 -1, NULL_PTR, 0, mode);
5651
5652 if (oldequiv)
5653 {
5654 int regno = true_regnum (oldequiv);
5655
5656 /* If OLDEQUIV is a spill register, don't use it for this
5657 if any other reload needs it at an earlier stage of this insn
5658 or at this stage. */
5659 if (spill_reg_order[regno] >= 0
5660 && (! reload_reg_free_p (regno, reload_opnum[j],
5661 reload_when_needed[j])
5662 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5663 reload_when_needed[j])))
5664 oldequiv = 0;
5665
5666 /* If OLDEQUIV is not a spill register,
5667 don't use it if any other reload wants it. */
5668 if (spill_reg_order[regno] < 0)
5669 {
5670 int k;
5671 for (k = 0; k < n_reloads; k++)
5672 if (reload_reg_rtx[k] != 0 && k != j
5673 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5674 oldequiv))
5675 {
5676 oldequiv = 0;
5677 break;
5678 }
5679 }
5680
5681 /* If it is no cheaper to copy from OLDEQUIV into the
5682 reload register than it would be to move from memory,
5683 don't use it. Likewise, if we need a secondary register
5684 or memory. */
5685
5686 if (oldequiv != 0
5687 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5688 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5689 reload_reg_class[j])
5690 >= MEMORY_MOVE_COST (mode)))
5691 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5692 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5693 mode, oldequiv)
5694 != NO_REGS)
5695 #endif
5696 #ifdef SECONDARY_MEMORY_NEEDED
5697 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5698 REGNO_REG_CLASS (regno),
5699 mode)
5700 #endif
5701 ))
5702 oldequiv = 0;
5703 }
5704
5705 if (oldequiv == 0)
5706 oldequiv = old;
5707 else if (GET_CODE (oldequiv) == REG)
5708 oldequiv_reg = oldequiv;
5709 else if (GET_CODE (oldequiv) == SUBREG)
5710 oldequiv_reg = SUBREG_REG (oldequiv);
5711
5712 /* If we are reloading from a register that was recently stored in
5713 with an output-reload, see if we can prove there was
5714 actually no need to store the old value in it. */
5715
5716 if (optimize && GET_CODE (oldequiv) == REG
5717 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5718 && spill_reg_order[REGNO (oldequiv)] >= 0
5719 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5720 && find_reg_note (insn, REG_DEAD, reload_in[j])
5721 /* This is unsafe if operand occurs more than once in current
5722 insn. Perhaps some occurrences weren't reloaded. */
5723 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5724 delete_output_reload
5725 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5726
5727 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5728 then load RELOADREG from OLDEQUIV. Note that we cannot use
5729 gen_lowpart_common since it can do the wrong thing when
5730 RELOADREG has a multi-word mode. Note that RELOADREG
5731 must always be a REG here. */
5732
5733 if (GET_MODE (reloadreg) != mode)
5734 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5735 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5736 oldequiv = SUBREG_REG (oldequiv);
5737 if (GET_MODE (oldequiv) != VOIDmode
5738 && mode != GET_MODE (oldequiv))
5739 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5740
5741 /* Switch to the right place to emit the reload insns. */
5742 switch (reload_when_needed[j])
5743 {
5744 case RELOAD_OTHER:
5745 where = &other_input_reload_insns;
5746 break;
5747 case RELOAD_FOR_INPUT:
5748 where = &input_reload_insns[reload_opnum[j]];
5749 break;
5750 case RELOAD_FOR_INPUT_ADDRESS:
5751 where = &input_address_reload_insns[reload_opnum[j]];
5752 break;
5753 case RELOAD_FOR_OUTPUT_ADDRESS:
5754 where = &output_address_reload_insns[reload_opnum[j]];
5755 break;
5756 case RELOAD_FOR_OPERAND_ADDRESS:
5757 where = &operand_reload_insns;
5758 break;
5759 case RELOAD_FOR_OPADDR_ADDR:
5760 where = &other_operand_reload_insns;
5761 break;
5762 case RELOAD_FOR_OTHER_ADDRESS:
5763 where = &other_input_address_reload_insns;
5764 break;
5765 default:
5766 abort ();
5767 }
5768
5769 push_to_sequence (*where);
5770 special = 0;
5771
5772 /* Auto-increment addresses must be reloaded in a special way. */
5773 if (GET_CODE (oldequiv) == POST_INC
5774 || GET_CODE (oldequiv) == POST_DEC
5775 || GET_CODE (oldequiv) == PRE_INC
5776 || GET_CODE (oldequiv) == PRE_DEC)
5777 {
5778 /* We are not going to bother supporting the case where a
5779 incremented register can't be copied directly from
5780 OLDEQUIV since this seems highly unlikely. */
5781 if (reload_secondary_in_reload[j] >= 0)
5782 abort ();
5783 /* Prevent normal processing of this reload. */
5784 special = 1;
5785 /* Output a special code sequence for this case. */
5786 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5787 }
5788
5789 /* If we are reloading a pseudo-register that was set by the previous
5790 insn, see if we can get rid of that pseudo-register entirely
5791 by redirecting the previous insn into our reload register. */
5792
5793 else if (optimize && GET_CODE (old) == REG
5794 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5795 && dead_or_set_p (insn, old)
5796 /* This is unsafe if some other reload
5797 uses the same reg first. */
5798 && reload_reg_free_before_p (REGNO (reloadreg),
5799 reload_opnum[j],
5800 reload_when_needed[j]))
5801 {
5802 rtx temp = PREV_INSN (insn);
5803 while (temp && GET_CODE (temp) == NOTE)
5804 temp = PREV_INSN (temp);
5805 if (temp
5806 && GET_CODE (temp) == INSN
5807 && GET_CODE (PATTERN (temp)) == SET
5808 && SET_DEST (PATTERN (temp)) == old
5809 /* Make sure we can access insn_operand_constraint. */
5810 && asm_noperands (PATTERN (temp)) < 0
5811 /* This is unsafe if prev insn rejects our reload reg. */
5812 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5813 reloadreg)
5814 /* This is unsafe if operand occurs more than once in current
5815 insn. Perhaps some occurrences aren't reloaded. */
5816 && count_occurrences (PATTERN (insn), old) == 1
5817 /* Don't risk splitting a matching pair of operands. */
5818 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5819 {
5820 /* Store into the reload register instead of the pseudo. */
5821 SET_DEST (PATTERN (temp)) = reloadreg;
5822 /* If these are the only uses of the pseudo reg,
5823 pretend for GDB it lives in the reload reg we used. */
5824 if (reg_n_deaths[REGNO (old)] == 1
5825 && reg_n_sets[REGNO (old)] == 1)
5826 {
5827 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5828 alter_reg (REGNO (old), -1);
5829 }
5830 special = 1;
5831 }
5832 }
5833
5834 /* We can't do that, so output an insn to load RELOADREG. */
5835
5836 if (! special)
5837 {
5838 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5839 rtx second_reload_reg = 0;
5840 enum insn_code icode;
5841
5842 /* If we have a secondary reload, pick up the secondary register
5843 and icode, if any. If OLDEQUIV and OLD are different or
5844 if this is an in-out reload, recompute whether or not we
5845 still need a secondary register and what the icode should
5846 be. If we still need a secondary register and the class or
5847 icode is different, go back to reloading from OLD if using
5848 OLDEQUIV means that we got the wrong type of register. We
5849 cannot have different class or icode due to an in-out reload
5850 because we don't make such reloads when both the input and
5851 output need secondary reload registers. */
5852
5853 if (reload_secondary_in_reload[j] >= 0)
5854 {
5855 int secondary_reload = reload_secondary_in_reload[j];
5856 rtx real_oldequiv = oldequiv;
5857 rtx real_old = old;
5858
5859 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5860 and similarly for OLD.
5861 See comments in get_secondary_reload in reload.c. */
5862 if (GET_CODE (oldequiv) == REG
5863 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5864 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5865 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5866
5867 if (GET_CODE (old) == REG
5868 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5869 && reg_equiv_mem[REGNO (old)] != 0)
5870 real_old = reg_equiv_mem[REGNO (old)];
5871
5872 second_reload_reg = reload_reg_rtx[secondary_reload];
5873 icode = reload_secondary_in_icode[j];
5874
5875 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5876 || (reload_in[j] != 0 && reload_out[j] != 0))
5877 {
5878 enum reg_class new_class
5879 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5880 mode, real_oldequiv);
5881
5882 if (new_class == NO_REGS)
5883 second_reload_reg = 0;
5884 else
5885 {
5886 enum insn_code new_icode;
5887 enum machine_mode new_mode;
5888
5889 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5890 REGNO (second_reload_reg)))
5891 oldequiv = old, real_oldequiv = real_old;
5892 else
5893 {
5894 new_icode = reload_in_optab[(int) mode];
5895 if (new_icode != CODE_FOR_nothing
5896 && ((insn_operand_predicate[(int) new_icode][0]
5897 && ! ((*insn_operand_predicate[(int) new_icode][0])
5898 (reloadreg, mode)))
5899 || (insn_operand_predicate[(int) new_icode][1]
5900 && ! ((*insn_operand_predicate[(int) new_icode][1])
5901 (real_oldequiv, mode)))))
5902 new_icode = CODE_FOR_nothing;
5903
5904 if (new_icode == CODE_FOR_nothing)
5905 new_mode = mode;
5906 else
5907 new_mode = insn_operand_mode[(int) new_icode][2];
5908
5909 if (GET_MODE (second_reload_reg) != new_mode)
5910 {
5911 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5912 new_mode))
5913 oldequiv = old, real_oldequiv = real_old;
5914 else
5915 second_reload_reg
5916 = gen_rtx (REG, new_mode,
5917 REGNO (second_reload_reg));
5918 }
5919 }
5920 }
5921 }
5922
5923 /* If we still need a secondary reload register, check
5924 to see if it is being used as a scratch or intermediate
5925 register and generate code appropriately. If we need
5926 a scratch register, use REAL_OLDEQUIV since the form of
5927 the insn may depend on the actual address if it is
5928 a MEM. */
5929
5930 if (second_reload_reg)
5931 {
5932 if (icode != CODE_FOR_nothing)
5933 {
5934 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5935 second_reload_reg));
5936 special = 1;
5937 }
5938 else
5939 {
5940 /* See if we need a scratch register to load the
5941 intermediate register (a tertiary reload). */
5942 enum insn_code tertiary_icode
5943 = reload_secondary_in_icode[secondary_reload];
5944
5945 if (tertiary_icode != CODE_FOR_nothing)
5946 {
5947 rtx third_reload_reg
5948 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
5949
5950 emit_insn ((GEN_FCN (tertiary_icode)
5951 (second_reload_reg, real_oldequiv,
5952 third_reload_reg)));
5953 }
5954 else
5955 gen_input_reload (second_reload_reg, oldequiv,
5956 reload_opnum[j],
5957 reload_when_needed[j]);
5958
5959 oldequiv = second_reload_reg;
5960 }
5961 }
5962 }
5963 #endif
5964
5965 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
5966 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5967 reload_when_needed[j]);
5968
5969 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5970 /* We may have to make a REG_DEAD note for the secondary reload
5971 register in the insns we just made. Find the last insn that
5972 mentioned the register. */
5973 if (! special && second_reload_reg
5974 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5975 {
5976 rtx prev;
5977
5978 for (prev = get_last_insn (); prev;
5979 prev = PREV_INSN (prev))
5980 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
5981 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5982 PATTERN (prev)))
5983 {
5984 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5985 second_reload_reg,
5986 REG_NOTES (prev));
5987 break;
5988 }
5989 }
5990 #endif
5991 }
5992
5993 /* End this sequence. */
5994 *where = get_insns ();
5995 end_sequence ();
5996 }
5997
5998 /* Add a note saying the input reload reg
5999 dies in this insn, if anyone cares. */
6000 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6001 if (old != 0
6002 && reload_reg_rtx[j] != old
6003 && reload_reg_rtx[j] != 0
6004 && reload_out[j] == 0
6005 && ! reload_inherited[j]
6006 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6007 {
6008 register rtx reloadreg = reload_reg_rtx[j];
6009
6010 #if 0
6011 /* We can't abort here because we need to support this for sched.c.
6012 It's not terrible to miss a REG_DEAD note, but we should try
6013 to figure out how to do this correctly. */
6014 /* The code below is incorrect for address-only reloads. */
6015 if (reload_when_needed[j] != RELOAD_OTHER
6016 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6017 abort ();
6018 #endif
6019
6020 /* Add a death note to this insn, for an input reload. */
6021
6022 if ((reload_when_needed[j] == RELOAD_OTHER
6023 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6024 && ! dead_or_set_p (insn, reloadreg))
6025 REG_NOTES (insn)
6026 = gen_rtx (EXPR_LIST, REG_DEAD,
6027 reloadreg, REG_NOTES (insn));
6028 }
6029
6030 /* When we inherit a reload, the last marked death of the reload reg
6031 may no longer really be a death. */
6032 if (reload_reg_rtx[j] != 0
6033 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6034 && reload_inherited[j])
6035 {
6036 /* Handle inheriting an output reload.
6037 Remove the death note from the output reload insn. */
6038 if (reload_spill_index[j] >= 0
6039 && GET_CODE (reload_in[j]) == REG
6040 && spill_reg_store[reload_spill_index[j]] != 0
6041 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6042 REG_DEAD, REGNO (reload_reg_rtx[j])))
6043 remove_death (REGNO (reload_reg_rtx[j]),
6044 spill_reg_store[reload_spill_index[j]]);
6045 /* Likewise for input reloads that were inherited. */
6046 else if (reload_spill_index[j] >= 0
6047 && GET_CODE (reload_in[j]) == REG
6048 && spill_reg_store[reload_spill_index[j]] == 0
6049 && reload_inheritance_insn[j] != 0
6050 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6051 REGNO (reload_reg_rtx[j])))
6052 remove_death (REGNO (reload_reg_rtx[j]),
6053 reload_inheritance_insn[j]);
6054 else
6055 {
6056 rtx prev;
6057
6058 /* We got this register from find_equiv_reg.
6059 Search back for its last death note and get rid of it.
6060 But don't search back too far.
6061 Don't go past a place where this reg is set,
6062 since a death note before that remains valid. */
6063 for (prev = PREV_INSN (insn);
6064 prev && GET_CODE (prev) != CODE_LABEL;
6065 prev = PREV_INSN (prev))
6066 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6067 && dead_or_set_p (prev, reload_reg_rtx[j]))
6068 {
6069 if (find_regno_note (prev, REG_DEAD,
6070 REGNO (reload_reg_rtx[j])))
6071 remove_death (REGNO (reload_reg_rtx[j]), prev);
6072 break;
6073 }
6074 }
6075 }
6076
6077 /* We might have used find_equiv_reg above to choose an alternate
6078 place from which to reload. If so, and it died, we need to remove
6079 that death and move it to one of the insns we just made. */
6080
6081 if (oldequiv_reg != 0
6082 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6083 {
6084 rtx prev, prev1;
6085
6086 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6087 prev = PREV_INSN (prev))
6088 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6089 && dead_or_set_p (prev, oldequiv_reg))
6090 {
6091 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6092 {
6093 for (prev1 = this_reload_insn;
6094 prev1; prev1 = PREV_INSN (prev1))
6095 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6096 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6097 PATTERN (prev1)))
6098 {
6099 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6100 oldequiv_reg,
6101 REG_NOTES (prev1));
6102 break;
6103 }
6104 remove_death (REGNO (oldequiv_reg), prev);
6105 }
6106 break;
6107 }
6108 }
6109 #endif
6110
6111 /* If we are reloading a register that was recently stored in with an
6112 output-reload, see if we can prove there was
6113 actually no need to store the old value in it. */
6114
6115 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6116 && reload_in[j] != 0
6117 && GET_CODE (reload_in[j]) == REG
6118 #if 0
6119 /* There doesn't seem to be any reason to restrict this to pseudos
6120 and doing so loses in the case where we are copying from a
6121 register of the wrong class. */
6122 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6123 #endif
6124 && spill_reg_store[reload_spill_index[j]] != 0
6125 /* This is unsafe if some other reload uses the same reg first. */
6126 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6127 reload_opnum[j], reload_when_needed[j])
6128 && dead_or_set_p (insn, reload_in[j])
6129 /* This is unsafe if operand occurs more than once in current
6130 insn. Perhaps some occurrences weren't reloaded. */
6131 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6132 delete_output_reload (insn, j,
6133 spill_reg_store[reload_spill_index[j]]);
6134
6135 /* Input-reloading is done. Now do output-reloading,
6136 storing the value from the reload-register after the main insn
6137 if reload_out[j] is nonzero.
6138
6139 ??? At some point we need to support handling output reloads of
6140 JUMP_INSNs or insns that set cc0. */
6141 old = reload_out[j];
6142 if (old != 0
6143 && reload_reg_rtx[j] != old
6144 && reload_reg_rtx[j] != 0)
6145 {
6146 register rtx reloadreg = reload_reg_rtx[j];
6147 register rtx second_reloadreg = 0;
6148 rtx note, p;
6149 enum machine_mode mode;
6150 int special = 0;
6151
6152 /* An output operand that dies right away does need a reload,
6153 but need not be copied from it. Show the new location in the
6154 REG_UNUSED note. */
6155 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6156 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6157 {
6158 XEXP (note, 0) = reload_reg_rtx[j];
6159 continue;
6160 }
6161 else if (GET_CODE (old) == SCRATCH)
6162 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6163 but we don't want to make an output reload. */
6164 continue;
6165
6166 #if 0
6167 /* Strip off of OLD any size-increasing SUBREGs such as
6168 (SUBREG:SI foo:QI 0). */
6169
6170 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6171 && (GET_MODE_SIZE (GET_MODE (old))
6172 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6173 old = SUBREG_REG (old);
6174 #endif
6175
6176 /* If is a JUMP_INSN, we can't support output reloads yet. */
6177 if (GET_CODE (insn) == JUMP_INSN)
6178 abort ();
6179
6180 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6181
6182 /* Determine the mode to reload in.
6183 See comments above (for input reloading). */
6184
6185 mode = GET_MODE (old);
6186 if (mode == VOIDmode)
6187 {
6188 /* VOIDmode should never happen for an output. */
6189 if (asm_noperands (PATTERN (insn)) < 0)
6190 /* It's the compiler's fault. */
6191 abort ();
6192 error_for_asm (insn, "output operand is constant in `asm'");
6193 /* Prevent crash--use something we know is valid. */
6194 mode = word_mode;
6195 old = gen_rtx (REG, mode, REGNO (reloadreg));
6196 }
6197
6198 if (GET_MODE (reloadreg) != mode)
6199 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6200
6201 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6202
6203 /* If we need two reload regs, set RELOADREG to the intermediate
6204 one, since it will be stored into OUT. We might need a secondary
6205 register only for an input reload, so check again here. */
6206
6207 if (reload_secondary_out_reload[j] >= 0)
6208 {
6209 rtx real_old = old;
6210
6211 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6212 && reg_equiv_mem[REGNO (old)] != 0)
6213 real_old = reg_equiv_mem[REGNO (old)];
6214
6215 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6216 mode, real_old)
6217 != NO_REGS))
6218 {
6219 second_reloadreg = reloadreg;
6220 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6221
6222 /* See if RELOADREG is to be used as a scratch register
6223 or as an intermediate register. */
6224 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6225 {
6226 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6227 (real_old, second_reloadreg, reloadreg)));
6228 special = 1;
6229 }
6230 else
6231 {
6232 /* See if we need both a scratch and intermediate reload
6233 register. */
6234 int secondary_reload = reload_secondary_out_reload[j];
6235 enum insn_code tertiary_icode
6236 = reload_secondary_out_icode[secondary_reload];
6237 rtx pat;
6238
6239 if (GET_MODE (reloadreg) != mode)
6240 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6241
6242 if (tertiary_icode != CODE_FOR_nothing)
6243 {
6244 rtx third_reloadreg
6245 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6246 pat = (GEN_FCN (tertiary_icode)
6247 (reloadreg, second_reloadreg, third_reloadreg));
6248 }
6249 #ifdef SECONDARY_MEMORY_NEEDED
6250 /* If we need a memory location to do the move, do it that way. */
6251 else if (GET_CODE (reloadreg) == REG
6252 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6253 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6254 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6255 GET_MODE (second_reloadreg)))
6256 {
6257 /* Get the memory to use and rewrite both registers
6258 to its mode. */
6259 rtx loc
6260 = get_secondary_mem (reloadreg,
6261 GET_MODE (second_reloadreg),
6262 reload_opnum[j],
6263 reload_when_needed[j]);
6264 rtx tmp_reloadreg;
6265
6266 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6267 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6268 REGNO (second_reloadreg));
6269
6270 if (GET_MODE (loc) != GET_MODE (reloadreg))
6271 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6272 REGNO (reloadreg));
6273 else
6274 tmp_reloadreg = reloadreg;
6275
6276 emit_move_insn (loc, second_reloadreg);
6277 pat = gen_move_insn (tmp_reloadreg, loc);
6278 }
6279 #endif
6280 else
6281 pat = gen_move_insn (reloadreg, second_reloadreg);
6282
6283 emit_insn (pat);
6284 }
6285 }
6286 }
6287 #endif
6288
6289 /* Output the last reload insn. */
6290 if (! special)
6291 {
6292 #ifdef SECONDARY_MEMORY_NEEDED
6293 /* If we need a memory location to do the move, do it that way. */
6294 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6295 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6296 REGNO_REG_CLASS (REGNO (reloadreg)),
6297 GET_MODE (reloadreg)))
6298 {
6299 /* Get the memory to use and rewrite both registers to
6300 its mode. */
6301 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6302 reload_opnum[j],
6303 reload_when_needed[j]);
6304
6305 if (GET_MODE (loc) != GET_MODE (reloadreg))
6306 reloadreg = gen_rtx (REG, GET_MODE (loc),
6307 REGNO (reloadreg));
6308
6309 if (GET_MODE (loc) != GET_MODE (old))
6310 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6311
6312 emit_insn (gen_move_insn (loc, reloadreg));
6313 emit_insn (gen_move_insn (old, loc));
6314 }
6315 else
6316 #endif
6317 emit_insn (gen_move_insn (old, reloadreg));
6318 }
6319
6320 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6321 /* If final will look at death notes for this reg,
6322 put one on the last output-reload insn to use it. Similarly
6323 for any secondary register. */
6324 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6325 for (p = get_last_insn (); p; p = PREV_INSN (p))
6326 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6327 && reg_overlap_mentioned_for_reload_p (reloadreg,
6328 PATTERN (p)))
6329 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6330 reloadreg, REG_NOTES (p));
6331
6332 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6333 if (! special
6334 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6335 for (p = get_last_insn (); p; p = PREV_INSN (p))
6336 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6337 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6338 PATTERN (p)))
6339 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6340 second_reloadreg, REG_NOTES (p));
6341 #endif
6342 #endif
6343 /* Look at all insns we emitted, just to be safe. */
6344 for (p = get_insns (); p; p = NEXT_INSN (p))
6345 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6346 {
6347 /* If this output reload doesn't come from a spill reg,
6348 clear any memory of reloaded copies of the pseudo reg.
6349 If this output reload comes from a spill reg,
6350 reg_has_output_reload will make this do nothing. */
6351 note_stores (PATTERN (p), forget_old_reloads_1);
6352
6353 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6354 store_insn = p;
6355 }
6356
6357 output_reload_insns[reload_opnum[j]] = get_insns ();
6358 end_sequence ();
6359
6360 }
6361
6362 if (reload_spill_index[j] >= 0)
6363 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6364 }
6365
6366 /* Now write all the insns we made for reloads in the order expected by
6367 the allocation functions. Prior to the insn being reloaded, we write
6368 the following reloads:
6369
6370 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6371
6372 RELOAD_OTHER reloads.
6373
6374 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6375 the RELOAD_FOR_INPUT reload for the operand.
6376
6377 RELOAD_FOR_OPADDR_ADDRS reloads.
6378
6379 RELOAD_FOR_OPERAND_ADDRESS reloads.
6380
6381 After the insn being reloaded, we write the following:
6382
6383 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6384 the RELOAD_FOR_OUTPUT reload for that operand. */
6385
6386 emit_insns_before (other_input_address_reload_insns, before_insn);
6387 emit_insns_before (other_input_reload_insns, before_insn);
6388
6389 for (j = 0; j < reload_n_operands; j++)
6390 {
6391 emit_insns_before (input_address_reload_insns[j], before_insn);
6392 emit_insns_before (input_reload_insns[j], before_insn);
6393 }
6394
6395 emit_insns_before (other_operand_reload_insns, before_insn);
6396 emit_insns_before (operand_reload_insns, before_insn);
6397
6398 for (j = 0; j < reload_n_operands; j++)
6399 {
6400 emit_insns_before (output_address_reload_insns[j], following_insn);
6401 emit_insns_before (output_reload_insns[j], following_insn);
6402 }
6403
6404 /* Move death notes from INSN
6405 to output-operand-address and output reload insns. */
6406 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6407 {
6408 rtx insn1;
6409 /* Loop over those insns, last ones first. */
6410 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6411 insn1 = PREV_INSN (insn1))
6412 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6413 {
6414 rtx source = SET_SRC (PATTERN (insn1));
6415 rtx dest = SET_DEST (PATTERN (insn1));
6416
6417 /* The note we will examine next. */
6418 rtx reg_notes = REG_NOTES (insn);
6419 /* The place that pointed to this note. */
6420 rtx *prev_reg_note = &REG_NOTES (insn);
6421
6422 /* If the note is for something used in the source of this
6423 reload insn, or in the output address, move the note. */
6424 while (reg_notes)
6425 {
6426 rtx next_reg_notes = XEXP (reg_notes, 1);
6427 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6428 && GET_CODE (XEXP (reg_notes, 0)) == REG
6429 && ((GET_CODE (dest) != REG
6430 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6431 dest))
6432 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6433 source)))
6434 {
6435 *prev_reg_note = next_reg_notes;
6436 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6437 REG_NOTES (insn1) = reg_notes;
6438 }
6439 else
6440 prev_reg_note = &XEXP (reg_notes, 1);
6441
6442 reg_notes = next_reg_notes;
6443 }
6444 }
6445 }
6446 #endif
6447
6448 /* For all the spill regs newly reloaded in this instruction,
6449 record what they were reloaded from, so subsequent instructions
6450 can inherit the reloads.
6451
6452 Update spill_reg_store for the reloads of this insn.
6453 Copy the elements that were updated in the loop above. */
6454
6455 for (j = 0; j < n_reloads; j++)
6456 {
6457 register int r = reload_order[j];
6458 register int i = reload_spill_index[r];
6459
6460 /* I is nonneg if this reload used one of the spill regs.
6461 If reload_reg_rtx[r] is 0, this is an optional reload
6462 that we opted to ignore.
6463
6464 Also ignore reloads that don't reach the end of the insn,
6465 since we will eventually see the one that does. */
6466
6467 if (i >= 0 && reload_reg_rtx[r] != 0
6468 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6469 reload_when_needed[r]))
6470 {
6471 /* First, clear out memory of what used to be in this spill reg.
6472 If consecutive registers are used, clear them all. */
6473 int nr
6474 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6475 int k;
6476
6477 for (k = 0; k < nr; k++)
6478 {
6479 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6480 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6481 }
6482
6483 /* Maybe the spill reg contains a copy of reload_out. */
6484 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6485 {
6486 register int nregno = REGNO (reload_out[r]);
6487 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6488 : HARD_REGNO_NREGS (nregno,
6489 GET_MODE (reload_reg_rtx[r])));
6490
6491 spill_reg_store[i] = new_spill_reg_store[i];
6492 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6493
6494 /* If NREGNO is a hard register, it may occupy more than
6495 one register. If it does, say what is in the
6496 rest of the registers assuming that both registers
6497 agree on how many words the object takes. If not,
6498 invalidate the subsequent registers. */
6499
6500 if (nregno < FIRST_PSEUDO_REGISTER)
6501 for (k = 1; k < nnr; k++)
6502 reg_last_reload_reg[nregno + k]
6503 = (nr == nnr ? gen_rtx (REG,
6504 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6505 REGNO (reload_reg_rtx[r]) + k)
6506 : 0);
6507
6508 /* Now do the inverse operation. */
6509 for (k = 0; k < nr; k++)
6510 {
6511 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6512 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6513 : nregno + k);
6514 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6515 }
6516 }
6517
6518 /* Maybe the spill reg contains a copy of reload_in. Only do
6519 something if there will not be an output reload for
6520 the register being reloaded. */
6521 else if (reload_out[r] == 0
6522 && reload_in[r] != 0
6523 && ((GET_CODE (reload_in[r]) == REG
6524 && ! reg_has_output_reload[REGNO (reload_in[r])]
6525 || (GET_CODE (reload_in_reg[r]) == REG
6526 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6527 {
6528 register int nregno;
6529 int nnr;
6530
6531 if (GET_CODE (reload_in[r]) == REG)
6532 nregno = REGNO (reload_in[r]);
6533 else
6534 nregno = REGNO (reload_in_reg[r]);
6535
6536 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6537 : HARD_REGNO_NREGS (nregno,
6538 GET_MODE (reload_reg_rtx[r])));
6539
6540 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6541
6542 if (nregno < FIRST_PSEUDO_REGISTER)
6543 for (k = 1; k < nnr; k++)
6544 reg_last_reload_reg[nregno + k]
6545 = (nr == nnr ? gen_rtx (REG,
6546 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6547 REGNO (reload_reg_rtx[r]) + k)
6548 : 0);
6549
6550 /* Unless we inherited this reload, show we haven't
6551 recently done a store. */
6552 if (! reload_inherited[r])
6553 spill_reg_store[i] = 0;
6554
6555 for (k = 0; k < nr; k++)
6556 {
6557 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6558 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6559 : nregno + k);
6560 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6561 = insn;
6562 }
6563 }
6564 }
6565
6566 /* The following if-statement was #if 0'd in 1.34 (or before...).
6567 It's reenabled in 1.35 because supposedly nothing else
6568 deals with this problem. */
6569
6570 /* If a register gets output-reloaded from a non-spill register,
6571 that invalidates any previous reloaded copy of it.
6572 But forget_old_reloads_1 won't get to see it, because
6573 it thinks only about the original insn. So invalidate it here. */
6574 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6575 {
6576 register int nregno = REGNO (reload_out[r]);
6577 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6578
6579 while (num_regs-- > 0)
6580 reg_last_reload_reg[nregno + num_regs] = 0;
6581 }
6582 }
6583 }
6584 \f
6585 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6586 operand OPNUM with reload type TYPE.
6587
6588 Returns first insn emitted. */
6589
6590 rtx
6591 gen_input_reload (reloadreg, in, opnum, type)
6592 rtx reloadreg;
6593 rtx in;
6594 int opnum;
6595 enum reload_type type;
6596 {
6597 rtx last = get_last_insn ();
6598
6599 /* How to do this reload can get quite tricky. Normally, we are being
6600 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6601 register that didn't get a hard register. In that case we can just
6602 call emit_move_insn.
6603
6604 We can also be asked to reload a PLUS that adds a register or a MEM to
6605 another register, constant or MEM. This can occur during frame pointer
6606 elimination and while reloading addresses. This case is handled by
6607 trying to emit a single insn to perform the add. If it is not valid,
6608 we use a two insn sequence.
6609
6610 Finally, we could be called to handle an 'o' constraint by putting
6611 an address into a register. In that case, we first try to do this
6612 with a named pattern of "reload_load_address". If no such pattern
6613 exists, we just emit a SET insn and hope for the best (it will normally
6614 be valid on machines that use 'o').
6615
6616 This entire process is made complex because reload will never
6617 process the insns we generate here and so we must ensure that
6618 they will fit their constraints and also by the fact that parts of
6619 IN might be being reloaded separately and replaced with spill registers.
6620 Because of this, we are, in some sense, just guessing the right approach
6621 here. The one listed above seems to work.
6622
6623 ??? At some point, this whole thing needs to be rethought. */
6624
6625 if (GET_CODE (in) == PLUS
6626 && (GET_CODE (XEXP (in, 0)) == REG
6627 || GET_CODE (XEXP (in, 0)) == MEM)
6628 && (GET_CODE (XEXP (in, 1)) == REG
6629 || CONSTANT_P (XEXP (in, 1))
6630 || GET_CODE (XEXP (in, 1)) == MEM))
6631 {
6632 /* We need to compute the sum of a register or a MEM and another
6633 register, constant, or MEM, and put it into the reload
6634 register. The best possible way of doing this is if the machine
6635 has a three-operand ADD insn that accepts the required operands.
6636
6637 The simplest approach is to try to generate such an insn and see if it
6638 is recognized and matches its constraints. If so, it can be used.
6639
6640 It might be better not to actually emit the insn unless it is valid,
6641 but we need to pass the insn as an operand to `recog' and
6642 `insn_extract' and it is simpler to emit and then delete the insn if
6643 not valid than to dummy things up. */
6644
6645 rtx op0, op1, tem, insn;
6646 int code;
6647
6648 op0 = find_replacement (&XEXP (in, 0));
6649 op1 = find_replacement (&XEXP (in, 1));
6650
6651 /* Since constraint checking is strict, commutativity won't be
6652 checked, so we need to do that here to avoid spurious failure
6653 if the add instruction is two-address and the second operand
6654 of the add is the same as the reload reg, which is frequently
6655 the case. If the insn would be A = B + A, rearrange it so
6656 it will be A = A + B as constrain_operands expects. */
6657
6658 if (GET_CODE (XEXP (in, 1)) == REG
6659 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6660 tem = op0, op0 = op1, op1 = tem;
6661
6662 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6663 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6664
6665 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6666 code = recog_memoized (insn);
6667
6668 if (code >= 0)
6669 {
6670 insn_extract (insn);
6671 /* We want constrain operands to treat this insn strictly in
6672 its validity determination, i.e., the way it would after reload
6673 has completed. */
6674 if (constrain_operands (code, 1))
6675 return insn;
6676 }
6677
6678 delete_insns_since (last);
6679
6680 /* If that failed, we must use a conservative two-insn sequence.
6681 use move to copy constant, MEM, or pseudo register to the reload
6682 register since "move" will be able to handle an arbitrary operand,
6683 unlike add which can't, in general. Then add the registers.
6684
6685 If there is another way to do this for a specific machine, a
6686 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6687 we emit below. */
6688
6689 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6690 || (GET_CODE (op1) == REG
6691 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6692 tem = op0, op0 = op1, op1 = tem;
6693
6694 emit_insn (gen_move_insn (reloadreg, op0));
6695
6696 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6697 This fixes a problem on the 32K where the stack pointer cannot
6698 be used as an operand of an add insn. */
6699
6700 if (rtx_equal_p (op0, op1))
6701 op1 = reloadreg;
6702
6703 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6704
6705 /* If that failed, copy the address register to the reload register.
6706 Then add the constant to the reload register. */
6707
6708 code = recog_memoized (insn);
6709
6710 if (code >= 0)
6711 {
6712 insn_extract (insn);
6713 /* We want constrain operands to treat this insn strictly in
6714 its validity determination, i.e., the way it would after reload
6715 has completed. */
6716 if (constrain_operands (code, 1))
6717 return insn;
6718 }
6719
6720 delete_insns_since (last);
6721
6722 emit_insn (gen_move_insn (reloadreg, op1));
6723 emit_insn (gen_add2_insn (reloadreg, op0));
6724 }
6725
6726 #ifdef SECONDARY_MEMORY_NEEDED
6727 /* If we need a memory location to do the move, do it that way. */
6728 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6729 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6730 REGNO_REG_CLASS (REGNO (reloadreg)),
6731 GET_MODE (reloadreg)))
6732 {
6733 /* Get the memory to use and rewrite both registers to its mode. */
6734 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6735
6736 if (GET_MODE (loc) != GET_MODE (reloadreg))
6737 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6738
6739 if (GET_MODE (loc) != GET_MODE (in))
6740 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6741
6742 emit_insn (gen_move_insn (loc, in));
6743 emit_insn (gen_move_insn (reloadreg, loc));
6744 }
6745 #endif
6746
6747 /* If IN is a simple operand, use gen_move_insn. */
6748 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6749 emit_insn (gen_move_insn (reloadreg, in));
6750
6751 #ifdef HAVE_reload_load_address
6752 else if (HAVE_reload_load_address)
6753 emit_insn (gen_reload_load_address (reloadreg, in));
6754 #endif
6755
6756 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6757 else
6758 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6759
6760 /* Return the first insn emitted.
6761 We can not just return get_last_insn, because there may have
6762 been multiple instructions emitted. Also note that gen_move_insn may
6763 emit more than one insn itself, so we can not assume that there is one
6764 insn emitted per emit_insn_before call. */
6765
6766 return last ? NEXT_INSN (last) : get_insns ();
6767 }
6768 \f
6769 /* Delete a previously made output-reload
6770 whose result we now believe is not needed.
6771 First we double-check.
6772
6773 INSN is the insn now being processed.
6774 OUTPUT_RELOAD_INSN is the insn of the output reload.
6775 J is the reload-number for this insn. */
6776
6777 static void
6778 delete_output_reload (insn, j, output_reload_insn)
6779 rtx insn;
6780 int j;
6781 rtx output_reload_insn;
6782 {
6783 register rtx i1;
6784
6785 /* Get the raw pseudo-register referred to. */
6786
6787 rtx reg = reload_in[j];
6788 while (GET_CODE (reg) == SUBREG)
6789 reg = SUBREG_REG (reg);
6790
6791 /* If the pseudo-reg we are reloading is no longer referenced
6792 anywhere between the store into it and here,
6793 and no jumps or labels intervene, then the value can get
6794 here through the reload reg alone.
6795 Otherwise, give up--return. */
6796 for (i1 = NEXT_INSN (output_reload_insn);
6797 i1 != insn; i1 = NEXT_INSN (i1))
6798 {
6799 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6800 return;
6801 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6802 && reg_mentioned_p (reg, PATTERN (i1)))
6803 return;
6804 }
6805
6806 if (cannot_omit_stores[REGNO (reg)])
6807 return;
6808
6809 /* If this insn will store in the pseudo again,
6810 the previous store can be removed. */
6811 if (reload_out[j] == reload_in[j])
6812 delete_insn (output_reload_insn);
6813
6814 /* See if the pseudo reg has been completely replaced
6815 with reload regs. If so, delete the store insn
6816 and forget we had a stack slot for the pseudo. */
6817 else if (reg_n_deaths[REGNO (reg)] == 1
6818 && reg_basic_block[REGNO (reg)] >= 0
6819 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6820 {
6821 rtx i2;
6822
6823 /* We know that it was used only between here
6824 and the beginning of the current basic block.
6825 (We also know that the last use before INSN was
6826 the output reload we are thinking of deleting, but never mind that.)
6827 Search that range; see if any ref remains. */
6828 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6829 {
6830 rtx set = single_set (i2);
6831
6832 /* Uses which just store in the pseudo don't count,
6833 since if they are the only uses, they are dead. */
6834 if (set != 0 && SET_DEST (set) == reg)
6835 continue;
6836 if (GET_CODE (i2) == CODE_LABEL
6837 || GET_CODE (i2) == JUMP_INSN)
6838 break;
6839 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6840 && reg_mentioned_p (reg, PATTERN (i2)))
6841 /* Some other ref remains;
6842 we can't do anything. */
6843 return;
6844 }
6845
6846 /* Delete the now-dead stores into this pseudo. */
6847 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6848 {
6849 rtx set = single_set (i2);
6850
6851 if (set != 0 && SET_DEST (set) == reg)
6852 delete_insn (i2);
6853 if (GET_CODE (i2) == CODE_LABEL
6854 || GET_CODE (i2) == JUMP_INSN)
6855 break;
6856 }
6857
6858 /* For the debugging info,
6859 say the pseudo lives in this reload reg. */
6860 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6861 alter_reg (REGNO (reg), -1);
6862 }
6863 }
6864 \f
6865 /* Output reload-insns to reload VALUE into RELOADREG.
6866 VALUE is an autoincrement or autodecrement RTX whose operand
6867 is a register or memory location;
6868 so reloading involves incrementing that location.
6869
6870 INC_AMOUNT is the number to increment or decrement by (always positive).
6871 This cannot be deduced from VALUE. */
6872
6873 static void
6874 inc_for_reload (reloadreg, value, inc_amount)
6875 rtx reloadreg;
6876 rtx value;
6877 int inc_amount;
6878 {
6879 /* REG or MEM to be copied and incremented. */
6880 rtx incloc = XEXP (value, 0);
6881 /* Nonzero if increment after copying. */
6882 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6883 rtx last;
6884 rtx inc;
6885 rtx add_insn;
6886 int code;
6887
6888 /* No hard register is equivalent to this register after
6889 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6890 we could inc/dec that register as well (maybe even using it for
6891 the source), but I'm not sure it's worth worrying about. */
6892 if (GET_CODE (incloc) == REG)
6893 reg_last_reload_reg[REGNO (incloc)] = 0;
6894
6895 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6896 inc_amount = - inc_amount;
6897
6898 inc = GEN_INT (inc_amount);
6899
6900 /* If this is post-increment, first copy the location to the reload reg. */
6901 if (post)
6902 emit_insn (gen_move_insn (reloadreg, incloc));
6903
6904 /* See if we can directly increment INCLOC. Use a method similar to that
6905 in gen_input_reload. */
6906
6907 last = get_last_insn ();
6908 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6909 gen_rtx (PLUS, GET_MODE (incloc),
6910 incloc, inc)));
6911
6912 code = recog_memoized (add_insn);
6913 if (code >= 0)
6914 {
6915 insn_extract (add_insn);
6916 if (constrain_operands (code, 1))
6917 {
6918 /* If this is a pre-increment and we have incremented the value
6919 where it lives, copy the incremented value to RELOADREG to
6920 be used as an address. */
6921
6922 if (! post)
6923 emit_insn (gen_move_insn (reloadreg, incloc));
6924
6925 return;
6926 }
6927 }
6928
6929 delete_insns_since (last);
6930
6931 /* If couldn't do the increment directly, must increment in RELOADREG.
6932 The way we do this depends on whether this is pre- or post-increment.
6933 For pre-increment, copy INCLOC to the reload register, increment it
6934 there, then save back. */
6935
6936 if (! post)
6937 {
6938 emit_insn (gen_move_insn (reloadreg, incloc));
6939 emit_insn (gen_add2_insn (reloadreg, inc));
6940 emit_insn (gen_move_insn (incloc, reloadreg));
6941 }
6942 else
6943 {
6944 /* Postincrement.
6945 Because this might be a jump insn or a compare, and because RELOADREG
6946 may not be available after the insn in an input reload, we must do
6947 the incrementation before the insn being reloaded for.
6948
6949 We have already copied INCLOC to RELOADREG. Increment the copy in
6950 RELOADREG, save that back, then decrement RELOADREG so it has
6951 the original value. */
6952
6953 emit_insn (gen_add2_insn (reloadreg, inc));
6954 emit_insn (gen_move_insn (incloc, reloadreg));
6955 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6956 }
6957
6958 return;
6959 }
6960 \f
6961 /* Return 1 if we are certain that the constraint-string STRING allows
6962 the hard register REG. Return 0 if we can't be sure of this. */
6963
6964 static int
6965 constraint_accepts_reg_p (string, reg)
6966 char *string;
6967 rtx reg;
6968 {
6969 int value = 0;
6970 int regno = true_regnum (reg);
6971 int c;
6972
6973 /* Initialize for first alternative. */
6974 value = 0;
6975 /* Check that each alternative contains `g' or `r'. */
6976 while (1)
6977 switch (c = *string++)
6978 {
6979 case 0:
6980 /* If an alternative lacks `g' or `r', we lose. */
6981 return value;
6982 case ',':
6983 /* If an alternative lacks `g' or `r', we lose. */
6984 if (value == 0)
6985 return 0;
6986 /* Initialize for next alternative. */
6987 value = 0;
6988 break;
6989 case 'g':
6990 case 'r':
6991 /* Any general reg wins for this alternative. */
6992 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6993 value = 1;
6994 break;
6995 default:
6996 /* Any reg in specified class wins for this alternative. */
6997 {
6998 enum reg_class class = REG_CLASS_FROM_LETTER (c);
6999
7000 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7001 value = 1;
7002 }
7003 }
7004 }
7005 \f
7006 /* Return the number of places FIND appears within X, but don't count
7007 an occurrence if some SET_DEST is FIND. */
7008
7009 static int
7010 count_occurrences (x, find)
7011 register rtx x, find;
7012 {
7013 register int i, j;
7014 register enum rtx_code code;
7015 register char *format_ptr;
7016 int count;
7017
7018 if (x == find)
7019 return 1;
7020 if (x == 0)
7021 return 0;
7022
7023 code = GET_CODE (x);
7024
7025 switch (code)
7026 {
7027 case REG:
7028 case QUEUED:
7029 case CONST_INT:
7030 case CONST_DOUBLE:
7031 case SYMBOL_REF:
7032 case CODE_LABEL:
7033 case PC:
7034 case CC0:
7035 return 0;
7036
7037 case SET:
7038 if (SET_DEST (x) == find)
7039 return count_occurrences (SET_SRC (x), find);
7040 break;
7041 }
7042
7043 format_ptr = GET_RTX_FORMAT (code);
7044 count = 0;
7045
7046 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7047 {
7048 switch (*format_ptr++)
7049 {
7050 case 'e':
7051 count += count_occurrences (XEXP (x, i), find);
7052 break;
7053
7054 case 'E':
7055 if (XVEC (x, i) != NULL)
7056 {
7057 for (j = 0; j < XVECLEN (x, i); j++)
7058 count += count_occurrences (XVECEXP (x, i, j), find);
7059 }
7060 break;
7061 }
7062 }
7063 return count;
7064 }