(last_spill_reg): Move definition to top of file.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Index of last register assigned as a spill register. We allocate in
172 a round-robin fashion. */
173
174 static int last_spill_reg;
175
176 /* Describes order of preference for putting regs into spill_regs.
177 Contains the numbers of all the hard regs, in order most preferred first.
178 This order is different for each function.
179 It is set up by order_regs_for_reload.
180 Empty elements at the end contain -1. */
181 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
182
183 /* 1 for a hard register that appears explicitly in the rtl
184 (for example, function value registers, special registers
185 used by insns, structure value pointer registers). */
186 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
187
188 /* Indicates if a register was counted against the need for
189 groups. 0 means it can count against max_nongroup instead. */
190 static HARD_REG_SET counted_for_groups;
191
192 /* Indicates if a register was counted against the need for
193 non-groups. 0 means it can become part of a new group.
194 During choose_reload_regs, 1 here means don't use this reg
195 as part of a group, even if it seems to be otherwise ok. */
196 static HARD_REG_SET counted_for_nongroups;
197
198 /* Indexed by pseudo reg number N,
199 says may not delete stores into the real (memory) home of pseudo N.
200 This is set if we already substituted a memory equivalent in some uses,
201 which happens when we have to eliminate the fp from it. */
202 static char *cannot_omit_stores;
203
204 /* Nonzero if indirect addressing is supported on the machine; this means
205 that spilling (REG n) does not require reloading it into a register in
206 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
207 value indicates the level of indirect addressing supported, e.g., two
208 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
209 a hard register. */
210
211 static char spill_indirect_levels;
212
213 /* Nonzero if indirect addressing is supported when the innermost MEM is
214 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
215 which these are valid is the same as spill_indirect_levels, above. */
216
217 char indirect_symref_ok;
218
219 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
220
221 char double_reg_address_ok;
222
223 /* Record the stack slot for each spilled hard register. */
224
225 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
226
227 /* Width allocated so far for that stack slot. */
228
229 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
230
231 /* Indexed by register class and basic block number, nonzero if there is
232 any need for a spill register of that class in that basic block.
233 The pointer is 0 if we did stupid allocation and don't know
234 the structure of basic blocks. */
235
236 char *basic_block_needs[N_REG_CLASSES];
237
238 /* First uid used by insns created by reload in this function.
239 Used in find_equiv_reg. */
240 int reload_first_uid;
241
242 /* Flag set by local-alloc or global-alloc if anything is live in
243 a call-clobbered reg across calls. */
244
245 int caller_save_needed;
246
247 /* Set to 1 while reload_as_needed is operating.
248 Required by some machines to handle any generated moves differently. */
249
250 int reload_in_progress = 0;
251
252 /* These arrays record the insn_code of insns that may be needed to
253 perform input and output reloads of special objects. They provide a
254 place to pass a scratch register. */
255
256 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
257 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
258
259 /* This obstack is used for allocation of rtl during register elimination.
260 The allocated storage can be freed once find_reloads has processed the
261 insn. */
262
263 struct obstack reload_obstack;
264 char *reload_firstobj;
265
266 #define obstack_chunk_alloc xmalloc
267 #define obstack_chunk_free free
268
269 /* List of labels that must never be deleted. */
270 extern rtx forced_labels;
271 \f
272 /* This structure is used to record information about register eliminations.
273 Each array entry describes one possible way of eliminating a register
274 in favor of another. If there is more than one way of eliminating a
275 particular register, the most preferred should be specified first. */
276
277 static struct elim_table
278 {
279 int from; /* Register number to be eliminated. */
280 int to; /* Register number used as replacement. */
281 int initial_offset; /* Initial difference between values. */
282 int can_eliminate; /* Non-zero if this elimination can be done. */
283 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
284 insns made by reload. */
285 int offset; /* Current offset between the two regs. */
286 int max_offset; /* Maximum offset between the two regs. */
287 int previous_offset; /* Offset at end of previous insn. */
288 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
289 rtx from_rtx; /* REG rtx for the register to be eliminated.
290 We cannot simply compare the number since
291 we might then spuriously replace a hard
292 register corresponding to a pseudo
293 assigned to the reg to be eliminated. */
294 rtx to_rtx; /* REG rtx for the replacement. */
295 } reg_eliminate[] =
296
297 /* If a set of eliminable registers was specified, define the table from it.
298 Otherwise, default to the normal case of the frame pointer being
299 replaced by the stack pointer. */
300
301 #ifdef ELIMINABLE_REGS
302 ELIMINABLE_REGS;
303 #else
304 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
305 #endif
306
307 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
308
309 /* Record the number of pending eliminations that have an offset not equal
310 to their initial offset. If non-zero, we use a new copy of each
311 replacement result in any insns encountered. */
312 static int num_not_at_initial_offset;
313
314 /* Count the number of registers that we may be able to eliminate. */
315 static int num_eliminable;
316
317 /* For each label, we record the offset of each elimination. If we reach
318 a label by more than one path and an offset differs, we cannot do the
319 elimination. This information is indexed by the number of the label.
320 The first table is an array of flags that records whether we have yet
321 encountered a label and the second table is an array of arrays, one
322 entry in the latter array for each elimination. */
323
324 static char *offsets_known_at;
325 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
326
327 /* Number of labels in the current function. */
328
329 static int num_labels;
330
331 struct hard_reg_n_uses { int regno; int uses; };
332 \f
333 static int possible_group_p PROTO((int, int *));
334 static void count_possible_groups PROTO((int *, enum machine_mode *,
335 int *, int));
336 static int modes_equiv_for_class_p PROTO((enum machine_mode,
337 enum machine_mode,
338 enum reg_class));
339 static void spill_failure PROTO((rtx));
340 static int new_spill_reg PROTO((int, int, int *, int *, int,
341 FILE *));
342 static void delete_dead_insn PROTO((rtx));
343 static void alter_reg PROTO((int, int));
344 static void mark_scratch_live PROTO((rtx));
345 static void set_label_offsets PROTO((rtx, rtx, int));
346 static int eliminate_regs_in_insn PROTO((rtx, int));
347 static void mark_not_eliminable PROTO((rtx, rtx));
348 static int spill_hard_reg PROTO((int, int, FILE *, int));
349 static void scan_paradoxical_subregs PROTO((rtx));
350 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
351 struct hard_reg_n_uses *));
352 static void order_regs_for_reload PROTO((void));
353 static int compare_spill_regs PROTO((short *, short *));
354 static void reload_as_needed PROTO((rtx, int));
355 static void forget_old_reloads_1 PROTO((rtx, rtx));
356 static int reload_reg_class_lower PROTO((short *, short *));
357 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
358 enum machine_mode));
359 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
360 enum machine_mode));
361 static int reload_reg_free_p PROTO((int, int, enum reload_type));
362 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
363 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
364 static int reloads_conflict PROTO((int, int));
365 static int allocate_reload_reg PROTO((int, rtx, int, int));
366 static void choose_reload_regs PROTO((rtx, rtx));
367 static void merge_assigned_reloads PROTO((rtx));
368 static void emit_reload_insns PROTO((rtx));
369 static void delete_output_reload PROTO((rtx, int, rtx));
370 static void inc_for_reload PROTO((rtx, rtx, int));
371 static int constraint_accepts_reg_p PROTO((char *, rtx));
372 static int count_occurrences PROTO((rtx, rtx));
373 \f
374 /* Initialize the reload pass once per compilation. */
375
376 void
377 init_reload ()
378 {
379 register int i;
380
381 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
382 Set spill_indirect_levels to the number of levels such addressing is
383 permitted, zero if it is not permitted at all. */
384
385 register rtx tem
386 = gen_rtx (MEM, Pmode,
387 gen_rtx (PLUS, Pmode,
388 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
389 GEN_INT (4)));
390 spill_indirect_levels = 0;
391
392 while (memory_address_p (QImode, tem))
393 {
394 spill_indirect_levels++;
395 tem = gen_rtx (MEM, Pmode, tem);
396 }
397
398 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
399
400 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
401 indirect_symref_ok = memory_address_p (QImode, tem);
402
403 /* See if reg+reg is a valid (and offsettable) address. */
404
405 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
406 {
407 tem = gen_rtx (PLUS, Pmode,
408 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
409 gen_rtx (REG, Pmode, i));
410 /* This way, we make sure that reg+reg is an offsettable address. */
411 tem = plus_constant (tem, 4);
412
413 if (memory_address_p (QImode, tem))
414 {
415 double_reg_address_ok = 1;
416 break;
417 }
418 }
419
420 /* Initialize obstack for our rtl allocation. */
421 gcc_obstack_init (&reload_obstack);
422 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
423 }
424
425 /* Main entry point for the reload pass.
426
427 FIRST is the first insn of the function being compiled.
428
429 GLOBAL nonzero means we were called from global_alloc
430 and should attempt to reallocate any pseudoregs that we
431 displace from hard regs we will use for reloads.
432 If GLOBAL is zero, we do not have enough information to do that,
433 so any pseudo reg that is spilled must go to the stack.
434
435 DUMPFILE is the global-reg debugging dump file stream, or 0.
436 If it is nonzero, messages are written to it to describe
437 which registers are seized as reload regs, which pseudo regs
438 are spilled from them, and where the pseudo regs are reallocated to.
439
440 Return value is nonzero if reload failed
441 and we must not do any more for this function. */
442
443 int
444 reload (first, global, dumpfile)
445 rtx first;
446 int global;
447 FILE *dumpfile;
448 {
449 register int class;
450 register int i, j, k;
451 register rtx insn;
452 register struct elim_table *ep;
453
454 int something_changed;
455 int something_needs_reloads;
456 int something_needs_elimination;
457 int new_basic_block_needs;
458 enum reg_class caller_save_spill_class = NO_REGS;
459 int caller_save_group_size = 1;
460
461 /* Nonzero means we couldn't get enough spill regs. */
462 int failure = 0;
463
464 /* The basic block number currently being processed for INSN. */
465 int this_block;
466
467 /* Make sure even insns with volatile mem refs are recognizable. */
468 init_recog ();
469
470 /* Enable find_equiv_reg to distinguish insns made by reload. */
471 reload_first_uid = get_max_uid ();
472
473 for (i = 0; i < N_REG_CLASSES; i++)
474 basic_block_needs[i] = 0;
475
476 #ifdef SECONDARY_MEMORY_NEEDED
477 /* Initialize the secondary memory table. */
478 clear_secondary_mem ();
479 #endif
480
481 /* Remember which hard regs appear explicitly
482 before we merge into `regs_ever_live' the ones in which
483 pseudo regs have been allocated. */
484 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
485
486 /* We don't have a stack slot for any spill reg yet. */
487 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
488 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
489
490 /* Initialize the save area information for caller-save, in case some
491 are needed. */
492 init_save_areas ();
493
494 /* Compute which hard registers are now in use
495 as homes for pseudo registers.
496 This is done here rather than (eg) in global_alloc
497 because this point is reached even if not optimizing. */
498
499 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
500 mark_home_live (i);
501
502 for (i = 0; i < scratch_list_length; i++)
503 if (scratch_list[i])
504 mark_scratch_live (scratch_list[i]);
505
506 /* Make sure that the last insn in the chain
507 is not something that needs reloading. */
508 emit_note (NULL_PTR, NOTE_INSN_DELETED);
509
510 /* Find all the pseudo registers that didn't get hard regs
511 but do have known equivalent constants or memory slots.
512 These include parameters (known equivalent to parameter slots)
513 and cse'd or loop-moved constant memory addresses.
514
515 Record constant equivalents in reg_equiv_constant
516 so they will be substituted by find_reloads.
517 Record memory equivalents in reg_mem_equiv so they can
518 be substituted eventually by altering the REG-rtx's. */
519
520 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
521 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
522 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
523 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
524 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
525 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
526 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
527 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
528 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
529 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
530 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
531 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
532 cannot_omit_stores = (char *) alloca (max_regno);
533 bzero (cannot_omit_stores, max_regno);
534
535 #ifdef SMALL_REGISTER_CLASSES
536 CLEAR_HARD_REG_SET (forbidden_regs);
537 #endif
538
539 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
540 Also find all paradoxical subregs and find largest such for each pseudo.
541 On machines with small register classes, record hard registers that
542 are used for user variables. These can never be used for spills. */
543
544 for (insn = first; insn; insn = NEXT_INSN (insn))
545 {
546 rtx set = single_set (insn);
547
548 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
549 {
550 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
551 if (note
552 #ifdef LEGITIMATE_PIC_OPERAND_P
553 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
554 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
555 #endif
556 )
557 {
558 rtx x = XEXP (note, 0);
559 i = REGNO (SET_DEST (set));
560 if (i > LAST_VIRTUAL_REGISTER)
561 {
562 if (GET_CODE (x) == MEM)
563 reg_equiv_memory_loc[i] = x;
564 else if (CONSTANT_P (x))
565 {
566 if (LEGITIMATE_CONSTANT_P (x))
567 reg_equiv_constant[i] = x;
568 else
569 reg_equiv_memory_loc[i]
570 = force_const_mem (GET_MODE (SET_DEST (set)), x);
571 }
572 else
573 continue;
574
575 /* If this register is being made equivalent to a MEM
576 and the MEM is not SET_SRC, the equivalencing insn
577 is one with the MEM as a SET_DEST and it occurs later.
578 So don't mark this insn now. */
579 if (GET_CODE (x) != MEM
580 || rtx_equal_p (SET_SRC (set), x))
581 reg_equiv_init[i] = insn;
582 }
583 }
584 }
585
586 /* If this insn is setting a MEM from a register equivalent to it,
587 this is the equivalencing insn. */
588 else if (set && GET_CODE (SET_DEST (set)) == MEM
589 && GET_CODE (SET_SRC (set)) == REG
590 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
591 && rtx_equal_p (SET_DEST (set),
592 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
593 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
594
595 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
596 scan_paradoxical_subregs (PATTERN (insn));
597 }
598
599 /* Does this function require a frame pointer? */
600
601 frame_pointer_needed = (! flag_omit_frame_pointer
602 #ifdef EXIT_IGNORE_STACK
603 /* ?? If EXIT_IGNORE_STACK is set, we will not save
604 and restore sp for alloca. So we can't eliminate
605 the frame pointer in that case. At some point,
606 we should improve this by emitting the
607 sp-adjusting insns for this case. */
608 || (current_function_calls_alloca
609 && EXIT_IGNORE_STACK)
610 #endif
611 || FRAME_POINTER_REQUIRED);
612
613 num_eliminable = 0;
614
615 /* Initialize the table of registers to eliminate. The way we do this
616 depends on how the eliminable registers were defined. */
617 #ifdef ELIMINABLE_REGS
618 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
619 {
620 ep->can_eliminate = ep->can_eliminate_previous
621 = (CAN_ELIMINATE (ep->from, ep->to)
622 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
623 }
624 #else
625 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
626 = ! frame_pointer_needed;
627 #endif
628
629 /* Count the number of eliminable registers and build the FROM and TO
630 REG rtx's. Note that code in gen_rtx will cause, e.g.,
631 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
632 We depend on this. */
633 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
634 {
635 num_eliminable += ep->can_eliminate;
636 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
637 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
638 }
639
640 num_labels = max_label_num () - get_first_label_num ();
641
642 /* Allocate the tables used to store offset information at labels. */
643 offsets_known_at = (char *) alloca (num_labels);
644 offsets_at
645 = (int (*)[NUM_ELIMINABLE_REGS])
646 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
647
648 offsets_known_at -= get_first_label_num ();
649 offsets_at -= get_first_label_num ();
650
651 /* Alter each pseudo-reg rtx to contain its hard reg number.
652 Assign stack slots to the pseudos that lack hard regs or equivalents.
653 Do not touch virtual registers. */
654
655 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
656 alter_reg (i, -1);
657
658 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
659 because the stack size may be a part of the offset computation for
660 register elimination. */
661 assign_stack_local (BLKmode, 0, 0);
662
663 /* If we have some registers we think can be eliminated, scan all insns to
664 see if there is an insn that sets one of these registers to something
665 other than itself plus a constant. If so, the register cannot be
666 eliminated. Doing this scan here eliminates an extra pass through the
667 main reload loop in the most common case where register elimination
668 cannot be done. */
669 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
670 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
671 || GET_CODE (insn) == CALL_INSN)
672 note_stores (PATTERN (insn), mark_not_eliminable);
673
674 #ifndef REGISTER_CONSTRAINTS
675 /* If all the pseudo regs have hard regs,
676 except for those that are never referenced,
677 we know that no reloads are needed. */
678 /* But that is not true if there are register constraints, since
679 in that case some pseudos might be in the wrong kind of hard reg. */
680
681 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
682 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
683 break;
684
685 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
686 return;
687 #endif
688
689 /* Compute the order of preference for hard registers to spill.
690 Store them by decreasing preference in potential_reload_regs. */
691
692 order_regs_for_reload ();
693
694 /* So far, no hard regs have been spilled. */
695 n_spills = 0;
696 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
697 spill_reg_order[i] = -1;
698
699 /* Initialize to -1, which means take the first spill register. */
700 last_spill_reg = -1;
701
702 /* On most machines, we can't use any register explicitly used in the
703 rtl as a spill register. But on some, we have to. Those will have
704 taken care to keep the life of hard regs as short as possible. */
705
706 #ifndef SMALL_REGISTER_CLASSES
707 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
708 #endif
709
710 /* Spill any hard regs that we know we can't eliminate. */
711 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
712 if (! ep->can_eliminate)
713 spill_hard_reg (ep->from, global, dumpfile, 1);
714
715 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
716 if (frame_pointer_needed)
717 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
718 #endif
719
720 if (global)
721 for (i = 0; i < N_REG_CLASSES; i++)
722 {
723 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
724 bzero (basic_block_needs[i], n_basic_blocks);
725 }
726
727 /* From now on, we need to emit any moves without making new pseudos. */
728 reload_in_progress = 1;
729
730 /* This loop scans the entire function each go-round
731 and repeats until one repetition spills no additional hard regs. */
732
733 /* This flag is set when a pseudo reg is spilled,
734 to require another pass. Note that getting an additional reload
735 reg does not necessarily imply any pseudo reg was spilled;
736 sometimes we find a reload reg that no pseudo reg was allocated in. */
737 something_changed = 1;
738 /* This flag is set if there are any insns that require reloading. */
739 something_needs_reloads = 0;
740 /* This flag is set if there are any insns that require register
741 eliminations. */
742 something_needs_elimination = 0;
743 while (something_changed)
744 {
745 rtx after_call = 0;
746
747 /* For each class, number of reload regs needed in that class.
748 This is the maximum over all insns of the needs in that class
749 of the individual insn. */
750 int max_needs[N_REG_CLASSES];
751 /* For each class, size of group of consecutive regs
752 that is needed for the reloads of this class. */
753 int group_size[N_REG_CLASSES];
754 /* For each class, max number of consecutive groups needed.
755 (Each group contains group_size[CLASS] consecutive registers.) */
756 int max_groups[N_REG_CLASSES];
757 /* For each class, max number needed of regs that don't belong
758 to any of the groups. */
759 int max_nongroups[N_REG_CLASSES];
760 /* For each class, the machine mode which requires consecutive
761 groups of regs of that class.
762 If two different modes ever require groups of one class,
763 they must be the same size and equally restrictive for that class,
764 otherwise we can't handle the complexity. */
765 enum machine_mode group_mode[N_REG_CLASSES];
766 /* Record the insn where each maximum need is first found. */
767 rtx max_needs_insn[N_REG_CLASSES];
768 rtx max_groups_insn[N_REG_CLASSES];
769 rtx max_nongroups_insn[N_REG_CLASSES];
770 rtx x;
771 int starting_frame_size = get_frame_size ();
772 int previous_frame_pointer_needed = frame_pointer_needed;
773 static char *reg_class_names[] = REG_CLASS_NAMES;
774
775 something_changed = 0;
776 bzero ((char *) max_needs, sizeof max_needs);
777 bzero ((char *) max_groups, sizeof max_groups);
778 bzero ((char *) max_nongroups, sizeof max_nongroups);
779 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
780 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
781 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
782 bzero ((char *) group_size, sizeof group_size);
783 for (i = 0; i < N_REG_CLASSES; i++)
784 group_mode[i] = VOIDmode;
785
786 /* Keep track of which basic blocks are needing the reloads. */
787 this_block = 0;
788
789 /* Remember whether any element of basic_block_needs
790 changes from 0 to 1 in this pass. */
791 new_basic_block_needs = 0;
792
793 /* Reset all offsets on eliminable registers to their initial values. */
794 #ifdef ELIMINABLE_REGS
795 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
796 {
797 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
798 ep->previous_offset = ep->offset
799 = ep->max_offset = ep->initial_offset;
800 }
801 #else
802 #ifdef INITIAL_FRAME_POINTER_OFFSET
803 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
804 #else
805 if (!FRAME_POINTER_REQUIRED)
806 abort ();
807 reg_eliminate[0].initial_offset = 0;
808 #endif
809 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
810 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
811 #endif
812
813 num_not_at_initial_offset = 0;
814
815 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
816
817 /* Set a known offset for each forced label to be at the initial offset
818 of each elimination. We do this because we assume that all
819 computed jumps occur from a location where each elimination is
820 at its initial offset. */
821
822 for (x = forced_labels; x; x = XEXP (x, 1))
823 if (XEXP (x, 0))
824 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
825
826 /* For each pseudo register that has an equivalent location defined,
827 try to eliminate any eliminable registers (such as the frame pointer)
828 assuming initial offsets for the replacement register, which
829 is the normal case.
830
831 If the resulting location is directly addressable, substitute
832 the MEM we just got directly for the old REG.
833
834 If it is not addressable but is a constant or the sum of a hard reg
835 and constant, it is probably not addressable because the constant is
836 out of range, in that case record the address; we will generate
837 hairy code to compute the address in a register each time it is
838 needed. Similarly if it is a hard register, but one that is not
839 valid as an address register.
840
841 If the location is not addressable, but does not have one of the
842 above forms, assign a stack slot. We have to do this to avoid the
843 potential of producing lots of reloads if, e.g., a location involves
844 a pseudo that didn't get a hard register and has an equivalent memory
845 location that also involves a pseudo that didn't get a hard register.
846
847 Perhaps at some point we will improve reload_when_needed handling
848 so this problem goes away. But that's very hairy. */
849
850 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
851 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
852 {
853 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
854
855 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
856 XEXP (x, 0)))
857 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
858 else if (CONSTANT_P (XEXP (x, 0))
859 || (GET_CODE (XEXP (x, 0)) == REG
860 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
861 || (GET_CODE (XEXP (x, 0)) == PLUS
862 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
863 && (REGNO (XEXP (XEXP (x, 0), 0))
864 < FIRST_PSEUDO_REGISTER)
865 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
866 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
867 else
868 {
869 /* Make a new stack slot. Then indicate that something
870 changed so we go back and recompute offsets for
871 eliminable registers because the allocation of memory
872 below might change some offset. reg_equiv_{mem,address}
873 will be set up for this pseudo on the next pass around
874 the loop. */
875 reg_equiv_memory_loc[i] = 0;
876 reg_equiv_init[i] = 0;
877 alter_reg (i, -1);
878 something_changed = 1;
879 }
880 }
881
882 /* If we allocated another pseudo to the stack, redo elimination
883 bookkeeping. */
884 if (something_changed)
885 continue;
886
887 /* If caller-saves needs a group, initialize the group to include
888 the size and mode required for caller-saves. */
889
890 if (caller_save_group_size > 1)
891 {
892 group_mode[(int) caller_save_spill_class] = Pmode;
893 group_size[(int) caller_save_spill_class] = caller_save_group_size;
894 }
895
896 /* Compute the most additional registers needed by any instruction.
897 Collect information separately for each class of regs. */
898
899 for (insn = first; insn; insn = NEXT_INSN (insn))
900 {
901 if (global && this_block + 1 < n_basic_blocks
902 && insn == basic_block_head[this_block+1])
903 ++this_block;
904
905 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
906 might include REG_LABEL), we need to see what effects this
907 has on the known offsets at labels. */
908
909 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
910 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
911 && REG_NOTES (insn) != 0))
912 set_label_offsets (insn, insn, 0);
913
914 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
915 {
916 /* Nonzero means don't use a reload reg that overlaps
917 the place where a function value can be returned. */
918 rtx avoid_return_reg = 0;
919
920 rtx old_body = PATTERN (insn);
921 int old_code = INSN_CODE (insn);
922 rtx old_notes = REG_NOTES (insn);
923 int did_elimination = 0;
924
925 /* To compute the number of reload registers of each class
926 needed for an insn, we must similate what choose_reload_regs
927 can do. We do this by splitting an insn into an "input" and
928 an "output" part. RELOAD_OTHER reloads are used in both.
929 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
930 which must be live over the entire input section of reloads,
931 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
932 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
933 inputs.
934
935 The registers needed for output are RELOAD_OTHER and
936 RELOAD_FOR_OUTPUT, which are live for the entire output
937 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
938 reloads for each operand.
939
940 The total number of registers needed is the maximum of the
941 inputs and outputs. */
942
943 struct needs
944 {
945 /* [0] is normal, [1] is nongroup. */
946 int regs[2][N_REG_CLASSES];
947 int groups[N_REG_CLASSES];
948 };
949
950 /* Each `struct needs' corresponds to one RELOAD_... type. */
951 struct {
952 struct needs other;
953 struct needs input;
954 struct needs output;
955 struct needs insn;
956 struct needs other_addr;
957 struct needs op_addr;
958 struct needs op_addr_reload;
959 struct needs in_addr[MAX_RECOG_OPERANDS];
960 struct needs out_addr[MAX_RECOG_OPERANDS];
961 } insn_needs;
962
963 /* If needed, eliminate any eliminable registers. */
964 if (num_eliminable)
965 did_elimination = eliminate_regs_in_insn (insn, 0);
966
967 #ifdef SMALL_REGISTER_CLASSES
968 /* Set avoid_return_reg if this is an insn
969 that might use the value of a function call. */
970 if (GET_CODE (insn) == CALL_INSN)
971 {
972 if (GET_CODE (PATTERN (insn)) == SET)
973 after_call = SET_DEST (PATTERN (insn));
974 else if (GET_CODE (PATTERN (insn)) == PARALLEL
975 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
976 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
977 else
978 after_call = 0;
979 }
980 else if (after_call != 0
981 && !(GET_CODE (PATTERN (insn)) == SET
982 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
983 {
984 if (reg_referenced_p (after_call, PATTERN (insn)))
985 avoid_return_reg = after_call;
986 after_call = 0;
987 }
988 #endif /* SMALL_REGISTER_CLASSES */
989
990 /* Analyze the instruction. */
991 find_reloads (insn, 0, spill_indirect_levels, global,
992 spill_reg_order);
993
994 /* Remember for later shortcuts which insns had any reloads or
995 register eliminations.
996
997 One might think that it would be worthwhile to mark insns
998 that need register replacements but not reloads, but this is
999 not safe because find_reloads may do some manipulation of
1000 the insn (such as swapping commutative operands), which would
1001 be lost when we restore the old pattern after register
1002 replacement. So the actions of find_reloads must be redone in
1003 subsequent passes or in reload_as_needed.
1004
1005 However, it is safe to mark insns that need reloads
1006 but not register replacement. */
1007
1008 PUT_MODE (insn, (did_elimination ? QImode
1009 : n_reloads ? HImode
1010 : GET_MODE (insn) == DImode ? DImode
1011 : VOIDmode));
1012
1013 /* Discard any register replacements done. */
1014 if (did_elimination)
1015 {
1016 obstack_free (&reload_obstack, reload_firstobj);
1017 PATTERN (insn) = old_body;
1018 INSN_CODE (insn) = old_code;
1019 REG_NOTES (insn) = old_notes;
1020 something_needs_elimination = 1;
1021 }
1022
1023 /* If this insn has no reloads, we need not do anything except
1024 in the case of a CALL_INSN when we have caller-saves and
1025 caller-save needs reloads. */
1026
1027 if (n_reloads == 0
1028 && ! (GET_CODE (insn) == CALL_INSN
1029 && caller_save_spill_class != NO_REGS))
1030 continue;
1031
1032 something_needs_reloads = 1;
1033 bzero ((char *) &insn_needs, sizeof insn_needs);
1034
1035 /* Count each reload once in every class
1036 containing the reload's own class. */
1037
1038 for (i = 0; i < n_reloads; i++)
1039 {
1040 register enum reg_class *p;
1041 enum reg_class class = reload_reg_class[i];
1042 int size;
1043 enum machine_mode mode;
1044 int nongroup_need;
1045 struct needs *this_needs;
1046
1047 /* Don't count the dummy reloads, for which one of the
1048 regs mentioned in the insn can be used for reloading.
1049 Don't count optional reloads.
1050 Don't count reloads that got combined with others. */
1051 if (reload_reg_rtx[i] != 0
1052 || reload_optional[i] != 0
1053 || (reload_out[i] == 0 && reload_in[i] == 0
1054 && ! reload_secondary_p[i]))
1055 continue;
1056
1057 /* Show that a reload register of this class is needed
1058 in this basic block. We do not use insn_needs and
1059 insn_groups because they are overly conservative for
1060 this purpose. */
1061 if (global && ! basic_block_needs[(int) class][this_block])
1062 {
1063 basic_block_needs[(int) class][this_block] = 1;
1064 new_basic_block_needs = 1;
1065 }
1066
1067
1068 mode = reload_inmode[i];
1069 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1070 mode = reload_outmode[i];
1071 size = CLASS_MAX_NREGS (class, mode);
1072
1073 /* If this class doesn't want a group, determine if we have
1074 a nongroup need or a regular need. We have a nongroup
1075 need if this reload conflicts with a group reload whose
1076 class intersects with this reload's class. */
1077
1078 nongroup_need = 0;
1079 if (size == 1)
1080 for (j = 0; j < n_reloads; j++)
1081 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1082 (GET_MODE_SIZE (reload_outmode[j])
1083 > GET_MODE_SIZE (reload_inmode[j]))
1084 ? reload_outmode[j]
1085 : reload_inmode[j])
1086 > 1)
1087 && (!reload_optional[j])
1088 && (reload_in[j] != 0 || reload_out[j] != 0
1089 || reload_secondary_p[j])
1090 && reloads_conflict (i, j)
1091 && reg_classes_intersect_p (class,
1092 reload_reg_class[j]))
1093 {
1094 nongroup_need = 1;
1095 break;
1096 }
1097
1098 /* Decide which time-of-use to count this reload for. */
1099 switch (reload_when_needed[i])
1100 {
1101 case RELOAD_OTHER:
1102 this_needs = &insn_needs.other;
1103 break;
1104 case RELOAD_FOR_INPUT:
1105 this_needs = &insn_needs.input;
1106 break;
1107 case RELOAD_FOR_OUTPUT:
1108 this_needs = &insn_needs.output;
1109 break;
1110 case RELOAD_FOR_INSN:
1111 this_needs = &insn_needs.insn;
1112 break;
1113 case RELOAD_FOR_OTHER_ADDRESS:
1114 this_needs = &insn_needs.other_addr;
1115 break;
1116 case RELOAD_FOR_INPUT_ADDRESS:
1117 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1118 break;
1119 case RELOAD_FOR_OUTPUT_ADDRESS:
1120 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1121 break;
1122 case RELOAD_FOR_OPERAND_ADDRESS:
1123 this_needs = &insn_needs.op_addr;
1124 break;
1125 case RELOAD_FOR_OPADDR_ADDR:
1126 this_needs = &insn_needs.op_addr_reload;
1127 break;
1128 }
1129
1130 if (size > 1)
1131 {
1132 enum machine_mode other_mode, allocate_mode;
1133
1134 /* Count number of groups needed separately from
1135 number of individual regs needed. */
1136 this_needs->groups[(int) class]++;
1137 p = reg_class_superclasses[(int) class];
1138 while (*p != LIM_REG_CLASSES)
1139 this_needs->groups[(int) *p++]++;
1140
1141 /* Record size and mode of a group of this class. */
1142 /* If more than one size group is needed,
1143 make all groups the largest needed size. */
1144 if (group_size[(int) class] < size)
1145 {
1146 other_mode = group_mode[(int) class];
1147 allocate_mode = mode;
1148
1149 group_size[(int) class] = size;
1150 group_mode[(int) class] = mode;
1151 }
1152 else
1153 {
1154 other_mode = mode;
1155 allocate_mode = group_mode[(int) class];
1156 }
1157
1158 /* Crash if two dissimilar machine modes both need
1159 groups of consecutive regs of the same class. */
1160
1161 if (other_mode != VOIDmode && other_mode != allocate_mode
1162 && ! modes_equiv_for_class_p (allocate_mode,
1163 other_mode, class))
1164 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1165 insn);
1166 }
1167 else if (size == 1)
1168 {
1169 this_needs->regs[nongroup_need][(int) class] += 1;
1170 p = reg_class_superclasses[(int) class];
1171 while (*p != LIM_REG_CLASSES)
1172 this_needs->regs[nongroup_need][(int) *p++] += 1;
1173 }
1174 else
1175 abort ();
1176 }
1177
1178 /* All reloads have been counted for this insn;
1179 now merge the various times of use.
1180 This sets insn_needs, etc., to the maximum total number
1181 of registers needed at any point in this insn. */
1182
1183 for (i = 0; i < N_REG_CLASSES; i++)
1184 {
1185 int in_max, out_max;
1186
1187 /* Compute normal and nongroup needs. */
1188 for (j = 0; j <= 1; j++)
1189 {
1190 for (in_max = 0, out_max = 0, k = 0;
1191 k < reload_n_operands; k++)
1192 {
1193 in_max
1194 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1195 out_max
1196 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1197 }
1198
1199 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1200 and operand addresses but not things used to reload
1201 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1202 don't conflict with things needed to reload inputs or
1203 outputs. */
1204
1205 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1206 insn_needs.op_addr_reload.regs[j][i]),
1207 in_max);
1208
1209 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1210
1211 insn_needs.input.regs[j][i]
1212 = MAX (insn_needs.input.regs[j][i]
1213 + insn_needs.op_addr.regs[j][i]
1214 + insn_needs.insn.regs[j][i],
1215 in_max + insn_needs.input.regs[j][i]);
1216
1217 insn_needs.output.regs[j][i] += out_max;
1218 insn_needs.other.regs[j][i]
1219 += MAX (MAX (insn_needs.input.regs[j][i],
1220 insn_needs.output.regs[j][i]),
1221 insn_needs.other_addr.regs[j][i]);
1222
1223 }
1224
1225 /* Now compute group needs. */
1226 for (in_max = 0, out_max = 0, j = 0;
1227 j < reload_n_operands; j++)
1228 {
1229 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1230 out_max
1231 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1232 }
1233
1234 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1235 insn_needs.op_addr_reload.groups[i]),
1236 in_max);
1237 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1238
1239 insn_needs.input.groups[i]
1240 = MAX (insn_needs.input.groups[i]
1241 + insn_needs.op_addr.groups[i]
1242 + insn_needs.insn.groups[i],
1243 in_max + insn_needs.input.groups[i]);
1244
1245 insn_needs.output.groups[i] += out_max;
1246 insn_needs.other.groups[i]
1247 += MAX (MAX (insn_needs.input.groups[i],
1248 insn_needs.output.groups[i]),
1249 insn_needs.other_addr.groups[i]);
1250 }
1251
1252 /* If this is a CALL_INSN and caller-saves will need
1253 a spill register, act as if the spill register is
1254 needed for this insn. However, the spill register
1255 can be used by any reload of this insn, so we only
1256 need do something if no need for that class has
1257 been recorded.
1258
1259 The assumption that every CALL_INSN will trigger a
1260 caller-save is highly conservative, however, the number
1261 of cases where caller-saves will need a spill register but
1262 a block containing a CALL_INSN won't need a spill register
1263 of that class should be quite rare.
1264
1265 If a group is needed, the size and mode of the group will
1266 have been set up at the beginning of this loop. */
1267
1268 if (GET_CODE (insn) == CALL_INSN
1269 && caller_save_spill_class != NO_REGS)
1270 {
1271 /* See if this register would conflict with any reload
1272 that needs a group. */
1273 int nongroup_need = 0;
1274 int *caller_save_needs;
1275
1276 for (j = 0; j < n_reloads; j++)
1277 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1278 (GET_MODE_SIZE (reload_outmode[j])
1279 > GET_MODE_SIZE (reload_inmode[j]))
1280 ? reload_outmode[j]
1281 : reload_inmode[j])
1282 > 1)
1283 && reg_classes_intersect_p (caller_save_spill_class,
1284 reload_reg_class[j]))
1285 {
1286 nongroup_need = 1;
1287 break;
1288 }
1289
1290 caller_save_needs
1291 = (caller_save_group_size > 1
1292 ? insn_needs.other.groups
1293 : insn_needs.other.regs[nongroup_need]);
1294
1295 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1296 {
1297 register enum reg_class *p
1298 = reg_class_superclasses[(int) caller_save_spill_class];
1299
1300 caller_save_needs[(int) caller_save_spill_class]++;
1301
1302 while (*p != LIM_REG_CLASSES)
1303 caller_save_needs[(int) *p++] += 1;
1304 }
1305
1306 /* Show that this basic block will need a register of
1307 this class. */
1308
1309 if (global
1310 && ! (basic_block_needs[(int) caller_save_spill_class]
1311 [this_block]))
1312 {
1313 basic_block_needs[(int) caller_save_spill_class]
1314 [this_block] = 1;
1315 new_basic_block_needs = 1;
1316 }
1317 }
1318
1319 #ifdef SMALL_REGISTER_CLASSES
1320 /* If this insn stores the value of a function call,
1321 and that value is in a register that has been spilled,
1322 and if the insn needs a reload in a class
1323 that might use that register as the reload register,
1324 then add add an extra need in that class.
1325 This makes sure we have a register available that does
1326 not overlap the return value. */
1327
1328 if (avoid_return_reg)
1329 {
1330 int regno = REGNO (avoid_return_reg);
1331 int nregs
1332 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1333 int r;
1334 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1335
1336 /* First compute the "basic needs", which counts a
1337 need only in the smallest class in which it
1338 is required. */
1339
1340 bcopy ((char *) insn_needs.other.regs[0],
1341 (char *) basic_needs, sizeof basic_needs);
1342 bcopy ((char *) insn_needs.other.groups,
1343 (char *) basic_groups, sizeof basic_groups);
1344
1345 for (i = 0; i < N_REG_CLASSES; i++)
1346 {
1347 enum reg_class *p;
1348
1349 if (basic_needs[i] >= 0)
1350 for (p = reg_class_superclasses[i];
1351 *p != LIM_REG_CLASSES; p++)
1352 basic_needs[(int) *p] -= basic_needs[i];
1353
1354 if (basic_groups[i] >= 0)
1355 for (p = reg_class_superclasses[i];
1356 *p != LIM_REG_CLASSES; p++)
1357 basic_groups[(int) *p] -= basic_groups[i];
1358 }
1359
1360 /* Now count extra regs if there might be a conflict with
1361 the return value register. */
1362
1363 for (r = regno; r < regno + nregs; r++)
1364 if (spill_reg_order[r] >= 0)
1365 for (i = 0; i < N_REG_CLASSES; i++)
1366 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1367 {
1368 if (basic_needs[i] > 0)
1369 {
1370 enum reg_class *p;
1371
1372 insn_needs.other.regs[0][i]++;
1373 p = reg_class_superclasses[i];
1374 while (*p != LIM_REG_CLASSES)
1375 insn_needs.other.regs[0][(int) *p++]++;
1376 }
1377 if (basic_groups[i] > 0)
1378 {
1379 enum reg_class *p;
1380
1381 insn_needs.other.groups[i]++;
1382 p = reg_class_superclasses[i];
1383 while (*p != LIM_REG_CLASSES)
1384 insn_needs.other.groups[(int) *p++]++;
1385 }
1386 }
1387 }
1388 #endif /* SMALL_REGISTER_CLASSES */
1389
1390 /* For each class, collect maximum need of any insn. */
1391
1392 for (i = 0; i < N_REG_CLASSES; i++)
1393 {
1394 if (max_needs[i] < insn_needs.other.regs[0][i])
1395 {
1396 max_needs[i] = insn_needs.other.regs[0][i];
1397 max_needs_insn[i] = insn;
1398 }
1399 if (max_groups[i] < insn_needs.other.groups[i])
1400 {
1401 max_groups[i] = insn_needs.other.groups[i];
1402 max_groups_insn[i] = insn;
1403 }
1404 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1405 {
1406 max_nongroups[i] = insn_needs.other.regs[1][i];
1407 max_nongroups_insn[i] = insn;
1408 }
1409 }
1410 }
1411 /* Note that there is a continue statement above. */
1412 }
1413
1414 /* If we allocated any new memory locations, make another pass
1415 since it might have changed elimination offsets. */
1416 if (starting_frame_size != get_frame_size ())
1417 something_changed = 1;
1418
1419 if (dumpfile)
1420 for (i = 0; i < N_REG_CLASSES; i++)
1421 {
1422 if (max_needs[i] > 0)
1423 fprintf (dumpfile,
1424 ";; Need %d reg%s of class %s (for insn %d).\n",
1425 max_needs[i], max_needs[i] == 1 ? "" : "s",
1426 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1427 if (max_nongroups[i] > 0)
1428 fprintf (dumpfile,
1429 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1430 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1431 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1432 if (max_groups[i] > 0)
1433 fprintf (dumpfile,
1434 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1435 max_groups[i], max_groups[i] == 1 ? "" : "s",
1436 mode_name[(int) group_mode[i]],
1437 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1438 }
1439
1440 /* If we have caller-saves, set up the save areas and see if caller-save
1441 will need a spill register. */
1442
1443 if (caller_save_needed
1444 && ! setup_save_areas (&something_changed)
1445 && caller_save_spill_class == NO_REGS)
1446 {
1447 /* The class we will need depends on whether the machine
1448 supports the sum of two registers for an address; see
1449 find_address_reloads for details. */
1450
1451 caller_save_spill_class
1452 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1453 caller_save_group_size
1454 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1455 something_changed = 1;
1456 }
1457
1458 /* See if anything that happened changes which eliminations are valid.
1459 For example, on the Sparc, whether or not the frame pointer can
1460 be eliminated can depend on what registers have been used. We need
1461 not check some conditions again (such as flag_omit_frame_pointer)
1462 since they can't have changed. */
1463
1464 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1465 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1466 #ifdef ELIMINABLE_REGS
1467 || ! CAN_ELIMINATE (ep->from, ep->to)
1468 #endif
1469 )
1470 ep->can_eliminate = 0;
1471
1472 /* Look for the case where we have discovered that we can't replace
1473 register A with register B and that means that we will now be
1474 trying to replace register A with register C. This means we can
1475 no longer replace register C with register B and we need to disable
1476 such an elimination, if it exists. This occurs often with A == ap,
1477 B == sp, and C == fp. */
1478
1479 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1480 {
1481 struct elim_table *op;
1482 register int new_to = -1;
1483
1484 if (! ep->can_eliminate && ep->can_eliminate_previous)
1485 {
1486 /* Find the current elimination for ep->from, if there is a
1487 new one. */
1488 for (op = reg_eliminate;
1489 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1490 if (op->from == ep->from && op->can_eliminate)
1491 {
1492 new_to = op->to;
1493 break;
1494 }
1495
1496 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1497 disable it. */
1498 for (op = reg_eliminate;
1499 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1500 if (op->from == new_to && op->to == ep->to)
1501 op->can_eliminate = 0;
1502 }
1503 }
1504
1505 /* See if any registers that we thought we could eliminate the previous
1506 time are no longer eliminable. If so, something has changed and we
1507 must spill the register. Also, recompute the number of eliminable
1508 registers and see if the frame pointer is needed; it is if there is
1509 no elimination of the frame pointer that we can perform. */
1510
1511 frame_pointer_needed = 1;
1512 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1513 {
1514 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1515 && ep->to != HARD_FRAME_POINTER_REGNUM)
1516 frame_pointer_needed = 0;
1517
1518 if (! ep->can_eliminate && ep->can_eliminate_previous)
1519 {
1520 ep->can_eliminate_previous = 0;
1521 spill_hard_reg (ep->from, global, dumpfile, 1);
1522 something_changed = 1;
1523 num_eliminable--;
1524 }
1525 }
1526
1527 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1528 /* If we didn't need a frame pointer last time, but we do now, spill
1529 the hard frame pointer. */
1530 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1531 {
1532 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1533 something_changed = 1;
1534 }
1535 #endif
1536
1537 /* If all needs are met, we win. */
1538
1539 for (i = 0; i < N_REG_CLASSES; i++)
1540 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1541 break;
1542 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1543 break;
1544
1545 /* Not all needs are met; must spill some hard regs. */
1546
1547 /* Put all registers spilled so far back in potential_reload_regs, but
1548 put them at the front, since we've already spilled most of the
1549 psuedos in them (we might have left some pseudos unspilled if they
1550 were in a block that didn't need any spill registers of a conflicting
1551 class. We used to try to mark off the need for those registers,
1552 but doing so properly is very complex and reallocating them is the
1553 simpler approach. First, "pack" potential_reload_regs by pushing
1554 any nonnegative entries towards the end. That will leave room
1555 for the registers we already spilled.
1556
1557 Also, undo the marking of the spill registers from the last time
1558 around in FORBIDDEN_REGS since we will be probably be allocating
1559 them again below.
1560
1561 ??? It is theoretically possible that we might end up not using one
1562 of our previously-spilled registers in this allocation, even though
1563 they are at the head of the list. It's not clear what to do about
1564 this, but it was no better before, when we marked off the needs met
1565 by the previously-spilled registers. With the current code, globals
1566 can be allocated into these registers, but locals cannot. */
1567
1568 if (n_spills)
1569 {
1570 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1571 if (potential_reload_regs[i] != -1)
1572 potential_reload_regs[j--] = potential_reload_regs[i];
1573
1574 for (i = 0; i < n_spills; i++)
1575 {
1576 potential_reload_regs[i] = spill_regs[i];
1577 spill_reg_order[spill_regs[i]] = -1;
1578 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1579 }
1580
1581 n_spills = 0;
1582 }
1583
1584 /* Now find more reload regs to satisfy the remaining need
1585 Do it by ascending class number, since otherwise a reg
1586 might be spilled for a big class and might fail to count
1587 for a smaller class even though it belongs to that class.
1588
1589 Count spilled regs in `spills', and add entries to
1590 `spill_regs' and `spill_reg_order'.
1591
1592 ??? Note there is a problem here.
1593 When there is a need for a group in a high-numbered class,
1594 and also need for non-group regs that come from a lower class,
1595 the non-group regs are chosen first. If there aren't many regs,
1596 they might leave no room for a group.
1597
1598 This was happening on the 386. To fix it, we added the code
1599 that calls possible_group_p, so that the lower class won't
1600 break up the last possible group.
1601
1602 Really fixing the problem would require changes above
1603 in counting the regs already spilled, and in choose_reload_regs.
1604 It might be hard to avoid introducing bugs there. */
1605
1606 CLEAR_HARD_REG_SET (counted_for_groups);
1607 CLEAR_HARD_REG_SET (counted_for_nongroups);
1608
1609 for (class = 0; class < N_REG_CLASSES; class++)
1610 {
1611 /* First get the groups of registers.
1612 If we got single registers first, we might fragment
1613 possible groups. */
1614 while (max_groups[class] > 0)
1615 {
1616 /* If any single spilled regs happen to form groups,
1617 count them now. Maybe we don't really need
1618 to spill another group. */
1619 count_possible_groups (group_size, group_mode, max_groups,
1620 class);
1621
1622 if (max_groups[class] <= 0)
1623 break;
1624
1625 /* Groups of size 2 (the only groups used on most machines)
1626 are treated specially. */
1627 if (group_size[class] == 2)
1628 {
1629 /* First, look for a register that will complete a group. */
1630 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1631 {
1632 int other;
1633
1634 j = potential_reload_regs[i];
1635 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1636 &&
1637 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1638 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1639 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1640 && HARD_REGNO_MODE_OK (other, group_mode[class])
1641 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1642 other)
1643 /* We don't want one part of another group.
1644 We could get "two groups" that overlap! */
1645 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1646 ||
1647 (j < FIRST_PSEUDO_REGISTER - 1
1648 && (other = j + 1, spill_reg_order[other] >= 0)
1649 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1650 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1651 && HARD_REGNO_MODE_OK (j, group_mode[class])
1652 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1653 other)
1654 && ! TEST_HARD_REG_BIT (counted_for_groups,
1655 other))))
1656 {
1657 register enum reg_class *p;
1658
1659 /* We have found one that will complete a group,
1660 so count off one group as provided. */
1661 max_groups[class]--;
1662 p = reg_class_superclasses[class];
1663 while (*p != LIM_REG_CLASSES)
1664 {
1665 if (group_size [(int) *p] <= group_size [class])
1666 max_groups[(int) *p]--;
1667 p++;
1668 }
1669
1670 /* Indicate both these regs are part of a group. */
1671 SET_HARD_REG_BIT (counted_for_groups, j);
1672 SET_HARD_REG_BIT (counted_for_groups, other);
1673 break;
1674 }
1675 }
1676 /* We can't complete a group, so start one. */
1677 #ifdef SMALL_REGISTER_CLASSES
1678 /* Look for a pair neither of which is explicitly used. */
1679 if (i == FIRST_PSEUDO_REGISTER)
1680 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1681 {
1682 int k;
1683 j = potential_reload_regs[i];
1684 /* Verify that J+1 is a potential reload reg. */
1685 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1686 if (potential_reload_regs[k] == j + 1)
1687 break;
1688 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1689 && k < FIRST_PSEUDO_REGISTER
1690 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1691 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1692 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1693 && HARD_REGNO_MODE_OK (j, group_mode[class])
1694 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1695 j + 1)
1696 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1697 /* Reject J at this stage
1698 if J+1 was explicitly used. */
1699 && ! regs_explicitly_used[j + 1])
1700 break;
1701 }
1702 #endif
1703 /* Now try any group at all
1704 whose registers are not in bad_spill_regs. */
1705 if (i == FIRST_PSEUDO_REGISTER)
1706 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1707 {
1708 int k;
1709 j = potential_reload_regs[i];
1710 /* Verify that J+1 is a potential reload reg. */
1711 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1712 if (potential_reload_regs[k] == j + 1)
1713 break;
1714 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1715 && k < FIRST_PSEUDO_REGISTER
1716 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1717 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1718 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1719 && HARD_REGNO_MODE_OK (j, group_mode[class])
1720 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1721 j + 1)
1722 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1723 break;
1724 }
1725
1726 /* I should be the index in potential_reload_regs
1727 of the new reload reg we have found. */
1728
1729 if (i >= FIRST_PSEUDO_REGISTER)
1730 {
1731 /* There are no groups left to spill. */
1732 spill_failure (max_groups_insn[class]);
1733 failure = 1;
1734 goto failed;
1735 }
1736 else
1737 something_changed
1738 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1739 global, dumpfile);
1740 }
1741 else
1742 {
1743 /* For groups of more than 2 registers,
1744 look for a sufficient sequence of unspilled registers,
1745 and spill them all at once. */
1746 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1747 {
1748 int k;
1749
1750 j = potential_reload_regs[i];
1751 if (j >= 0
1752 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1753 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1754 {
1755 /* Check each reg in the sequence. */
1756 for (k = 0; k < group_size[class]; k++)
1757 if (! (spill_reg_order[j + k] < 0
1758 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1759 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1760 break;
1761 /* We got a full sequence, so spill them all. */
1762 if (k == group_size[class])
1763 {
1764 register enum reg_class *p;
1765 for (k = 0; k < group_size[class]; k++)
1766 {
1767 int idx;
1768 SET_HARD_REG_BIT (counted_for_groups, j + k);
1769 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1770 if (potential_reload_regs[idx] == j + k)
1771 break;
1772 something_changed
1773 |= new_spill_reg (idx, class,
1774 max_needs, NULL_PTR,
1775 global, dumpfile);
1776 }
1777
1778 /* We have found one that will complete a group,
1779 so count off one group as provided. */
1780 max_groups[class]--;
1781 p = reg_class_superclasses[class];
1782 while (*p != LIM_REG_CLASSES)
1783 {
1784 if (group_size [(int) *p]
1785 <= group_size [class])
1786 max_groups[(int) *p]--;
1787 p++;
1788 }
1789 break;
1790 }
1791 }
1792 }
1793 /* We couldn't find any registers for this reload.
1794 Avoid going into an infinite loop. */
1795 if (i >= FIRST_PSEUDO_REGISTER)
1796 {
1797 /* There are no groups left. */
1798 spill_failure (max_groups_insn[class]);
1799 failure = 1;
1800 goto failed;
1801 }
1802 }
1803 }
1804
1805 /* Now similarly satisfy all need for single registers. */
1806
1807 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1808 {
1809 #ifdef SMALL_REGISTER_CLASSES
1810 /* This should be right for all machines, but only the 386
1811 is known to need it, so this conditional plays safe.
1812 ??? For 2.5, try making this unconditional. */
1813 /* If we spilled enough regs, but they weren't counted
1814 against the non-group need, see if we can count them now.
1815 If so, we can avoid some actual spilling. */
1816 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1817 for (i = 0; i < n_spills; i++)
1818 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1819 spill_regs[i])
1820 && !TEST_HARD_REG_BIT (counted_for_groups,
1821 spill_regs[i])
1822 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1823 spill_regs[i])
1824 && max_nongroups[class] > 0)
1825 {
1826 register enum reg_class *p;
1827
1828 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1829 max_nongroups[class]--;
1830 p = reg_class_superclasses[class];
1831 while (*p != LIM_REG_CLASSES)
1832 max_nongroups[(int) *p++]--;
1833 }
1834 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1835 break;
1836 #endif
1837
1838 /* Consider the potential reload regs that aren't
1839 yet in use as reload regs, in order of preference.
1840 Find the most preferred one that's in this class. */
1841
1842 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1843 if (potential_reload_regs[i] >= 0
1844 && TEST_HARD_REG_BIT (reg_class_contents[class],
1845 potential_reload_regs[i])
1846 /* If this reg will not be available for groups,
1847 pick one that does not foreclose possible groups.
1848 This is a kludge, and not very general,
1849 but it should be sufficient to make the 386 work,
1850 and the problem should not occur on machines with
1851 more registers. */
1852 && (max_nongroups[class] == 0
1853 || possible_group_p (potential_reload_regs[i], max_groups)))
1854 break;
1855
1856 /* If we couldn't get a register, try to get one even if we
1857 might foreclose possible groups. This may cause problems
1858 later, but that's better than aborting now, since it is
1859 possible that we will, in fact, be able to form the needed
1860 group even with this allocation. */
1861
1862 if (i >= FIRST_PSEUDO_REGISTER
1863 && (asm_noperands (max_needs[class] > 0
1864 ? max_needs_insn[class]
1865 : max_nongroups_insn[class])
1866 < 0))
1867 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1868 if (potential_reload_regs[i] >= 0
1869 && TEST_HARD_REG_BIT (reg_class_contents[class],
1870 potential_reload_regs[i]))
1871 break;
1872
1873 /* I should be the index in potential_reload_regs
1874 of the new reload reg we have found. */
1875
1876 if (i >= FIRST_PSEUDO_REGISTER)
1877 {
1878 /* There are no possible registers left to spill. */
1879 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1880 : max_nongroups_insn[class]);
1881 failure = 1;
1882 goto failed;
1883 }
1884 else
1885 something_changed
1886 |= new_spill_reg (i, class, max_needs, max_nongroups,
1887 global, dumpfile);
1888 }
1889 }
1890 }
1891
1892 /* If global-alloc was run, notify it of any register eliminations we have
1893 done. */
1894 if (global)
1895 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1896 if (ep->can_eliminate)
1897 mark_elimination (ep->from, ep->to);
1898
1899 /* Insert code to save and restore call-clobbered hard regs
1900 around calls. Tell if what mode to use so that we will process
1901 those insns in reload_as_needed if we have to. */
1902
1903 if (caller_save_needed)
1904 save_call_clobbered_regs (num_eliminable ? QImode
1905 : caller_save_spill_class != NO_REGS ? HImode
1906 : VOIDmode);
1907
1908 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1909 If that insn didn't set the register (i.e., it copied the register to
1910 memory), just delete that insn instead of the equivalencing insn plus
1911 anything now dead. If we call delete_dead_insn on that insn, we may
1912 delete the insn that actually sets the register if the register die
1913 there and that is incorrect. */
1914
1915 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1916 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1917 && GET_CODE (reg_equiv_init[i]) != NOTE)
1918 {
1919 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1920 delete_dead_insn (reg_equiv_init[i]);
1921 else
1922 {
1923 PUT_CODE (reg_equiv_init[i], NOTE);
1924 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1925 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1926 }
1927 }
1928
1929 /* Use the reload registers where necessary
1930 by generating move instructions to move the must-be-register
1931 values into or out of the reload registers. */
1932
1933 if (something_needs_reloads || something_needs_elimination
1934 || (caller_save_needed && num_eliminable)
1935 || caller_save_spill_class != NO_REGS)
1936 reload_as_needed (first, global);
1937
1938 /* If we were able to eliminate the frame pointer, show that it is no
1939 longer live at the start of any basic block. If it ls live by
1940 virtue of being in a pseudo, that pseudo will be marked live
1941 and hence the frame pointer will be known to be live via that
1942 pseudo. */
1943
1944 if (! frame_pointer_needed)
1945 for (i = 0; i < n_basic_blocks; i++)
1946 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1947 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1948 % REGSET_ELT_BITS));
1949
1950 /* Come here (with failure set nonzero) if we can't get enough spill regs
1951 and we decide not to abort about it. */
1952 failed:
1953
1954 reload_in_progress = 0;
1955
1956 /* Now eliminate all pseudo regs by modifying them into
1957 their equivalent memory references.
1958 The REG-rtx's for the pseudos are modified in place,
1959 so all insns that used to refer to them now refer to memory.
1960
1961 For a reg that has a reg_equiv_address, all those insns
1962 were changed by reloading so that no insns refer to it any longer;
1963 but the DECL_RTL of a variable decl may refer to it,
1964 and if so this causes the debugging info to mention the variable. */
1965
1966 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1967 {
1968 rtx addr = 0;
1969 int in_struct = 0;
1970 if (reg_equiv_mem[i])
1971 {
1972 addr = XEXP (reg_equiv_mem[i], 0);
1973 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1974 }
1975 if (reg_equiv_address[i])
1976 addr = reg_equiv_address[i];
1977 if (addr)
1978 {
1979 if (reg_renumber[i] < 0)
1980 {
1981 rtx reg = regno_reg_rtx[i];
1982 XEXP (reg, 0) = addr;
1983 REG_USERVAR_P (reg) = 0;
1984 MEM_IN_STRUCT_P (reg) = in_struct;
1985 PUT_CODE (reg, MEM);
1986 }
1987 else if (reg_equiv_mem[i])
1988 XEXP (reg_equiv_mem[i], 0) = addr;
1989 }
1990 }
1991
1992 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1993 /* Make a pass over all the insns and remove death notes for things that
1994 are no longer registers or no longer die in the insn (e.g., an input
1995 and output pseudo being tied). */
1996
1997 for (insn = first; insn; insn = NEXT_INSN (insn))
1998 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1999 {
2000 rtx note, next;
2001
2002 for (note = REG_NOTES (insn); note; note = next)
2003 {
2004 next = XEXP (note, 1);
2005 if (REG_NOTE_KIND (note) == REG_DEAD
2006 && (GET_CODE (XEXP (note, 0)) != REG
2007 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2008 remove_note (insn, note);
2009 }
2010 }
2011 #endif
2012
2013 /* Indicate that we no longer have known memory locations or constants. */
2014 reg_equiv_constant = 0;
2015 reg_equiv_memory_loc = 0;
2016
2017 if (scratch_list)
2018 free (scratch_list);
2019 scratch_list = 0;
2020 if (scratch_block)
2021 free (scratch_block);
2022 scratch_block = 0;
2023
2024 return failure;
2025 }
2026 \f
2027 /* Nonzero if, after spilling reg REGNO for non-groups,
2028 it will still be possible to find a group if we still need one. */
2029
2030 static int
2031 possible_group_p (regno, max_groups)
2032 int regno;
2033 int *max_groups;
2034 {
2035 int i;
2036 int class = (int) NO_REGS;
2037
2038 for (i = 0; i < (int) N_REG_CLASSES; i++)
2039 if (max_groups[i] > 0)
2040 {
2041 class = i;
2042 break;
2043 }
2044
2045 if (class == (int) NO_REGS)
2046 return 1;
2047
2048 /* Consider each pair of consecutive registers. */
2049 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2050 {
2051 /* Ignore pairs that include reg REGNO. */
2052 if (i == regno || i + 1 == regno)
2053 continue;
2054
2055 /* Ignore pairs that are outside the class that needs the group.
2056 ??? Here we fail to handle the case where two different classes
2057 independently need groups. But this never happens with our
2058 current machine descriptions. */
2059 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2060 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2061 continue;
2062
2063 /* A pair of consecutive regs we can still spill does the trick. */
2064 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2065 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2066 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2067 return 1;
2068
2069 /* A pair of one already spilled and one we can spill does it
2070 provided the one already spilled is not otherwise reserved. */
2071 if (spill_reg_order[i] < 0
2072 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2073 && spill_reg_order[i + 1] >= 0
2074 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2075 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2076 return 1;
2077 if (spill_reg_order[i + 1] < 0
2078 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2079 && spill_reg_order[i] >= 0
2080 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2081 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2082 return 1;
2083 }
2084
2085 return 0;
2086 }
2087 \f
2088 /* Count any groups of CLASS that can be formed from the registers recently
2089 spilled. */
2090
2091 static void
2092 count_possible_groups (group_size, group_mode, max_groups, class)
2093 int *group_size;
2094 enum machine_mode *group_mode;
2095 int *max_groups;
2096 int class;
2097 {
2098 HARD_REG_SET new;
2099 int i, j;
2100
2101 /* Now find all consecutive groups of spilled registers
2102 and mark each group off against the need for such groups.
2103 But don't count them against ordinary need, yet. */
2104
2105 if (group_size[class] == 0)
2106 return;
2107
2108 CLEAR_HARD_REG_SET (new);
2109
2110 /* Make a mask of all the regs that are spill regs in class I. */
2111 for (i = 0; i < n_spills; i++)
2112 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2113 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2114 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2115 SET_HARD_REG_BIT (new, spill_regs[i]);
2116
2117 /* Find each consecutive group of them. */
2118 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2119 if (TEST_HARD_REG_BIT (new, i)
2120 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2121 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2122 {
2123 for (j = 1; j < group_size[class]; j++)
2124 if (! TEST_HARD_REG_BIT (new, i + j))
2125 break;
2126
2127 if (j == group_size[class])
2128 {
2129 /* We found a group. Mark it off against this class's need for
2130 groups, and against each superclass too. */
2131 register enum reg_class *p;
2132
2133 max_groups[class]--;
2134 p = reg_class_superclasses[class];
2135 while (*p != LIM_REG_CLASSES)
2136 {
2137 if (group_size [(int) *p] <= group_size [class])
2138 max_groups[(int) *p]--;
2139 p++;
2140 }
2141
2142 /* Don't count these registers again. */
2143 for (j = 0; j < group_size[class]; j++)
2144 SET_HARD_REG_BIT (counted_for_groups, i + j);
2145 }
2146
2147 /* Skip to the last reg in this group. When i is incremented above,
2148 it will then point to the first reg of the next possible group. */
2149 i += j - 1;
2150 }
2151 }
2152 \f
2153 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2154 another mode that needs to be reloaded for the same register class CLASS.
2155 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2156 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2157
2158 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2159 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2160 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2161 causes unnecessary failures on machines requiring alignment of register
2162 groups when the two modes are different sizes, because the larger mode has
2163 more strict alignment rules than the smaller mode. */
2164
2165 static int
2166 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2167 enum machine_mode allocate_mode, other_mode;
2168 enum reg_class class;
2169 {
2170 register int regno;
2171 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2172 {
2173 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2174 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2175 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2176 return 0;
2177 }
2178 return 1;
2179 }
2180
2181 /* Handle the failure to find a register to spill.
2182 INSN should be one of the insns which needed this particular spill reg. */
2183
2184 static void
2185 spill_failure (insn)
2186 rtx insn;
2187 {
2188 if (asm_noperands (PATTERN (insn)) >= 0)
2189 error_for_asm (insn, "`asm' needs too many reloads");
2190 else
2191 fatal_insn ("Unable to find a register to spill.", insn);
2192 }
2193
2194 /* Add a new register to the tables of available spill-registers
2195 (as well as spilling all pseudos allocated to the register).
2196 I is the index of this register in potential_reload_regs.
2197 CLASS is the regclass whose need is being satisfied.
2198 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2199 so that this register can count off against them.
2200 MAX_NONGROUPS is 0 if this register is part of a group.
2201 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2202
2203 static int
2204 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2205 int i;
2206 int class;
2207 int *max_needs;
2208 int *max_nongroups;
2209 int global;
2210 FILE *dumpfile;
2211 {
2212 register enum reg_class *p;
2213 int val;
2214 int regno = potential_reload_regs[i];
2215
2216 if (i >= FIRST_PSEUDO_REGISTER)
2217 abort (); /* Caller failed to find any register. */
2218
2219 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2220 fatal ("fixed or forbidden register was spilled.\n\
2221 This may be due to a compiler bug or to impossible asm\n\
2222 statements or clauses.");
2223
2224 /* Make reg REGNO an additional reload reg. */
2225
2226 potential_reload_regs[i] = -1;
2227 spill_regs[n_spills] = regno;
2228 spill_reg_order[regno] = n_spills;
2229 if (dumpfile)
2230 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2231
2232 /* Clear off the needs we just satisfied. */
2233
2234 max_needs[class]--;
2235 p = reg_class_superclasses[class];
2236 while (*p != LIM_REG_CLASSES)
2237 max_needs[(int) *p++]--;
2238
2239 if (max_nongroups && max_nongroups[class] > 0)
2240 {
2241 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2242 max_nongroups[class]--;
2243 p = reg_class_superclasses[class];
2244 while (*p != LIM_REG_CLASSES)
2245 max_nongroups[(int) *p++]--;
2246 }
2247
2248 /* Spill every pseudo reg that was allocated to this reg
2249 or to something that overlaps this reg. */
2250
2251 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2252
2253 /* If there are some registers still to eliminate and this register
2254 wasn't ever used before, additional stack space may have to be
2255 allocated to store this register. Thus, we may have changed the offset
2256 between the stack and frame pointers, so mark that something has changed.
2257 (If new pseudos were spilled, thus requiring more space, VAL would have
2258 been set non-zero by the call to spill_hard_reg above since additional
2259 reloads may be needed in that case.
2260
2261 One might think that we need only set VAL to 1 if this is a call-used
2262 register. However, the set of registers that must be saved by the
2263 prologue is not identical to the call-used set. For example, the
2264 register used by the call insn for the return PC is a call-used register,
2265 but must be saved by the prologue. */
2266 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2267 val = 1;
2268
2269 regs_ever_live[spill_regs[n_spills]] = 1;
2270 n_spills++;
2271
2272 return val;
2273 }
2274 \f
2275 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2276 data that is dead in INSN. */
2277
2278 static void
2279 delete_dead_insn (insn)
2280 rtx insn;
2281 {
2282 rtx prev = prev_real_insn (insn);
2283 rtx prev_dest;
2284
2285 /* If the previous insn sets a register that dies in our insn, delete it
2286 too. */
2287 if (prev && GET_CODE (PATTERN (prev)) == SET
2288 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2289 && reg_mentioned_p (prev_dest, PATTERN (insn))
2290 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2291 delete_dead_insn (prev);
2292
2293 PUT_CODE (insn, NOTE);
2294 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2295 NOTE_SOURCE_FILE (insn) = 0;
2296 }
2297
2298 /* Modify the home of pseudo-reg I.
2299 The new home is present in reg_renumber[I].
2300
2301 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2302 or it may be -1, meaning there is none or it is not relevant.
2303 This is used so that all pseudos spilled from a given hard reg
2304 can share one stack slot. */
2305
2306 static void
2307 alter_reg (i, from_reg)
2308 register int i;
2309 int from_reg;
2310 {
2311 /* When outputting an inline function, this can happen
2312 for a reg that isn't actually used. */
2313 if (regno_reg_rtx[i] == 0)
2314 return;
2315
2316 /* If the reg got changed to a MEM at rtl-generation time,
2317 ignore it. */
2318 if (GET_CODE (regno_reg_rtx[i]) != REG)
2319 return;
2320
2321 /* Modify the reg-rtx to contain the new hard reg
2322 number or else to contain its pseudo reg number. */
2323 REGNO (regno_reg_rtx[i])
2324 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2325
2326 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2327 allocate a stack slot for it. */
2328
2329 if (reg_renumber[i] < 0
2330 && reg_n_refs[i] > 0
2331 && reg_equiv_constant[i] == 0
2332 && reg_equiv_memory_loc[i] == 0)
2333 {
2334 register rtx x;
2335 int inherent_size = PSEUDO_REGNO_BYTES (i);
2336 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2337 int adjust = 0;
2338
2339 /* Each pseudo reg has an inherent size which comes from its own mode,
2340 and a total size which provides room for paradoxical subregs
2341 which refer to the pseudo reg in wider modes.
2342
2343 We can use a slot already allocated if it provides both
2344 enough inherent space and enough total space.
2345 Otherwise, we allocate a new slot, making sure that it has no less
2346 inherent space, and no less total space, then the previous slot. */
2347 if (from_reg == -1)
2348 {
2349 /* No known place to spill from => no slot to reuse. */
2350 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2351 if (BYTES_BIG_ENDIAN)
2352 {
2353 /* Cancel the big-endian correction done in assign_stack_local.
2354 Get the address of the beginning of the slot.
2355 This is so we can do a big-endian correction unconditionally
2356 below. */
2357 adjust = inherent_size - total_size;
2358 }
2359 }
2360 /* Reuse a stack slot if possible. */
2361 else if (spill_stack_slot[from_reg] != 0
2362 && spill_stack_slot_width[from_reg] >= total_size
2363 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2364 >= inherent_size))
2365 x = spill_stack_slot[from_reg];
2366 /* Allocate a bigger slot. */
2367 else
2368 {
2369 /* Compute maximum size needed, both for inherent size
2370 and for total size. */
2371 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2372 rtx stack_slot;
2373 if (spill_stack_slot[from_reg])
2374 {
2375 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2376 > inherent_size)
2377 mode = GET_MODE (spill_stack_slot[from_reg]);
2378 if (spill_stack_slot_width[from_reg] > total_size)
2379 total_size = spill_stack_slot_width[from_reg];
2380 }
2381 /* Make a slot with that size. */
2382 x = assign_stack_local (mode, total_size, -1);
2383 stack_slot = x;
2384 if (BYTES_BIG_ENDIAN)
2385 {
2386 /* Cancel the big-endian correction done in assign_stack_local.
2387 Get the address of the beginning of the slot.
2388 This is so we can do a big-endian correction unconditionally
2389 below. */
2390 adjust = GET_MODE_SIZE (mode) - total_size;
2391 if (adjust)
2392 {
2393 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2394 * BITS_PER_UNIT,
2395 MODE_INT, 1),
2396 plus_constant (XEXP (x, 0), adjust));
2397 RTX_UNCHANGING_P (stack_slot)
2398 = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2399 }
2400 }
2401 spill_stack_slot[from_reg] = stack_slot;
2402 spill_stack_slot_width[from_reg] = total_size;
2403 }
2404
2405 /* On a big endian machine, the "address" of the slot
2406 is the address of the low part that fits its inherent mode. */
2407 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2408 adjust += (total_size - inherent_size);
2409
2410 /* If we have any adjustment to make, or if the stack slot is the
2411 wrong mode, make a new stack slot. */
2412 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2413 {
2414 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2415 plus_constant (XEXP (x, 0), adjust));
2416 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2417 }
2418
2419 /* Save the stack slot for later. */
2420 reg_equiv_memory_loc[i] = x;
2421 }
2422 }
2423
2424 /* Mark the slots in regs_ever_live for the hard regs
2425 used by pseudo-reg number REGNO. */
2426
2427 void
2428 mark_home_live (regno)
2429 int regno;
2430 {
2431 register int i, lim;
2432 i = reg_renumber[regno];
2433 if (i < 0)
2434 return;
2435 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2436 while (i < lim)
2437 regs_ever_live[i++] = 1;
2438 }
2439
2440 /* Mark the registers used in SCRATCH as being live. */
2441
2442 static void
2443 mark_scratch_live (scratch)
2444 rtx scratch;
2445 {
2446 register int i;
2447 int regno = REGNO (scratch);
2448 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2449
2450 for (i = regno; i < lim; i++)
2451 regs_ever_live[i] = 1;
2452 }
2453 \f
2454 /* This function handles the tracking of elimination offsets around branches.
2455
2456 X is a piece of RTL being scanned.
2457
2458 INSN is the insn that it came from, if any.
2459
2460 INITIAL_P is non-zero if we are to set the offset to be the initial
2461 offset and zero if we are setting the offset of the label to be the
2462 current offset. */
2463
2464 static void
2465 set_label_offsets (x, insn, initial_p)
2466 rtx x;
2467 rtx insn;
2468 int initial_p;
2469 {
2470 enum rtx_code code = GET_CODE (x);
2471 rtx tem;
2472 int i;
2473 struct elim_table *p;
2474
2475 switch (code)
2476 {
2477 case LABEL_REF:
2478 if (LABEL_REF_NONLOCAL_P (x))
2479 return;
2480
2481 x = XEXP (x, 0);
2482
2483 /* ... fall through ... */
2484
2485 case CODE_LABEL:
2486 /* If we know nothing about this label, set the desired offsets. Note
2487 that this sets the offset at a label to be the offset before a label
2488 if we don't know anything about the label. This is not correct for
2489 the label after a BARRIER, but is the best guess we can make. If
2490 we guessed wrong, we will suppress an elimination that might have
2491 been possible had we been able to guess correctly. */
2492
2493 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2494 {
2495 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2496 offsets_at[CODE_LABEL_NUMBER (x)][i]
2497 = (initial_p ? reg_eliminate[i].initial_offset
2498 : reg_eliminate[i].offset);
2499 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2500 }
2501
2502 /* Otherwise, if this is the definition of a label and it is
2503 preceded by a BARRIER, set our offsets to the known offset of
2504 that label. */
2505
2506 else if (x == insn
2507 && (tem = prev_nonnote_insn (insn)) != 0
2508 && GET_CODE (tem) == BARRIER)
2509 {
2510 num_not_at_initial_offset = 0;
2511 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2512 {
2513 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2514 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2515 if (reg_eliminate[i].can_eliminate
2516 && (reg_eliminate[i].offset
2517 != reg_eliminate[i].initial_offset))
2518 num_not_at_initial_offset++;
2519 }
2520 }
2521
2522 else
2523 /* If neither of the above cases is true, compare each offset
2524 with those previously recorded and suppress any eliminations
2525 where the offsets disagree. */
2526
2527 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2528 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2529 != (initial_p ? reg_eliminate[i].initial_offset
2530 : reg_eliminate[i].offset))
2531 reg_eliminate[i].can_eliminate = 0;
2532
2533 return;
2534
2535 case JUMP_INSN:
2536 set_label_offsets (PATTERN (insn), insn, initial_p);
2537
2538 /* ... fall through ... */
2539
2540 case INSN:
2541 case CALL_INSN:
2542 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2543 and hence must have all eliminations at their initial offsets. */
2544 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2545 if (REG_NOTE_KIND (tem) == REG_LABEL)
2546 set_label_offsets (XEXP (tem, 0), insn, 1);
2547 return;
2548
2549 case ADDR_VEC:
2550 case ADDR_DIFF_VEC:
2551 /* Each of the labels in the address vector must be at their initial
2552 offsets. We want the first first for ADDR_VEC and the second
2553 field for ADDR_DIFF_VEC. */
2554
2555 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2556 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2557 insn, initial_p);
2558 return;
2559
2560 case SET:
2561 /* We only care about setting PC. If the source is not RETURN,
2562 IF_THEN_ELSE, or a label, disable any eliminations not at
2563 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2564 isn't one of those possibilities. For branches to a label,
2565 call ourselves recursively.
2566
2567 Note that this can disable elimination unnecessarily when we have
2568 a non-local goto since it will look like a non-constant jump to
2569 someplace in the current function. This isn't a significant
2570 problem since such jumps will normally be when all elimination
2571 pairs are back to their initial offsets. */
2572
2573 if (SET_DEST (x) != pc_rtx)
2574 return;
2575
2576 switch (GET_CODE (SET_SRC (x)))
2577 {
2578 case PC:
2579 case RETURN:
2580 return;
2581
2582 case LABEL_REF:
2583 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2584 return;
2585
2586 case IF_THEN_ELSE:
2587 tem = XEXP (SET_SRC (x), 1);
2588 if (GET_CODE (tem) == LABEL_REF)
2589 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2590 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2591 break;
2592
2593 tem = XEXP (SET_SRC (x), 2);
2594 if (GET_CODE (tem) == LABEL_REF)
2595 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2596 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2597 break;
2598 return;
2599 }
2600
2601 /* If we reach here, all eliminations must be at their initial
2602 offset because we are doing a jump to a variable address. */
2603 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2604 if (p->offset != p->initial_offset)
2605 p->can_eliminate = 0;
2606 }
2607 }
2608 \f
2609 /* Used for communication between the next two function to properly share
2610 the vector for an ASM_OPERANDS. */
2611
2612 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2613
2614 /* Scan X and replace any eliminable registers (such as fp) with a
2615 replacement (such as sp), plus an offset.
2616
2617 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2618 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2619 MEM, we are allowed to replace a sum of a register and the constant zero
2620 with the register, which we cannot do outside a MEM. In addition, we need
2621 to record the fact that a register is referenced outside a MEM.
2622
2623 If INSN is an insn, it is the insn containing X. If we replace a REG
2624 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2625 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2626 that the REG is being modified.
2627
2628 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2629 That's used when we eliminate in expressions stored in notes.
2630 This means, do not set ref_outside_mem even if the reference
2631 is outside of MEMs.
2632
2633 If we see a modification to a register we know about, take the
2634 appropriate action (see case SET, below).
2635
2636 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2637 replacements done assuming all offsets are at their initial values. If
2638 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2639 encounter, return the actual location so that find_reloads will do
2640 the proper thing. */
2641
2642 rtx
2643 eliminate_regs (x, mem_mode, insn)
2644 rtx x;
2645 enum machine_mode mem_mode;
2646 rtx insn;
2647 {
2648 enum rtx_code code = GET_CODE (x);
2649 struct elim_table *ep;
2650 int regno;
2651 rtx new;
2652 int i, j;
2653 char *fmt;
2654 int copied = 0;
2655
2656 switch (code)
2657 {
2658 case CONST_INT:
2659 case CONST_DOUBLE:
2660 case CONST:
2661 case SYMBOL_REF:
2662 case CODE_LABEL:
2663 case PC:
2664 case CC0:
2665 case ASM_INPUT:
2666 case ADDR_VEC:
2667 case ADDR_DIFF_VEC:
2668 case RETURN:
2669 return x;
2670
2671 case REG:
2672 regno = REGNO (x);
2673
2674 /* First handle the case where we encounter a bare register that
2675 is eliminable. Replace it with a PLUS. */
2676 if (regno < FIRST_PSEUDO_REGISTER)
2677 {
2678 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2679 ep++)
2680 if (ep->from_rtx == x && ep->can_eliminate)
2681 {
2682 if (! mem_mode
2683 /* Refs inside notes don't count for this purpose. */
2684 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2685 || GET_CODE (insn) == INSN_LIST)))
2686 ep->ref_outside_mem = 1;
2687 return plus_constant (ep->to_rtx, ep->previous_offset);
2688 }
2689
2690 }
2691 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2692 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2693 {
2694 /* In this case, find_reloads would attempt to either use an
2695 incorrect address (if something is not at its initial offset)
2696 or substitute an replaced address into an insn (which loses
2697 if the offset is changed by some later action). So we simply
2698 return the replaced stack slot (assuming it is changed by
2699 elimination) and ignore the fact that this is actually a
2700 reference to the pseudo. Ensure we make a copy of the
2701 address in case it is shared. */
2702 new = eliminate_regs (reg_equiv_memory_loc[regno],
2703 mem_mode, insn);
2704 if (new != reg_equiv_memory_loc[regno])
2705 {
2706 cannot_omit_stores[regno] = 1;
2707 return copy_rtx (new);
2708 }
2709 }
2710 return x;
2711
2712 case PLUS:
2713 /* If this is the sum of an eliminable register and a constant, rework
2714 the sum. */
2715 if (GET_CODE (XEXP (x, 0)) == REG
2716 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2717 && CONSTANT_P (XEXP (x, 1)))
2718 {
2719 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2720 ep++)
2721 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2722 {
2723 if (! mem_mode
2724 /* Refs inside notes don't count for this purpose. */
2725 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2726 || GET_CODE (insn) == INSN_LIST)))
2727 ep->ref_outside_mem = 1;
2728
2729 /* The only time we want to replace a PLUS with a REG (this
2730 occurs when the constant operand of the PLUS is the negative
2731 of the offset) is when we are inside a MEM. We won't want
2732 to do so at other times because that would change the
2733 structure of the insn in a way that reload can't handle.
2734 We special-case the commonest situation in
2735 eliminate_regs_in_insn, so just replace a PLUS with a
2736 PLUS here, unless inside a MEM. */
2737 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2738 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2739 return ep->to_rtx;
2740 else
2741 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2742 plus_constant (XEXP (x, 1),
2743 ep->previous_offset));
2744 }
2745
2746 /* If the register is not eliminable, we are done since the other
2747 operand is a constant. */
2748 return x;
2749 }
2750
2751 /* If this is part of an address, we want to bring any constant to the
2752 outermost PLUS. We will do this by doing register replacement in
2753 our operands and seeing if a constant shows up in one of them.
2754
2755 We assume here this is part of an address (or a "load address" insn)
2756 since an eliminable register is not likely to appear in any other
2757 context.
2758
2759 If we have (plus (eliminable) (reg)), we want to produce
2760 (plus (plus (replacement) (reg) (const))). If this was part of a
2761 normal add insn, (plus (replacement) (reg)) will be pushed as a
2762 reload. This is the desired action. */
2763
2764 {
2765 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2766 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2767
2768 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2769 {
2770 /* If one side is a PLUS and the other side is a pseudo that
2771 didn't get a hard register but has a reg_equiv_constant,
2772 we must replace the constant here since it may no longer
2773 be in the position of any operand. */
2774 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2775 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2776 && reg_renumber[REGNO (new1)] < 0
2777 && reg_equiv_constant != 0
2778 && reg_equiv_constant[REGNO (new1)] != 0)
2779 new1 = reg_equiv_constant[REGNO (new1)];
2780 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2781 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2782 && reg_renumber[REGNO (new0)] < 0
2783 && reg_equiv_constant[REGNO (new0)] != 0)
2784 new0 = reg_equiv_constant[REGNO (new0)];
2785
2786 new = form_sum (new0, new1);
2787
2788 /* As above, if we are not inside a MEM we do not want to
2789 turn a PLUS into something else. We might try to do so here
2790 for an addition of 0 if we aren't optimizing. */
2791 if (! mem_mode && GET_CODE (new) != PLUS)
2792 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2793 else
2794 return new;
2795 }
2796 }
2797 return x;
2798
2799 case MULT:
2800 /* If this is the product of an eliminable register and a
2801 constant, apply the distribute law and move the constant out
2802 so that we have (plus (mult ..) ..). This is needed in order
2803 to keep load-address insns valid. This case is pathalogical.
2804 We ignore the possibility of overflow here. */
2805 if (GET_CODE (XEXP (x, 0)) == REG
2806 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2807 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2808 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2809 ep++)
2810 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2811 {
2812 if (! mem_mode
2813 /* Refs inside notes don't count for this purpose. */
2814 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2815 || GET_CODE (insn) == INSN_LIST)))
2816 ep->ref_outside_mem = 1;
2817
2818 return
2819 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2820 ep->previous_offset * INTVAL (XEXP (x, 1)));
2821 }
2822
2823 /* ... fall through ... */
2824
2825 case CALL:
2826 case COMPARE:
2827 case MINUS:
2828 case DIV: case UDIV:
2829 case MOD: case UMOD:
2830 case AND: case IOR: case XOR:
2831 case ROTATERT: case ROTATE:
2832 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2833 case NE: case EQ:
2834 case GE: case GT: case GEU: case GTU:
2835 case LE: case LT: case LEU: case LTU:
2836 {
2837 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2838 rtx new1
2839 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2840
2841 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2842 return gen_rtx (code, GET_MODE (x), new0, new1);
2843 }
2844 return x;
2845
2846 case EXPR_LIST:
2847 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2848 if (XEXP (x, 0))
2849 {
2850 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2851 if (new != XEXP (x, 0))
2852 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2853 }
2854
2855 /* ... fall through ... */
2856
2857 case INSN_LIST:
2858 /* Now do eliminations in the rest of the chain. If this was
2859 an EXPR_LIST, this might result in allocating more memory than is
2860 strictly needed, but it simplifies the code. */
2861 if (XEXP (x, 1))
2862 {
2863 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2864 if (new != XEXP (x, 1))
2865 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2866 }
2867 return x;
2868
2869 case PRE_INC:
2870 case POST_INC:
2871 case PRE_DEC:
2872 case POST_DEC:
2873 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2874 if (ep->to_rtx == XEXP (x, 0))
2875 {
2876 int size = GET_MODE_SIZE (mem_mode);
2877
2878 /* If more bytes than MEM_MODE are pushed, account for them. */
2879 #ifdef PUSH_ROUNDING
2880 if (ep->to_rtx == stack_pointer_rtx)
2881 size = PUSH_ROUNDING (size);
2882 #endif
2883 if (code == PRE_DEC || code == POST_DEC)
2884 ep->offset += size;
2885 else
2886 ep->offset -= size;
2887 }
2888
2889 /* Fall through to generic unary operation case. */
2890 case USE:
2891 case STRICT_LOW_PART:
2892 case NEG: case NOT:
2893 case SIGN_EXTEND: case ZERO_EXTEND:
2894 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2895 case FLOAT: case FIX:
2896 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2897 case ABS:
2898 case SQRT:
2899 case FFS:
2900 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2901 if (new != XEXP (x, 0))
2902 return gen_rtx (code, GET_MODE (x), new);
2903 return x;
2904
2905 case SUBREG:
2906 /* Similar to above processing, but preserve SUBREG_WORD.
2907 Convert (subreg (mem)) to (mem) if not paradoxical.
2908 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2909 pseudo didn't get a hard reg, we must replace this with the
2910 eliminated version of the memory location because push_reloads
2911 may do the replacement in certain circumstances. */
2912 if (GET_CODE (SUBREG_REG (x)) == REG
2913 && (GET_MODE_SIZE (GET_MODE (x))
2914 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2915 && reg_equiv_memory_loc != 0
2916 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2917 {
2918 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2919 mem_mode, insn);
2920
2921 /* If we didn't change anything, we must retain the pseudo. */
2922 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2923 new = SUBREG_REG (x);
2924 else
2925 {
2926 /* Otherwise, ensure NEW isn't shared in case we have to reload
2927 it. */
2928 new = copy_rtx (new);
2929
2930 /* In this case, we must show that the pseudo is used in this
2931 insn so that delete_output_reload will do the right thing. */
2932 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2933 && GET_CODE (insn) != INSN_LIST)
2934 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2935 insn);
2936 }
2937 }
2938 else
2939 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2940
2941 if (new != XEXP (x, 0))
2942 {
2943 if (GET_CODE (new) == MEM
2944 && (GET_MODE_SIZE (GET_MODE (x))
2945 <= GET_MODE_SIZE (GET_MODE (new)))
2946 #ifdef LOAD_EXTEND_OP
2947 /* On these machines we will be reloading what is
2948 inside the SUBREG if it originally was a pseudo and
2949 the inner and outer modes are both a word or
2950 smaller. So leave the SUBREG then. */
2951 && ! (GET_CODE (SUBREG_REG (x)) == REG
2952 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2953 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2954 && (GET_MODE_SIZE (GET_MODE (x))
2955 > GET_MODE_SIZE (GET_MODE (new)))
2956 && INTEGRAL_MODE_P (GET_MODE (new))
2957 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
2958 #endif
2959 )
2960 {
2961 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2962 enum machine_mode mode = GET_MODE (x);
2963
2964 if (BYTES_BIG_ENDIAN)
2965 offset += (MIN (UNITS_PER_WORD,
2966 GET_MODE_SIZE (GET_MODE (new)))
2967 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2968
2969 PUT_MODE (new, mode);
2970 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2971 return new;
2972 }
2973 else
2974 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2975 }
2976
2977 return x;
2978
2979 case CLOBBER:
2980 /* If clobbering a register that is the replacement register for an
2981 elimination we still think can be performed, note that it cannot
2982 be performed. Otherwise, we need not be concerned about it. */
2983 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2984 if (ep->to_rtx == XEXP (x, 0))
2985 ep->can_eliminate = 0;
2986
2987 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2988 if (new != XEXP (x, 0))
2989 return gen_rtx (code, GET_MODE (x), new);
2990 return x;
2991
2992 case ASM_OPERANDS:
2993 {
2994 rtx *temp_vec;
2995 /* Properly handle sharing input and constraint vectors. */
2996 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2997 {
2998 /* When we come to a new vector not seen before,
2999 scan all its elements; keep the old vector if none
3000 of them changes; otherwise, make a copy. */
3001 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3002 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3003 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3004 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3005 mem_mode, insn);
3006
3007 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3008 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3009 break;
3010
3011 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3012 new_asm_operands_vec = old_asm_operands_vec;
3013 else
3014 new_asm_operands_vec
3015 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3016 }
3017
3018 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3019 if (new_asm_operands_vec == old_asm_operands_vec)
3020 return x;
3021
3022 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3023 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3024 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3025 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3026 ASM_OPERANDS_SOURCE_FILE (x),
3027 ASM_OPERANDS_SOURCE_LINE (x));
3028 new->volatil = x->volatil;
3029 return new;
3030 }
3031
3032 case SET:
3033 /* Check for setting a register that we know about. */
3034 if (GET_CODE (SET_DEST (x)) == REG)
3035 {
3036 /* See if this is setting the replacement register for an
3037 elimination.
3038
3039 If DEST is the hard frame pointer, we do nothing because we
3040 assume that all assignments to the frame pointer are for
3041 non-local gotos and are being done at a time when they are valid
3042 and do not disturb anything else. Some machines want to
3043 eliminate a fake argument pointer (or even a fake frame pointer)
3044 with either the real frame or the stack pointer. Assignments to
3045 the hard frame pointer must not prevent this elimination. */
3046
3047 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3048 ep++)
3049 if (ep->to_rtx == SET_DEST (x)
3050 && SET_DEST (x) != hard_frame_pointer_rtx)
3051 {
3052 /* If it is being incremented, adjust the offset. Otherwise,
3053 this elimination can't be done. */
3054 rtx src = SET_SRC (x);
3055
3056 if (GET_CODE (src) == PLUS
3057 && XEXP (src, 0) == SET_DEST (x)
3058 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3059 ep->offset -= INTVAL (XEXP (src, 1));
3060 else
3061 ep->can_eliminate = 0;
3062 }
3063
3064 /* Now check to see we are assigning to a register that can be
3065 eliminated. If so, it must be as part of a PARALLEL, since we
3066 will not have been called if this is a single SET. So indicate
3067 that we can no longer eliminate this reg. */
3068 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3069 ep++)
3070 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3071 ep->can_eliminate = 0;
3072 }
3073
3074 /* Now avoid the loop below in this common case. */
3075 {
3076 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3077 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3078
3079 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3080 write a CLOBBER insn. */
3081 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3082 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3083 && GET_CODE (insn) != INSN_LIST)
3084 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3085
3086 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3087 return gen_rtx (SET, VOIDmode, new0, new1);
3088 }
3089
3090 return x;
3091
3092 case MEM:
3093 /* Our only special processing is to pass the mode of the MEM to our
3094 recursive call and copy the flags. While we are here, handle this
3095 case more efficiently. */
3096 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3097 if (new != XEXP (x, 0))
3098 {
3099 new = gen_rtx (MEM, GET_MODE (x), new);
3100 new->volatil = x->volatil;
3101 new->unchanging = x->unchanging;
3102 new->in_struct = x->in_struct;
3103 return new;
3104 }
3105 else
3106 return x;
3107 }
3108
3109 /* Process each of our operands recursively. If any have changed, make a
3110 copy of the rtx. */
3111 fmt = GET_RTX_FORMAT (code);
3112 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3113 {
3114 if (*fmt == 'e')
3115 {
3116 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3117 if (new != XEXP (x, i) && ! copied)
3118 {
3119 rtx new_x = rtx_alloc (code);
3120 bcopy ((char *) x, (char *) new_x,
3121 (sizeof (*new_x) - sizeof (new_x->fld)
3122 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3123 x = new_x;
3124 copied = 1;
3125 }
3126 XEXP (x, i) = new;
3127 }
3128 else if (*fmt == 'E')
3129 {
3130 int copied_vec = 0;
3131 for (j = 0; j < XVECLEN (x, i); j++)
3132 {
3133 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3134 if (new != XVECEXP (x, i, j) && ! copied_vec)
3135 {
3136 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3137 &XVECEXP (x, i, 0));
3138 if (! copied)
3139 {
3140 rtx new_x = rtx_alloc (code);
3141 bcopy ((char *) x, (char *) new_x,
3142 (sizeof (*new_x) - sizeof (new_x->fld)
3143 + (sizeof (new_x->fld[0])
3144 * GET_RTX_LENGTH (code))));
3145 x = new_x;
3146 copied = 1;
3147 }
3148 XVEC (x, i) = new_v;
3149 copied_vec = 1;
3150 }
3151 XVECEXP (x, i, j) = new;
3152 }
3153 }
3154 }
3155
3156 return x;
3157 }
3158 \f
3159 /* Scan INSN and eliminate all eliminable registers in it.
3160
3161 If REPLACE is nonzero, do the replacement destructively. Also
3162 delete the insn as dead it if it is setting an eliminable register.
3163
3164 If REPLACE is zero, do all our allocations in reload_obstack.
3165
3166 If no eliminations were done and this insn doesn't require any elimination
3167 processing (these are not identical conditions: it might be updating sp,
3168 but not referencing fp; this needs to be seen during reload_as_needed so
3169 that the offset between fp and sp can be taken into consideration), zero
3170 is returned. Otherwise, 1 is returned. */
3171
3172 static int
3173 eliminate_regs_in_insn (insn, replace)
3174 rtx insn;
3175 int replace;
3176 {
3177 rtx old_body = PATTERN (insn);
3178 rtx old_set = single_set (insn);
3179 rtx new_body;
3180 int val = 0;
3181 struct elim_table *ep;
3182
3183 if (! replace)
3184 push_obstacks (&reload_obstack, &reload_obstack);
3185
3186 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3187 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3188 {
3189 /* Check for setting an eliminable register. */
3190 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3191 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3192 {
3193 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3194 /* If this is setting the frame pointer register to the
3195 hardware frame pointer register and this is an elimination
3196 that will be done (tested above), this insn is really
3197 adjusting the frame pointer downward to compensate for
3198 the adjustment done before a nonlocal goto. */
3199 if (ep->from == FRAME_POINTER_REGNUM
3200 && ep->to == HARD_FRAME_POINTER_REGNUM)
3201 {
3202 rtx src = SET_SRC (old_set);
3203 int offset, ok = 0;
3204
3205 if (src == ep->to_rtx)
3206 offset = 0, ok = 1;
3207 else if (GET_CODE (src) == PLUS
3208 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3209 offset = INTVAL (XEXP (src, 0)), ok = 1;
3210
3211 if (ok)
3212 {
3213 if (replace)
3214 {
3215 rtx src
3216 = plus_constant (ep->to_rtx, offset - ep->offset);
3217
3218 /* First see if this insn remains valid when we
3219 make the change. If not, keep the INSN_CODE
3220 the same and let reload fit it up. */
3221 validate_change (insn, &SET_SRC (old_set), src, 1);
3222 validate_change (insn, &SET_DEST (old_set),
3223 ep->to_rtx, 1);
3224 if (! apply_change_group ())
3225 {
3226 SET_SRC (old_set) = src;
3227 SET_DEST (old_set) = ep->to_rtx;
3228 }
3229 }
3230
3231 val = 1;
3232 goto done;
3233 }
3234 }
3235 #endif
3236
3237 /* In this case this insn isn't serving a useful purpose. We
3238 will delete it in reload_as_needed once we know that this
3239 elimination is, in fact, being done.
3240
3241 If REPLACE isn't set, we can't delete this insn, but neededn't
3242 process it since it won't be used unless something changes. */
3243 if (replace)
3244 delete_dead_insn (insn);
3245 val = 1;
3246 goto done;
3247 }
3248
3249 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3250 in the insn is the negative of the offset in FROM. Substitute
3251 (set (reg) (reg to)) for the insn and change its code.
3252
3253 We have to do this here, rather than in eliminate_regs, do that we can
3254 change the insn code. */
3255
3256 if (GET_CODE (SET_SRC (old_set)) == PLUS
3257 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3258 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3259 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3260 ep++)
3261 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3262 && ep->can_eliminate)
3263 {
3264 /* We must stop at the first elimination that will be used.
3265 If this one would replace the PLUS with a REG, do it
3266 now. Otherwise, quit the loop and let eliminate_regs
3267 do its normal replacement. */
3268 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3269 {
3270 /* We assume here that we don't need a PARALLEL of
3271 any CLOBBERs for this assignment. There's not
3272 much we can do if we do need it. */
3273 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3274 SET_DEST (old_set), ep->to_rtx);
3275 INSN_CODE (insn) = -1;
3276 val = 1;
3277 goto done;
3278 }
3279
3280 break;
3281 }
3282 }
3283
3284 old_asm_operands_vec = 0;
3285
3286 /* Replace the body of this insn with a substituted form. If we changed
3287 something, return non-zero.
3288
3289 If we are replacing a body that was a (set X (plus Y Z)), try to
3290 re-recognize the insn. We do this in case we had a simple addition
3291 but now can do this as a load-address. This saves an insn in this
3292 common case. */
3293
3294 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3295 if (new_body != old_body)
3296 {
3297 /* If we aren't replacing things permanently and we changed something,
3298 make another copy to ensure that all the RTL is new. Otherwise
3299 things can go wrong if find_reload swaps commutative operands
3300 and one is inside RTL that has been copied while the other is not. */
3301
3302 /* Don't copy an asm_operands because (1) there's no need and (2)
3303 copy_rtx can't do it properly when there are multiple outputs. */
3304 if (! replace && asm_noperands (old_body) < 0)
3305 new_body = copy_rtx (new_body);
3306
3307 /* If we had a move insn but now we don't, rerecognize it. This will
3308 cause spurious re-recognition if the old move had a PARALLEL since
3309 the new one still will, but we can't call single_set without
3310 having put NEW_BODY into the insn and the re-recognition won't
3311 hurt in this rare case. */
3312 if (old_set != 0
3313 && ((GET_CODE (SET_SRC (old_set)) == REG
3314 && (GET_CODE (new_body) != SET
3315 || GET_CODE (SET_SRC (new_body)) != REG))
3316 /* If this was a load from or store to memory, compare
3317 the MEM in recog_operand to the one in the insn. If they
3318 are not equal, then rerecognize the insn. */
3319 || (old_set != 0
3320 && ((GET_CODE (SET_SRC (old_set)) == MEM
3321 && SET_SRC (old_set) != recog_operand[1])
3322 || (GET_CODE (SET_DEST (old_set)) == MEM
3323 && SET_DEST (old_set) != recog_operand[0])))
3324 /* If this was an add insn before, rerecognize. */
3325 || GET_CODE (SET_SRC (old_set)) == PLUS))
3326 {
3327 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3328 /* If recognition fails, store the new body anyway.
3329 It's normal to have recognition failures here
3330 due to bizarre memory addresses; reloading will fix them. */
3331 PATTERN (insn) = new_body;
3332 }
3333 else
3334 PATTERN (insn) = new_body;
3335
3336 val = 1;
3337 }
3338
3339 /* Loop through all elimination pairs. See if any have changed and
3340 recalculate the number not at initial offset.
3341
3342 Compute the maximum offset (minimum offset if the stack does not
3343 grow downward) for each elimination pair.
3344
3345 We also detect a cases where register elimination cannot be done,
3346 namely, if a register would be both changed and referenced outside a MEM
3347 in the resulting insn since such an insn is often undefined and, even if
3348 not, we cannot know what meaning will be given to it. Note that it is
3349 valid to have a register used in an address in an insn that changes it
3350 (presumably with a pre- or post-increment or decrement).
3351
3352 If anything changes, return nonzero. */
3353
3354 num_not_at_initial_offset = 0;
3355 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3356 {
3357 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3358 ep->can_eliminate = 0;
3359
3360 ep->ref_outside_mem = 0;
3361
3362 if (ep->previous_offset != ep->offset)
3363 val = 1;
3364
3365 ep->previous_offset = ep->offset;
3366 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3367 num_not_at_initial_offset++;
3368
3369 #ifdef STACK_GROWS_DOWNWARD
3370 ep->max_offset = MAX (ep->max_offset, ep->offset);
3371 #else
3372 ep->max_offset = MIN (ep->max_offset, ep->offset);
3373 #endif
3374 }
3375
3376 done:
3377 /* If we changed something, perform elmination in REG_NOTES. This is
3378 needed even when REPLACE is zero because a REG_DEAD note might refer
3379 to a register that we eliminate and could cause a different number
3380 of spill registers to be needed in the final reload pass than in
3381 the pre-passes. */
3382 if (val && REG_NOTES (insn) != 0)
3383 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3384
3385 if (! replace)
3386 pop_obstacks ();
3387
3388 return val;
3389 }
3390
3391 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3392 replacement we currently believe is valid, mark it as not eliminable if X
3393 modifies DEST in any way other than by adding a constant integer to it.
3394
3395 If DEST is the frame pointer, we do nothing because we assume that
3396 all assignments to the hard frame pointer are nonlocal gotos and are being
3397 done at a time when they are valid and do not disturb anything else.
3398 Some machines want to eliminate a fake argument pointer with either the
3399 frame or stack pointer. Assignments to the hard frame pointer must not
3400 prevent this elimination.
3401
3402 Called via note_stores from reload before starting its passes to scan
3403 the insns of the function. */
3404
3405 static void
3406 mark_not_eliminable (dest, x)
3407 rtx dest;
3408 rtx x;
3409 {
3410 register int i;
3411
3412 /* A SUBREG of a hard register here is just changing its mode. We should
3413 not see a SUBREG of an eliminable hard register, but check just in
3414 case. */
3415 if (GET_CODE (dest) == SUBREG)
3416 dest = SUBREG_REG (dest);
3417
3418 if (dest == hard_frame_pointer_rtx)
3419 return;
3420
3421 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3422 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3423 && (GET_CODE (x) != SET
3424 || GET_CODE (SET_SRC (x)) != PLUS
3425 || XEXP (SET_SRC (x), 0) != dest
3426 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3427 {
3428 reg_eliminate[i].can_eliminate_previous
3429 = reg_eliminate[i].can_eliminate = 0;
3430 num_eliminable--;
3431 }
3432 }
3433 \f
3434 /* Kick all pseudos out of hard register REGNO.
3435 If GLOBAL is nonzero, try to find someplace else to put them.
3436 If DUMPFILE is nonzero, log actions taken on that file.
3437
3438 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3439 because we found we can't eliminate some register. In the case, no pseudos
3440 are allowed to be in the register, even if they are only in a block that
3441 doesn't require spill registers, unlike the case when we are spilling this
3442 hard reg to produce another spill register.
3443
3444 Return nonzero if any pseudos needed to be kicked out. */
3445
3446 static int
3447 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3448 register int regno;
3449 int global;
3450 FILE *dumpfile;
3451 int cant_eliminate;
3452 {
3453 enum reg_class class = REGNO_REG_CLASS (regno);
3454 int something_changed = 0;
3455 register int i;
3456
3457 SET_HARD_REG_BIT (forbidden_regs, regno);
3458
3459 if (cant_eliminate)
3460 regs_ever_live[regno] = 1;
3461
3462 /* Spill every pseudo reg that was allocated to this reg
3463 or to something that overlaps this reg. */
3464
3465 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3466 if (reg_renumber[i] >= 0
3467 && reg_renumber[i] <= regno
3468 && (reg_renumber[i]
3469 + HARD_REGNO_NREGS (reg_renumber[i],
3470 PSEUDO_REGNO_MODE (i))
3471 > regno))
3472 {
3473 /* If this register belongs solely to a basic block which needed no
3474 spilling of any class that this register is contained in,
3475 leave it be, unless we are spilling this register because
3476 it was a hard register that can't be eliminated. */
3477
3478 if (! cant_eliminate
3479 && basic_block_needs[0]
3480 && reg_basic_block[i] >= 0
3481 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3482 {
3483 enum reg_class *p;
3484
3485 for (p = reg_class_superclasses[(int) class];
3486 *p != LIM_REG_CLASSES; p++)
3487 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3488 break;
3489
3490 if (*p == LIM_REG_CLASSES)
3491 continue;
3492 }
3493
3494 /* Mark it as no longer having a hard register home. */
3495 reg_renumber[i] = -1;
3496 /* We will need to scan everything again. */
3497 something_changed = 1;
3498 if (global)
3499 retry_global_alloc (i, forbidden_regs);
3500
3501 alter_reg (i, regno);
3502 if (dumpfile)
3503 {
3504 if (reg_renumber[i] == -1)
3505 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3506 else
3507 fprintf (dumpfile, " Register %d now in %d.\n\n",
3508 i, reg_renumber[i]);
3509 }
3510 }
3511 for (i = 0; i < scratch_list_length; i++)
3512 {
3513 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3514 {
3515 if (! cant_eliminate && basic_block_needs[0]
3516 && ! basic_block_needs[(int) class][scratch_block[i]])
3517 {
3518 enum reg_class *p;
3519
3520 for (p = reg_class_superclasses[(int) class];
3521 *p != LIM_REG_CLASSES; p++)
3522 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3523 break;
3524
3525 if (*p == LIM_REG_CLASSES)
3526 continue;
3527 }
3528 PUT_CODE (scratch_list[i], SCRATCH);
3529 scratch_list[i] = 0;
3530 something_changed = 1;
3531 continue;
3532 }
3533 }
3534
3535 return something_changed;
3536 }
3537 \f
3538 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3539 Also mark any hard registers used to store user variables as
3540 forbidden from being used for spill registers. */
3541
3542 static void
3543 scan_paradoxical_subregs (x)
3544 register rtx x;
3545 {
3546 register int i;
3547 register char *fmt;
3548 register enum rtx_code code = GET_CODE (x);
3549
3550 switch (code)
3551 {
3552 case REG:
3553 #ifdef SMALL_REGISTER_CLASSES
3554 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3555 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3556 #endif
3557 return;
3558
3559 case CONST_INT:
3560 case CONST:
3561 case SYMBOL_REF:
3562 case LABEL_REF:
3563 case CONST_DOUBLE:
3564 case CC0:
3565 case PC:
3566 case USE:
3567 case CLOBBER:
3568 return;
3569
3570 case SUBREG:
3571 if (GET_CODE (SUBREG_REG (x)) == REG
3572 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3573 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3574 = GET_MODE_SIZE (GET_MODE (x));
3575 return;
3576 }
3577
3578 fmt = GET_RTX_FORMAT (code);
3579 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3580 {
3581 if (fmt[i] == 'e')
3582 scan_paradoxical_subregs (XEXP (x, i));
3583 else if (fmt[i] == 'E')
3584 {
3585 register int j;
3586 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3587 scan_paradoxical_subregs (XVECEXP (x, i, j));
3588 }
3589 }
3590 }
3591 \f
3592 static int
3593 hard_reg_use_compare (p1, p2)
3594 struct hard_reg_n_uses *p1, *p2;
3595 {
3596 int tem = p1->uses - p2->uses;
3597 if (tem != 0) return tem;
3598 /* If regs are equally good, sort by regno,
3599 so that the results of qsort leave nothing to chance. */
3600 return p1->regno - p2->regno;
3601 }
3602
3603 /* Choose the order to consider regs for use as reload registers
3604 based on how much trouble would be caused by spilling one.
3605 Store them in order of decreasing preference in potential_reload_regs. */
3606
3607 static void
3608 order_regs_for_reload ()
3609 {
3610 register int i;
3611 register int o = 0;
3612 int large = 0;
3613
3614 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3615
3616 CLEAR_HARD_REG_SET (bad_spill_regs);
3617
3618 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3619 potential_reload_regs[i] = -1;
3620
3621 /* Count number of uses of each hard reg by pseudo regs allocated to it
3622 and then order them by decreasing use. */
3623
3624 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3625 {
3626 hard_reg_n_uses[i].uses = 0;
3627 hard_reg_n_uses[i].regno = i;
3628 }
3629
3630 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3631 {
3632 int regno = reg_renumber[i];
3633 if (regno >= 0)
3634 {
3635 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3636 while (regno < lim)
3637 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3638 }
3639 large += reg_n_refs[i];
3640 }
3641
3642 /* Now fixed registers (which cannot safely be used for reloading)
3643 get a very high use count so they will be considered least desirable.
3644 Registers used explicitly in the rtl code are almost as bad. */
3645
3646 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3647 {
3648 if (fixed_regs[i])
3649 {
3650 hard_reg_n_uses[i].uses += 2 * large + 2;
3651 SET_HARD_REG_BIT (bad_spill_regs, i);
3652 }
3653 else if (regs_explicitly_used[i])
3654 {
3655 hard_reg_n_uses[i].uses += large + 1;
3656 #ifndef SMALL_REGISTER_CLASSES
3657 /* ??? We are doing this here because of the potential that
3658 bad code may be generated if a register explicitly used in
3659 an insn was used as a spill register for that insn. But
3660 not using these are spill registers may lose on some machine.
3661 We'll have to see how this works out. */
3662 SET_HARD_REG_BIT (bad_spill_regs, i);
3663 #endif
3664 }
3665 }
3666 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3667 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3668
3669 #ifdef ELIMINABLE_REGS
3670 /* If registers other than the frame pointer are eliminable, mark them as
3671 poor choices. */
3672 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3673 {
3674 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3675 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3676 }
3677 #endif
3678
3679 /* Prefer registers not so far used, for use in temporary loading.
3680 Among them, if REG_ALLOC_ORDER is defined, use that order.
3681 Otherwise, prefer registers not preserved by calls. */
3682
3683 #ifdef REG_ALLOC_ORDER
3684 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3685 {
3686 int regno = reg_alloc_order[i];
3687
3688 if (hard_reg_n_uses[regno].uses == 0)
3689 potential_reload_regs[o++] = regno;
3690 }
3691 #else
3692 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3693 {
3694 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3695 potential_reload_regs[o++] = i;
3696 }
3697 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3698 {
3699 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3700 potential_reload_regs[o++] = i;
3701 }
3702 #endif
3703
3704 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3705 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3706
3707 /* Now add the regs that are already used,
3708 preferring those used less often. The fixed and otherwise forbidden
3709 registers will be at the end of this list. */
3710
3711 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3712 if (hard_reg_n_uses[i].uses != 0)
3713 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3714 }
3715 \f
3716 /* Used in reload_as_needed to sort the spilled regs. */
3717
3718 static int
3719 compare_spill_regs (r1, r2)
3720 short *r1, *r2;
3721 {
3722 return *r1 - *r2;
3723 }
3724
3725 /* Reload pseudo-registers into hard regs around each insn as needed.
3726 Additional register load insns are output before the insn that needs it
3727 and perhaps store insns after insns that modify the reloaded pseudo reg.
3728
3729 reg_last_reload_reg and reg_reloaded_contents keep track of
3730 which registers are already available in reload registers.
3731 We update these for the reloads that we perform,
3732 as the insns are scanned. */
3733
3734 static void
3735 reload_as_needed (first, live_known)
3736 rtx first;
3737 int live_known;
3738 {
3739 register rtx insn;
3740 register int i;
3741 int this_block = 0;
3742 rtx x;
3743 rtx after_call = 0;
3744
3745 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3746 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3747 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3748 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3749 reg_has_output_reload = (char *) alloca (max_regno);
3750 for (i = 0; i < n_spills; i++)
3751 {
3752 reg_reloaded_contents[i] = -1;
3753 reg_reloaded_insn[i] = 0;
3754 }
3755
3756 /* Reset all offsets on eliminable registers to their initial values. */
3757 #ifdef ELIMINABLE_REGS
3758 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3759 {
3760 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3761 reg_eliminate[i].initial_offset);
3762 reg_eliminate[i].previous_offset
3763 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3764 }
3765 #else
3766 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3767 reg_eliminate[0].previous_offset
3768 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3769 #endif
3770
3771 num_not_at_initial_offset = 0;
3772
3773 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3774 pack registers with group needs. */
3775 if (n_spills > 1)
3776 {
3777 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3778 for (i = 0; i < n_spills; i++)
3779 spill_reg_order[spill_regs[i]] = i;
3780 }
3781
3782 for (insn = first; insn;)
3783 {
3784 register rtx next = NEXT_INSN (insn);
3785
3786 /* Notice when we move to a new basic block. */
3787 if (live_known && this_block + 1 < n_basic_blocks
3788 && insn == basic_block_head[this_block+1])
3789 ++this_block;
3790
3791 /* If we pass a label, copy the offsets from the label information
3792 into the current offsets of each elimination. */
3793 if (GET_CODE (insn) == CODE_LABEL)
3794 {
3795 num_not_at_initial_offset = 0;
3796 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3797 {
3798 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3799 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3800 if (reg_eliminate[i].can_eliminate
3801 && (reg_eliminate[i].offset
3802 != reg_eliminate[i].initial_offset))
3803 num_not_at_initial_offset++;
3804 }
3805 }
3806
3807 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3808 {
3809 rtx avoid_return_reg = 0;
3810
3811 #ifdef SMALL_REGISTER_CLASSES
3812 /* Set avoid_return_reg if this is an insn
3813 that might use the value of a function call. */
3814 if (GET_CODE (insn) == CALL_INSN)
3815 {
3816 if (GET_CODE (PATTERN (insn)) == SET)
3817 after_call = SET_DEST (PATTERN (insn));
3818 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3819 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3820 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3821 else
3822 after_call = 0;
3823 }
3824 else if (after_call != 0
3825 && !(GET_CODE (PATTERN (insn)) == SET
3826 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3827 {
3828 if (reg_referenced_p (after_call, PATTERN (insn)))
3829 avoid_return_reg = after_call;
3830 after_call = 0;
3831 }
3832 #endif /* SMALL_REGISTER_CLASSES */
3833
3834 /* If this is a USE and CLOBBER of a MEM, ensure that any
3835 references to eliminable registers have been removed. */
3836
3837 if ((GET_CODE (PATTERN (insn)) == USE
3838 || GET_CODE (PATTERN (insn)) == CLOBBER)
3839 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3840 XEXP (XEXP (PATTERN (insn), 0), 0)
3841 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3842 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3843
3844 /* If we need to do register elimination processing, do so.
3845 This might delete the insn, in which case we are done. */
3846 if (num_eliminable && GET_MODE (insn) == QImode)
3847 {
3848 eliminate_regs_in_insn (insn, 1);
3849 if (GET_CODE (insn) == NOTE)
3850 {
3851 insn = next;
3852 continue;
3853 }
3854 }
3855
3856 if (GET_MODE (insn) == VOIDmode)
3857 n_reloads = 0;
3858 /* First find the pseudo regs that must be reloaded for this insn.
3859 This info is returned in the tables reload_... (see reload.h).
3860 Also modify the body of INSN by substituting RELOAD
3861 rtx's for those pseudo regs. */
3862 else
3863 {
3864 bzero (reg_has_output_reload, max_regno);
3865 CLEAR_HARD_REG_SET (reg_is_output_reload);
3866
3867 find_reloads (insn, 1, spill_indirect_levels, live_known,
3868 spill_reg_order);
3869 }
3870
3871 if (n_reloads > 0)
3872 {
3873 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3874 rtx p;
3875 int class;
3876
3877 /* If this block has not had spilling done for a
3878 particular clas and we have any non-optionals that need a
3879 spill reg in that class, abort. */
3880
3881 for (class = 0; class < N_REG_CLASSES; class++)
3882 if (basic_block_needs[class] != 0
3883 && basic_block_needs[class][this_block] == 0)
3884 for (i = 0; i < n_reloads; i++)
3885 if (class == (int) reload_reg_class[i]
3886 && reload_reg_rtx[i] == 0
3887 && ! reload_optional[i]
3888 && (reload_in[i] != 0 || reload_out[i] != 0
3889 || reload_secondary_p[i] != 0))
3890 fatal_insn ("Non-optional registers need a spill register", insn);
3891
3892 /* Now compute which reload regs to reload them into. Perhaps
3893 reusing reload regs from previous insns, or else output
3894 load insns to reload them. Maybe output store insns too.
3895 Record the choices of reload reg in reload_reg_rtx. */
3896 choose_reload_regs (insn, avoid_return_reg);
3897
3898 #ifdef SMALL_REGISTER_CLASSES
3899 /* Merge any reloads that we didn't combine for fear of
3900 increasing the number of spill registers needed but now
3901 discover can be safely merged. */
3902 merge_assigned_reloads (insn);
3903 #endif
3904
3905 /* Generate the insns to reload operands into or out of
3906 their reload regs. */
3907 emit_reload_insns (insn);
3908
3909 /* Substitute the chosen reload regs from reload_reg_rtx
3910 into the insn's body (or perhaps into the bodies of other
3911 load and store insn that we just made for reloading
3912 and that we moved the structure into). */
3913 subst_reloads ();
3914
3915 /* If this was an ASM, make sure that all the reload insns
3916 we have generated are valid. If not, give an error
3917 and delete them. */
3918
3919 if (asm_noperands (PATTERN (insn)) >= 0)
3920 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3921 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3922 && (recog_memoized (p) < 0
3923 || (insn_extract (p),
3924 ! constrain_operands (INSN_CODE (p), 1))))
3925 {
3926 error_for_asm (insn,
3927 "`asm' operand requires impossible reload");
3928 PUT_CODE (p, NOTE);
3929 NOTE_SOURCE_FILE (p) = 0;
3930 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3931 }
3932 }
3933 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3934 is no longer validly lying around to save a future reload.
3935 Note that this does not detect pseudos that were reloaded
3936 for this insn in order to be stored in
3937 (obeying register constraints). That is correct; such reload
3938 registers ARE still valid. */
3939 note_stores (PATTERN (insn), forget_old_reloads_1);
3940
3941 /* There may have been CLOBBER insns placed after INSN. So scan
3942 between INSN and NEXT and use them to forget old reloads. */
3943 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3944 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3945 note_stores (PATTERN (x), forget_old_reloads_1);
3946
3947 #ifdef AUTO_INC_DEC
3948 /* Likewise for regs altered by auto-increment in this insn.
3949 But note that the reg-notes are not changed by reloading:
3950 they still contain the pseudo-regs, not the spill regs. */
3951 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3952 if (REG_NOTE_KIND (x) == REG_INC)
3953 {
3954 /* See if this pseudo reg was reloaded in this insn.
3955 If so, its last-reload info is still valid
3956 because it is based on this insn's reload. */
3957 for (i = 0; i < n_reloads; i++)
3958 if (reload_out[i] == XEXP (x, 0))
3959 break;
3960
3961 if (i == n_reloads)
3962 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3963 }
3964 #endif
3965 }
3966 /* A reload reg's contents are unknown after a label. */
3967 if (GET_CODE (insn) == CODE_LABEL)
3968 for (i = 0; i < n_spills; i++)
3969 {
3970 reg_reloaded_contents[i] = -1;
3971 reg_reloaded_insn[i] = 0;
3972 }
3973
3974 /* Don't assume a reload reg is still good after a call insn
3975 if it is a call-used reg. */
3976 else if (GET_CODE (insn) == CALL_INSN)
3977 for (i = 0; i < n_spills; i++)
3978 if (call_used_regs[spill_regs[i]])
3979 {
3980 reg_reloaded_contents[i] = -1;
3981 reg_reloaded_insn[i] = 0;
3982 }
3983
3984 /* In case registers overlap, allow certain insns to invalidate
3985 particular hard registers. */
3986
3987 #ifdef INSN_CLOBBERS_REGNO_P
3988 for (i = 0 ; i < n_spills ; i++)
3989 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3990 {
3991 reg_reloaded_contents[i] = -1;
3992 reg_reloaded_insn[i] = 0;
3993 }
3994 #endif
3995
3996 insn = next;
3997
3998 #ifdef USE_C_ALLOCA
3999 alloca (0);
4000 #endif
4001 }
4002 }
4003
4004 /* Discard all record of any value reloaded from X,
4005 or reloaded in X from someplace else;
4006 unless X is an output reload reg of the current insn.
4007
4008 X may be a hard reg (the reload reg)
4009 or it may be a pseudo reg that was reloaded from. */
4010
4011 static void
4012 forget_old_reloads_1 (x, ignored)
4013 rtx x;
4014 rtx ignored;
4015 {
4016 register int regno;
4017 int nr;
4018 int offset = 0;
4019
4020 /* note_stores does give us subregs of hard regs. */
4021 while (GET_CODE (x) == SUBREG)
4022 {
4023 offset += SUBREG_WORD (x);
4024 x = SUBREG_REG (x);
4025 }
4026
4027 if (GET_CODE (x) != REG)
4028 return;
4029
4030 regno = REGNO (x) + offset;
4031
4032 if (regno >= FIRST_PSEUDO_REGISTER)
4033 nr = 1;
4034 else
4035 {
4036 int i;
4037 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4038 /* Storing into a spilled-reg invalidates its contents.
4039 This can happen if a block-local pseudo is allocated to that reg
4040 and it wasn't spilled because this block's total need is 0.
4041 Then some insn might have an optional reload and use this reg. */
4042 for (i = 0; i < nr; i++)
4043 if (spill_reg_order[regno + i] >= 0
4044 /* But don't do this if the reg actually serves as an output
4045 reload reg in the current instruction. */
4046 && (n_reloads == 0
4047 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4048 {
4049 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4050 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4051 }
4052 }
4053
4054 /* Since value of X has changed,
4055 forget any value previously copied from it. */
4056
4057 while (nr-- > 0)
4058 /* But don't forget a copy if this is the output reload
4059 that establishes the copy's validity. */
4060 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4061 reg_last_reload_reg[regno + nr] = 0;
4062 }
4063 \f
4064 /* For each reload, the mode of the reload register. */
4065 static enum machine_mode reload_mode[MAX_RELOADS];
4066
4067 /* For each reload, the largest number of registers it will require. */
4068 static int reload_nregs[MAX_RELOADS];
4069
4070 /* Comparison function for qsort to decide which of two reloads
4071 should be handled first. *P1 and *P2 are the reload numbers. */
4072
4073 static int
4074 reload_reg_class_lower (p1, p2)
4075 short *p1, *p2;
4076 {
4077 register int r1 = *p1, r2 = *p2;
4078 register int t;
4079
4080 /* Consider required reloads before optional ones. */
4081 t = reload_optional[r1] - reload_optional[r2];
4082 if (t != 0)
4083 return t;
4084
4085 /* Count all solitary classes before non-solitary ones. */
4086 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4087 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4088 if (t != 0)
4089 return t;
4090
4091 /* Aside from solitaires, consider all multi-reg groups first. */
4092 t = reload_nregs[r2] - reload_nregs[r1];
4093 if (t != 0)
4094 return t;
4095
4096 /* Consider reloads in order of increasing reg-class number. */
4097 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4098 if (t != 0)
4099 return t;
4100
4101 /* If reloads are equally urgent, sort by reload number,
4102 so that the results of qsort leave nothing to chance. */
4103 return r1 - r2;
4104 }
4105 \f
4106 /* The following HARD_REG_SETs indicate when each hard register is
4107 used for a reload of various parts of the current insn. */
4108
4109 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4110 static HARD_REG_SET reload_reg_used;
4111 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4112 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4113 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4114 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4115 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4116 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4117 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4118 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4119 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4120 static HARD_REG_SET reload_reg_used_in_op_addr;
4121 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4122 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4123 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4124 static HARD_REG_SET reload_reg_used_in_insn;
4125 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4126 static HARD_REG_SET reload_reg_used_in_other_addr;
4127
4128 /* If reg is in use as a reload reg for any sort of reload. */
4129 static HARD_REG_SET reload_reg_used_at_all;
4130
4131 /* If reg is use as an inherited reload. We just mark the first register
4132 in the group. */
4133 static HARD_REG_SET reload_reg_used_for_inherit;
4134
4135 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4136 TYPE. MODE is used to indicate how many consecutive regs are
4137 actually used. */
4138
4139 static void
4140 mark_reload_reg_in_use (regno, opnum, type, mode)
4141 int regno;
4142 int opnum;
4143 enum reload_type type;
4144 enum machine_mode mode;
4145 {
4146 int nregs = HARD_REGNO_NREGS (regno, mode);
4147 int i;
4148
4149 for (i = regno; i < nregs + regno; i++)
4150 {
4151 switch (type)
4152 {
4153 case RELOAD_OTHER:
4154 SET_HARD_REG_BIT (reload_reg_used, i);
4155 break;
4156
4157 case RELOAD_FOR_INPUT_ADDRESS:
4158 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4159 break;
4160
4161 case RELOAD_FOR_OUTPUT_ADDRESS:
4162 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4163 break;
4164
4165 case RELOAD_FOR_OPERAND_ADDRESS:
4166 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4167 break;
4168
4169 case RELOAD_FOR_OPADDR_ADDR:
4170 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4171 break;
4172
4173 case RELOAD_FOR_OTHER_ADDRESS:
4174 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4175 break;
4176
4177 case RELOAD_FOR_INPUT:
4178 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4179 break;
4180
4181 case RELOAD_FOR_OUTPUT:
4182 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4183 break;
4184
4185 case RELOAD_FOR_INSN:
4186 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4187 break;
4188 }
4189
4190 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4191 }
4192 }
4193
4194 /* Similarly, but show REGNO is no longer in use for a reload. */
4195
4196 static void
4197 clear_reload_reg_in_use (regno, opnum, type, mode)
4198 int regno;
4199 int opnum;
4200 enum reload_type type;
4201 enum machine_mode mode;
4202 {
4203 int nregs = HARD_REGNO_NREGS (regno, mode);
4204 int i;
4205
4206 for (i = regno; i < nregs + regno; i++)
4207 {
4208 switch (type)
4209 {
4210 case RELOAD_OTHER:
4211 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4212 break;
4213
4214 case RELOAD_FOR_INPUT_ADDRESS:
4215 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4216 break;
4217
4218 case RELOAD_FOR_OUTPUT_ADDRESS:
4219 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4220 break;
4221
4222 case RELOAD_FOR_OPERAND_ADDRESS:
4223 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4224 break;
4225
4226 case RELOAD_FOR_OPADDR_ADDR:
4227 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4228 break;
4229
4230 case RELOAD_FOR_OTHER_ADDRESS:
4231 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4232 break;
4233
4234 case RELOAD_FOR_INPUT:
4235 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4236 break;
4237
4238 case RELOAD_FOR_OUTPUT:
4239 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4240 break;
4241
4242 case RELOAD_FOR_INSN:
4243 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4244 break;
4245 }
4246 }
4247 }
4248
4249 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4250 specified by OPNUM and TYPE. */
4251
4252 static int
4253 reload_reg_free_p (regno, opnum, type)
4254 int regno;
4255 int opnum;
4256 enum reload_type type;
4257 {
4258 int i;
4259
4260 /* In use for a RELOAD_OTHER means it's not available for anything except
4261 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4262 to be used only for inputs. */
4263
4264 if (type != RELOAD_FOR_OTHER_ADDRESS
4265 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4266 return 0;
4267
4268 switch (type)
4269 {
4270 case RELOAD_OTHER:
4271 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4272 we can't use it for RELOAD_OTHER. */
4273 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4274 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4275 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4276 return 0;
4277
4278 for (i = 0; i < reload_n_operands; i++)
4279 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4280 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4281 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4282 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4283 return 0;
4284
4285 return 1;
4286
4287 case RELOAD_FOR_INPUT:
4288 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4289 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4290 return 0;
4291
4292 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4293 return 0;
4294
4295 /* If it is used for some other input, can't use it. */
4296 for (i = 0; i < reload_n_operands; i++)
4297 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4298 return 0;
4299
4300 /* If it is used in a later operand's address, can't use it. */
4301 for (i = opnum + 1; i < reload_n_operands; i++)
4302 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4303 return 0;
4304
4305 return 1;
4306
4307 case RELOAD_FOR_INPUT_ADDRESS:
4308 /* Can't use a register if it is used for an input address for this
4309 operand or used as an input in an earlier one. */
4310 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4311 return 0;
4312
4313 for (i = 0; i < opnum; i++)
4314 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4315 return 0;
4316
4317 return 1;
4318
4319 case RELOAD_FOR_OUTPUT_ADDRESS:
4320 /* Can't use a register if it is used for an output address for this
4321 operand or used as an output in this or a later operand. */
4322 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4323 return 0;
4324
4325 for (i = opnum; i < reload_n_operands; i++)
4326 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4327 return 0;
4328
4329 return 1;
4330
4331 case RELOAD_FOR_OPERAND_ADDRESS:
4332 for (i = 0; i < reload_n_operands; i++)
4333 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4334 return 0;
4335
4336 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4337 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4338
4339 case RELOAD_FOR_OPADDR_ADDR:
4340 for (i = 0; i < reload_n_operands; i++)
4341 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4342 return 0;
4343
4344 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4345
4346 case RELOAD_FOR_OUTPUT:
4347 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4348 outputs, or an operand address for this or an earlier output. */
4349 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4350 return 0;
4351
4352 for (i = 0; i < reload_n_operands; i++)
4353 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4354 return 0;
4355
4356 for (i = 0; i <= opnum; i++)
4357 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4358 return 0;
4359
4360 return 1;
4361
4362 case RELOAD_FOR_INSN:
4363 for (i = 0; i < reload_n_operands; i++)
4364 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4365 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4366 return 0;
4367
4368 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4369 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4370
4371 case RELOAD_FOR_OTHER_ADDRESS:
4372 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4373 }
4374 abort ();
4375 }
4376
4377 /* Return 1 if the value in reload reg REGNO, as used by a reload
4378 needed for the part of the insn specified by OPNUM and TYPE,
4379 is not in use for a reload in any prior part of the insn.
4380
4381 We can assume that the reload reg was already tested for availability
4382 at the time it is needed, and we should not check this again,
4383 in case the reg has already been marked in use. */
4384
4385 static int
4386 reload_reg_free_before_p (regno, opnum, type)
4387 int regno;
4388 int opnum;
4389 enum reload_type type;
4390 {
4391 int i;
4392
4393 switch (type)
4394 {
4395 case RELOAD_FOR_OTHER_ADDRESS:
4396 /* These always come first. */
4397 return 1;
4398
4399 case RELOAD_OTHER:
4400 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4401
4402 /* If this use is for part of the insn,
4403 check the reg is not in use for any prior part. It is tempting
4404 to try to do this by falling through from objecs that occur
4405 later in the insn to ones that occur earlier, but that will not
4406 correctly take into account the fact that here we MUST ignore
4407 things that would prevent the register from being allocated in
4408 the first place, since we know that it was allocated. */
4409
4410 case RELOAD_FOR_OUTPUT_ADDRESS:
4411 /* Earlier reloads are for earlier outputs or their addresses,
4412 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4413 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4414 RELOAD_OTHER).. */
4415 for (i = 0; i < opnum; i++)
4416 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4417 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4418 return 0;
4419
4420 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4421 return 0;
4422
4423 for (i = 0; i < reload_n_operands; i++)
4424 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4425 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4426 return 0;
4427
4428 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4429 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4430 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4431
4432 case RELOAD_FOR_OUTPUT:
4433 /* This can't be used in the output address for this operand and
4434 anything that can't be used for it, except that we've already
4435 tested for RELOAD_FOR_INSN objects. */
4436
4437 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4438 return 0;
4439
4440 for (i = 0; i < opnum; i++)
4441 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4442 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4443 return 0;
4444
4445 for (i = 0; i < reload_n_operands; i++)
4446 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4447 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4448 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4449 return 0;
4450
4451 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4452
4453 case RELOAD_FOR_OPERAND_ADDRESS:
4454 case RELOAD_FOR_OPADDR_ADDR:
4455 case RELOAD_FOR_INSN:
4456 /* These can't conflict with inputs, or each other, so all we have to
4457 test is input addresses and the addresses of OTHER items. */
4458
4459 for (i = 0; i < reload_n_operands; i++)
4460 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4461 return 0;
4462
4463 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4464
4465 case RELOAD_FOR_INPUT:
4466 /* The only things earlier are the address for this and
4467 earlier inputs, other inputs (which we know we don't conflict
4468 with), and addresses of RELOAD_OTHER objects. */
4469
4470 for (i = 0; i <= opnum; i++)
4471 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4472 return 0;
4473
4474 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4475
4476 case RELOAD_FOR_INPUT_ADDRESS:
4477 /* Similarly, all we have to check is for use in earlier inputs'
4478 addresses. */
4479 for (i = 0; i < opnum; i++)
4480 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4481 return 0;
4482
4483 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4484 }
4485 abort ();
4486 }
4487
4488 /* Return 1 if the value in reload reg REGNO, as used by a reload
4489 needed for the part of the insn specified by OPNUM and TYPE,
4490 is still available in REGNO at the end of the insn.
4491
4492 We can assume that the reload reg was already tested for availability
4493 at the time it is needed, and we should not check this again,
4494 in case the reg has already been marked in use. */
4495
4496 static int
4497 reload_reg_reaches_end_p (regno, opnum, type)
4498 int regno;
4499 int opnum;
4500 enum reload_type type;
4501 {
4502 int i;
4503
4504 switch (type)
4505 {
4506 case RELOAD_OTHER:
4507 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4508 its value must reach the end. */
4509 return 1;
4510
4511 /* If this use is for part of the insn,
4512 its value reaches if no subsequent part uses the same register.
4513 Just like the above function, don't try to do this with lots
4514 of fallthroughs. */
4515
4516 case RELOAD_FOR_OTHER_ADDRESS:
4517 /* Here we check for everything else, since these don't conflict
4518 with anything else and everything comes later. */
4519
4520 for (i = 0; i < reload_n_operands; i++)
4521 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4522 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4523 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4524 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4525 return 0;
4526
4527 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4528 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4529 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4530
4531 case RELOAD_FOR_INPUT_ADDRESS:
4532 /* Similar, except that we check only for this and subsequent inputs
4533 and the address of only subsequent inputs and we do not need
4534 to check for RELOAD_OTHER objects since they are known not to
4535 conflict. */
4536
4537 for (i = opnum; i < reload_n_operands; i++)
4538 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4539 return 0;
4540
4541 for (i = opnum + 1; i < reload_n_operands; i++)
4542 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4543 return 0;
4544
4545 for (i = 0; i < reload_n_operands; i++)
4546 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4547 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4548 return 0;
4549
4550 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4551 return 0;
4552
4553 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4554 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4555
4556 case RELOAD_FOR_INPUT:
4557 /* Similar to input address, except we start at the next operand for
4558 both input and input address and we do not check for
4559 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4560 would conflict. */
4561
4562 for (i = opnum + 1; i < reload_n_operands; i++)
4563 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4564 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4565 return 0;
4566
4567 /* ... fall through ... */
4568
4569 case RELOAD_FOR_OPERAND_ADDRESS:
4570 /* Check outputs and their addresses. */
4571
4572 for (i = 0; i < reload_n_operands; i++)
4573 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4574 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4575 return 0;
4576
4577 return 1;
4578
4579 case RELOAD_FOR_OPADDR_ADDR:
4580 for (i = 0; i < reload_n_operands; i++)
4581 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4582 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4583 return 0;
4584
4585 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4586 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4587
4588 case RELOAD_FOR_INSN:
4589 /* These conflict with other outputs with RELOAD_OTHER. So
4590 we need only check for output addresses. */
4591
4592 opnum = -1;
4593
4594 /* ... fall through ... */
4595
4596 case RELOAD_FOR_OUTPUT:
4597 case RELOAD_FOR_OUTPUT_ADDRESS:
4598 /* We already know these can't conflict with a later output. So the
4599 only thing to check are later output addresses. */
4600 for (i = opnum + 1; i < reload_n_operands; i++)
4601 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4602 return 0;
4603
4604 return 1;
4605 }
4606
4607 abort ();
4608 }
4609 \f
4610 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4611 Return 0 otherwise.
4612
4613 This function uses the same algorithm as reload_reg_free_p above. */
4614
4615 static int
4616 reloads_conflict (r1, r2)
4617 int r1, r2;
4618 {
4619 enum reload_type r1_type = reload_when_needed[r1];
4620 enum reload_type r2_type = reload_when_needed[r2];
4621 int r1_opnum = reload_opnum[r1];
4622 int r2_opnum = reload_opnum[r2];
4623
4624 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4625
4626 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4627 return 1;
4628
4629 /* Otherwise, check conflicts differently for each type. */
4630
4631 switch (r1_type)
4632 {
4633 case RELOAD_FOR_INPUT:
4634 return (r2_type == RELOAD_FOR_INSN
4635 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4636 || r2_type == RELOAD_FOR_OPADDR_ADDR
4637 || r2_type == RELOAD_FOR_INPUT
4638 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4639
4640 case RELOAD_FOR_INPUT_ADDRESS:
4641 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4642 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4643
4644 case RELOAD_FOR_OUTPUT_ADDRESS:
4645 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4646 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4647
4648 case RELOAD_FOR_OPERAND_ADDRESS:
4649 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4650 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4651
4652 case RELOAD_FOR_OPADDR_ADDR:
4653 return (r2_type == RELOAD_FOR_INPUT
4654 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4655
4656 case RELOAD_FOR_OUTPUT:
4657 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4658 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4659 && r2_opnum >= r1_opnum));
4660
4661 case RELOAD_FOR_INSN:
4662 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4663 || r2_type == RELOAD_FOR_INSN
4664 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4665
4666 case RELOAD_FOR_OTHER_ADDRESS:
4667 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4668
4669 case RELOAD_OTHER:
4670 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4671
4672 default:
4673 abort ();
4674 }
4675 }
4676 \f
4677 /* Vector of reload-numbers showing the order in which the reloads should
4678 be processed. */
4679 short reload_order[MAX_RELOADS];
4680
4681 /* Indexed by reload number, 1 if incoming value
4682 inherited from previous insns. */
4683 char reload_inherited[MAX_RELOADS];
4684
4685 /* For an inherited reload, this is the insn the reload was inherited from,
4686 if we know it. Otherwise, this is 0. */
4687 rtx reload_inheritance_insn[MAX_RELOADS];
4688
4689 /* If non-zero, this is a place to get the value of the reload,
4690 rather than using reload_in. */
4691 rtx reload_override_in[MAX_RELOADS];
4692
4693 /* For each reload, the index in spill_regs of the spill register used,
4694 or -1 if we did not need one of the spill registers for this reload. */
4695 int reload_spill_index[MAX_RELOADS];
4696
4697 /* Find a spill register to use as a reload register for reload R.
4698 LAST_RELOAD is non-zero if this is the last reload for the insn being
4699 processed.
4700
4701 Set reload_reg_rtx[R] to the register allocated.
4702
4703 If NOERROR is nonzero, we return 1 if successful,
4704 or 0 if we couldn't find a spill reg and we didn't change anything. */
4705
4706 static int
4707 allocate_reload_reg (r, insn, last_reload, noerror)
4708 int r;
4709 rtx insn;
4710 int last_reload;
4711 int noerror;
4712 {
4713 int i;
4714 int pass;
4715 int count;
4716 rtx new;
4717 int regno;
4718
4719 /* If we put this reload ahead, thinking it is a group,
4720 then insist on finding a group. Otherwise we can grab a
4721 reg that some other reload needs.
4722 (That can happen when we have a 68000 DATA_OR_FP_REG
4723 which is a group of data regs or one fp reg.)
4724 We need not be so restrictive if there are no more reloads
4725 for this insn.
4726
4727 ??? Really it would be nicer to have smarter handling
4728 for that kind of reg class, where a problem like this is normal.
4729 Perhaps those classes should be avoided for reloading
4730 by use of more alternatives. */
4731
4732 int force_group = reload_nregs[r] > 1 && ! last_reload;
4733
4734 /* If we want a single register and haven't yet found one,
4735 take any reg in the right class and not in use.
4736 If we want a consecutive group, here is where we look for it.
4737
4738 We use two passes so we can first look for reload regs to
4739 reuse, which are already in use for other reloads in this insn,
4740 and only then use additional registers.
4741 I think that maximizing reuse is needed to make sure we don't
4742 run out of reload regs. Suppose we have three reloads, and
4743 reloads A and B can share regs. These need two regs.
4744 Suppose A and B are given different regs.
4745 That leaves none for C. */
4746 for (pass = 0; pass < 2; pass++)
4747 {
4748 /* I is the index in spill_regs.
4749 We advance it round-robin between insns to use all spill regs
4750 equally, so that inherited reloads have a chance
4751 of leapfrogging each other. Don't do this, however, when we have
4752 group needs and failure would be fatal; if we only have a relatively
4753 small number of spill registers, and more than one of them has
4754 group needs, then by starting in the middle, we may end up
4755 allocating the first one in such a way that we are not left with
4756 sufficient groups to handle the rest. */
4757
4758 if (noerror || ! force_group)
4759 i = last_spill_reg;
4760 else
4761 i = -1;
4762
4763 for (count = 0; count < n_spills; count++)
4764 {
4765 int class = (int) reload_reg_class[r];
4766
4767 i = (i + 1) % n_spills;
4768
4769 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4770 reload_when_needed[r])
4771 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4772 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4773 /* Look first for regs to share, then for unshared. But
4774 don't share regs used for inherited reloads; they are
4775 the ones we want to preserve. */
4776 && (pass
4777 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4778 spill_regs[i])
4779 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4780 spill_regs[i]))))
4781 {
4782 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4783 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4784 (on 68000) got us two FP regs. If NR is 1,
4785 we would reject both of them. */
4786 if (force_group)
4787 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4788 /* If we need only one reg, we have already won. */
4789 if (nr == 1)
4790 {
4791 /* But reject a single reg if we demand a group. */
4792 if (force_group)
4793 continue;
4794 break;
4795 }
4796 /* Otherwise check that as many consecutive regs as we need
4797 are available here.
4798 Also, don't use for a group registers that are
4799 needed for nongroups. */
4800 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4801 while (nr > 1)
4802 {
4803 regno = spill_regs[i] + nr - 1;
4804 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4805 && spill_reg_order[regno] >= 0
4806 && reload_reg_free_p (regno, reload_opnum[r],
4807 reload_when_needed[r])
4808 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4809 regno)))
4810 break;
4811 nr--;
4812 }
4813 if (nr == 1)
4814 break;
4815 }
4816 }
4817
4818 /* If we found something on pass 1, omit pass 2. */
4819 if (count < n_spills)
4820 break;
4821 }
4822
4823 /* We should have found a spill register by now. */
4824 if (count == n_spills)
4825 {
4826 if (noerror)
4827 return 0;
4828 goto failure;
4829 }
4830
4831 /* I is the index in SPILL_REG_RTX of the reload register we are to
4832 allocate. Get an rtx for it and find its register number. */
4833
4834 new = spill_reg_rtx[i];
4835
4836 if (new == 0 || GET_MODE (new) != reload_mode[r])
4837 spill_reg_rtx[i] = new
4838 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4839
4840 regno = true_regnum (new);
4841
4842 /* Detect when the reload reg can't hold the reload mode.
4843 This used to be one `if', but Sequent compiler can't handle that. */
4844 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4845 {
4846 enum machine_mode test_mode = VOIDmode;
4847 if (reload_in[r])
4848 test_mode = GET_MODE (reload_in[r]);
4849 /* If reload_in[r] has VOIDmode, it means we will load it
4850 in whatever mode the reload reg has: to wit, reload_mode[r].
4851 We have already tested that for validity. */
4852 /* Aside from that, we need to test that the expressions
4853 to reload from or into have modes which are valid for this
4854 reload register. Otherwise the reload insns would be invalid. */
4855 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4856 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4857 if (! (reload_out[r] != 0
4858 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4859 {
4860 /* The reg is OK. */
4861 last_spill_reg = i;
4862
4863 /* Mark as in use for this insn the reload regs we use
4864 for this. */
4865 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4866 reload_when_needed[r], reload_mode[r]);
4867
4868 reload_reg_rtx[r] = new;
4869 reload_spill_index[r] = i;
4870 return 1;
4871 }
4872 }
4873
4874 /* The reg is not OK. */
4875 if (noerror)
4876 return 0;
4877
4878 failure:
4879 if (asm_noperands (PATTERN (insn)) < 0)
4880 /* It's the compiler's fault. */
4881 fatal_insn ("Could not find a spill register", insn);
4882
4883 /* It's the user's fault; the operand's mode and constraint
4884 don't match. Disable this reload so we don't crash in final. */
4885 error_for_asm (insn,
4886 "`asm' operand constraint incompatible with operand size");
4887 reload_in[r] = 0;
4888 reload_out[r] = 0;
4889 reload_reg_rtx[r] = 0;
4890 reload_optional[r] = 1;
4891 reload_secondary_p[r] = 1;
4892
4893 return 1;
4894 }
4895 \f
4896 /* Assign hard reg targets for the pseudo-registers we must reload
4897 into hard regs for this insn.
4898 Also output the instructions to copy them in and out of the hard regs.
4899
4900 For machines with register classes, we are responsible for
4901 finding a reload reg in the proper class. */
4902
4903 static void
4904 choose_reload_regs (insn, avoid_return_reg)
4905 rtx insn;
4906 rtx avoid_return_reg;
4907 {
4908 register int i, j;
4909 int max_group_size = 1;
4910 enum reg_class group_class = NO_REGS;
4911 int inheritance;
4912
4913 rtx save_reload_reg_rtx[MAX_RELOADS];
4914 char save_reload_inherited[MAX_RELOADS];
4915 rtx save_reload_inheritance_insn[MAX_RELOADS];
4916 rtx save_reload_override_in[MAX_RELOADS];
4917 int save_reload_spill_index[MAX_RELOADS];
4918 HARD_REG_SET save_reload_reg_used;
4919 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4920 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4921 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4922 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4923 HARD_REG_SET save_reload_reg_used_in_op_addr;
4924 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4925 HARD_REG_SET save_reload_reg_used_in_insn;
4926 HARD_REG_SET save_reload_reg_used_in_other_addr;
4927 HARD_REG_SET save_reload_reg_used_at_all;
4928
4929 bzero (reload_inherited, MAX_RELOADS);
4930 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4931 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4932
4933 CLEAR_HARD_REG_SET (reload_reg_used);
4934 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4935 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4936 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4937 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4938 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4939
4940 for (i = 0; i < reload_n_operands; i++)
4941 {
4942 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4943 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4944 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4945 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4946 }
4947
4948 #ifdef SMALL_REGISTER_CLASSES
4949 /* Don't bother with avoiding the return reg
4950 if we have no mandatory reload that could use it. */
4951 if (avoid_return_reg)
4952 {
4953 int do_avoid = 0;
4954 int regno = REGNO (avoid_return_reg);
4955 int nregs
4956 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4957 int r;
4958
4959 for (r = regno; r < regno + nregs; r++)
4960 if (spill_reg_order[r] >= 0)
4961 for (j = 0; j < n_reloads; j++)
4962 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4963 && (reload_in[j] != 0 || reload_out[j] != 0
4964 || reload_secondary_p[j])
4965 &&
4966 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4967 do_avoid = 1;
4968 if (!do_avoid)
4969 avoid_return_reg = 0;
4970 }
4971 #endif /* SMALL_REGISTER_CLASSES */
4972
4973 #if 0 /* Not needed, now that we can always retry without inheritance. */
4974 /* See if we have more mandatory reloads than spill regs.
4975 If so, then we cannot risk optimizations that could prevent
4976 reloads from sharing one spill register.
4977
4978 Since we will try finding a better register than reload_reg_rtx
4979 unless it is equal to reload_in or reload_out, count such reloads. */
4980
4981 {
4982 int tem = 0;
4983 #ifdef SMALL_REGISTER_CLASSES
4984 int tem = (avoid_return_reg != 0);
4985 #endif
4986 for (j = 0; j < n_reloads; j++)
4987 if (! reload_optional[j]
4988 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4989 && (reload_reg_rtx[j] == 0
4990 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4991 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4992 tem++;
4993 if (tem > n_spills)
4994 must_reuse = 1;
4995 }
4996 #endif
4997
4998 #ifdef SMALL_REGISTER_CLASSES
4999 /* Don't use the subroutine call return reg for a reload
5000 if we are supposed to avoid it. */
5001 if (avoid_return_reg)
5002 {
5003 int regno = REGNO (avoid_return_reg);
5004 int nregs
5005 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5006 int r;
5007
5008 for (r = regno; r < regno + nregs; r++)
5009 if (spill_reg_order[r] >= 0)
5010 SET_HARD_REG_BIT (reload_reg_used, r);
5011 }
5012 #endif /* SMALL_REGISTER_CLASSES */
5013
5014 /* In order to be certain of getting the registers we need,
5015 we must sort the reloads into order of increasing register class.
5016 Then our grabbing of reload registers will parallel the process
5017 that provided the reload registers.
5018
5019 Also note whether any of the reloads wants a consecutive group of regs.
5020 If so, record the maximum size of the group desired and what
5021 register class contains all the groups needed by this insn. */
5022
5023 for (j = 0; j < n_reloads; j++)
5024 {
5025 reload_order[j] = j;
5026 reload_spill_index[j] = -1;
5027
5028 reload_mode[j]
5029 = (reload_inmode[j] == VOIDmode
5030 || (GET_MODE_SIZE (reload_outmode[j])
5031 > GET_MODE_SIZE (reload_inmode[j])))
5032 ? reload_outmode[j] : reload_inmode[j];
5033
5034 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5035
5036 if (reload_nregs[j] > 1)
5037 {
5038 max_group_size = MAX (reload_nregs[j], max_group_size);
5039 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5040 }
5041
5042 /* If we have already decided to use a certain register,
5043 don't use it in another way. */
5044 if (reload_reg_rtx[j])
5045 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5046 reload_when_needed[j], reload_mode[j]);
5047 }
5048
5049 if (n_reloads > 1)
5050 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5051
5052 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5053 sizeof reload_reg_rtx);
5054 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5055 bcopy ((char *) reload_inheritance_insn,
5056 (char *) save_reload_inheritance_insn,
5057 sizeof reload_inheritance_insn);
5058 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5059 sizeof reload_override_in);
5060 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5061 sizeof reload_spill_index);
5062 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5063 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5064 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5065 reload_reg_used_in_op_addr);
5066
5067 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5068 reload_reg_used_in_op_addr_reload);
5069
5070 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5071 reload_reg_used_in_insn);
5072 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5073 reload_reg_used_in_other_addr);
5074
5075 for (i = 0; i < reload_n_operands; i++)
5076 {
5077 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5078 reload_reg_used_in_output[i]);
5079 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5080 reload_reg_used_in_input[i]);
5081 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5082 reload_reg_used_in_input_addr[i]);
5083 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5084 reload_reg_used_in_output_addr[i]);
5085 }
5086
5087 /* If -O, try first with inheritance, then turning it off.
5088 If not -O, don't do inheritance.
5089 Using inheritance when not optimizing leads to paradoxes
5090 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5091 because one side of the comparison might be inherited. */
5092
5093 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5094 {
5095 /* Process the reloads in order of preference just found.
5096 Beyond this point, subregs can be found in reload_reg_rtx.
5097
5098 This used to look for an existing reloaded home for all
5099 of the reloads, and only then perform any new reloads.
5100 But that could lose if the reloads were done out of reg-class order
5101 because a later reload with a looser constraint might have an old
5102 home in a register needed by an earlier reload with a tighter constraint.
5103
5104 To solve this, we make two passes over the reloads, in the order
5105 described above. In the first pass we try to inherit a reload
5106 from a previous insn. If there is a later reload that needs a
5107 class that is a proper subset of the class being processed, we must
5108 also allocate a spill register during the first pass.
5109
5110 Then make a second pass over the reloads to allocate any reloads
5111 that haven't been given registers yet. */
5112
5113 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5114
5115 for (j = 0; j < n_reloads; j++)
5116 {
5117 register int r = reload_order[j];
5118
5119 /* Ignore reloads that got marked inoperative. */
5120 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5121 continue;
5122
5123 /* If find_reloads chose a to use reload_in or reload_out as a reload
5124 register, we don't need to chose one. Otherwise, try even if it found
5125 one since we might save an insn if we find the value lying around. */
5126 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5127 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5128 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5129 continue;
5130
5131 #if 0 /* No longer needed for correct operation.
5132 It might give better code, or might not; worth an experiment? */
5133 /* If this is an optional reload, we can't inherit from earlier insns
5134 until we are sure that any non-optional reloads have been allocated.
5135 The following code takes advantage of the fact that optional reloads
5136 are at the end of reload_order. */
5137 if (reload_optional[r] != 0)
5138 for (i = 0; i < j; i++)
5139 if ((reload_out[reload_order[i]] != 0
5140 || reload_in[reload_order[i]] != 0
5141 || reload_secondary_p[reload_order[i]])
5142 && ! reload_optional[reload_order[i]]
5143 && reload_reg_rtx[reload_order[i]] == 0)
5144 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5145 #endif
5146
5147 /* First see if this pseudo is already available as reloaded
5148 for a previous insn. We cannot try to inherit for reloads
5149 that are smaller than the maximum number of registers needed
5150 for groups unless the register we would allocate cannot be used
5151 for the groups.
5152
5153 We could check here to see if this is a secondary reload for
5154 an object that is already in a register of the desired class.
5155 This would avoid the need for the secondary reload register.
5156 But this is complex because we can't easily determine what
5157 objects might want to be loaded via this reload. So let a register
5158 be allocated here. In `emit_reload_insns' we suppress one of the
5159 loads in the case described above. */
5160
5161 if (inheritance)
5162 {
5163 register int regno = -1;
5164 enum machine_mode mode;
5165
5166 if (reload_in[r] == 0)
5167 ;
5168 else if (GET_CODE (reload_in[r]) == REG)
5169 {
5170 regno = REGNO (reload_in[r]);
5171 mode = GET_MODE (reload_in[r]);
5172 }
5173 else if (GET_CODE (reload_in_reg[r]) == REG)
5174 {
5175 regno = REGNO (reload_in_reg[r]);
5176 mode = GET_MODE (reload_in_reg[r]);
5177 }
5178 #if 0
5179 /* This won't work, since REGNO can be a pseudo reg number.
5180 Also, it takes much more hair to keep track of all the things
5181 that can invalidate an inherited reload of part of a pseudoreg. */
5182 else if (GET_CODE (reload_in[r]) == SUBREG
5183 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5184 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5185 #endif
5186
5187 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5188 {
5189 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5190
5191 if (reg_reloaded_contents[i] == regno
5192 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5193 >= GET_MODE_SIZE (mode))
5194 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5195 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5196 spill_regs[i])
5197 && (reload_nregs[r] == max_group_size
5198 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5199 spill_regs[i]))
5200 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5201 reload_when_needed[r])
5202 && reload_reg_free_before_p (spill_regs[i],
5203 reload_opnum[r],
5204 reload_when_needed[r]))
5205 {
5206 /* If a group is needed, verify that all the subsequent
5207 registers still have their values intact. */
5208 int nr
5209 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5210 int k;
5211
5212 for (k = 1; k < nr; k++)
5213 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5214 != regno)
5215 break;
5216
5217 if (k == nr)
5218 {
5219 int i1;
5220
5221 /* We found a register that contains the
5222 value we need. If this register is the
5223 same as an `earlyclobber' operand of the
5224 current insn, just mark it as a place to
5225 reload from since we can't use it as the
5226 reload register itself. */
5227
5228 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5229 if (reg_overlap_mentioned_for_reload_p
5230 (reg_last_reload_reg[regno],
5231 reload_earlyclobbers[i1]))
5232 break;
5233
5234 if (i1 != n_earlyclobbers
5235 /* Don't really use the inherited spill reg
5236 if we need it wider than we've got it. */
5237 || (GET_MODE_SIZE (reload_mode[r])
5238 > GET_MODE_SIZE (mode)))
5239 reload_override_in[r] = reg_last_reload_reg[regno];
5240 else
5241 {
5242 int k;
5243 /* We can use this as a reload reg. */
5244 /* Mark the register as in use for this part of
5245 the insn. */
5246 mark_reload_reg_in_use (spill_regs[i],
5247 reload_opnum[r],
5248 reload_when_needed[r],
5249 reload_mode[r]);
5250 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5251 reload_inherited[r] = 1;
5252 reload_inheritance_insn[r]
5253 = reg_reloaded_insn[i];
5254 reload_spill_index[r] = i;
5255 for (k = 0; k < nr; k++)
5256 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5257 spill_regs[i + k]);
5258 }
5259 }
5260 }
5261 }
5262 }
5263
5264 /* Here's another way to see if the value is already lying around. */
5265 if (inheritance
5266 && reload_in[r] != 0
5267 && ! reload_inherited[r]
5268 && reload_out[r] == 0
5269 && (CONSTANT_P (reload_in[r])
5270 || GET_CODE (reload_in[r]) == PLUS
5271 || GET_CODE (reload_in[r]) == REG
5272 || GET_CODE (reload_in[r]) == MEM)
5273 && (reload_nregs[r] == max_group_size
5274 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5275 {
5276 register rtx equiv
5277 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5278 -1, NULL_PTR, 0, reload_mode[r]);
5279 int regno;
5280
5281 if (equiv != 0)
5282 {
5283 if (GET_CODE (equiv) == REG)
5284 regno = REGNO (equiv);
5285 else if (GET_CODE (equiv) == SUBREG)
5286 {
5287 /* This must be a SUBREG of a hard register.
5288 Make a new REG since this might be used in an
5289 address and not all machines support SUBREGs
5290 there. */
5291 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5292 equiv = gen_rtx (REG, reload_mode[r], regno);
5293 }
5294 else
5295 abort ();
5296 }
5297
5298 /* If we found a spill reg, reject it unless it is free
5299 and of the desired class. */
5300 if (equiv != 0
5301 && ((spill_reg_order[regno] >= 0
5302 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5303 reload_when_needed[r]))
5304 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5305 regno)))
5306 equiv = 0;
5307
5308 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5309 equiv = 0;
5310
5311 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5312 equiv = 0;
5313
5314 /* We found a register that contains the value we need.
5315 If this register is the same as an `earlyclobber' operand
5316 of the current insn, just mark it as a place to reload from
5317 since we can't use it as the reload register itself. */
5318
5319 if (equiv != 0)
5320 for (i = 0; i < n_earlyclobbers; i++)
5321 if (reg_overlap_mentioned_for_reload_p (equiv,
5322 reload_earlyclobbers[i]))
5323 {
5324 reload_override_in[r] = equiv;
5325 equiv = 0;
5326 break;
5327 }
5328
5329 /* JRV: If the equiv register we have found is explicitly
5330 clobbered in the current insn, mark but don't use, as above. */
5331
5332 if (equiv != 0 && regno_clobbered_p (regno, insn))
5333 {
5334 reload_override_in[r] = equiv;
5335 equiv = 0;
5336 }
5337
5338 /* If we found an equivalent reg, say no code need be generated
5339 to load it, and use it as our reload reg. */
5340 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5341 {
5342 reload_reg_rtx[r] = equiv;
5343 reload_inherited[r] = 1;
5344 /* If it is a spill reg,
5345 mark the spill reg as in use for this insn. */
5346 i = spill_reg_order[regno];
5347 if (i >= 0)
5348 {
5349 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5350 int k;
5351 mark_reload_reg_in_use (regno, reload_opnum[r],
5352 reload_when_needed[r],
5353 reload_mode[r]);
5354 for (k = 0; k < nr; k++)
5355 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5356 }
5357 }
5358 }
5359
5360 /* If we found a register to use already, or if this is an optional
5361 reload, we are done. */
5362 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5363 continue;
5364
5365 #if 0 /* No longer needed for correct operation. Might or might not
5366 give better code on the average. Want to experiment? */
5367
5368 /* See if there is a later reload that has a class different from our
5369 class that intersects our class or that requires less register
5370 than our reload. If so, we must allocate a register to this
5371 reload now, since that reload might inherit a previous reload
5372 and take the only available register in our class. Don't do this
5373 for optional reloads since they will force all previous reloads
5374 to be allocated. Also don't do this for reloads that have been
5375 turned off. */
5376
5377 for (i = j + 1; i < n_reloads; i++)
5378 {
5379 int s = reload_order[i];
5380
5381 if ((reload_in[s] == 0 && reload_out[s] == 0
5382 && ! reload_secondary_p[s])
5383 || reload_optional[s])
5384 continue;
5385
5386 if ((reload_reg_class[s] != reload_reg_class[r]
5387 && reg_classes_intersect_p (reload_reg_class[r],
5388 reload_reg_class[s]))
5389 || reload_nregs[s] < reload_nregs[r])
5390 break;
5391 }
5392
5393 if (i == n_reloads)
5394 continue;
5395
5396 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5397 #endif
5398 }
5399
5400 /* Now allocate reload registers for anything non-optional that
5401 didn't get one yet. */
5402 for (j = 0; j < n_reloads; j++)
5403 {
5404 register int r = reload_order[j];
5405
5406 /* Ignore reloads that got marked inoperative. */
5407 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5408 continue;
5409
5410 /* Skip reloads that already have a register allocated or are
5411 optional. */
5412 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5413 continue;
5414
5415 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5416 break;
5417 }
5418
5419 /* If that loop got all the way, we have won. */
5420 if (j == n_reloads)
5421 break;
5422
5423 fail:
5424 /* Loop around and try without any inheritance. */
5425 /* First undo everything done by the failed attempt
5426 to allocate with inheritance. */
5427 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5428 sizeof reload_reg_rtx);
5429 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5430 sizeof reload_inherited);
5431 bcopy ((char *) save_reload_inheritance_insn,
5432 (char *) reload_inheritance_insn,
5433 sizeof reload_inheritance_insn);
5434 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5435 sizeof reload_override_in);
5436 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5437 sizeof reload_spill_index);
5438 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5439 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5440 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5441 save_reload_reg_used_in_op_addr);
5442 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5443 save_reload_reg_used_in_op_addr_reload);
5444 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5445 save_reload_reg_used_in_insn);
5446 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5447 save_reload_reg_used_in_other_addr);
5448
5449 for (i = 0; i < reload_n_operands; i++)
5450 {
5451 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5452 save_reload_reg_used_in_input[i]);
5453 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5454 save_reload_reg_used_in_output[i]);
5455 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5456 save_reload_reg_used_in_input_addr[i]);
5457 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5458 save_reload_reg_used_in_output_addr[i]);
5459 }
5460 }
5461
5462 /* If we thought we could inherit a reload, because it seemed that
5463 nothing else wanted the same reload register earlier in the insn,
5464 verify that assumption, now that all reloads have been assigned. */
5465
5466 for (j = 0; j < n_reloads; j++)
5467 {
5468 register int r = reload_order[j];
5469
5470 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5471 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5472 reload_opnum[r],
5473 reload_when_needed[r]))
5474 reload_inherited[r] = 0;
5475
5476 /* If we found a better place to reload from,
5477 validate it in the same fashion, if it is a reload reg. */
5478 if (reload_override_in[r]
5479 && (GET_CODE (reload_override_in[r]) == REG
5480 || GET_CODE (reload_override_in[r]) == SUBREG))
5481 {
5482 int regno = true_regnum (reload_override_in[r]);
5483 if (spill_reg_order[regno] >= 0
5484 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5485 reload_when_needed[r]))
5486 reload_override_in[r] = 0;
5487 }
5488 }
5489
5490 /* Now that reload_override_in is known valid,
5491 actually override reload_in. */
5492 for (j = 0; j < n_reloads; j++)
5493 if (reload_override_in[j])
5494 reload_in[j] = reload_override_in[j];
5495
5496 /* If this reload won't be done because it has been cancelled or is
5497 optional and not inherited, clear reload_reg_rtx so other
5498 routines (such as subst_reloads) don't get confused. */
5499 for (j = 0; j < n_reloads; j++)
5500 if (reload_reg_rtx[j] != 0
5501 && ((reload_optional[j] && ! reload_inherited[j])
5502 || (reload_in[j] == 0 && reload_out[j] == 0
5503 && ! reload_secondary_p[j])))
5504 {
5505 int regno = true_regnum (reload_reg_rtx[j]);
5506
5507 if (spill_reg_order[regno] >= 0)
5508 clear_reload_reg_in_use (regno, reload_opnum[j],
5509 reload_when_needed[j], reload_mode[j]);
5510 reload_reg_rtx[j] = 0;
5511 }
5512
5513 /* Record which pseudos and which spill regs have output reloads. */
5514 for (j = 0; j < n_reloads; j++)
5515 {
5516 register int r = reload_order[j];
5517
5518 i = reload_spill_index[r];
5519
5520 /* I is nonneg if this reload used one of the spill regs.
5521 If reload_reg_rtx[r] is 0, this is an optional reload
5522 that we opted to ignore. */
5523 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5524 && reload_reg_rtx[r] != 0)
5525 {
5526 register int nregno = REGNO (reload_out[r]);
5527 int nr = 1;
5528
5529 if (nregno < FIRST_PSEUDO_REGISTER)
5530 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5531
5532 while (--nr >= 0)
5533 reg_has_output_reload[nregno + nr] = 1;
5534
5535 if (i >= 0)
5536 {
5537 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5538 while (--nr >= 0)
5539 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5540 }
5541
5542 if (reload_when_needed[r] != RELOAD_OTHER
5543 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5544 && reload_when_needed[r] != RELOAD_FOR_INSN)
5545 abort ();
5546 }
5547 }
5548 }
5549 \f
5550 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5551 reloads of the same item for fear that we might not have enough reload
5552 registers. However, normally they will get the same reload register
5553 and hence actually need not be loaded twice.
5554
5555 Here we check for the most common case of this phenomenon: when we have
5556 a number of reloads for the same object, each of which were allocated
5557 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5558 reload, and is not modified in the insn itself. If we find such,
5559 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5560 This will not increase the number of spill registers needed and will
5561 prevent redundant code. */
5562
5563 #ifdef SMALL_REGISTER_CLASSES
5564
5565 static void
5566 merge_assigned_reloads (insn)
5567 rtx insn;
5568 {
5569 int i, j;
5570
5571 /* Scan all the reloads looking for ones that only load values and
5572 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5573 assigned and not modified by INSN. */
5574
5575 for (i = 0; i < n_reloads; i++)
5576 {
5577 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5578 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5579 || reg_set_p (reload_reg_rtx[i], insn))
5580 continue;
5581
5582 /* Look at all other reloads. Ensure that the only use of this
5583 reload_reg_rtx is in a reload that just loads the same value
5584 as we do. Note that any secondary reloads must be of the identical
5585 class since the values, modes, and result registers are the
5586 same, so we need not do anything with any secondary reloads. */
5587
5588 for (j = 0; j < n_reloads; j++)
5589 {
5590 if (i == j || reload_reg_rtx[j] == 0
5591 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5592 reload_reg_rtx[i]))
5593 continue;
5594
5595 /* If the reload regs aren't exactly the same (e.g, different modes)
5596 or if the values are different, we can't merge anything with this
5597 reload register. */
5598
5599 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5600 || reload_out[j] != 0 || reload_in[j] == 0
5601 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5602 break;
5603 }
5604
5605 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5606 we, in fact, found any matching reloads. */
5607
5608 if (j == n_reloads)
5609 {
5610 for (j = 0; j < n_reloads; j++)
5611 if (i != j && reload_reg_rtx[j] != 0
5612 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5613 {
5614 reload_when_needed[i] = RELOAD_OTHER;
5615 reload_in[j] = 0;
5616 transfer_replacements (i, j);
5617 }
5618
5619 /* If this is now RELOAD_OTHER, look for any reloads that load
5620 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5621 if they were for inputs, RELOAD_OTHER for outputs. Note that
5622 this test is equivalent to looking for reloads for this operand
5623 number. */
5624
5625 if (reload_when_needed[i] == RELOAD_OTHER)
5626 for (j = 0; j < n_reloads; j++)
5627 if (reload_in[j] != 0
5628 && reload_when_needed[i] != RELOAD_OTHER
5629 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5630 reload_in[i]))
5631 reload_when_needed[j]
5632 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5633 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5634 }
5635 }
5636 }
5637 #endif /* SMALL_RELOAD_CLASSES */
5638 \f
5639 /* Output insns to reload values in and out of the chosen reload regs. */
5640
5641 static void
5642 emit_reload_insns (insn)
5643 rtx insn;
5644 {
5645 register int j;
5646 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5647 rtx other_input_address_reload_insns = 0;
5648 rtx other_input_reload_insns = 0;
5649 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5650 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5651 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5652 rtx operand_reload_insns = 0;
5653 rtx other_operand_reload_insns = 0;
5654 rtx following_insn = NEXT_INSN (insn);
5655 rtx before_insn = insn;
5656 int special;
5657 /* Values to be put in spill_reg_store are put here first. */
5658 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5659
5660 for (j = 0; j < reload_n_operands; j++)
5661 input_reload_insns[j] = input_address_reload_insns[j]
5662 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5663
5664 /* Now output the instructions to copy the data into and out of the
5665 reload registers. Do these in the order that the reloads were reported,
5666 since reloads of base and index registers precede reloads of operands
5667 and the operands may need the base and index registers reloaded. */
5668
5669 for (j = 0; j < n_reloads; j++)
5670 {
5671 register rtx old;
5672 rtx oldequiv_reg = 0;
5673
5674 if (reload_spill_index[j] >= 0)
5675 new_spill_reg_store[reload_spill_index[j]] = 0;
5676
5677 old = reload_in[j];
5678 if (old != 0 && ! reload_inherited[j]
5679 && ! rtx_equal_p (reload_reg_rtx[j], old)
5680 && reload_reg_rtx[j] != 0)
5681 {
5682 register rtx reloadreg = reload_reg_rtx[j];
5683 rtx oldequiv = 0;
5684 enum machine_mode mode;
5685 rtx *where;
5686
5687 /* Determine the mode to reload in.
5688 This is very tricky because we have three to choose from.
5689 There is the mode the insn operand wants (reload_inmode[J]).
5690 There is the mode of the reload register RELOADREG.
5691 There is the intrinsic mode of the operand, which we could find
5692 by stripping some SUBREGs.
5693 It turns out that RELOADREG's mode is irrelevant:
5694 we can change that arbitrarily.
5695
5696 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5697 then the reload reg may not support QImode moves, so use SImode.
5698 If foo is in memory due to spilling a pseudo reg, this is safe,
5699 because the QImode value is in the least significant part of a
5700 slot big enough for a SImode. If foo is some other sort of
5701 memory reference, then it is impossible to reload this case,
5702 so previous passes had better make sure this never happens.
5703
5704 Then consider a one-word union which has SImode and one of its
5705 members is a float, being fetched as (SUBREG:SF union:SI).
5706 We must fetch that as SFmode because we could be loading into
5707 a float-only register. In this case OLD's mode is correct.
5708
5709 Consider an immediate integer: it has VOIDmode. Here we need
5710 to get a mode from something else.
5711
5712 In some cases, there is a fourth mode, the operand's
5713 containing mode. If the insn specifies a containing mode for
5714 this operand, it overrides all others.
5715
5716 I am not sure whether the algorithm here is always right,
5717 but it does the right things in those cases. */
5718
5719 mode = GET_MODE (old);
5720 if (mode == VOIDmode)
5721 mode = reload_inmode[j];
5722
5723 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5724 /* If we need a secondary register for this operation, see if
5725 the value is already in a register in that class. Don't
5726 do this if the secondary register will be used as a scratch
5727 register. */
5728
5729 if (reload_secondary_in_reload[j] >= 0
5730 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5731 && optimize)
5732 oldequiv
5733 = find_equiv_reg (old, insn,
5734 reload_reg_class[reload_secondary_in_reload[j]],
5735 -1, NULL_PTR, 0, mode);
5736 #endif
5737
5738 /* If reloading from memory, see if there is a register
5739 that already holds the same value. If so, reload from there.
5740 We can pass 0 as the reload_reg_p argument because
5741 any other reload has either already been emitted,
5742 in which case find_equiv_reg will see the reload-insn,
5743 or has yet to be emitted, in which case it doesn't matter
5744 because we will use this equiv reg right away. */
5745
5746 if (oldequiv == 0 && optimize
5747 && (GET_CODE (old) == MEM
5748 || (GET_CODE (old) == REG
5749 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5750 && reg_renumber[REGNO (old)] < 0)))
5751 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5752 -1, NULL_PTR, 0, mode);
5753
5754 if (oldequiv)
5755 {
5756 int regno = true_regnum (oldequiv);
5757
5758 /* If OLDEQUIV is a spill register, don't use it for this
5759 if any other reload needs it at an earlier stage of this insn
5760 or at this stage. */
5761 if (spill_reg_order[regno] >= 0
5762 && (! reload_reg_free_p (regno, reload_opnum[j],
5763 reload_when_needed[j])
5764 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5765 reload_when_needed[j])))
5766 oldequiv = 0;
5767
5768 /* If OLDEQUIV is not a spill register,
5769 don't use it if any other reload wants it. */
5770 if (spill_reg_order[regno] < 0)
5771 {
5772 int k;
5773 for (k = 0; k < n_reloads; k++)
5774 if (reload_reg_rtx[k] != 0 && k != j
5775 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5776 oldequiv))
5777 {
5778 oldequiv = 0;
5779 break;
5780 }
5781 }
5782
5783 /* If it is no cheaper to copy from OLDEQUIV into the
5784 reload register than it would be to move from memory,
5785 don't use it. Likewise, if we need a secondary register
5786 or memory. */
5787
5788 if (oldequiv != 0
5789 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5790 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5791 reload_reg_class[j])
5792 >= MEMORY_MOVE_COST (mode)))
5793 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5794 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5795 mode, oldequiv)
5796 != NO_REGS)
5797 #endif
5798 #ifdef SECONDARY_MEMORY_NEEDED
5799 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5800 REGNO_REG_CLASS (regno),
5801 mode)
5802 #endif
5803 ))
5804 oldequiv = 0;
5805 }
5806
5807 if (oldequiv == 0)
5808 oldequiv = old;
5809 else if (GET_CODE (oldequiv) == REG)
5810 oldequiv_reg = oldequiv;
5811 else if (GET_CODE (oldequiv) == SUBREG)
5812 oldequiv_reg = SUBREG_REG (oldequiv);
5813
5814 /* If we are reloading from a register that was recently stored in
5815 with an output-reload, see if we can prove there was
5816 actually no need to store the old value in it. */
5817
5818 if (optimize && GET_CODE (oldequiv) == REG
5819 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5820 && spill_reg_order[REGNO (oldequiv)] >= 0
5821 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5822 && find_reg_note (insn, REG_DEAD, reload_in[j])
5823 /* This is unsafe if operand occurs more than once in current
5824 insn. Perhaps some occurrences weren't reloaded. */
5825 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5826 delete_output_reload
5827 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5828
5829 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5830 then load RELOADREG from OLDEQUIV. Note that we cannot use
5831 gen_lowpart_common since it can do the wrong thing when
5832 RELOADREG has a multi-word mode. Note that RELOADREG
5833 must always be a REG here. */
5834
5835 if (GET_MODE (reloadreg) != mode)
5836 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5837 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5838 oldequiv = SUBREG_REG (oldequiv);
5839 if (GET_MODE (oldequiv) != VOIDmode
5840 && mode != GET_MODE (oldequiv))
5841 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5842
5843 /* Switch to the right place to emit the reload insns. */
5844 switch (reload_when_needed[j])
5845 {
5846 case RELOAD_OTHER:
5847 where = &other_input_reload_insns;
5848 break;
5849 case RELOAD_FOR_INPUT:
5850 where = &input_reload_insns[reload_opnum[j]];
5851 break;
5852 case RELOAD_FOR_INPUT_ADDRESS:
5853 where = &input_address_reload_insns[reload_opnum[j]];
5854 break;
5855 case RELOAD_FOR_OUTPUT_ADDRESS:
5856 where = &output_address_reload_insns[reload_opnum[j]];
5857 break;
5858 case RELOAD_FOR_OPERAND_ADDRESS:
5859 where = &operand_reload_insns;
5860 break;
5861 case RELOAD_FOR_OPADDR_ADDR:
5862 where = &other_operand_reload_insns;
5863 break;
5864 case RELOAD_FOR_OTHER_ADDRESS:
5865 where = &other_input_address_reload_insns;
5866 break;
5867 default:
5868 abort ();
5869 }
5870
5871 push_to_sequence (*where);
5872 special = 0;
5873
5874 /* Auto-increment addresses must be reloaded in a special way. */
5875 if (GET_CODE (oldequiv) == POST_INC
5876 || GET_CODE (oldequiv) == POST_DEC
5877 || GET_CODE (oldequiv) == PRE_INC
5878 || GET_CODE (oldequiv) == PRE_DEC)
5879 {
5880 /* We are not going to bother supporting the case where a
5881 incremented register can't be copied directly from
5882 OLDEQUIV since this seems highly unlikely. */
5883 if (reload_secondary_in_reload[j] >= 0)
5884 abort ();
5885 /* Prevent normal processing of this reload. */
5886 special = 1;
5887 /* Output a special code sequence for this case. */
5888 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5889 }
5890
5891 /* If we are reloading a pseudo-register that was set by the previous
5892 insn, see if we can get rid of that pseudo-register entirely
5893 by redirecting the previous insn into our reload register. */
5894
5895 else if (optimize && GET_CODE (old) == REG
5896 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5897 && dead_or_set_p (insn, old)
5898 /* This is unsafe if some other reload
5899 uses the same reg first. */
5900 && reload_reg_free_before_p (REGNO (reloadreg),
5901 reload_opnum[j],
5902 reload_when_needed[j]))
5903 {
5904 rtx temp = PREV_INSN (insn);
5905 while (temp && GET_CODE (temp) == NOTE)
5906 temp = PREV_INSN (temp);
5907 if (temp
5908 && GET_CODE (temp) == INSN
5909 && GET_CODE (PATTERN (temp)) == SET
5910 && SET_DEST (PATTERN (temp)) == old
5911 /* Make sure we can access insn_operand_constraint. */
5912 && asm_noperands (PATTERN (temp)) < 0
5913 /* This is unsafe if prev insn rejects our reload reg. */
5914 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5915 reloadreg)
5916 /* This is unsafe if operand occurs more than once in current
5917 insn. Perhaps some occurrences aren't reloaded. */
5918 && count_occurrences (PATTERN (insn), old) == 1
5919 /* Don't risk splitting a matching pair of operands. */
5920 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5921 {
5922 /* Store into the reload register instead of the pseudo. */
5923 SET_DEST (PATTERN (temp)) = reloadreg;
5924 /* If these are the only uses of the pseudo reg,
5925 pretend for GDB it lives in the reload reg we used. */
5926 if (reg_n_deaths[REGNO (old)] == 1
5927 && reg_n_sets[REGNO (old)] == 1)
5928 {
5929 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5930 alter_reg (REGNO (old), -1);
5931 }
5932 special = 1;
5933 }
5934 }
5935
5936 /* We can't do that, so output an insn to load RELOADREG. */
5937
5938 if (! special)
5939 {
5940 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5941 rtx second_reload_reg = 0;
5942 enum insn_code icode;
5943
5944 /* If we have a secondary reload, pick up the secondary register
5945 and icode, if any. If OLDEQUIV and OLD are different or
5946 if this is an in-out reload, recompute whether or not we
5947 still need a secondary register and what the icode should
5948 be. If we still need a secondary register and the class or
5949 icode is different, go back to reloading from OLD if using
5950 OLDEQUIV means that we got the wrong type of register. We
5951 cannot have different class or icode due to an in-out reload
5952 because we don't make such reloads when both the input and
5953 output need secondary reload registers. */
5954
5955 if (reload_secondary_in_reload[j] >= 0)
5956 {
5957 int secondary_reload = reload_secondary_in_reload[j];
5958 rtx real_oldequiv = oldequiv;
5959 rtx real_old = old;
5960
5961 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5962 and similarly for OLD.
5963 See comments in get_secondary_reload in reload.c. */
5964 if (GET_CODE (oldequiv) == REG
5965 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5966 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5967 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5968
5969 if (GET_CODE (old) == REG
5970 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5971 && reg_equiv_mem[REGNO (old)] != 0)
5972 real_old = reg_equiv_mem[REGNO (old)];
5973
5974 second_reload_reg = reload_reg_rtx[secondary_reload];
5975 icode = reload_secondary_in_icode[j];
5976
5977 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5978 || (reload_in[j] != 0 && reload_out[j] != 0))
5979 {
5980 enum reg_class new_class
5981 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5982 mode, real_oldequiv);
5983
5984 if (new_class == NO_REGS)
5985 second_reload_reg = 0;
5986 else
5987 {
5988 enum insn_code new_icode;
5989 enum machine_mode new_mode;
5990
5991 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5992 REGNO (second_reload_reg)))
5993 oldequiv = old, real_oldequiv = real_old;
5994 else
5995 {
5996 new_icode = reload_in_optab[(int) mode];
5997 if (new_icode != CODE_FOR_nothing
5998 && ((insn_operand_predicate[(int) new_icode][0]
5999 && ! ((*insn_operand_predicate[(int) new_icode][0])
6000 (reloadreg, mode)))
6001 || (insn_operand_predicate[(int) new_icode][1]
6002 && ! ((*insn_operand_predicate[(int) new_icode][1])
6003 (real_oldequiv, mode)))))
6004 new_icode = CODE_FOR_nothing;
6005
6006 if (new_icode == CODE_FOR_nothing)
6007 new_mode = mode;
6008 else
6009 new_mode = insn_operand_mode[(int) new_icode][2];
6010
6011 if (GET_MODE (second_reload_reg) != new_mode)
6012 {
6013 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6014 new_mode))
6015 oldequiv = old, real_oldequiv = real_old;
6016 else
6017 second_reload_reg
6018 = gen_rtx (REG, new_mode,
6019 REGNO (second_reload_reg));
6020 }
6021 }
6022 }
6023 }
6024
6025 /* If we still need a secondary reload register, check
6026 to see if it is being used as a scratch or intermediate
6027 register and generate code appropriately. If we need
6028 a scratch register, use REAL_OLDEQUIV since the form of
6029 the insn may depend on the actual address if it is
6030 a MEM. */
6031
6032 if (second_reload_reg)
6033 {
6034 if (icode != CODE_FOR_nothing)
6035 {
6036 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6037 second_reload_reg));
6038 special = 1;
6039 }
6040 else
6041 {
6042 /* See if we need a scratch register to load the
6043 intermediate register (a tertiary reload). */
6044 enum insn_code tertiary_icode
6045 = reload_secondary_in_icode[secondary_reload];
6046
6047 if (tertiary_icode != CODE_FOR_nothing)
6048 {
6049 rtx third_reload_reg
6050 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6051
6052 emit_insn ((GEN_FCN (tertiary_icode)
6053 (second_reload_reg, real_oldequiv,
6054 third_reload_reg)));
6055 }
6056 else
6057 gen_reload (second_reload_reg, oldequiv,
6058 reload_opnum[j],
6059 reload_when_needed[j]);
6060
6061 oldequiv = second_reload_reg;
6062 }
6063 }
6064 }
6065 #endif
6066
6067 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6068 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6069 reload_when_needed[j]);
6070
6071 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6072 /* We may have to make a REG_DEAD note for the secondary reload
6073 register in the insns we just made. Find the last insn that
6074 mentioned the register. */
6075 if (! special && second_reload_reg
6076 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6077 {
6078 rtx prev;
6079
6080 for (prev = get_last_insn (); prev;
6081 prev = PREV_INSN (prev))
6082 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6083 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6084 PATTERN (prev)))
6085 {
6086 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6087 second_reload_reg,
6088 REG_NOTES (prev));
6089 break;
6090 }
6091 }
6092 #endif
6093 }
6094
6095 /* End this sequence. */
6096 *where = get_insns ();
6097 end_sequence ();
6098 }
6099
6100 /* Add a note saying the input reload reg
6101 dies in this insn, if anyone cares. */
6102 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6103 if (old != 0
6104 && reload_reg_rtx[j] != old
6105 && reload_reg_rtx[j] != 0
6106 && reload_out[j] == 0
6107 && ! reload_inherited[j]
6108 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6109 {
6110 register rtx reloadreg = reload_reg_rtx[j];
6111
6112 #if 0
6113 /* We can't abort here because we need to support this for sched.c.
6114 It's not terrible to miss a REG_DEAD note, but we should try
6115 to figure out how to do this correctly. */
6116 /* The code below is incorrect for address-only reloads. */
6117 if (reload_when_needed[j] != RELOAD_OTHER
6118 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6119 abort ();
6120 #endif
6121
6122 /* Add a death note to this insn, for an input reload. */
6123
6124 if ((reload_when_needed[j] == RELOAD_OTHER
6125 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6126 && ! dead_or_set_p (insn, reloadreg))
6127 REG_NOTES (insn)
6128 = gen_rtx (EXPR_LIST, REG_DEAD,
6129 reloadreg, REG_NOTES (insn));
6130 }
6131
6132 /* When we inherit a reload, the last marked death of the reload reg
6133 may no longer really be a death. */
6134 if (reload_reg_rtx[j] != 0
6135 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6136 && reload_inherited[j])
6137 {
6138 /* Handle inheriting an output reload.
6139 Remove the death note from the output reload insn. */
6140 if (reload_spill_index[j] >= 0
6141 && GET_CODE (reload_in[j]) == REG
6142 && spill_reg_store[reload_spill_index[j]] != 0
6143 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6144 REG_DEAD, REGNO (reload_reg_rtx[j])))
6145 remove_death (REGNO (reload_reg_rtx[j]),
6146 spill_reg_store[reload_spill_index[j]]);
6147 /* Likewise for input reloads that were inherited. */
6148 else if (reload_spill_index[j] >= 0
6149 && GET_CODE (reload_in[j]) == REG
6150 && spill_reg_store[reload_spill_index[j]] == 0
6151 && reload_inheritance_insn[j] != 0
6152 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6153 REGNO (reload_reg_rtx[j])))
6154 remove_death (REGNO (reload_reg_rtx[j]),
6155 reload_inheritance_insn[j]);
6156 else
6157 {
6158 rtx prev;
6159
6160 /* We got this register from find_equiv_reg.
6161 Search back for its last death note and get rid of it.
6162 But don't search back too far.
6163 Don't go past a place where this reg is set,
6164 since a death note before that remains valid. */
6165 for (prev = PREV_INSN (insn);
6166 prev && GET_CODE (prev) != CODE_LABEL;
6167 prev = PREV_INSN (prev))
6168 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6169 && dead_or_set_p (prev, reload_reg_rtx[j]))
6170 {
6171 if (find_regno_note (prev, REG_DEAD,
6172 REGNO (reload_reg_rtx[j])))
6173 remove_death (REGNO (reload_reg_rtx[j]), prev);
6174 break;
6175 }
6176 }
6177 }
6178
6179 /* We might have used find_equiv_reg above to choose an alternate
6180 place from which to reload. If so, and it died, we need to remove
6181 that death and move it to one of the insns we just made. */
6182
6183 if (oldequiv_reg != 0
6184 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6185 {
6186 rtx prev, prev1;
6187
6188 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6189 prev = PREV_INSN (prev))
6190 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6191 && dead_or_set_p (prev, oldequiv_reg))
6192 {
6193 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6194 {
6195 for (prev1 = this_reload_insn;
6196 prev1; prev1 = PREV_INSN (prev1))
6197 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6198 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6199 PATTERN (prev1)))
6200 {
6201 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6202 oldequiv_reg,
6203 REG_NOTES (prev1));
6204 break;
6205 }
6206 remove_death (REGNO (oldequiv_reg), prev);
6207 }
6208 break;
6209 }
6210 }
6211 #endif
6212
6213 /* If we are reloading a register that was recently stored in with an
6214 output-reload, see if we can prove there was
6215 actually no need to store the old value in it. */
6216
6217 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6218 && reload_in[j] != 0
6219 && GET_CODE (reload_in[j]) == REG
6220 #if 0
6221 /* There doesn't seem to be any reason to restrict this to pseudos
6222 and doing so loses in the case where we are copying from a
6223 register of the wrong class. */
6224 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6225 #endif
6226 && spill_reg_store[reload_spill_index[j]] != 0
6227 /* This is unsafe if some other reload uses the same reg first. */
6228 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6229 reload_opnum[j], reload_when_needed[j])
6230 && dead_or_set_p (insn, reload_in[j])
6231 /* This is unsafe if operand occurs more than once in current
6232 insn. Perhaps some occurrences weren't reloaded. */
6233 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6234 delete_output_reload (insn, j,
6235 spill_reg_store[reload_spill_index[j]]);
6236
6237 /* Input-reloading is done. Now do output-reloading,
6238 storing the value from the reload-register after the main insn
6239 if reload_out[j] is nonzero.
6240
6241 ??? At some point we need to support handling output reloads of
6242 JUMP_INSNs or insns that set cc0. */
6243 old = reload_out[j];
6244 if (old != 0
6245 && reload_reg_rtx[j] != old
6246 && reload_reg_rtx[j] != 0)
6247 {
6248 register rtx reloadreg = reload_reg_rtx[j];
6249 register rtx second_reloadreg = 0;
6250 rtx note, p;
6251 enum machine_mode mode;
6252 int special = 0;
6253
6254 /* An output operand that dies right away does need a reload,
6255 but need not be copied from it. Show the new location in the
6256 REG_UNUSED note. */
6257 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6258 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6259 {
6260 XEXP (note, 0) = reload_reg_rtx[j];
6261 continue;
6262 }
6263 else if (GET_CODE (old) == SCRATCH)
6264 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6265 but we don't want to make an output reload. */
6266 continue;
6267
6268 #if 0
6269 /* Strip off of OLD any size-increasing SUBREGs such as
6270 (SUBREG:SI foo:QI 0). */
6271
6272 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6273 && (GET_MODE_SIZE (GET_MODE (old))
6274 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6275 old = SUBREG_REG (old);
6276 #endif
6277
6278 /* If is a JUMP_INSN, we can't support output reloads yet. */
6279 if (GET_CODE (insn) == JUMP_INSN)
6280 abort ();
6281
6282 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6283
6284 /* Determine the mode to reload in.
6285 See comments above (for input reloading). */
6286
6287 mode = GET_MODE (old);
6288 if (mode == VOIDmode)
6289 {
6290 /* VOIDmode should never happen for an output. */
6291 if (asm_noperands (PATTERN (insn)) < 0)
6292 /* It's the compiler's fault. */
6293 fatal_insn ("VOIDmode on an output", insn);
6294 error_for_asm (insn, "output operand is constant in `asm'");
6295 /* Prevent crash--use something we know is valid. */
6296 mode = word_mode;
6297 old = gen_rtx (REG, mode, REGNO (reloadreg));
6298 }
6299
6300 if (GET_MODE (reloadreg) != mode)
6301 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6302
6303 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6304
6305 /* If we need two reload regs, set RELOADREG to the intermediate
6306 one, since it will be stored into OLD. We might need a secondary
6307 register only for an input reload, so check again here. */
6308
6309 if (reload_secondary_out_reload[j] >= 0)
6310 {
6311 rtx real_old = old;
6312
6313 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6314 && reg_equiv_mem[REGNO (old)] != 0)
6315 real_old = reg_equiv_mem[REGNO (old)];
6316
6317 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6318 mode, real_old)
6319 != NO_REGS))
6320 {
6321 second_reloadreg = reloadreg;
6322 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6323
6324 /* See if RELOADREG is to be used as a scratch register
6325 or as an intermediate register. */
6326 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6327 {
6328 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6329 (real_old, second_reloadreg, reloadreg)));
6330 special = 1;
6331 }
6332 else
6333 {
6334 /* See if we need both a scratch and intermediate reload
6335 register. */
6336
6337 int secondary_reload = reload_secondary_out_reload[j];
6338 enum insn_code tertiary_icode
6339 = reload_secondary_out_icode[secondary_reload];
6340
6341 if (GET_MODE (reloadreg) != mode)
6342 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6343
6344 if (tertiary_icode != CODE_FOR_nothing)
6345 {
6346 rtx third_reloadreg
6347 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6348
6349 /* Copy primary reload reg to secondary reload reg.
6350 (Note that these have been swapped above, then
6351 secondary reload reg to OLD using our insn. */
6352
6353 gen_reload (reloadreg, second_reloadreg,
6354 reload_opnum[j], reload_when_needed[j]);
6355 emit_insn ((GEN_FCN (tertiary_icode)
6356 (real_old, reloadreg, third_reloadreg)));
6357 special = 1;
6358 }
6359
6360 else
6361 /* Copy between the reload regs here and then to
6362 OUT later. */
6363
6364 gen_reload (reloadreg, second_reloadreg,
6365 reload_opnum[j], reload_when_needed[j]);
6366 }
6367 }
6368 }
6369 #endif
6370
6371 /* Output the last reload insn. */
6372 if (! special)
6373 gen_reload (old, reloadreg, reload_opnum[j],
6374 reload_when_needed[j]);
6375
6376 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6377 /* If final will look at death notes for this reg,
6378 put one on the last output-reload insn to use it. Similarly
6379 for any secondary register. */
6380 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6381 for (p = get_last_insn (); p; p = PREV_INSN (p))
6382 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6383 && reg_overlap_mentioned_for_reload_p (reloadreg,
6384 PATTERN (p)))
6385 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6386 reloadreg, REG_NOTES (p));
6387
6388 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6389 if (! special
6390 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6391 for (p = get_last_insn (); p; p = PREV_INSN (p))
6392 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6393 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6394 PATTERN (p)))
6395 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6396 second_reloadreg, REG_NOTES (p));
6397 #endif
6398 #endif
6399 /* Look at all insns we emitted, just to be safe. */
6400 for (p = get_insns (); p; p = NEXT_INSN (p))
6401 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6402 {
6403 /* If this output reload doesn't come from a spill reg,
6404 clear any memory of reloaded copies of the pseudo reg.
6405 If this output reload comes from a spill reg,
6406 reg_has_output_reload will make this do nothing. */
6407 note_stores (PATTERN (p), forget_old_reloads_1);
6408
6409 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6410 && reload_spill_index[j] >= 0)
6411 new_spill_reg_store[reload_spill_index[j]] = p;
6412 }
6413
6414 output_reload_insns[reload_opnum[j]] = get_insns ();
6415 end_sequence ();
6416 }
6417 }
6418
6419 /* Now write all the insns we made for reloads in the order expected by
6420 the allocation functions. Prior to the insn being reloaded, we write
6421 the following reloads:
6422
6423 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6424
6425 RELOAD_OTHER reloads.
6426
6427 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6428 the RELOAD_FOR_INPUT reload for the operand.
6429
6430 RELOAD_FOR_OPADDR_ADDRS reloads.
6431
6432 RELOAD_FOR_OPERAND_ADDRESS reloads.
6433
6434 After the insn being reloaded, we write the following:
6435
6436 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6437 the RELOAD_FOR_OUTPUT reload for that operand. */
6438
6439 emit_insns_before (other_input_address_reload_insns, before_insn);
6440 emit_insns_before (other_input_reload_insns, before_insn);
6441
6442 for (j = 0; j < reload_n_operands; j++)
6443 {
6444 emit_insns_before (input_address_reload_insns[j], before_insn);
6445 emit_insns_before (input_reload_insns[j], before_insn);
6446 }
6447
6448 emit_insns_before (other_operand_reload_insns, before_insn);
6449 emit_insns_before (operand_reload_insns, before_insn);
6450
6451 for (j = 0; j < reload_n_operands; j++)
6452 {
6453 emit_insns_before (output_address_reload_insns[j], following_insn);
6454 emit_insns_before (output_reload_insns[j], following_insn);
6455 }
6456
6457 /* Move death notes from INSN
6458 to output-operand-address and output reload insns. */
6459 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6460 {
6461 rtx insn1;
6462 /* Loop over those insns, last ones first. */
6463 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6464 insn1 = PREV_INSN (insn1))
6465 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6466 {
6467 rtx source = SET_SRC (PATTERN (insn1));
6468 rtx dest = SET_DEST (PATTERN (insn1));
6469
6470 /* The note we will examine next. */
6471 rtx reg_notes = REG_NOTES (insn);
6472 /* The place that pointed to this note. */
6473 rtx *prev_reg_note = &REG_NOTES (insn);
6474
6475 /* If the note is for something used in the source of this
6476 reload insn, or in the output address, move the note. */
6477 while (reg_notes)
6478 {
6479 rtx next_reg_notes = XEXP (reg_notes, 1);
6480 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6481 && GET_CODE (XEXP (reg_notes, 0)) == REG
6482 && ((GET_CODE (dest) != REG
6483 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6484 dest))
6485 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6486 source)))
6487 {
6488 *prev_reg_note = next_reg_notes;
6489 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6490 REG_NOTES (insn1) = reg_notes;
6491 }
6492 else
6493 prev_reg_note = &XEXP (reg_notes, 1);
6494
6495 reg_notes = next_reg_notes;
6496 }
6497 }
6498 }
6499 #endif
6500
6501 /* For all the spill regs newly reloaded in this instruction,
6502 record what they were reloaded from, so subsequent instructions
6503 can inherit the reloads.
6504
6505 Update spill_reg_store for the reloads of this insn.
6506 Copy the elements that were updated in the loop above. */
6507
6508 for (j = 0; j < n_reloads; j++)
6509 {
6510 register int r = reload_order[j];
6511 register int i = reload_spill_index[r];
6512
6513 /* I is nonneg if this reload used one of the spill regs.
6514 If reload_reg_rtx[r] is 0, this is an optional reload
6515 that we opted to ignore.
6516
6517 Also ignore reloads that don't reach the end of the insn,
6518 since we will eventually see the one that does. */
6519
6520 if (i >= 0 && reload_reg_rtx[r] != 0
6521 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6522 reload_when_needed[r]))
6523 {
6524 /* First, clear out memory of what used to be in this spill reg.
6525 If consecutive registers are used, clear them all. */
6526 int nr
6527 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6528 int k;
6529
6530 for (k = 0; k < nr; k++)
6531 {
6532 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6533 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6534 }
6535
6536 /* Maybe the spill reg contains a copy of reload_out. */
6537 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6538 {
6539 register int nregno = REGNO (reload_out[r]);
6540 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6541 : HARD_REGNO_NREGS (nregno,
6542 GET_MODE (reload_reg_rtx[r])));
6543
6544 spill_reg_store[i] = new_spill_reg_store[i];
6545 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6546
6547 /* If NREGNO is a hard register, it may occupy more than
6548 one register. If it does, say what is in the
6549 rest of the registers assuming that both registers
6550 agree on how many words the object takes. If not,
6551 invalidate the subsequent registers. */
6552
6553 if (nregno < FIRST_PSEUDO_REGISTER)
6554 for (k = 1; k < nnr; k++)
6555 reg_last_reload_reg[nregno + k]
6556 = (nr == nnr ? gen_rtx (REG,
6557 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6558 REGNO (reload_reg_rtx[r]) + k)
6559 : 0);
6560
6561 /* Now do the inverse operation. */
6562 for (k = 0; k < nr; k++)
6563 {
6564 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6565 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6566 : nregno + k);
6567 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6568 }
6569 }
6570
6571 /* Maybe the spill reg contains a copy of reload_in. Only do
6572 something if there will not be an output reload for
6573 the register being reloaded. */
6574 else if (reload_out[r] == 0
6575 && reload_in[r] != 0
6576 && ((GET_CODE (reload_in[r]) == REG
6577 && ! reg_has_output_reload[REGNO (reload_in[r])]
6578 || (GET_CODE (reload_in_reg[r]) == REG
6579 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6580 {
6581 register int nregno;
6582 int nnr;
6583
6584 if (GET_CODE (reload_in[r]) == REG)
6585 nregno = REGNO (reload_in[r]);
6586 else
6587 nregno = REGNO (reload_in_reg[r]);
6588
6589 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6590 : HARD_REGNO_NREGS (nregno,
6591 GET_MODE (reload_reg_rtx[r])));
6592
6593 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6594
6595 if (nregno < FIRST_PSEUDO_REGISTER)
6596 for (k = 1; k < nnr; k++)
6597 reg_last_reload_reg[nregno + k]
6598 = (nr == nnr ? gen_rtx (REG,
6599 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6600 REGNO (reload_reg_rtx[r]) + k)
6601 : 0);
6602
6603 /* Unless we inherited this reload, show we haven't
6604 recently done a store. */
6605 if (! reload_inherited[r])
6606 spill_reg_store[i] = 0;
6607
6608 for (k = 0; k < nr; k++)
6609 {
6610 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6611 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6612 : nregno + k);
6613 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6614 = insn;
6615 }
6616 }
6617 }
6618
6619 /* The following if-statement was #if 0'd in 1.34 (or before...).
6620 It's reenabled in 1.35 because supposedly nothing else
6621 deals with this problem. */
6622
6623 /* If a register gets output-reloaded from a non-spill register,
6624 that invalidates any previous reloaded copy of it.
6625 But forget_old_reloads_1 won't get to see it, because
6626 it thinks only about the original insn. So invalidate it here. */
6627 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6628 {
6629 register int nregno = REGNO (reload_out[r]);
6630 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6631
6632 while (num_regs-- > 0)
6633 reg_last_reload_reg[nregno + num_regs] = 0;
6634 }
6635 }
6636 }
6637 \f
6638 /* Emit code to perform a reload from IN (which may be a reload register) to
6639 OUT (which may also be a reload register). IN or OUT is from operand
6640 OPNUM with reload type TYPE.
6641
6642 Returns first insn emitted. */
6643
6644 rtx
6645 gen_reload (out, in, opnum, type)
6646 rtx out;
6647 rtx in;
6648 int opnum;
6649 enum reload_type type;
6650 {
6651 rtx last = get_last_insn ();
6652
6653 /* How to do this reload can get quite tricky. Normally, we are being
6654 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6655 register that didn't get a hard register. In that case we can just
6656 call emit_move_insn.
6657
6658 We can also be asked to reload a PLUS that adds a register or a MEM to
6659 another register, constant or MEM. This can occur during frame pointer
6660 elimination and while reloading addresses. This case is handled by
6661 trying to emit a single insn to perform the add. If it is not valid,
6662 we use a two insn sequence.
6663
6664 Finally, we could be called to handle an 'o' constraint by putting
6665 an address into a register. In that case, we first try to do this
6666 with a named pattern of "reload_load_address". If no such pattern
6667 exists, we just emit a SET insn and hope for the best (it will normally
6668 be valid on machines that use 'o').
6669
6670 This entire process is made complex because reload will never
6671 process the insns we generate here and so we must ensure that
6672 they will fit their constraints and also by the fact that parts of
6673 IN might be being reloaded separately and replaced with spill registers.
6674 Because of this, we are, in some sense, just guessing the right approach
6675 here. The one listed above seems to work.
6676
6677 ??? At some point, this whole thing needs to be rethought. */
6678
6679 if (GET_CODE (in) == PLUS
6680 && (GET_CODE (XEXP (in, 0)) == REG
6681 || GET_CODE (XEXP (in, 0)) == MEM)
6682 && (GET_CODE (XEXP (in, 1)) == REG
6683 || CONSTANT_P (XEXP (in, 1))
6684 || GET_CODE (XEXP (in, 1)) == MEM))
6685 {
6686 /* We need to compute the sum of a register or a MEM and another
6687 register, constant, or MEM, and put it into the reload
6688 register. The best possible way of doing this is if the machine
6689 has a three-operand ADD insn that accepts the required operands.
6690
6691 The simplest approach is to try to generate such an insn and see if it
6692 is recognized and matches its constraints. If so, it can be used.
6693
6694 It might be better not to actually emit the insn unless it is valid,
6695 but we need to pass the insn as an operand to `recog' and
6696 `insn_extract' and it is simpler to emit and then delete the insn if
6697 not valid than to dummy things up. */
6698
6699 rtx op0, op1, tem, insn;
6700 int code;
6701
6702 op0 = find_replacement (&XEXP (in, 0));
6703 op1 = find_replacement (&XEXP (in, 1));
6704
6705 /* Since constraint checking is strict, commutativity won't be
6706 checked, so we need to do that here to avoid spurious failure
6707 if the add instruction is two-address and the second operand
6708 of the add is the same as the reload reg, which is frequently
6709 the case. If the insn would be A = B + A, rearrange it so
6710 it will be A = A + B as constrain_operands expects. */
6711
6712 if (GET_CODE (XEXP (in, 1)) == REG
6713 && REGNO (out) == REGNO (XEXP (in, 1)))
6714 tem = op0, op0 = op1, op1 = tem;
6715
6716 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6717 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6718
6719 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
6720 code = recog_memoized (insn);
6721
6722 if (code >= 0)
6723 {
6724 insn_extract (insn);
6725 /* We want constrain operands to treat this insn strictly in
6726 its validity determination, i.e., the way it would after reload
6727 has completed. */
6728 if (constrain_operands (code, 1))
6729 return insn;
6730 }
6731
6732 delete_insns_since (last);
6733
6734 /* If that failed, we must use a conservative two-insn sequence.
6735 use move to copy constant, MEM, or pseudo register to the reload
6736 register since "move" will be able to handle an arbitrary operand,
6737 unlike add which can't, in general. Then add the registers.
6738
6739 If there is another way to do this for a specific machine, a
6740 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6741 we emit below. */
6742
6743 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6744 || (GET_CODE (op1) == REG
6745 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6746 tem = op0, op0 = op1, op1 = tem;
6747
6748 emit_insn (gen_move_insn (out, op0));
6749
6750 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6751 This fixes a problem on the 32K where the stack pointer cannot
6752 be used as an operand of an add insn. */
6753
6754 if (rtx_equal_p (op0, op1))
6755 op1 = out;
6756
6757 insn = emit_insn (gen_add2_insn (out, op1));
6758
6759 /* If that failed, copy the address register to the reload register.
6760 Then add the constant to the reload register. */
6761
6762 code = recog_memoized (insn);
6763
6764 if (code >= 0)
6765 {
6766 insn_extract (insn);
6767 /* We want constrain operands to treat this insn strictly in
6768 its validity determination, i.e., the way it would after reload
6769 has completed. */
6770 if (constrain_operands (code, 1))
6771 return insn;
6772 }
6773
6774 delete_insns_since (last);
6775
6776 emit_insn (gen_move_insn (out, op1));
6777 emit_insn (gen_add2_insn (out, op0));
6778 }
6779
6780 #ifdef SECONDARY_MEMORY_NEEDED
6781 /* If we need a memory location to do the move, do it that way. */
6782 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6783 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
6784 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6785 REGNO_REG_CLASS (REGNO (out)),
6786 GET_MODE (out)))
6787 {
6788 /* Get the memory to use and rewrite both registers to its mode. */
6789 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
6790
6791 if (GET_MODE (loc) != GET_MODE (out))
6792 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
6793
6794 if (GET_MODE (loc) != GET_MODE (in))
6795 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6796
6797 emit_insn (gen_move_insn (loc, in));
6798 emit_insn (gen_move_insn (out, loc));
6799 }
6800 #endif
6801
6802 /* If IN is a simple operand, use gen_move_insn. */
6803 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6804 emit_insn (gen_move_insn (out, in));
6805
6806 #ifdef HAVE_reload_load_address
6807 else if (HAVE_reload_load_address)
6808 emit_insn (gen_reload_load_address (out, in));
6809 #endif
6810
6811 /* Otherwise, just write (set OUT IN) and hope for the best. */
6812 else
6813 emit_insn (gen_rtx (SET, VOIDmode, out, in));
6814
6815 /* Return the first insn emitted.
6816 We can not just return get_last_insn, because there may have
6817 been multiple instructions emitted. Also note that gen_move_insn may
6818 emit more than one insn itself, so we can not assume that there is one
6819 insn emitted per emit_insn_before call. */
6820
6821 return last ? NEXT_INSN (last) : get_insns ();
6822 }
6823 \f
6824 /* Delete a previously made output-reload
6825 whose result we now believe is not needed.
6826 First we double-check.
6827
6828 INSN is the insn now being processed.
6829 OUTPUT_RELOAD_INSN is the insn of the output reload.
6830 J is the reload-number for this insn. */
6831
6832 static void
6833 delete_output_reload (insn, j, output_reload_insn)
6834 rtx insn;
6835 int j;
6836 rtx output_reload_insn;
6837 {
6838 register rtx i1;
6839
6840 /* Get the raw pseudo-register referred to. */
6841
6842 rtx reg = reload_in[j];
6843 while (GET_CODE (reg) == SUBREG)
6844 reg = SUBREG_REG (reg);
6845
6846 /* If the pseudo-reg we are reloading is no longer referenced
6847 anywhere between the store into it and here,
6848 and no jumps or labels intervene, then the value can get
6849 here through the reload reg alone.
6850 Otherwise, give up--return. */
6851 for (i1 = NEXT_INSN (output_reload_insn);
6852 i1 != insn; i1 = NEXT_INSN (i1))
6853 {
6854 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6855 return;
6856 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6857 && reg_mentioned_p (reg, PATTERN (i1)))
6858 return;
6859 }
6860
6861 if (cannot_omit_stores[REGNO (reg)])
6862 return;
6863
6864 /* If this insn will store in the pseudo again,
6865 the previous store can be removed. */
6866 if (reload_out[j] == reload_in[j])
6867 delete_insn (output_reload_insn);
6868
6869 /* See if the pseudo reg has been completely replaced
6870 with reload regs. If so, delete the store insn
6871 and forget we had a stack slot for the pseudo. */
6872 else if (reg_n_deaths[REGNO (reg)] == 1
6873 && reg_basic_block[REGNO (reg)] >= 0
6874 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6875 {
6876 rtx i2;
6877
6878 /* We know that it was used only between here
6879 and the beginning of the current basic block.
6880 (We also know that the last use before INSN was
6881 the output reload we are thinking of deleting, but never mind that.)
6882 Search that range; see if any ref remains. */
6883 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6884 {
6885 rtx set = single_set (i2);
6886
6887 /* Uses which just store in the pseudo don't count,
6888 since if they are the only uses, they are dead. */
6889 if (set != 0 && SET_DEST (set) == reg)
6890 continue;
6891 if (GET_CODE (i2) == CODE_LABEL
6892 || GET_CODE (i2) == JUMP_INSN)
6893 break;
6894 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6895 && reg_mentioned_p (reg, PATTERN (i2)))
6896 /* Some other ref remains;
6897 we can't do anything. */
6898 return;
6899 }
6900
6901 /* Delete the now-dead stores into this pseudo. */
6902 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6903 {
6904 rtx set = single_set (i2);
6905
6906 if (set != 0 && SET_DEST (set) == reg)
6907 delete_insn (i2);
6908 if (GET_CODE (i2) == CODE_LABEL
6909 || GET_CODE (i2) == JUMP_INSN)
6910 break;
6911 }
6912
6913 /* For the debugging info,
6914 say the pseudo lives in this reload reg. */
6915 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6916 alter_reg (REGNO (reg), -1);
6917 }
6918 }
6919 \f
6920 /* Output reload-insns to reload VALUE into RELOADREG.
6921 VALUE is an autoincrement or autodecrement RTX whose operand
6922 is a register or memory location;
6923 so reloading involves incrementing that location.
6924
6925 INC_AMOUNT is the number to increment or decrement by (always positive).
6926 This cannot be deduced from VALUE. */
6927
6928 static void
6929 inc_for_reload (reloadreg, value, inc_amount)
6930 rtx reloadreg;
6931 rtx value;
6932 int inc_amount;
6933 {
6934 /* REG or MEM to be copied and incremented. */
6935 rtx incloc = XEXP (value, 0);
6936 /* Nonzero if increment after copying. */
6937 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6938 rtx last;
6939 rtx inc;
6940 rtx add_insn;
6941 int code;
6942
6943 /* No hard register is equivalent to this register after
6944 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6945 we could inc/dec that register as well (maybe even using it for
6946 the source), but I'm not sure it's worth worrying about. */
6947 if (GET_CODE (incloc) == REG)
6948 reg_last_reload_reg[REGNO (incloc)] = 0;
6949
6950 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6951 inc_amount = - inc_amount;
6952
6953 inc = GEN_INT (inc_amount);
6954
6955 /* If this is post-increment, first copy the location to the reload reg. */
6956 if (post)
6957 emit_insn (gen_move_insn (reloadreg, incloc));
6958
6959 /* See if we can directly increment INCLOC. Use a method similar to that
6960 in gen_reload. */
6961
6962 last = get_last_insn ();
6963 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6964 gen_rtx (PLUS, GET_MODE (incloc),
6965 incloc, inc)));
6966
6967 code = recog_memoized (add_insn);
6968 if (code >= 0)
6969 {
6970 insn_extract (add_insn);
6971 if (constrain_operands (code, 1))
6972 {
6973 /* If this is a pre-increment and we have incremented the value
6974 where it lives, copy the incremented value to RELOADREG to
6975 be used as an address. */
6976
6977 if (! post)
6978 emit_insn (gen_move_insn (reloadreg, incloc));
6979
6980 return;
6981 }
6982 }
6983
6984 delete_insns_since (last);
6985
6986 /* If couldn't do the increment directly, must increment in RELOADREG.
6987 The way we do this depends on whether this is pre- or post-increment.
6988 For pre-increment, copy INCLOC to the reload register, increment it
6989 there, then save back. */
6990
6991 if (! post)
6992 {
6993 emit_insn (gen_move_insn (reloadreg, incloc));
6994 emit_insn (gen_add2_insn (reloadreg, inc));
6995 emit_insn (gen_move_insn (incloc, reloadreg));
6996 }
6997 else
6998 {
6999 /* Postincrement.
7000 Because this might be a jump insn or a compare, and because RELOADREG
7001 may not be available after the insn in an input reload, we must do
7002 the incrementation before the insn being reloaded for.
7003
7004 We have already copied INCLOC to RELOADREG. Increment the copy in
7005 RELOADREG, save that back, then decrement RELOADREG so it has
7006 the original value. */
7007
7008 emit_insn (gen_add2_insn (reloadreg, inc));
7009 emit_insn (gen_move_insn (incloc, reloadreg));
7010 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7011 }
7012
7013 return;
7014 }
7015 \f
7016 /* Return 1 if we are certain that the constraint-string STRING allows
7017 the hard register REG. Return 0 if we can't be sure of this. */
7018
7019 static int
7020 constraint_accepts_reg_p (string, reg)
7021 char *string;
7022 rtx reg;
7023 {
7024 int value = 0;
7025 int regno = true_regnum (reg);
7026 int c;
7027
7028 /* Initialize for first alternative. */
7029 value = 0;
7030 /* Check that each alternative contains `g' or `r'. */
7031 while (1)
7032 switch (c = *string++)
7033 {
7034 case 0:
7035 /* If an alternative lacks `g' or `r', we lose. */
7036 return value;
7037 case ',':
7038 /* If an alternative lacks `g' or `r', we lose. */
7039 if (value == 0)
7040 return 0;
7041 /* Initialize for next alternative. */
7042 value = 0;
7043 break;
7044 case 'g':
7045 case 'r':
7046 /* Any general reg wins for this alternative. */
7047 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7048 value = 1;
7049 break;
7050 default:
7051 /* Any reg in specified class wins for this alternative. */
7052 {
7053 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7054
7055 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7056 value = 1;
7057 }
7058 }
7059 }
7060 \f
7061 /* Return the number of places FIND appears within X, but don't count
7062 an occurrence if some SET_DEST is FIND. */
7063
7064 static int
7065 count_occurrences (x, find)
7066 register rtx x, find;
7067 {
7068 register int i, j;
7069 register enum rtx_code code;
7070 register char *format_ptr;
7071 int count;
7072
7073 if (x == find)
7074 return 1;
7075 if (x == 0)
7076 return 0;
7077
7078 code = GET_CODE (x);
7079
7080 switch (code)
7081 {
7082 case REG:
7083 case QUEUED:
7084 case CONST_INT:
7085 case CONST_DOUBLE:
7086 case SYMBOL_REF:
7087 case CODE_LABEL:
7088 case PC:
7089 case CC0:
7090 return 0;
7091
7092 case SET:
7093 if (SET_DEST (x) == find)
7094 return count_occurrences (SET_SRC (x), find);
7095 break;
7096 }
7097
7098 format_ptr = GET_RTX_FORMAT (code);
7099 count = 0;
7100
7101 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7102 {
7103 switch (*format_ptr++)
7104 {
7105 case 'e':
7106 count += count_occurrences (XEXP (x, i), find);
7107 break;
7108
7109 case 'E':
7110 if (XVEC (x, i) != NULL)
7111 {
7112 for (j = 0; j < XVECLEN (x, i); j++)
7113 count += count_occurrences (XVECEXP (x, i, j), find);
7114 }
7115 break;
7116 }
7117 }
7118 return count;
7119 }