(reload_as_needed): Call forget_old_reloads_1 on pattern before reg
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Index of last register assigned as a spill register. We allocate in
172 a round-robin fashion. */
173
174 static int last_spill_reg;
175
176 /* Describes order of preference for putting regs into spill_regs.
177 Contains the numbers of all the hard regs, in order most preferred first.
178 This order is different for each function.
179 It is set up by order_regs_for_reload.
180 Empty elements at the end contain -1. */
181 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
182
183 /* 1 for a hard register that appears explicitly in the rtl
184 (for example, function value registers, special registers
185 used by insns, structure value pointer registers). */
186 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
187
188 /* Indicates if a register was counted against the need for
189 groups. 0 means it can count against max_nongroup instead. */
190 static HARD_REG_SET counted_for_groups;
191
192 /* Indicates if a register was counted against the need for
193 non-groups. 0 means it can become part of a new group.
194 During choose_reload_regs, 1 here means don't use this reg
195 as part of a group, even if it seems to be otherwise ok. */
196 static HARD_REG_SET counted_for_nongroups;
197
198 /* Indexed by pseudo reg number N,
199 says may not delete stores into the real (memory) home of pseudo N.
200 This is set if we already substituted a memory equivalent in some uses,
201 which happens when we have to eliminate the fp from it. */
202 static char *cannot_omit_stores;
203
204 /* Nonzero if indirect addressing is supported on the machine; this means
205 that spilling (REG n) does not require reloading it into a register in
206 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
207 value indicates the level of indirect addressing supported, e.g., two
208 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
209 a hard register. */
210
211 static char spill_indirect_levels;
212
213 /* Nonzero if indirect addressing is supported when the innermost MEM is
214 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
215 which these are valid is the same as spill_indirect_levels, above. */
216
217 char indirect_symref_ok;
218
219 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
220
221 char double_reg_address_ok;
222
223 /* Record the stack slot for each spilled hard register. */
224
225 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
226
227 /* Width allocated so far for that stack slot. */
228
229 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
230
231 /* Indexed by register class and basic block number, nonzero if there is
232 any need for a spill register of that class in that basic block.
233 The pointer is 0 if we did stupid allocation and don't know
234 the structure of basic blocks. */
235
236 char *basic_block_needs[N_REG_CLASSES];
237
238 /* First uid used by insns created by reload in this function.
239 Used in find_equiv_reg. */
240 int reload_first_uid;
241
242 /* Flag set by local-alloc or global-alloc if anything is live in
243 a call-clobbered reg across calls. */
244
245 int caller_save_needed;
246
247 /* Set to 1 while reload_as_needed is operating.
248 Required by some machines to handle any generated moves differently. */
249
250 int reload_in_progress = 0;
251
252 /* These arrays record the insn_code of insns that may be needed to
253 perform input and output reloads of special objects. They provide a
254 place to pass a scratch register. */
255
256 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
257 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
258
259 /* This obstack is used for allocation of rtl during register elimination.
260 The allocated storage can be freed once find_reloads has processed the
261 insn. */
262
263 struct obstack reload_obstack;
264 char *reload_firstobj;
265
266 #define obstack_chunk_alloc xmalloc
267 #define obstack_chunk_free free
268
269 /* List of labels that must never be deleted. */
270 extern rtx forced_labels;
271 \f
272 /* This structure is used to record information about register eliminations.
273 Each array entry describes one possible way of eliminating a register
274 in favor of another. If there is more than one way of eliminating a
275 particular register, the most preferred should be specified first. */
276
277 static struct elim_table
278 {
279 int from; /* Register number to be eliminated. */
280 int to; /* Register number used as replacement. */
281 int initial_offset; /* Initial difference between values. */
282 int can_eliminate; /* Non-zero if this elimination can be done. */
283 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
284 insns made by reload. */
285 int offset; /* Current offset between the two regs. */
286 int max_offset; /* Maximum offset between the two regs. */
287 int previous_offset; /* Offset at end of previous insn. */
288 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
289 rtx from_rtx; /* REG rtx for the register to be eliminated.
290 We cannot simply compare the number since
291 we might then spuriously replace a hard
292 register corresponding to a pseudo
293 assigned to the reg to be eliminated. */
294 rtx to_rtx; /* REG rtx for the replacement. */
295 } reg_eliminate[] =
296
297 /* If a set of eliminable registers was specified, define the table from it.
298 Otherwise, default to the normal case of the frame pointer being
299 replaced by the stack pointer. */
300
301 #ifdef ELIMINABLE_REGS
302 ELIMINABLE_REGS;
303 #else
304 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
305 #endif
306
307 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
308
309 /* Record the number of pending eliminations that have an offset not equal
310 to their initial offset. If non-zero, we use a new copy of each
311 replacement result in any insns encountered. */
312 static int num_not_at_initial_offset;
313
314 /* Count the number of registers that we may be able to eliminate. */
315 static int num_eliminable;
316
317 /* For each label, we record the offset of each elimination. If we reach
318 a label by more than one path and an offset differs, we cannot do the
319 elimination. This information is indexed by the number of the label.
320 The first table is an array of flags that records whether we have yet
321 encountered a label and the second table is an array of arrays, one
322 entry in the latter array for each elimination. */
323
324 static char *offsets_known_at;
325 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
326
327 /* Number of labels in the current function. */
328
329 static int num_labels;
330
331 struct hard_reg_n_uses { int regno; int uses; };
332 \f
333 static int possible_group_p PROTO((int, int *));
334 static void count_possible_groups PROTO((int *, enum machine_mode *,
335 int *, int));
336 static int modes_equiv_for_class_p PROTO((enum machine_mode,
337 enum machine_mode,
338 enum reg_class));
339 static void spill_failure PROTO((rtx));
340 static int new_spill_reg PROTO((int, int, int *, int *, int,
341 FILE *));
342 static void delete_dead_insn PROTO((rtx));
343 static void alter_reg PROTO((int, int));
344 static void mark_scratch_live PROTO((rtx));
345 static void set_label_offsets PROTO((rtx, rtx, int));
346 static int eliminate_regs_in_insn PROTO((rtx, int));
347 static void mark_not_eliminable PROTO((rtx, rtx));
348 static int spill_hard_reg PROTO((int, int, FILE *, int));
349 static void scan_paradoxical_subregs PROTO((rtx));
350 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
351 struct hard_reg_n_uses *));
352 static void order_regs_for_reload PROTO((void));
353 static int compare_spill_regs PROTO((short *, short *));
354 static void reload_as_needed PROTO((rtx, int));
355 static void forget_old_reloads_1 PROTO((rtx, rtx));
356 static int reload_reg_class_lower PROTO((short *, short *));
357 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
358 enum machine_mode));
359 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
360 enum machine_mode));
361 static int reload_reg_free_p PROTO((int, int, enum reload_type));
362 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
363 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
364 static int reloads_conflict PROTO((int, int));
365 static int allocate_reload_reg PROTO((int, rtx, int, int));
366 static void choose_reload_regs PROTO((rtx, rtx));
367 static void merge_assigned_reloads PROTO((rtx));
368 static void emit_reload_insns PROTO((rtx));
369 static void delete_output_reload PROTO((rtx, int, rtx));
370 static void inc_for_reload PROTO((rtx, rtx, int));
371 static int constraint_accepts_reg_p PROTO((char *, rtx));
372 static int count_occurrences PROTO((rtx, rtx));
373 \f
374 /* Initialize the reload pass once per compilation. */
375
376 void
377 init_reload ()
378 {
379 register int i;
380
381 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
382 Set spill_indirect_levels to the number of levels such addressing is
383 permitted, zero if it is not permitted at all. */
384
385 register rtx tem
386 = gen_rtx (MEM, Pmode,
387 gen_rtx (PLUS, Pmode,
388 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
389 GEN_INT (4)));
390 spill_indirect_levels = 0;
391
392 while (memory_address_p (QImode, tem))
393 {
394 spill_indirect_levels++;
395 tem = gen_rtx (MEM, Pmode, tem);
396 }
397
398 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
399
400 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
401 indirect_symref_ok = memory_address_p (QImode, tem);
402
403 /* See if reg+reg is a valid (and offsettable) address. */
404
405 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
406 {
407 tem = gen_rtx (PLUS, Pmode,
408 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
409 gen_rtx (REG, Pmode, i));
410 /* This way, we make sure that reg+reg is an offsettable address. */
411 tem = plus_constant (tem, 4);
412
413 if (memory_address_p (QImode, tem))
414 {
415 double_reg_address_ok = 1;
416 break;
417 }
418 }
419
420 /* Initialize obstack for our rtl allocation. */
421 gcc_obstack_init (&reload_obstack);
422 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
423 }
424
425 /* Main entry point for the reload pass.
426
427 FIRST is the first insn of the function being compiled.
428
429 GLOBAL nonzero means we were called from global_alloc
430 and should attempt to reallocate any pseudoregs that we
431 displace from hard regs we will use for reloads.
432 If GLOBAL is zero, we do not have enough information to do that,
433 so any pseudo reg that is spilled must go to the stack.
434
435 DUMPFILE is the global-reg debugging dump file stream, or 0.
436 If it is nonzero, messages are written to it to describe
437 which registers are seized as reload regs, which pseudo regs
438 are spilled from them, and where the pseudo regs are reallocated to.
439
440 Return value is nonzero if reload failed
441 and we must not do any more for this function. */
442
443 int
444 reload (first, global, dumpfile)
445 rtx first;
446 int global;
447 FILE *dumpfile;
448 {
449 register int class;
450 register int i, j, k;
451 register rtx insn;
452 register struct elim_table *ep;
453
454 int something_changed;
455 int something_needs_reloads;
456 int something_needs_elimination;
457 int new_basic_block_needs;
458 enum reg_class caller_save_spill_class = NO_REGS;
459 int caller_save_group_size = 1;
460
461 /* Nonzero means we couldn't get enough spill regs. */
462 int failure = 0;
463
464 /* The basic block number currently being processed for INSN. */
465 int this_block;
466
467 /* Make sure even insns with volatile mem refs are recognizable. */
468 init_recog ();
469
470 /* Enable find_equiv_reg to distinguish insns made by reload. */
471 reload_first_uid = get_max_uid ();
472
473 for (i = 0; i < N_REG_CLASSES; i++)
474 basic_block_needs[i] = 0;
475
476 #ifdef SECONDARY_MEMORY_NEEDED
477 /* Initialize the secondary memory table. */
478 clear_secondary_mem ();
479 #endif
480
481 /* Remember which hard regs appear explicitly
482 before we merge into `regs_ever_live' the ones in which
483 pseudo regs have been allocated. */
484 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
485
486 /* We don't have a stack slot for any spill reg yet. */
487 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
488 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
489
490 /* Initialize the save area information for caller-save, in case some
491 are needed. */
492 init_save_areas ();
493
494 /* Compute which hard registers are now in use
495 as homes for pseudo registers.
496 This is done here rather than (eg) in global_alloc
497 because this point is reached even if not optimizing. */
498
499 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
500 mark_home_live (i);
501
502 for (i = 0; i < scratch_list_length; i++)
503 if (scratch_list[i])
504 mark_scratch_live (scratch_list[i]);
505
506 /* Make sure that the last insn in the chain
507 is not something that needs reloading. */
508 emit_note (NULL_PTR, NOTE_INSN_DELETED);
509
510 /* Find all the pseudo registers that didn't get hard regs
511 but do have known equivalent constants or memory slots.
512 These include parameters (known equivalent to parameter slots)
513 and cse'd or loop-moved constant memory addresses.
514
515 Record constant equivalents in reg_equiv_constant
516 so they will be substituted by find_reloads.
517 Record memory equivalents in reg_mem_equiv so they can
518 be substituted eventually by altering the REG-rtx's. */
519
520 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
521 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
522 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
523 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
524 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
525 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
526 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
527 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
528 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
529 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
530 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
531 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
532 cannot_omit_stores = (char *) alloca (max_regno);
533 bzero (cannot_omit_stores, max_regno);
534
535 #ifdef SMALL_REGISTER_CLASSES
536 CLEAR_HARD_REG_SET (forbidden_regs);
537 #endif
538
539 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
540 Also find all paradoxical subregs and find largest such for each pseudo.
541 On machines with small register classes, record hard registers that
542 are used for user variables. These can never be used for spills. */
543
544 for (insn = first; insn; insn = NEXT_INSN (insn))
545 {
546 rtx set = single_set (insn);
547
548 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
549 {
550 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
551 if (note
552 #ifdef LEGITIMATE_PIC_OPERAND_P
553 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
554 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
555 #endif
556 )
557 {
558 rtx x = XEXP (note, 0);
559 i = REGNO (SET_DEST (set));
560 if (i > LAST_VIRTUAL_REGISTER)
561 {
562 if (GET_CODE (x) == MEM)
563 reg_equiv_memory_loc[i] = x;
564 else if (CONSTANT_P (x))
565 {
566 if (LEGITIMATE_CONSTANT_P (x))
567 reg_equiv_constant[i] = x;
568 else
569 reg_equiv_memory_loc[i]
570 = force_const_mem (GET_MODE (SET_DEST (set)), x);
571 }
572 else
573 continue;
574
575 /* If this register is being made equivalent to a MEM
576 and the MEM is not SET_SRC, the equivalencing insn
577 is one with the MEM as a SET_DEST and it occurs later.
578 So don't mark this insn now. */
579 if (GET_CODE (x) != MEM
580 || rtx_equal_p (SET_SRC (set), x))
581 reg_equiv_init[i] = insn;
582 }
583 }
584 }
585
586 /* If this insn is setting a MEM from a register equivalent to it,
587 this is the equivalencing insn. */
588 else if (set && GET_CODE (SET_DEST (set)) == MEM
589 && GET_CODE (SET_SRC (set)) == REG
590 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
591 && rtx_equal_p (SET_DEST (set),
592 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
593 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
594
595 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
596 scan_paradoxical_subregs (PATTERN (insn));
597 }
598
599 /* Does this function require a frame pointer? */
600
601 frame_pointer_needed = (! flag_omit_frame_pointer
602 #ifdef EXIT_IGNORE_STACK
603 /* ?? If EXIT_IGNORE_STACK is set, we will not save
604 and restore sp for alloca. So we can't eliminate
605 the frame pointer in that case. At some point,
606 we should improve this by emitting the
607 sp-adjusting insns for this case. */
608 || (current_function_calls_alloca
609 && EXIT_IGNORE_STACK)
610 #endif
611 || FRAME_POINTER_REQUIRED);
612
613 num_eliminable = 0;
614
615 /* Initialize the table of registers to eliminate. The way we do this
616 depends on how the eliminable registers were defined. */
617 #ifdef ELIMINABLE_REGS
618 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
619 {
620 ep->can_eliminate = ep->can_eliminate_previous
621 = (CAN_ELIMINATE (ep->from, ep->to)
622 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
623 }
624 #else
625 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
626 = ! frame_pointer_needed;
627 #endif
628
629 /* Count the number of eliminable registers and build the FROM and TO
630 REG rtx's. Note that code in gen_rtx will cause, e.g.,
631 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
632 We depend on this. */
633 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
634 {
635 num_eliminable += ep->can_eliminate;
636 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
637 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
638 }
639
640 num_labels = max_label_num () - get_first_label_num ();
641
642 /* Allocate the tables used to store offset information at labels. */
643 offsets_known_at = (char *) alloca (num_labels);
644 offsets_at
645 = (int (*)[NUM_ELIMINABLE_REGS])
646 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
647
648 offsets_known_at -= get_first_label_num ();
649 offsets_at -= get_first_label_num ();
650
651 /* Alter each pseudo-reg rtx to contain its hard reg number.
652 Assign stack slots to the pseudos that lack hard regs or equivalents.
653 Do not touch virtual registers. */
654
655 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
656 alter_reg (i, -1);
657
658 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
659 because the stack size may be a part of the offset computation for
660 register elimination. */
661 assign_stack_local (BLKmode, 0, 0);
662
663 /* If we have some registers we think can be eliminated, scan all insns to
664 see if there is an insn that sets one of these registers to something
665 other than itself plus a constant. If so, the register cannot be
666 eliminated. Doing this scan here eliminates an extra pass through the
667 main reload loop in the most common case where register elimination
668 cannot be done. */
669 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
670 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
671 || GET_CODE (insn) == CALL_INSN)
672 note_stores (PATTERN (insn), mark_not_eliminable);
673
674 #ifndef REGISTER_CONSTRAINTS
675 /* If all the pseudo regs have hard regs,
676 except for those that are never referenced,
677 we know that no reloads are needed. */
678 /* But that is not true if there are register constraints, since
679 in that case some pseudos might be in the wrong kind of hard reg. */
680
681 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
682 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
683 break;
684
685 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
686 return;
687 #endif
688
689 /* Compute the order of preference for hard registers to spill.
690 Store them by decreasing preference in potential_reload_regs. */
691
692 order_regs_for_reload ();
693
694 /* So far, no hard regs have been spilled. */
695 n_spills = 0;
696 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
697 spill_reg_order[i] = -1;
698
699 /* Initialize to -1, which means take the first spill register. */
700 last_spill_reg = -1;
701
702 /* On most machines, we can't use any register explicitly used in the
703 rtl as a spill register. But on some, we have to. Those will have
704 taken care to keep the life of hard regs as short as possible. */
705
706 #ifndef SMALL_REGISTER_CLASSES
707 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
708 #endif
709
710 /* Spill any hard regs that we know we can't eliminate. */
711 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
712 if (! ep->can_eliminate)
713 spill_hard_reg (ep->from, global, dumpfile, 1);
714
715 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
716 if (frame_pointer_needed)
717 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
718 #endif
719
720 if (global)
721 for (i = 0; i < N_REG_CLASSES; i++)
722 {
723 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
724 bzero (basic_block_needs[i], n_basic_blocks);
725 }
726
727 /* From now on, we need to emit any moves without making new pseudos. */
728 reload_in_progress = 1;
729
730 /* This loop scans the entire function each go-round
731 and repeats until one repetition spills no additional hard regs. */
732
733 /* This flag is set when a pseudo reg is spilled,
734 to require another pass. Note that getting an additional reload
735 reg does not necessarily imply any pseudo reg was spilled;
736 sometimes we find a reload reg that no pseudo reg was allocated in. */
737 something_changed = 1;
738 /* This flag is set if there are any insns that require reloading. */
739 something_needs_reloads = 0;
740 /* This flag is set if there are any insns that require register
741 eliminations. */
742 something_needs_elimination = 0;
743 while (something_changed)
744 {
745 rtx after_call = 0;
746
747 /* For each class, number of reload regs needed in that class.
748 This is the maximum over all insns of the needs in that class
749 of the individual insn. */
750 int max_needs[N_REG_CLASSES];
751 /* For each class, size of group of consecutive regs
752 that is needed for the reloads of this class. */
753 int group_size[N_REG_CLASSES];
754 /* For each class, max number of consecutive groups needed.
755 (Each group contains group_size[CLASS] consecutive registers.) */
756 int max_groups[N_REG_CLASSES];
757 /* For each class, max number needed of regs that don't belong
758 to any of the groups. */
759 int max_nongroups[N_REG_CLASSES];
760 /* For each class, the machine mode which requires consecutive
761 groups of regs of that class.
762 If two different modes ever require groups of one class,
763 they must be the same size and equally restrictive for that class,
764 otherwise we can't handle the complexity. */
765 enum machine_mode group_mode[N_REG_CLASSES];
766 /* Record the insn where each maximum need is first found. */
767 rtx max_needs_insn[N_REG_CLASSES];
768 rtx max_groups_insn[N_REG_CLASSES];
769 rtx max_nongroups_insn[N_REG_CLASSES];
770 rtx x;
771 int starting_frame_size = get_frame_size ();
772 int previous_frame_pointer_needed = frame_pointer_needed;
773 static char *reg_class_names[] = REG_CLASS_NAMES;
774
775 something_changed = 0;
776 bzero ((char *) max_needs, sizeof max_needs);
777 bzero ((char *) max_groups, sizeof max_groups);
778 bzero ((char *) max_nongroups, sizeof max_nongroups);
779 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
780 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
781 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
782 bzero ((char *) group_size, sizeof group_size);
783 for (i = 0; i < N_REG_CLASSES; i++)
784 group_mode[i] = VOIDmode;
785
786 /* Keep track of which basic blocks are needing the reloads. */
787 this_block = 0;
788
789 /* Remember whether any element of basic_block_needs
790 changes from 0 to 1 in this pass. */
791 new_basic_block_needs = 0;
792
793 /* Reset all offsets on eliminable registers to their initial values. */
794 #ifdef ELIMINABLE_REGS
795 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
796 {
797 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
798 ep->previous_offset = ep->offset
799 = ep->max_offset = ep->initial_offset;
800 }
801 #else
802 #ifdef INITIAL_FRAME_POINTER_OFFSET
803 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
804 #else
805 if (!FRAME_POINTER_REQUIRED)
806 abort ();
807 reg_eliminate[0].initial_offset = 0;
808 #endif
809 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
810 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
811 #endif
812
813 num_not_at_initial_offset = 0;
814
815 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
816
817 /* Set a known offset for each forced label to be at the initial offset
818 of each elimination. We do this because we assume that all
819 computed jumps occur from a location where each elimination is
820 at its initial offset. */
821
822 for (x = forced_labels; x; x = XEXP (x, 1))
823 if (XEXP (x, 0))
824 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
825
826 /* For each pseudo register that has an equivalent location defined,
827 try to eliminate any eliminable registers (such as the frame pointer)
828 assuming initial offsets for the replacement register, which
829 is the normal case.
830
831 If the resulting location is directly addressable, substitute
832 the MEM we just got directly for the old REG.
833
834 If it is not addressable but is a constant or the sum of a hard reg
835 and constant, it is probably not addressable because the constant is
836 out of range, in that case record the address; we will generate
837 hairy code to compute the address in a register each time it is
838 needed. Similarly if it is a hard register, but one that is not
839 valid as an address register.
840
841 If the location is not addressable, but does not have one of the
842 above forms, assign a stack slot. We have to do this to avoid the
843 potential of producing lots of reloads if, e.g., a location involves
844 a pseudo that didn't get a hard register and has an equivalent memory
845 location that also involves a pseudo that didn't get a hard register.
846
847 Perhaps at some point we will improve reload_when_needed handling
848 so this problem goes away. But that's very hairy. */
849
850 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
851 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
852 {
853 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
854
855 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
856 XEXP (x, 0)))
857 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
858 else if (CONSTANT_P (XEXP (x, 0))
859 || (GET_CODE (XEXP (x, 0)) == REG
860 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
861 || (GET_CODE (XEXP (x, 0)) == PLUS
862 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
863 && (REGNO (XEXP (XEXP (x, 0), 0))
864 < FIRST_PSEUDO_REGISTER)
865 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
866 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
867 else
868 {
869 /* Make a new stack slot. Then indicate that something
870 changed so we go back and recompute offsets for
871 eliminable registers because the allocation of memory
872 below might change some offset. reg_equiv_{mem,address}
873 will be set up for this pseudo on the next pass around
874 the loop. */
875 reg_equiv_memory_loc[i] = 0;
876 reg_equiv_init[i] = 0;
877 alter_reg (i, -1);
878 something_changed = 1;
879 }
880 }
881
882 /* If we allocated another pseudo to the stack, redo elimination
883 bookkeeping. */
884 if (something_changed)
885 continue;
886
887 /* If caller-saves needs a group, initialize the group to include
888 the size and mode required for caller-saves. */
889
890 if (caller_save_group_size > 1)
891 {
892 group_mode[(int) caller_save_spill_class] = Pmode;
893 group_size[(int) caller_save_spill_class] = caller_save_group_size;
894 }
895
896 /* Compute the most additional registers needed by any instruction.
897 Collect information separately for each class of regs. */
898
899 for (insn = first; insn; insn = NEXT_INSN (insn))
900 {
901 if (global && this_block + 1 < n_basic_blocks
902 && insn == basic_block_head[this_block+1])
903 ++this_block;
904
905 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
906 might include REG_LABEL), we need to see what effects this
907 has on the known offsets at labels. */
908
909 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
910 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
911 && REG_NOTES (insn) != 0))
912 set_label_offsets (insn, insn, 0);
913
914 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
915 {
916 /* Nonzero means don't use a reload reg that overlaps
917 the place where a function value can be returned. */
918 rtx avoid_return_reg = 0;
919
920 rtx old_body = PATTERN (insn);
921 int old_code = INSN_CODE (insn);
922 rtx old_notes = REG_NOTES (insn);
923 int did_elimination = 0;
924
925 /* To compute the number of reload registers of each class
926 needed for an insn, we must similate what choose_reload_regs
927 can do. We do this by splitting an insn into an "input" and
928 an "output" part. RELOAD_OTHER reloads are used in both.
929 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
930 which must be live over the entire input section of reloads,
931 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
932 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
933 inputs.
934
935 The registers needed for output are RELOAD_OTHER and
936 RELOAD_FOR_OUTPUT, which are live for the entire output
937 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
938 reloads for each operand.
939
940 The total number of registers needed is the maximum of the
941 inputs and outputs. */
942
943 struct needs
944 {
945 /* [0] is normal, [1] is nongroup. */
946 int regs[2][N_REG_CLASSES];
947 int groups[N_REG_CLASSES];
948 };
949
950 /* Each `struct needs' corresponds to one RELOAD_... type. */
951 struct {
952 struct needs other;
953 struct needs input;
954 struct needs output;
955 struct needs insn;
956 struct needs other_addr;
957 struct needs op_addr;
958 struct needs op_addr_reload;
959 struct needs in_addr[MAX_RECOG_OPERANDS];
960 struct needs out_addr[MAX_RECOG_OPERANDS];
961 } insn_needs;
962
963 /* If needed, eliminate any eliminable registers. */
964 if (num_eliminable)
965 did_elimination = eliminate_regs_in_insn (insn, 0);
966
967 #ifdef SMALL_REGISTER_CLASSES
968 /* Set avoid_return_reg if this is an insn
969 that might use the value of a function call. */
970 if (GET_CODE (insn) == CALL_INSN)
971 {
972 if (GET_CODE (PATTERN (insn)) == SET)
973 after_call = SET_DEST (PATTERN (insn));
974 else if (GET_CODE (PATTERN (insn)) == PARALLEL
975 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
976 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
977 else
978 after_call = 0;
979 }
980 else if (after_call != 0
981 && !(GET_CODE (PATTERN (insn)) == SET
982 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
983 {
984 if (reg_referenced_p (after_call, PATTERN (insn)))
985 avoid_return_reg = after_call;
986 after_call = 0;
987 }
988 #endif /* SMALL_REGISTER_CLASSES */
989
990 /* Analyze the instruction. */
991 find_reloads (insn, 0, spill_indirect_levels, global,
992 spill_reg_order);
993
994 /* Remember for later shortcuts which insns had any reloads or
995 register eliminations.
996
997 One might think that it would be worthwhile to mark insns
998 that need register replacements but not reloads, but this is
999 not safe because find_reloads may do some manipulation of
1000 the insn (such as swapping commutative operands), which would
1001 be lost when we restore the old pattern after register
1002 replacement. So the actions of find_reloads must be redone in
1003 subsequent passes or in reload_as_needed.
1004
1005 However, it is safe to mark insns that need reloads
1006 but not register replacement. */
1007
1008 PUT_MODE (insn, (did_elimination ? QImode
1009 : n_reloads ? HImode
1010 : GET_MODE (insn) == DImode ? DImode
1011 : VOIDmode));
1012
1013 /* Discard any register replacements done. */
1014 if (did_elimination)
1015 {
1016 obstack_free (&reload_obstack, reload_firstobj);
1017 PATTERN (insn) = old_body;
1018 INSN_CODE (insn) = old_code;
1019 REG_NOTES (insn) = old_notes;
1020 something_needs_elimination = 1;
1021 }
1022
1023 /* If this insn has no reloads, we need not do anything except
1024 in the case of a CALL_INSN when we have caller-saves and
1025 caller-save needs reloads. */
1026
1027 if (n_reloads == 0
1028 && ! (GET_CODE (insn) == CALL_INSN
1029 && caller_save_spill_class != NO_REGS))
1030 continue;
1031
1032 something_needs_reloads = 1;
1033 bzero ((char *) &insn_needs, sizeof insn_needs);
1034
1035 /* Count each reload once in every class
1036 containing the reload's own class. */
1037
1038 for (i = 0; i < n_reloads; i++)
1039 {
1040 register enum reg_class *p;
1041 enum reg_class class = reload_reg_class[i];
1042 int size;
1043 enum machine_mode mode;
1044 int nongroup_need;
1045 struct needs *this_needs;
1046
1047 /* Don't count the dummy reloads, for which one of the
1048 regs mentioned in the insn can be used for reloading.
1049 Don't count optional reloads.
1050 Don't count reloads that got combined with others. */
1051 if (reload_reg_rtx[i] != 0
1052 || reload_optional[i] != 0
1053 || (reload_out[i] == 0 && reload_in[i] == 0
1054 && ! reload_secondary_p[i]))
1055 continue;
1056
1057 /* Show that a reload register of this class is needed
1058 in this basic block. We do not use insn_needs and
1059 insn_groups because they are overly conservative for
1060 this purpose. */
1061 if (global && ! basic_block_needs[(int) class][this_block])
1062 {
1063 basic_block_needs[(int) class][this_block] = 1;
1064 new_basic_block_needs = 1;
1065 }
1066
1067
1068 mode = reload_inmode[i];
1069 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1070 mode = reload_outmode[i];
1071 size = CLASS_MAX_NREGS (class, mode);
1072
1073 /* If this class doesn't want a group, determine if we have
1074 a nongroup need or a regular need. We have a nongroup
1075 need if this reload conflicts with a group reload whose
1076 class intersects with this reload's class. */
1077
1078 nongroup_need = 0;
1079 if (size == 1)
1080 for (j = 0; j < n_reloads; j++)
1081 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1082 (GET_MODE_SIZE (reload_outmode[j])
1083 > GET_MODE_SIZE (reload_inmode[j]))
1084 ? reload_outmode[j]
1085 : reload_inmode[j])
1086 > 1)
1087 && (!reload_optional[j])
1088 && (reload_in[j] != 0 || reload_out[j] != 0
1089 || reload_secondary_p[j])
1090 && reloads_conflict (i, j)
1091 && reg_classes_intersect_p (class,
1092 reload_reg_class[j]))
1093 {
1094 nongroup_need = 1;
1095 break;
1096 }
1097
1098 /* Decide which time-of-use to count this reload for. */
1099 switch (reload_when_needed[i])
1100 {
1101 case RELOAD_OTHER:
1102 this_needs = &insn_needs.other;
1103 break;
1104 case RELOAD_FOR_INPUT:
1105 this_needs = &insn_needs.input;
1106 break;
1107 case RELOAD_FOR_OUTPUT:
1108 this_needs = &insn_needs.output;
1109 break;
1110 case RELOAD_FOR_INSN:
1111 this_needs = &insn_needs.insn;
1112 break;
1113 case RELOAD_FOR_OTHER_ADDRESS:
1114 this_needs = &insn_needs.other_addr;
1115 break;
1116 case RELOAD_FOR_INPUT_ADDRESS:
1117 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1118 break;
1119 case RELOAD_FOR_OUTPUT_ADDRESS:
1120 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1121 break;
1122 case RELOAD_FOR_OPERAND_ADDRESS:
1123 this_needs = &insn_needs.op_addr;
1124 break;
1125 case RELOAD_FOR_OPADDR_ADDR:
1126 this_needs = &insn_needs.op_addr_reload;
1127 break;
1128 }
1129
1130 if (size > 1)
1131 {
1132 enum machine_mode other_mode, allocate_mode;
1133
1134 /* Count number of groups needed separately from
1135 number of individual regs needed. */
1136 this_needs->groups[(int) class]++;
1137 p = reg_class_superclasses[(int) class];
1138 while (*p != LIM_REG_CLASSES)
1139 this_needs->groups[(int) *p++]++;
1140
1141 /* Record size and mode of a group of this class. */
1142 /* If more than one size group is needed,
1143 make all groups the largest needed size. */
1144 if (group_size[(int) class] < size)
1145 {
1146 other_mode = group_mode[(int) class];
1147 allocate_mode = mode;
1148
1149 group_size[(int) class] = size;
1150 group_mode[(int) class] = mode;
1151 }
1152 else
1153 {
1154 other_mode = mode;
1155 allocate_mode = group_mode[(int) class];
1156 }
1157
1158 /* Crash if two dissimilar machine modes both need
1159 groups of consecutive regs of the same class. */
1160
1161 if (other_mode != VOIDmode && other_mode != allocate_mode
1162 && ! modes_equiv_for_class_p (allocate_mode,
1163 other_mode, class))
1164 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1165 insn);
1166 }
1167 else if (size == 1)
1168 {
1169 this_needs->regs[nongroup_need][(int) class] += 1;
1170 p = reg_class_superclasses[(int) class];
1171 while (*p != LIM_REG_CLASSES)
1172 this_needs->regs[nongroup_need][(int) *p++] += 1;
1173 }
1174 else
1175 abort ();
1176 }
1177
1178 /* All reloads have been counted for this insn;
1179 now merge the various times of use.
1180 This sets insn_needs, etc., to the maximum total number
1181 of registers needed at any point in this insn. */
1182
1183 for (i = 0; i < N_REG_CLASSES; i++)
1184 {
1185 int in_max, out_max;
1186
1187 /* Compute normal and nongroup needs. */
1188 for (j = 0; j <= 1; j++)
1189 {
1190 for (in_max = 0, out_max = 0, k = 0;
1191 k < reload_n_operands; k++)
1192 {
1193 in_max
1194 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1195 out_max
1196 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1197 }
1198
1199 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1200 and operand addresses but not things used to reload
1201 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1202 don't conflict with things needed to reload inputs or
1203 outputs. */
1204
1205 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1206 insn_needs.op_addr_reload.regs[j][i]),
1207 in_max);
1208
1209 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1210
1211 insn_needs.input.regs[j][i]
1212 = MAX (insn_needs.input.regs[j][i]
1213 + insn_needs.op_addr.regs[j][i]
1214 + insn_needs.insn.regs[j][i],
1215 in_max + insn_needs.input.regs[j][i]);
1216
1217 insn_needs.output.regs[j][i] += out_max;
1218 insn_needs.other.regs[j][i]
1219 += MAX (MAX (insn_needs.input.regs[j][i],
1220 insn_needs.output.regs[j][i]),
1221 insn_needs.other_addr.regs[j][i]);
1222
1223 }
1224
1225 /* Now compute group needs. */
1226 for (in_max = 0, out_max = 0, j = 0;
1227 j < reload_n_operands; j++)
1228 {
1229 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1230 out_max
1231 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1232 }
1233
1234 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1235 insn_needs.op_addr_reload.groups[i]),
1236 in_max);
1237 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1238
1239 insn_needs.input.groups[i]
1240 = MAX (insn_needs.input.groups[i]
1241 + insn_needs.op_addr.groups[i]
1242 + insn_needs.insn.groups[i],
1243 in_max + insn_needs.input.groups[i]);
1244
1245 insn_needs.output.groups[i] += out_max;
1246 insn_needs.other.groups[i]
1247 += MAX (MAX (insn_needs.input.groups[i],
1248 insn_needs.output.groups[i]),
1249 insn_needs.other_addr.groups[i]);
1250 }
1251
1252 /* If this is a CALL_INSN and caller-saves will need
1253 a spill register, act as if the spill register is
1254 needed for this insn. However, the spill register
1255 can be used by any reload of this insn, so we only
1256 need do something if no need for that class has
1257 been recorded.
1258
1259 The assumption that every CALL_INSN will trigger a
1260 caller-save is highly conservative, however, the number
1261 of cases where caller-saves will need a spill register but
1262 a block containing a CALL_INSN won't need a spill register
1263 of that class should be quite rare.
1264
1265 If a group is needed, the size and mode of the group will
1266 have been set up at the beginning of this loop. */
1267
1268 if (GET_CODE (insn) == CALL_INSN
1269 && caller_save_spill_class != NO_REGS)
1270 {
1271 /* See if this register would conflict with any reload
1272 that needs a group. */
1273 int nongroup_need = 0;
1274 int *caller_save_needs;
1275
1276 for (j = 0; j < n_reloads; j++)
1277 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1278 (GET_MODE_SIZE (reload_outmode[j])
1279 > GET_MODE_SIZE (reload_inmode[j]))
1280 ? reload_outmode[j]
1281 : reload_inmode[j])
1282 > 1)
1283 && reg_classes_intersect_p (caller_save_spill_class,
1284 reload_reg_class[j]))
1285 {
1286 nongroup_need = 1;
1287 break;
1288 }
1289
1290 caller_save_needs
1291 = (caller_save_group_size > 1
1292 ? insn_needs.other.groups
1293 : insn_needs.other.regs[nongroup_need]);
1294
1295 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1296 {
1297 register enum reg_class *p
1298 = reg_class_superclasses[(int) caller_save_spill_class];
1299
1300 caller_save_needs[(int) caller_save_spill_class]++;
1301
1302 while (*p != LIM_REG_CLASSES)
1303 caller_save_needs[(int) *p++] += 1;
1304 }
1305
1306 /* Show that this basic block will need a register of
1307 this class. */
1308
1309 if (global
1310 && ! (basic_block_needs[(int) caller_save_spill_class]
1311 [this_block]))
1312 {
1313 basic_block_needs[(int) caller_save_spill_class]
1314 [this_block] = 1;
1315 new_basic_block_needs = 1;
1316 }
1317 }
1318
1319 #ifdef SMALL_REGISTER_CLASSES
1320 /* If this insn stores the value of a function call,
1321 and that value is in a register that has been spilled,
1322 and if the insn needs a reload in a class
1323 that might use that register as the reload register,
1324 then add add an extra need in that class.
1325 This makes sure we have a register available that does
1326 not overlap the return value. */
1327
1328 if (avoid_return_reg)
1329 {
1330 int regno = REGNO (avoid_return_reg);
1331 int nregs
1332 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1333 int r;
1334 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1335
1336 /* First compute the "basic needs", which counts a
1337 need only in the smallest class in which it
1338 is required. */
1339
1340 bcopy ((char *) insn_needs.other.regs[0],
1341 (char *) basic_needs, sizeof basic_needs);
1342 bcopy ((char *) insn_needs.other.groups,
1343 (char *) basic_groups, sizeof basic_groups);
1344
1345 for (i = 0; i < N_REG_CLASSES; i++)
1346 {
1347 enum reg_class *p;
1348
1349 if (basic_needs[i] >= 0)
1350 for (p = reg_class_superclasses[i];
1351 *p != LIM_REG_CLASSES; p++)
1352 basic_needs[(int) *p] -= basic_needs[i];
1353
1354 if (basic_groups[i] >= 0)
1355 for (p = reg_class_superclasses[i];
1356 *p != LIM_REG_CLASSES; p++)
1357 basic_groups[(int) *p] -= basic_groups[i];
1358 }
1359
1360 /* Now count extra regs if there might be a conflict with
1361 the return value register. */
1362
1363 for (r = regno; r < regno + nregs; r++)
1364 if (spill_reg_order[r] >= 0)
1365 for (i = 0; i < N_REG_CLASSES; i++)
1366 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1367 {
1368 if (basic_needs[i] > 0)
1369 {
1370 enum reg_class *p;
1371
1372 insn_needs.other.regs[0][i]++;
1373 p = reg_class_superclasses[i];
1374 while (*p != LIM_REG_CLASSES)
1375 insn_needs.other.regs[0][(int) *p++]++;
1376 }
1377 if (basic_groups[i] > 0)
1378 {
1379 enum reg_class *p;
1380
1381 insn_needs.other.groups[i]++;
1382 p = reg_class_superclasses[i];
1383 while (*p != LIM_REG_CLASSES)
1384 insn_needs.other.groups[(int) *p++]++;
1385 }
1386 }
1387 }
1388 #endif /* SMALL_REGISTER_CLASSES */
1389
1390 /* For each class, collect maximum need of any insn. */
1391
1392 for (i = 0; i < N_REG_CLASSES; i++)
1393 {
1394 if (max_needs[i] < insn_needs.other.regs[0][i])
1395 {
1396 max_needs[i] = insn_needs.other.regs[0][i];
1397 max_needs_insn[i] = insn;
1398 }
1399 if (max_groups[i] < insn_needs.other.groups[i])
1400 {
1401 max_groups[i] = insn_needs.other.groups[i];
1402 max_groups_insn[i] = insn;
1403 }
1404 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1405 {
1406 max_nongroups[i] = insn_needs.other.regs[1][i];
1407 max_nongroups_insn[i] = insn;
1408 }
1409 }
1410 }
1411 /* Note that there is a continue statement above. */
1412 }
1413
1414 /* If we allocated any new memory locations, make another pass
1415 since it might have changed elimination offsets. */
1416 if (starting_frame_size != get_frame_size ())
1417 something_changed = 1;
1418
1419 if (dumpfile)
1420 for (i = 0; i < N_REG_CLASSES; i++)
1421 {
1422 if (max_needs[i] > 0)
1423 fprintf (dumpfile,
1424 ";; Need %d reg%s of class %s (for insn %d).\n",
1425 max_needs[i], max_needs[i] == 1 ? "" : "s",
1426 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1427 if (max_nongroups[i] > 0)
1428 fprintf (dumpfile,
1429 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1430 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1431 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1432 if (max_groups[i] > 0)
1433 fprintf (dumpfile,
1434 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1435 max_groups[i], max_groups[i] == 1 ? "" : "s",
1436 mode_name[(int) group_mode[i]],
1437 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1438 }
1439
1440 /* If we have caller-saves, set up the save areas and see if caller-save
1441 will need a spill register. */
1442
1443 if (caller_save_needed
1444 && ! setup_save_areas (&something_changed)
1445 && caller_save_spill_class == NO_REGS)
1446 {
1447 /* The class we will need depends on whether the machine
1448 supports the sum of two registers for an address; see
1449 find_address_reloads for details. */
1450
1451 caller_save_spill_class
1452 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1453 caller_save_group_size
1454 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1455 something_changed = 1;
1456 }
1457
1458 /* See if anything that happened changes which eliminations are valid.
1459 For example, on the Sparc, whether or not the frame pointer can
1460 be eliminated can depend on what registers have been used. We need
1461 not check some conditions again (such as flag_omit_frame_pointer)
1462 since they can't have changed. */
1463
1464 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1465 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1466 #ifdef ELIMINABLE_REGS
1467 || ! CAN_ELIMINATE (ep->from, ep->to)
1468 #endif
1469 )
1470 ep->can_eliminate = 0;
1471
1472 /* Look for the case where we have discovered that we can't replace
1473 register A with register B and that means that we will now be
1474 trying to replace register A with register C. This means we can
1475 no longer replace register C with register B and we need to disable
1476 such an elimination, if it exists. This occurs often with A == ap,
1477 B == sp, and C == fp. */
1478
1479 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1480 {
1481 struct elim_table *op;
1482 register int new_to = -1;
1483
1484 if (! ep->can_eliminate && ep->can_eliminate_previous)
1485 {
1486 /* Find the current elimination for ep->from, if there is a
1487 new one. */
1488 for (op = reg_eliminate;
1489 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1490 if (op->from == ep->from && op->can_eliminate)
1491 {
1492 new_to = op->to;
1493 break;
1494 }
1495
1496 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1497 disable it. */
1498 for (op = reg_eliminate;
1499 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1500 if (op->from == new_to && op->to == ep->to)
1501 op->can_eliminate = 0;
1502 }
1503 }
1504
1505 /* See if any registers that we thought we could eliminate the previous
1506 time are no longer eliminable. If so, something has changed and we
1507 must spill the register. Also, recompute the number of eliminable
1508 registers and see if the frame pointer is needed; it is if there is
1509 no elimination of the frame pointer that we can perform. */
1510
1511 frame_pointer_needed = 1;
1512 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1513 {
1514 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1515 && ep->to != HARD_FRAME_POINTER_REGNUM)
1516 frame_pointer_needed = 0;
1517
1518 if (! ep->can_eliminate && ep->can_eliminate_previous)
1519 {
1520 ep->can_eliminate_previous = 0;
1521 spill_hard_reg (ep->from, global, dumpfile, 1);
1522 something_changed = 1;
1523 num_eliminable--;
1524 }
1525 }
1526
1527 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1528 /* If we didn't need a frame pointer last time, but we do now, spill
1529 the hard frame pointer. */
1530 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1531 {
1532 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1533 something_changed = 1;
1534 }
1535 #endif
1536
1537 /* If all needs are met, we win. */
1538
1539 for (i = 0; i < N_REG_CLASSES; i++)
1540 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1541 break;
1542 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1543 break;
1544
1545 /* Not all needs are met; must spill some hard regs. */
1546
1547 /* Put all registers spilled so far back in potential_reload_regs, but
1548 put them at the front, since we've already spilled most of the
1549 psuedos in them (we might have left some pseudos unspilled if they
1550 were in a block that didn't need any spill registers of a conflicting
1551 class. We used to try to mark off the need for those registers,
1552 but doing so properly is very complex and reallocating them is the
1553 simpler approach. First, "pack" potential_reload_regs by pushing
1554 any nonnegative entries towards the end. That will leave room
1555 for the registers we already spilled.
1556
1557 Also, undo the marking of the spill registers from the last time
1558 around in FORBIDDEN_REGS since we will be probably be allocating
1559 them again below.
1560
1561 ??? It is theoretically possible that we might end up not using one
1562 of our previously-spilled registers in this allocation, even though
1563 they are at the head of the list. It's not clear what to do about
1564 this, but it was no better before, when we marked off the needs met
1565 by the previously-spilled registers. With the current code, globals
1566 can be allocated into these registers, but locals cannot. */
1567
1568 if (n_spills)
1569 {
1570 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1571 if (potential_reload_regs[i] != -1)
1572 potential_reload_regs[j--] = potential_reload_regs[i];
1573
1574 for (i = 0; i < n_spills; i++)
1575 {
1576 potential_reload_regs[i] = spill_regs[i];
1577 spill_reg_order[spill_regs[i]] = -1;
1578 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1579 }
1580
1581 n_spills = 0;
1582 }
1583
1584 /* Now find more reload regs to satisfy the remaining need
1585 Do it by ascending class number, since otherwise a reg
1586 might be spilled for a big class and might fail to count
1587 for a smaller class even though it belongs to that class.
1588
1589 Count spilled regs in `spills', and add entries to
1590 `spill_regs' and `spill_reg_order'.
1591
1592 ??? Note there is a problem here.
1593 When there is a need for a group in a high-numbered class,
1594 and also need for non-group regs that come from a lower class,
1595 the non-group regs are chosen first. If there aren't many regs,
1596 they might leave no room for a group.
1597
1598 This was happening on the 386. To fix it, we added the code
1599 that calls possible_group_p, so that the lower class won't
1600 break up the last possible group.
1601
1602 Really fixing the problem would require changes above
1603 in counting the regs already spilled, and in choose_reload_regs.
1604 It might be hard to avoid introducing bugs there. */
1605
1606 CLEAR_HARD_REG_SET (counted_for_groups);
1607 CLEAR_HARD_REG_SET (counted_for_nongroups);
1608
1609 for (class = 0; class < N_REG_CLASSES; class++)
1610 {
1611 /* First get the groups of registers.
1612 If we got single registers first, we might fragment
1613 possible groups. */
1614 while (max_groups[class] > 0)
1615 {
1616 /* If any single spilled regs happen to form groups,
1617 count them now. Maybe we don't really need
1618 to spill another group. */
1619 count_possible_groups (group_size, group_mode, max_groups,
1620 class);
1621
1622 if (max_groups[class] <= 0)
1623 break;
1624
1625 /* Groups of size 2 (the only groups used on most machines)
1626 are treated specially. */
1627 if (group_size[class] == 2)
1628 {
1629 /* First, look for a register that will complete a group. */
1630 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1631 {
1632 int other;
1633
1634 j = potential_reload_regs[i];
1635 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1636 &&
1637 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1638 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1639 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1640 && HARD_REGNO_MODE_OK (other, group_mode[class])
1641 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1642 other)
1643 /* We don't want one part of another group.
1644 We could get "two groups" that overlap! */
1645 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1646 ||
1647 (j < FIRST_PSEUDO_REGISTER - 1
1648 && (other = j + 1, spill_reg_order[other] >= 0)
1649 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1650 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1651 && HARD_REGNO_MODE_OK (j, group_mode[class])
1652 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1653 other)
1654 && ! TEST_HARD_REG_BIT (counted_for_groups,
1655 other))))
1656 {
1657 register enum reg_class *p;
1658
1659 /* We have found one that will complete a group,
1660 so count off one group as provided. */
1661 max_groups[class]--;
1662 p = reg_class_superclasses[class];
1663 while (*p != LIM_REG_CLASSES)
1664 {
1665 if (group_size [(int) *p] <= group_size [class])
1666 max_groups[(int) *p]--;
1667 p++;
1668 }
1669
1670 /* Indicate both these regs are part of a group. */
1671 SET_HARD_REG_BIT (counted_for_groups, j);
1672 SET_HARD_REG_BIT (counted_for_groups, other);
1673 break;
1674 }
1675 }
1676 /* We can't complete a group, so start one. */
1677 #ifdef SMALL_REGISTER_CLASSES
1678 /* Look for a pair neither of which is explicitly used. */
1679 if (i == FIRST_PSEUDO_REGISTER)
1680 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1681 {
1682 int k;
1683 j = potential_reload_regs[i];
1684 /* Verify that J+1 is a potential reload reg. */
1685 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1686 if (potential_reload_regs[k] == j + 1)
1687 break;
1688 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1689 && k < FIRST_PSEUDO_REGISTER
1690 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1691 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1692 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1693 && HARD_REGNO_MODE_OK (j, group_mode[class])
1694 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1695 j + 1)
1696 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1697 /* Reject J at this stage
1698 if J+1 was explicitly used. */
1699 && ! regs_explicitly_used[j + 1])
1700 break;
1701 }
1702 #endif
1703 /* Now try any group at all
1704 whose registers are not in bad_spill_regs. */
1705 if (i == FIRST_PSEUDO_REGISTER)
1706 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1707 {
1708 int k;
1709 j = potential_reload_regs[i];
1710 /* Verify that J+1 is a potential reload reg. */
1711 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1712 if (potential_reload_regs[k] == j + 1)
1713 break;
1714 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1715 && k < FIRST_PSEUDO_REGISTER
1716 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1717 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1718 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1719 && HARD_REGNO_MODE_OK (j, group_mode[class])
1720 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1721 j + 1)
1722 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1723 break;
1724 }
1725
1726 /* I should be the index in potential_reload_regs
1727 of the new reload reg we have found. */
1728
1729 if (i >= FIRST_PSEUDO_REGISTER)
1730 {
1731 /* There are no groups left to spill. */
1732 spill_failure (max_groups_insn[class]);
1733 failure = 1;
1734 goto failed;
1735 }
1736 else
1737 something_changed
1738 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1739 global, dumpfile);
1740 }
1741 else
1742 {
1743 /* For groups of more than 2 registers,
1744 look for a sufficient sequence of unspilled registers,
1745 and spill them all at once. */
1746 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1747 {
1748 int k;
1749
1750 j = potential_reload_regs[i];
1751 if (j >= 0
1752 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1753 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1754 {
1755 /* Check each reg in the sequence. */
1756 for (k = 0; k < group_size[class]; k++)
1757 if (! (spill_reg_order[j + k] < 0
1758 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1759 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1760 break;
1761 /* We got a full sequence, so spill them all. */
1762 if (k == group_size[class])
1763 {
1764 register enum reg_class *p;
1765 for (k = 0; k < group_size[class]; k++)
1766 {
1767 int idx;
1768 SET_HARD_REG_BIT (counted_for_groups, j + k);
1769 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1770 if (potential_reload_regs[idx] == j + k)
1771 break;
1772 something_changed
1773 |= new_spill_reg (idx, class,
1774 max_needs, NULL_PTR,
1775 global, dumpfile);
1776 }
1777
1778 /* We have found one that will complete a group,
1779 so count off one group as provided. */
1780 max_groups[class]--;
1781 p = reg_class_superclasses[class];
1782 while (*p != LIM_REG_CLASSES)
1783 {
1784 if (group_size [(int) *p]
1785 <= group_size [class])
1786 max_groups[(int) *p]--;
1787 p++;
1788 }
1789 break;
1790 }
1791 }
1792 }
1793 /* We couldn't find any registers for this reload.
1794 Avoid going into an infinite loop. */
1795 if (i >= FIRST_PSEUDO_REGISTER)
1796 {
1797 /* There are no groups left. */
1798 spill_failure (max_groups_insn[class]);
1799 failure = 1;
1800 goto failed;
1801 }
1802 }
1803 }
1804
1805 /* Now similarly satisfy all need for single registers. */
1806
1807 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1808 {
1809 #ifdef SMALL_REGISTER_CLASSES
1810 /* This should be right for all machines, but only the 386
1811 is known to need it, so this conditional plays safe.
1812 ??? For 2.5, try making this unconditional. */
1813 /* If we spilled enough regs, but they weren't counted
1814 against the non-group need, see if we can count them now.
1815 If so, we can avoid some actual spilling. */
1816 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1817 for (i = 0; i < n_spills; i++)
1818 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1819 spill_regs[i])
1820 && !TEST_HARD_REG_BIT (counted_for_groups,
1821 spill_regs[i])
1822 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1823 spill_regs[i])
1824 && max_nongroups[class] > 0)
1825 {
1826 register enum reg_class *p;
1827
1828 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1829 max_nongroups[class]--;
1830 p = reg_class_superclasses[class];
1831 while (*p != LIM_REG_CLASSES)
1832 max_nongroups[(int) *p++]--;
1833 }
1834 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1835 break;
1836 #endif
1837
1838 /* Consider the potential reload regs that aren't
1839 yet in use as reload regs, in order of preference.
1840 Find the most preferred one that's in this class. */
1841
1842 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1843 if (potential_reload_regs[i] >= 0
1844 && TEST_HARD_REG_BIT (reg_class_contents[class],
1845 potential_reload_regs[i])
1846 /* If this reg will not be available for groups,
1847 pick one that does not foreclose possible groups.
1848 This is a kludge, and not very general,
1849 but it should be sufficient to make the 386 work,
1850 and the problem should not occur on machines with
1851 more registers. */
1852 && (max_nongroups[class] == 0
1853 || possible_group_p (potential_reload_regs[i], max_groups)))
1854 break;
1855
1856 /* If we couldn't get a register, try to get one even if we
1857 might foreclose possible groups. This may cause problems
1858 later, but that's better than aborting now, since it is
1859 possible that we will, in fact, be able to form the needed
1860 group even with this allocation. */
1861
1862 if (i >= FIRST_PSEUDO_REGISTER
1863 && (asm_noperands (max_needs[class] > 0
1864 ? max_needs_insn[class]
1865 : max_nongroups_insn[class])
1866 < 0))
1867 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1868 if (potential_reload_regs[i] >= 0
1869 && TEST_HARD_REG_BIT (reg_class_contents[class],
1870 potential_reload_regs[i]))
1871 break;
1872
1873 /* I should be the index in potential_reload_regs
1874 of the new reload reg we have found. */
1875
1876 if (i >= FIRST_PSEUDO_REGISTER)
1877 {
1878 /* There are no possible registers left to spill. */
1879 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1880 : max_nongroups_insn[class]);
1881 failure = 1;
1882 goto failed;
1883 }
1884 else
1885 something_changed
1886 |= new_spill_reg (i, class, max_needs, max_nongroups,
1887 global, dumpfile);
1888 }
1889 }
1890 }
1891
1892 /* If global-alloc was run, notify it of any register eliminations we have
1893 done. */
1894 if (global)
1895 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1896 if (ep->can_eliminate)
1897 mark_elimination (ep->from, ep->to);
1898
1899 /* Insert code to save and restore call-clobbered hard regs
1900 around calls. Tell if what mode to use so that we will process
1901 those insns in reload_as_needed if we have to. */
1902
1903 if (caller_save_needed)
1904 save_call_clobbered_regs (num_eliminable ? QImode
1905 : caller_save_spill_class != NO_REGS ? HImode
1906 : VOIDmode);
1907
1908 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1909 If that insn didn't set the register (i.e., it copied the register to
1910 memory), just delete that insn instead of the equivalencing insn plus
1911 anything now dead. If we call delete_dead_insn on that insn, we may
1912 delete the insn that actually sets the register if the register die
1913 there and that is incorrect. */
1914
1915 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1916 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1917 && GET_CODE (reg_equiv_init[i]) != NOTE)
1918 {
1919 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1920 delete_dead_insn (reg_equiv_init[i]);
1921 else
1922 {
1923 PUT_CODE (reg_equiv_init[i], NOTE);
1924 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1925 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1926 }
1927 }
1928
1929 /* Use the reload registers where necessary
1930 by generating move instructions to move the must-be-register
1931 values into or out of the reload registers. */
1932
1933 if (something_needs_reloads || something_needs_elimination
1934 || (caller_save_needed && num_eliminable)
1935 || caller_save_spill_class != NO_REGS)
1936 reload_as_needed (first, global);
1937
1938 /* If we were able to eliminate the frame pointer, show that it is no
1939 longer live at the start of any basic block. If it ls live by
1940 virtue of being in a pseudo, that pseudo will be marked live
1941 and hence the frame pointer will be known to be live via that
1942 pseudo. */
1943
1944 if (! frame_pointer_needed)
1945 for (i = 0; i < n_basic_blocks; i++)
1946 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1947 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1948 % REGSET_ELT_BITS));
1949
1950 /* Come here (with failure set nonzero) if we can't get enough spill regs
1951 and we decide not to abort about it. */
1952 failed:
1953
1954 reload_in_progress = 0;
1955
1956 /* Now eliminate all pseudo regs by modifying them into
1957 their equivalent memory references.
1958 The REG-rtx's for the pseudos are modified in place,
1959 so all insns that used to refer to them now refer to memory.
1960
1961 For a reg that has a reg_equiv_address, all those insns
1962 were changed by reloading so that no insns refer to it any longer;
1963 but the DECL_RTL of a variable decl may refer to it,
1964 and if so this causes the debugging info to mention the variable. */
1965
1966 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1967 {
1968 rtx addr = 0;
1969 int in_struct = 0;
1970 if (reg_equiv_mem[i])
1971 {
1972 addr = XEXP (reg_equiv_mem[i], 0);
1973 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1974 }
1975 if (reg_equiv_address[i])
1976 addr = reg_equiv_address[i];
1977 if (addr)
1978 {
1979 if (reg_renumber[i] < 0)
1980 {
1981 rtx reg = regno_reg_rtx[i];
1982 XEXP (reg, 0) = addr;
1983 REG_USERVAR_P (reg) = 0;
1984 MEM_IN_STRUCT_P (reg) = in_struct;
1985 PUT_CODE (reg, MEM);
1986 }
1987 else if (reg_equiv_mem[i])
1988 XEXP (reg_equiv_mem[i], 0) = addr;
1989 }
1990 }
1991
1992 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1993 /* Make a pass over all the insns and remove death notes for things that
1994 are no longer registers or no longer die in the insn (e.g., an input
1995 and output pseudo being tied). */
1996
1997 for (insn = first; insn; insn = NEXT_INSN (insn))
1998 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1999 {
2000 rtx note, next;
2001
2002 for (note = REG_NOTES (insn); note; note = next)
2003 {
2004 next = XEXP (note, 1);
2005 if (REG_NOTE_KIND (note) == REG_DEAD
2006 && (GET_CODE (XEXP (note, 0)) != REG
2007 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2008 remove_note (insn, note);
2009 }
2010 }
2011 #endif
2012
2013 /* Indicate that we no longer have known memory locations or constants. */
2014 reg_equiv_constant = 0;
2015 reg_equiv_memory_loc = 0;
2016
2017 if (scratch_list)
2018 free (scratch_list);
2019 scratch_list = 0;
2020 if (scratch_block)
2021 free (scratch_block);
2022 scratch_block = 0;
2023
2024 return failure;
2025 }
2026 \f
2027 /* Nonzero if, after spilling reg REGNO for non-groups,
2028 it will still be possible to find a group if we still need one. */
2029
2030 static int
2031 possible_group_p (regno, max_groups)
2032 int regno;
2033 int *max_groups;
2034 {
2035 int i;
2036 int class = (int) NO_REGS;
2037
2038 for (i = 0; i < (int) N_REG_CLASSES; i++)
2039 if (max_groups[i] > 0)
2040 {
2041 class = i;
2042 break;
2043 }
2044
2045 if (class == (int) NO_REGS)
2046 return 1;
2047
2048 /* Consider each pair of consecutive registers. */
2049 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2050 {
2051 /* Ignore pairs that include reg REGNO. */
2052 if (i == regno || i + 1 == regno)
2053 continue;
2054
2055 /* Ignore pairs that are outside the class that needs the group.
2056 ??? Here we fail to handle the case where two different classes
2057 independently need groups. But this never happens with our
2058 current machine descriptions. */
2059 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2060 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2061 continue;
2062
2063 /* A pair of consecutive regs we can still spill does the trick. */
2064 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2065 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2066 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2067 return 1;
2068
2069 /* A pair of one already spilled and one we can spill does it
2070 provided the one already spilled is not otherwise reserved. */
2071 if (spill_reg_order[i] < 0
2072 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2073 && spill_reg_order[i + 1] >= 0
2074 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2075 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2076 return 1;
2077 if (spill_reg_order[i + 1] < 0
2078 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2079 && spill_reg_order[i] >= 0
2080 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2081 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2082 return 1;
2083 }
2084
2085 return 0;
2086 }
2087 \f
2088 /* Count any groups of CLASS that can be formed from the registers recently
2089 spilled. */
2090
2091 static void
2092 count_possible_groups (group_size, group_mode, max_groups, class)
2093 int *group_size;
2094 enum machine_mode *group_mode;
2095 int *max_groups;
2096 int class;
2097 {
2098 HARD_REG_SET new;
2099 int i, j;
2100
2101 /* Now find all consecutive groups of spilled registers
2102 and mark each group off against the need for such groups.
2103 But don't count them against ordinary need, yet. */
2104
2105 if (group_size[class] == 0)
2106 return;
2107
2108 CLEAR_HARD_REG_SET (new);
2109
2110 /* Make a mask of all the regs that are spill regs in class I. */
2111 for (i = 0; i < n_spills; i++)
2112 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2113 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2114 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2115 SET_HARD_REG_BIT (new, spill_regs[i]);
2116
2117 /* Find each consecutive group of them. */
2118 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2119 if (TEST_HARD_REG_BIT (new, i)
2120 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2121 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2122 {
2123 for (j = 1; j < group_size[class]; j++)
2124 if (! TEST_HARD_REG_BIT (new, i + j))
2125 break;
2126
2127 if (j == group_size[class])
2128 {
2129 /* We found a group. Mark it off against this class's need for
2130 groups, and against each superclass too. */
2131 register enum reg_class *p;
2132
2133 max_groups[class]--;
2134 p = reg_class_superclasses[class];
2135 while (*p != LIM_REG_CLASSES)
2136 {
2137 if (group_size [(int) *p] <= group_size [class])
2138 max_groups[(int) *p]--;
2139 p++;
2140 }
2141
2142 /* Don't count these registers again. */
2143 for (j = 0; j < group_size[class]; j++)
2144 SET_HARD_REG_BIT (counted_for_groups, i + j);
2145 }
2146
2147 /* Skip to the last reg in this group. When i is incremented above,
2148 it will then point to the first reg of the next possible group. */
2149 i += j - 1;
2150 }
2151 }
2152 \f
2153 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2154 another mode that needs to be reloaded for the same register class CLASS.
2155 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2156 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2157
2158 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2159 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2160 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2161 causes unnecessary failures on machines requiring alignment of register
2162 groups when the two modes are different sizes, because the larger mode has
2163 more strict alignment rules than the smaller mode. */
2164
2165 static int
2166 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2167 enum machine_mode allocate_mode, other_mode;
2168 enum reg_class class;
2169 {
2170 register int regno;
2171 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2172 {
2173 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2174 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2175 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2176 return 0;
2177 }
2178 return 1;
2179 }
2180
2181 /* Handle the failure to find a register to spill.
2182 INSN should be one of the insns which needed this particular spill reg. */
2183
2184 static void
2185 spill_failure (insn)
2186 rtx insn;
2187 {
2188 if (asm_noperands (PATTERN (insn)) >= 0)
2189 error_for_asm (insn, "`asm' needs too many reloads");
2190 else
2191 fatal_insn ("Unable to find a register to spill.", insn);
2192 }
2193
2194 /* Add a new register to the tables of available spill-registers
2195 (as well as spilling all pseudos allocated to the register).
2196 I is the index of this register in potential_reload_regs.
2197 CLASS is the regclass whose need is being satisfied.
2198 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2199 so that this register can count off against them.
2200 MAX_NONGROUPS is 0 if this register is part of a group.
2201 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2202
2203 static int
2204 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2205 int i;
2206 int class;
2207 int *max_needs;
2208 int *max_nongroups;
2209 int global;
2210 FILE *dumpfile;
2211 {
2212 register enum reg_class *p;
2213 int val;
2214 int regno = potential_reload_regs[i];
2215
2216 if (i >= FIRST_PSEUDO_REGISTER)
2217 abort (); /* Caller failed to find any register. */
2218
2219 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2220 fatal ("fixed or forbidden register was spilled.\n\
2221 This may be due to a compiler bug or to impossible asm\n\
2222 statements or clauses.");
2223
2224 /* Make reg REGNO an additional reload reg. */
2225
2226 potential_reload_regs[i] = -1;
2227 spill_regs[n_spills] = regno;
2228 spill_reg_order[regno] = n_spills;
2229 if (dumpfile)
2230 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2231
2232 /* Clear off the needs we just satisfied. */
2233
2234 max_needs[class]--;
2235 p = reg_class_superclasses[class];
2236 while (*p != LIM_REG_CLASSES)
2237 max_needs[(int) *p++]--;
2238
2239 if (max_nongroups && max_nongroups[class] > 0)
2240 {
2241 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2242 max_nongroups[class]--;
2243 p = reg_class_superclasses[class];
2244 while (*p != LIM_REG_CLASSES)
2245 max_nongroups[(int) *p++]--;
2246 }
2247
2248 /* Spill every pseudo reg that was allocated to this reg
2249 or to something that overlaps this reg. */
2250
2251 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2252
2253 /* If there are some registers still to eliminate and this register
2254 wasn't ever used before, additional stack space may have to be
2255 allocated to store this register. Thus, we may have changed the offset
2256 between the stack and frame pointers, so mark that something has changed.
2257 (If new pseudos were spilled, thus requiring more space, VAL would have
2258 been set non-zero by the call to spill_hard_reg above since additional
2259 reloads may be needed in that case.
2260
2261 One might think that we need only set VAL to 1 if this is a call-used
2262 register. However, the set of registers that must be saved by the
2263 prologue is not identical to the call-used set. For example, the
2264 register used by the call insn for the return PC is a call-used register,
2265 but must be saved by the prologue. */
2266 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2267 val = 1;
2268
2269 regs_ever_live[spill_regs[n_spills]] = 1;
2270 n_spills++;
2271
2272 return val;
2273 }
2274 \f
2275 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2276 data that is dead in INSN. */
2277
2278 static void
2279 delete_dead_insn (insn)
2280 rtx insn;
2281 {
2282 rtx prev = prev_real_insn (insn);
2283 rtx prev_dest;
2284
2285 /* If the previous insn sets a register that dies in our insn, delete it
2286 too. */
2287 if (prev && GET_CODE (PATTERN (prev)) == SET
2288 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2289 && reg_mentioned_p (prev_dest, PATTERN (insn))
2290 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2291 delete_dead_insn (prev);
2292
2293 PUT_CODE (insn, NOTE);
2294 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2295 NOTE_SOURCE_FILE (insn) = 0;
2296 }
2297
2298 /* Modify the home of pseudo-reg I.
2299 The new home is present in reg_renumber[I].
2300
2301 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2302 or it may be -1, meaning there is none or it is not relevant.
2303 This is used so that all pseudos spilled from a given hard reg
2304 can share one stack slot. */
2305
2306 static void
2307 alter_reg (i, from_reg)
2308 register int i;
2309 int from_reg;
2310 {
2311 /* When outputting an inline function, this can happen
2312 for a reg that isn't actually used. */
2313 if (regno_reg_rtx[i] == 0)
2314 return;
2315
2316 /* If the reg got changed to a MEM at rtl-generation time,
2317 ignore it. */
2318 if (GET_CODE (regno_reg_rtx[i]) != REG)
2319 return;
2320
2321 /* Modify the reg-rtx to contain the new hard reg
2322 number or else to contain its pseudo reg number. */
2323 REGNO (regno_reg_rtx[i])
2324 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2325
2326 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2327 allocate a stack slot for it. */
2328
2329 if (reg_renumber[i] < 0
2330 && reg_n_refs[i] > 0
2331 && reg_equiv_constant[i] == 0
2332 && reg_equiv_memory_loc[i] == 0)
2333 {
2334 register rtx x;
2335 int inherent_size = PSEUDO_REGNO_BYTES (i);
2336 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2337 int adjust = 0;
2338
2339 /* Each pseudo reg has an inherent size which comes from its own mode,
2340 and a total size which provides room for paradoxical subregs
2341 which refer to the pseudo reg in wider modes.
2342
2343 We can use a slot already allocated if it provides both
2344 enough inherent space and enough total space.
2345 Otherwise, we allocate a new slot, making sure that it has no less
2346 inherent space, and no less total space, then the previous slot. */
2347 if (from_reg == -1)
2348 {
2349 /* No known place to spill from => no slot to reuse. */
2350 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2351 if (BYTES_BIG_ENDIAN)
2352 {
2353 /* Cancel the big-endian correction done in assign_stack_local.
2354 Get the address of the beginning of the slot.
2355 This is so we can do a big-endian correction unconditionally
2356 below. */
2357 adjust = inherent_size - total_size;
2358 }
2359 }
2360 /* Reuse a stack slot if possible. */
2361 else if (spill_stack_slot[from_reg] != 0
2362 && spill_stack_slot_width[from_reg] >= total_size
2363 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2364 >= inherent_size))
2365 x = spill_stack_slot[from_reg];
2366 /* Allocate a bigger slot. */
2367 else
2368 {
2369 /* Compute maximum size needed, both for inherent size
2370 and for total size. */
2371 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2372 rtx stack_slot;
2373 if (spill_stack_slot[from_reg])
2374 {
2375 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2376 > inherent_size)
2377 mode = GET_MODE (spill_stack_slot[from_reg]);
2378 if (spill_stack_slot_width[from_reg] > total_size)
2379 total_size = spill_stack_slot_width[from_reg];
2380 }
2381 /* Make a slot with that size. */
2382 x = assign_stack_local (mode, total_size, -1);
2383 stack_slot = x;
2384 if (BYTES_BIG_ENDIAN)
2385 {
2386 /* Cancel the big-endian correction done in assign_stack_local.
2387 Get the address of the beginning of the slot.
2388 This is so we can do a big-endian correction unconditionally
2389 below. */
2390 adjust = GET_MODE_SIZE (mode) - total_size;
2391 if (adjust)
2392 {
2393 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2394 * BITS_PER_UNIT,
2395 MODE_INT, 1),
2396 plus_constant (XEXP (x, 0), adjust));
2397 RTX_UNCHANGING_P (stack_slot)
2398 = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2399 }
2400 }
2401 spill_stack_slot[from_reg] = stack_slot;
2402 spill_stack_slot_width[from_reg] = total_size;
2403 }
2404
2405 /* On a big endian machine, the "address" of the slot
2406 is the address of the low part that fits its inherent mode. */
2407 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2408 adjust += (total_size - inherent_size);
2409
2410 /* If we have any adjustment to make, or if the stack slot is the
2411 wrong mode, make a new stack slot. */
2412 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2413 {
2414 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2415 plus_constant (XEXP (x, 0), adjust));
2416 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2417 }
2418
2419 /* Save the stack slot for later. */
2420 reg_equiv_memory_loc[i] = x;
2421 }
2422 }
2423
2424 /* Mark the slots in regs_ever_live for the hard regs
2425 used by pseudo-reg number REGNO. */
2426
2427 void
2428 mark_home_live (regno)
2429 int regno;
2430 {
2431 register int i, lim;
2432 i = reg_renumber[regno];
2433 if (i < 0)
2434 return;
2435 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2436 while (i < lim)
2437 regs_ever_live[i++] = 1;
2438 }
2439
2440 /* Mark the registers used in SCRATCH as being live. */
2441
2442 static void
2443 mark_scratch_live (scratch)
2444 rtx scratch;
2445 {
2446 register int i;
2447 int regno = REGNO (scratch);
2448 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2449
2450 for (i = regno; i < lim; i++)
2451 regs_ever_live[i] = 1;
2452 }
2453 \f
2454 /* This function handles the tracking of elimination offsets around branches.
2455
2456 X is a piece of RTL being scanned.
2457
2458 INSN is the insn that it came from, if any.
2459
2460 INITIAL_P is non-zero if we are to set the offset to be the initial
2461 offset and zero if we are setting the offset of the label to be the
2462 current offset. */
2463
2464 static void
2465 set_label_offsets (x, insn, initial_p)
2466 rtx x;
2467 rtx insn;
2468 int initial_p;
2469 {
2470 enum rtx_code code = GET_CODE (x);
2471 rtx tem;
2472 int i;
2473 struct elim_table *p;
2474
2475 switch (code)
2476 {
2477 case LABEL_REF:
2478 if (LABEL_REF_NONLOCAL_P (x))
2479 return;
2480
2481 x = XEXP (x, 0);
2482
2483 /* ... fall through ... */
2484
2485 case CODE_LABEL:
2486 /* If we know nothing about this label, set the desired offsets. Note
2487 that this sets the offset at a label to be the offset before a label
2488 if we don't know anything about the label. This is not correct for
2489 the label after a BARRIER, but is the best guess we can make. If
2490 we guessed wrong, we will suppress an elimination that might have
2491 been possible had we been able to guess correctly. */
2492
2493 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2494 {
2495 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2496 offsets_at[CODE_LABEL_NUMBER (x)][i]
2497 = (initial_p ? reg_eliminate[i].initial_offset
2498 : reg_eliminate[i].offset);
2499 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2500 }
2501
2502 /* Otherwise, if this is the definition of a label and it is
2503 preceded by a BARRIER, set our offsets to the known offset of
2504 that label. */
2505
2506 else if (x == insn
2507 && (tem = prev_nonnote_insn (insn)) != 0
2508 && GET_CODE (tem) == BARRIER)
2509 {
2510 num_not_at_initial_offset = 0;
2511 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2512 {
2513 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2514 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2515 if (reg_eliminate[i].can_eliminate
2516 && (reg_eliminate[i].offset
2517 != reg_eliminate[i].initial_offset))
2518 num_not_at_initial_offset++;
2519 }
2520 }
2521
2522 else
2523 /* If neither of the above cases is true, compare each offset
2524 with those previously recorded and suppress any eliminations
2525 where the offsets disagree. */
2526
2527 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2528 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2529 != (initial_p ? reg_eliminate[i].initial_offset
2530 : reg_eliminate[i].offset))
2531 reg_eliminate[i].can_eliminate = 0;
2532
2533 return;
2534
2535 case JUMP_INSN:
2536 set_label_offsets (PATTERN (insn), insn, initial_p);
2537
2538 /* ... fall through ... */
2539
2540 case INSN:
2541 case CALL_INSN:
2542 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2543 and hence must have all eliminations at their initial offsets. */
2544 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2545 if (REG_NOTE_KIND (tem) == REG_LABEL)
2546 set_label_offsets (XEXP (tem, 0), insn, 1);
2547 return;
2548
2549 case ADDR_VEC:
2550 case ADDR_DIFF_VEC:
2551 /* Each of the labels in the address vector must be at their initial
2552 offsets. We want the first first for ADDR_VEC and the second
2553 field for ADDR_DIFF_VEC. */
2554
2555 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2556 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2557 insn, initial_p);
2558 return;
2559
2560 case SET:
2561 /* We only care about setting PC. If the source is not RETURN,
2562 IF_THEN_ELSE, or a label, disable any eliminations not at
2563 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2564 isn't one of those possibilities. For branches to a label,
2565 call ourselves recursively.
2566
2567 Note that this can disable elimination unnecessarily when we have
2568 a non-local goto since it will look like a non-constant jump to
2569 someplace in the current function. This isn't a significant
2570 problem since such jumps will normally be when all elimination
2571 pairs are back to their initial offsets. */
2572
2573 if (SET_DEST (x) != pc_rtx)
2574 return;
2575
2576 switch (GET_CODE (SET_SRC (x)))
2577 {
2578 case PC:
2579 case RETURN:
2580 return;
2581
2582 case LABEL_REF:
2583 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2584 return;
2585
2586 case IF_THEN_ELSE:
2587 tem = XEXP (SET_SRC (x), 1);
2588 if (GET_CODE (tem) == LABEL_REF)
2589 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2590 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2591 break;
2592
2593 tem = XEXP (SET_SRC (x), 2);
2594 if (GET_CODE (tem) == LABEL_REF)
2595 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2596 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2597 break;
2598 return;
2599 }
2600
2601 /* If we reach here, all eliminations must be at their initial
2602 offset because we are doing a jump to a variable address. */
2603 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2604 if (p->offset != p->initial_offset)
2605 p->can_eliminate = 0;
2606 }
2607 }
2608 \f
2609 /* Used for communication between the next two function to properly share
2610 the vector for an ASM_OPERANDS. */
2611
2612 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2613
2614 /* Scan X and replace any eliminable registers (such as fp) with a
2615 replacement (such as sp), plus an offset.
2616
2617 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2618 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2619 MEM, we are allowed to replace a sum of a register and the constant zero
2620 with the register, which we cannot do outside a MEM. In addition, we need
2621 to record the fact that a register is referenced outside a MEM.
2622
2623 If INSN is an insn, it is the insn containing X. If we replace a REG
2624 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2625 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2626 that the REG is being modified.
2627
2628 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2629 That's used when we eliminate in expressions stored in notes.
2630 This means, do not set ref_outside_mem even if the reference
2631 is outside of MEMs.
2632
2633 If we see a modification to a register we know about, take the
2634 appropriate action (see case SET, below).
2635
2636 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2637 replacements done assuming all offsets are at their initial values. If
2638 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2639 encounter, return the actual location so that find_reloads will do
2640 the proper thing. */
2641
2642 rtx
2643 eliminate_regs (x, mem_mode, insn)
2644 rtx x;
2645 enum machine_mode mem_mode;
2646 rtx insn;
2647 {
2648 enum rtx_code code = GET_CODE (x);
2649 struct elim_table *ep;
2650 int regno;
2651 rtx new;
2652 int i, j;
2653 char *fmt;
2654 int copied = 0;
2655
2656 switch (code)
2657 {
2658 case CONST_INT:
2659 case CONST_DOUBLE:
2660 case CONST:
2661 case SYMBOL_REF:
2662 case CODE_LABEL:
2663 case PC:
2664 case CC0:
2665 case ASM_INPUT:
2666 case ADDR_VEC:
2667 case ADDR_DIFF_VEC:
2668 case RETURN:
2669 return x;
2670
2671 case REG:
2672 regno = REGNO (x);
2673
2674 /* First handle the case where we encounter a bare register that
2675 is eliminable. Replace it with a PLUS. */
2676 if (regno < FIRST_PSEUDO_REGISTER)
2677 {
2678 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2679 ep++)
2680 if (ep->from_rtx == x && ep->can_eliminate)
2681 {
2682 if (! mem_mode
2683 /* Refs inside notes don't count for this purpose. */
2684 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2685 || GET_CODE (insn) == INSN_LIST)))
2686 ep->ref_outside_mem = 1;
2687 return plus_constant (ep->to_rtx, ep->previous_offset);
2688 }
2689
2690 }
2691 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2692 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2693 {
2694 /* In this case, find_reloads would attempt to either use an
2695 incorrect address (if something is not at its initial offset)
2696 or substitute an replaced address into an insn (which loses
2697 if the offset is changed by some later action). So we simply
2698 return the replaced stack slot (assuming it is changed by
2699 elimination) and ignore the fact that this is actually a
2700 reference to the pseudo. Ensure we make a copy of the
2701 address in case it is shared. */
2702 new = eliminate_regs (reg_equiv_memory_loc[regno],
2703 mem_mode, insn);
2704 if (new != reg_equiv_memory_loc[regno])
2705 {
2706 cannot_omit_stores[regno] = 1;
2707 return copy_rtx (new);
2708 }
2709 }
2710 return x;
2711
2712 case PLUS:
2713 /* If this is the sum of an eliminable register and a constant, rework
2714 the sum. */
2715 if (GET_CODE (XEXP (x, 0)) == REG
2716 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2717 && CONSTANT_P (XEXP (x, 1)))
2718 {
2719 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2720 ep++)
2721 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2722 {
2723 if (! mem_mode
2724 /* Refs inside notes don't count for this purpose. */
2725 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2726 || GET_CODE (insn) == INSN_LIST)))
2727 ep->ref_outside_mem = 1;
2728
2729 /* The only time we want to replace a PLUS with a REG (this
2730 occurs when the constant operand of the PLUS is the negative
2731 of the offset) is when we are inside a MEM. We won't want
2732 to do so at other times because that would change the
2733 structure of the insn in a way that reload can't handle.
2734 We special-case the commonest situation in
2735 eliminate_regs_in_insn, so just replace a PLUS with a
2736 PLUS here, unless inside a MEM. */
2737 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2738 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2739 return ep->to_rtx;
2740 else
2741 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2742 plus_constant (XEXP (x, 1),
2743 ep->previous_offset));
2744 }
2745
2746 /* If the register is not eliminable, we are done since the other
2747 operand is a constant. */
2748 return x;
2749 }
2750
2751 /* If this is part of an address, we want to bring any constant to the
2752 outermost PLUS. We will do this by doing register replacement in
2753 our operands and seeing if a constant shows up in one of them.
2754
2755 We assume here this is part of an address (or a "load address" insn)
2756 since an eliminable register is not likely to appear in any other
2757 context.
2758
2759 If we have (plus (eliminable) (reg)), we want to produce
2760 (plus (plus (replacement) (reg) (const))). If this was part of a
2761 normal add insn, (plus (replacement) (reg)) will be pushed as a
2762 reload. This is the desired action. */
2763
2764 {
2765 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2766 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2767
2768 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2769 {
2770 /* If one side is a PLUS and the other side is a pseudo that
2771 didn't get a hard register but has a reg_equiv_constant,
2772 we must replace the constant here since it may no longer
2773 be in the position of any operand. */
2774 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2775 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2776 && reg_renumber[REGNO (new1)] < 0
2777 && reg_equiv_constant != 0
2778 && reg_equiv_constant[REGNO (new1)] != 0)
2779 new1 = reg_equiv_constant[REGNO (new1)];
2780 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2781 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2782 && reg_renumber[REGNO (new0)] < 0
2783 && reg_equiv_constant[REGNO (new0)] != 0)
2784 new0 = reg_equiv_constant[REGNO (new0)];
2785
2786 new = form_sum (new0, new1);
2787
2788 /* As above, if we are not inside a MEM we do not want to
2789 turn a PLUS into something else. We might try to do so here
2790 for an addition of 0 if we aren't optimizing. */
2791 if (! mem_mode && GET_CODE (new) != PLUS)
2792 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2793 else
2794 return new;
2795 }
2796 }
2797 return x;
2798
2799 case MULT:
2800 /* If this is the product of an eliminable register and a
2801 constant, apply the distribute law and move the constant out
2802 so that we have (plus (mult ..) ..). This is needed in order
2803 to keep load-address insns valid. This case is pathalogical.
2804 We ignore the possibility of overflow here. */
2805 if (GET_CODE (XEXP (x, 0)) == REG
2806 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2807 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2808 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2809 ep++)
2810 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2811 {
2812 if (! mem_mode
2813 /* Refs inside notes don't count for this purpose. */
2814 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2815 || GET_CODE (insn) == INSN_LIST)))
2816 ep->ref_outside_mem = 1;
2817
2818 return
2819 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2820 ep->previous_offset * INTVAL (XEXP (x, 1)));
2821 }
2822
2823 /* ... fall through ... */
2824
2825 case CALL:
2826 case COMPARE:
2827 case MINUS:
2828 case DIV: case UDIV:
2829 case MOD: case UMOD:
2830 case AND: case IOR: case XOR:
2831 case ROTATERT: case ROTATE:
2832 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2833 case NE: case EQ:
2834 case GE: case GT: case GEU: case GTU:
2835 case LE: case LT: case LEU: case LTU:
2836 {
2837 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2838 rtx new1
2839 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2840
2841 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2842 return gen_rtx (code, GET_MODE (x), new0, new1);
2843 }
2844 return x;
2845
2846 case EXPR_LIST:
2847 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2848 if (XEXP (x, 0))
2849 {
2850 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2851 if (new != XEXP (x, 0))
2852 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2853 }
2854
2855 /* ... fall through ... */
2856
2857 case INSN_LIST:
2858 /* Now do eliminations in the rest of the chain. If this was
2859 an EXPR_LIST, this might result in allocating more memory than is
2860 strictly needed, but it simplifies the code. */
2861 if (XEXP (x, 1))
2862 {
2863 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2864 if (new != XEXP (x, 1))
2865 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2866 }
2867 return x;
2868
2869 case PRE_INC:
2870 case POST_INC:
2871 case PRE_DEC:
2872 case POST_DEC:
2873 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2874 if (ep->to_rtx == XEXP (x, 0))
2875 {
2876 int size = GET_MODE_SIZE (mem_mode);
2877
2878 /* If more bytes than MEM_MODE are pushed, account for them. */
2879 #ifdef PUSH_ROUNDING
2880 if (ep->to_rtx == stack_pointer_rtx)
2881 size = PUSH_ROUNDING (size);
2882 #endif
2883 if (code == PRE_DEC || code == POST_DEC)
2884 ep->offset += size;
2885 else
2886 ep->offset -= size;
2887 }
2888
2889 /* Fall through to generic unary operation case. */
2890 case USE:
2891 case STRICT_LOW_PART:
2892 case NEG: case NOT:
2893 case SIGN_EXTEND: case ZERO_EXTEND:
2894 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2895 case FLOAT: case FIX:
2896 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2897 case ABS:
2898 case SQRT:
2899 case FFS:
2900 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2901 if (new != XEXP (x, 0))
2902 return gen_rtx (code, GET_MODE (x), new);
2903 return x;
2904
2905 case SUBREG:
2906 /* Similar to above processing, but preserve SUBREG_WORD.
2907 Convert (subreg (mem)) to (mem) if not paradoxical.
2908 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2909 pseudo didn't get a hard reg, we must replace this with the
2910 eliminated version of the memory location because push_reloads
2911 may do the replacement in certain circumstances. */
2912 if (GET_CODE (SUBREG_REG (x)) == REG
2913 && (GET_MODE_SIZE (GET_MODE (x))
2914 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2915 && reg_equiv_memory_loc != 0
2916 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2917 {
2918 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2919 mem_mode, insn);
2920
2921 /* If we didn't change anything, we must retain the pseudo. */
2922 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2923 new = SUBREG_REG (x);
2924 else
2925 {
2926 /* Otherwise, ensure NEW isn't shared in case we have to reload
2927 it. */
2928 new = copy_rtx (new);
2929
2930 /* In this case, we must show that the pseudo is used in this
2931 insn so that delete_output_reload will do the right thing. */
2932 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2933 && GET_CODE (insn) != INSN_LIST)
2934 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2935 insn);
2936 }
2937 }
2938 else
2939 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2940
2941 if (new != XEXP (x, 0))
2942 {
2943 if (GET_CODE (new) == MEM
2944 && (GET_MODE_SIZE (GET_MODE (x))
2945 <= GET_MODE_SIZE (GET_MODE (new)))
2946 #ifdef LOAD_EXTEND_OP
2947 /* On these machines we will be reloading what is
2948 inside the SUBREG if it originally was a pseudo and
2949 the inner and outer modes are both a word or
2950 smaller. So leave the SUBREG then. */
2951 && ! (GET_CODE (SUBREG_REG (x)) == REG
2952 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2953 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2954 && (GET_MODE_SIZE (GET_MODE (x))
2955 > GET_MODE_SIZE (GET_MODE (new)))
2956 && INTEGRAL_MODE_P (GET_MODE (new))
2957 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
2958 #endif
2959 )
2960 {
2961 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2962 enum machine_mode mode = GET_MODE (x);
2963
2964 if (BYTES_BIG_ENDIAN)
2965 offset += (MIN (UNITS_PER_WORD,
2966 GET_MODE_SIZE (GET_MODE (new)))
2967 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2968
2969 PUT_MODE (new, mode);
2970 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2971 return new;
2972 }
2973 else
2974 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2975 }
2976
2977 return x;
2978
2979 case CLOBBER:
2980 /* If clobbering a register that is the replacement register for an
2981 elimination we still think can be performed, note that it cannot
2982 be performed. Otherwise, we need not be concerned about it. */
2983 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2984 if (ep->to_rtx == XEXP (x, 0))
2985 ep->can_eliminate = 0;
2986
2987 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2988 if (new != XEXP (x, 0))
2989 return gen_rtx (code, GET_MODE (x), new);
2990 return x;
2991
2992 case ASM_OPERANDS:
2993 {
2994 rtx *temp_vec;
2995 /* Properly handle sharing input and constraint vectors. */
2996 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2997 {
2998 /* When we come to a new vector not seen before,
2999 scan all its elements; keep the old vector if none
3000 of them changes; otherwise, make a copy. */
3001 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3002 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3003 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3004 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3005 mem_mode, insn);
3006
3007 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3008 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3009 break;
3010
3011 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3012 new_asm_operands_vec = old_asm_operands_vec;
3013 else
3014 new_asm_operands_vec
3015 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3016 }
3017
3018 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3019 if (new_asm_operands_vec == old_asm_operands_vec)
3020 return x;
3021
3022 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3023 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3024 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3025 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3026 ASM_OPERANDS_SOURCE_FILE (x),
3027 ASM_OPERANDS_SOURCE_LINE (x));
3028 new->volatil = x->volatil;
3029 return new;
3030 }
3031
3032 case SET:
3033 /* Check for setting a register that we know about. */
3034 if (GET_CODE (SET_DEST (x)) == REG)
3035 {
3036 /* See if this is setting the replacement register for an
3037 elimination.
3038
3039 If DEST is the hard frame pointer, we do nothing because we
3040 assume that all assignments to the frame pointer are for
3041 non-local gotos and are being done at a time when they are valid
3042 and do not disturb anything else. Some machines want to
3043 eliminate a fake argument pointer (or even a fake frame pointer)
3044 with either the real frame or the stack pointer. Assignments to
3045 the hard frame pointer must not prevent this elimination. */
3046
3047 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3048 ep++)
3049 if (ep->to_rtx == SET_DEST (x)
3050 && SET_DEST (x) != hard_frame_pointer_rtx)
3051 {
3052 /* If it is being incremented, adjust the offset. Otherwise,
3053 this elimination can't be done. */
3054 rtx src = SET_SRC (x);
3055
3056 if (GET_CODE (src) == PLUS
3057 && XEXP (src, 0) == SET_DEST (x)
3058 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3059 ep->offset -= INTVAL (XEXP (src, 1));
3060 else
3061 ep->can_eliminate = 0;
3062 }
3063
3064 /* Now check to see we are assigning to a register that can be
3065 eliminated. If so, it must be as part of a PARALLEL, since we
3066 will not have been called if this is a single SET. So indicate
3067 that we can no longer eliminate this reg. */
3068 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3069 ep++)
3070 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3071 ep->can_eliminate = 0;
3072 }
3073
3074 /* Now avoid the loop below in this common case. */
3075 {
3076 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3077 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3078
3079 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3080 write a CLOBBER insn. */
3081 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3082 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3083 && GET_CODE (insn) != INSN_LIST)
3084 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3085
3086 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3087 return gen_rtx (SET, VOIDmode, new0, new1);
3088 }
3089
3090 return x;
3091
3092 case MEM:
3093 /* Our only special processing is to pass the mode of the MEM to our
3094 recursive call and copy the flags. While we are here, handle this
3095 case more efficiently. */
3096 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3097 if (new != XEXP (x, 0))
3098 {
3099 new = gen_rtx (MEM, GET_MODE (x), new);
3100 new->volatil = x->volatil;
3101 new->unchanging = x->unchanging;
3102 new->in_struct = x->in_struct;
3103 return new;
3104 }
3105 else
3106 return x;
3107 }
3108
3109 /* Process each of our operands recursively. If any have changed, make a
3110 copy of the rtx. */
3111 fmt = GET_RTX_FORMAT (code);
3112 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3113 {
3114 if (*fmt == 'e')
3115 {
3116 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3117 if (new != XEXP (x, i) && ! copied)
3118 {
3119 rtx new_x = rtx_alloc (code);
3120 bcopy ((char *) x, (char *) new_x,
3121 (sizeof (*new_x) - sizeof (new_x->fld)
3122 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3123 x = new_x;
3124 copied = 1;
3125 }
3126 XEXP (x, i) = new;
3127 }
3128 else if (*fmt == 'E')
3129 {
3130 int copied_vec = 0;
3131 for (j = 0; j < XVECLEN (x, i); j++)
3132 {
3133 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3134 if (new != XVECEXP (x, i, j) && ! copied_vec)
3135 {
3136 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3137 &XVECEXP (x, i, 0));
3138 if (! copied)
3139 {
3140 rtx new_x = rtx_alloc (code);
3141 bcopy ((char *) x, (char *) new_x,
3142 (sizeof (*new_x) - sizeof (new_x->fld)
3143 + (sizeof (new_x->fld[0])
3144 * GET_RTX_LENGTH (code))));
3145 x = new_x;
3146 copied = 1;
3147 }
3148 XVEC (x, i) = new_v;
3149 copied_vec = 1;
3150 }
3151 XVECEXP (x, i, j) = new;
3152 }
3153 }
3154 }
3155
3156 return x;
3157 }
3158 \f
3159 /* Scan INSN and eliminate all eliminable registers in it.
3160
3161 If REPLACE is nonzero, do the replacement destructively. Also
3162 delete the insn as dead it if it is setting an eliminable register.
3163
3164 If REPLACE is zero, do all our allocations in reload_obstack.
3165
3166 If no eliminations were done and this insn doesn't require any elimination
3167 processing (these are not identical conditions: it might be updating sp,
3168 but not referencing fp; this needs to be seen during reload_as_needed so
3169 that the offset between fp and sp can be taken into consideration), zero
3170 is returned. Otherwise, 1 is returned. */
3171
3172 static int
3173 eliminate_regs_in_insn (insn, replace)
3174 rtx insn;
3175 int replace;
3176 {
3177 rtx old_body = PATTERN (insn);
3178 rtx old_set = single_set (insn);
3179 rtx new_body;
3180 int val = 0;
3181 struct elim_table *ep;
3182
3183 if (! replace)
3184 push_obstacks (&reload_obstack, &reload_obstack);
3185
3186 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3187 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3188 {
3189 /* Check for setting an eliminable register. */
3190 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3191 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3192 {
3193 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3194 /* If this is setting the frame pointer register to the
3195 hardware frame pointer register and this is an elimination
3196 that will be done (tested above), this insn is really
3197 adjusting the frame pointer downward to compensate for
3198 the adjustment done before a nonlocal goto. */
3199 if (ep->from == FRAME_POINTER_REGNUM
3200 && ep->to == HARD_FRAME_POINTER_REGNUM)
3201 {
3202 rtx src = SET_SRC (old_set);
3203 int offset, ok = 0;
3204
3205 if (src == ep->to_rtx)
3206 offset = 0, ok = 1;
3207 else if (GET_CODE (src) == PLUS
3208 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3209 offset = INTVAL (XEXP (src, 0)), ok = 1;
3210
3211 if (ok)
3212 {
3213 if (replace)
3214 {
3215 rtx src
3216 = plus_constant (ep->to_rtx, offset - ep->offset);
3217
3218 /* First see if this insn remains valid when we
3219 make the change. If not, keep the INSN_CODE
3220 the same and let reload fit it up. */
3221 validate_change (insn, &SET_SRC (old_set), src, 1);
3222 validate_change (insn, &SET_DEST (old_set),
3223 ep->to_rtx, 1);
3224 if (! apply_change_group ())
3225 {
3226 SET_SRC (old_set) = src;
3227 SET_DEST (old_set) = ep->to_rtx;
3228 }
3229 }
3230
3231 val = 1;
3232 goto done;
3233 }
3234 }
3235 #endif
3236
3237 /* In this case this insn isn't serving a useful purpose. We
3238 will delete it in reload_as_needed once we know that this
3239 elimination is, in fact, being done.
3240
3241 If REPLACE isn't set, we can't delete this insn, but neededn't
3242 process it since it won't be used unless something changes. */
3243 if (replace)
3244 delete_dead_insn (insn);
3245 val = 1;
3246 goto done;
3247 }
3248
3249 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3250 in the insn is the negative of the offset in FROM. Substitute
3251 (set (reg) (reg to)) for the insn and change its code.
3252
3253 We have to do this here, rather than in eliminate_regs, do that we can
3254 change the insn code. */
3255
3256 if (GET_CODE (SET_SRC (old_set)) == PLUS
3257 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3258 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3259 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3260 ep++)
3261 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3262 && ep->can_eliminate)
3263 {
3264 /* We must stop at the first elimination that will be used.
3265 If this one would replace the PLUS with a REG, do it
3266 now. Otherwise, quit the loop and let eliminate_regs
3267 do its normal replacement. */
3268 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3269 {
3270 /* We assume here that we don't need a PARALLEL of
3271 any CLOBBERs for this assignment. There's not
3272 much we can do if we do need it. */
3273 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3274 SET_DEST (old_set), ep->to_rtx);
3275 INSN_CODE (insn) = -1;
3276 val = 1;
3277 goto done;
3278 }
3279
3280 break;
3281 }
3282 }
3283
3284 old_asm_operands_vec = 0;
3285
3286 /* Replace the body of this insn with a substituted form. If we changed
3287 something, return non-zero.
3288
3289 If we are replacing a body that was a (set X (plus Y Z)), try to
3290 re-recognize the insn. We do this in case we had a simple addition
3291 but now can do this as a load-address. This saves an insn in this
3292 common case. */
3293
3294 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3295 if (new_body != old_body)
3296 {
3297 /* If we aren't replacing things permanently and we changed something,
3298 make another copy to ensure that all the RTL is new. Otherwise
3299 things can go wrong if find_reload swaps commutative operands
3300 and one is inside RTL that has been copied while the other is not. */
3301
3302 /* Don't copy an asm_operands because (1) there's no need and (2)
3303 copy_rtx can't do it properly when there are multiple outputs. */
3304 if (! replace && asm_noperands (old_body) < 0)
3305 new_body = copy_rtx (new_body);
3306
3307 /* If we had a move insn but now we don't, rerecognize it. This will
3308 cause spurious re-recognition if the old move had a PARALLEL since
3309 the new one still will, but we can't call single_set without
3310 having put NEW_BODY into the insn and the re-recognition won't
3311 hurt in this rare case. */
3312 if (old_set != 0
3313 && ((GET_CODE (SET_SRC (old_set)) == REG
3314 && (GET_CODE (new_body) != SET
3315 || GET_CODE (SET_SRC (new_body)) != REG))
3316 /* If this was a load from or store to memory, compare
3317 the MEM in recog_operand to the one in the insn. If they
3318 are not equal, then rerecognize the insn. */
3319 || (old_set != 0
3320 && ((GET_CODE (SET_SRC (old_set)) == MEM
3321 && SET_SRC (old_set) != recog_operand[1])
3322 || (GET_CODE (SET_DEST (old_set)) == MEM
3323 && SET_DEST (old_set) != recog_operand[0])))
3324 /* If this was an add insn before, rerecognize. */
3325 || GET_CODE (SET_SRC (old_set)) == PLUS))
3326 {
3327 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3328 /* If recognition fails, store the new body anyway.
3329 It's normal to have recognition failures here
3330 due to bizarre memory addresses; reloading will fix them. */
3331 PATTERN (insn) = new_body;
3332 }
3333 else
3334 PATTERN (insn) = new_body;
3335
3336 val = 1;
3337 }
3338
3339 /* Loop through all elimination pairs. See if any have changed and
3340 recalculate the number not at initial offset.
3341
3342 Compute the maximum offset (minimum offset if the stack does not
3343 grow downward) for each elimination pair.
3344
3345 We also detect a cases where register elimination cannot be done,
3346 namely, if a register would be both changed and referenced outside a MEM
3347 in the resulting insn since such an insn is often undefined and, even if
3348 not, we cannot know what meaning will be given to it. Note that it is
3349 valid to have a register used in an address in an insn that changes it
3350 (presumably with a pre- or post-increment or decrement).
3351
3352 If anything changes, return nonzero. */
3353
3354 num_not_at_initial_offset = 0;
3355 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3356 {
3357 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3358 ep->can_eliminate = 0;
3359
3360 ep->ref_outside_mem = 0;
3361
3362 if (ep->previous_offset != ep->offset)
3363 val = 1;
3364
3365 ep->previous_offset = ep->offset;
3366 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3367 num_not_at_initial_offset++;
3368
3369 #ifdef STACK_GROWS_DOWNWARD
3370 ep->max_offset = MAX (ep->max_offset, ep->offset);
3371 #else
3372 ep->max_offset = MIN (ep->max_offset, ep->offset);
3373 #endif
3374 }
3375
3376 done:
3377 /* If we changed something, perform elmination in REG_NOTES. This is
3378 needed even when REPLACE is zero because a REG_DEAD note might refer
3379 to a register that we eliminate and could cause a different number
3380 of spill registers to be needed in the final reload pass than in
3381 the pre-passes. */
3382 if (val && REG_NOTES (insn) != 0)
3383 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3384
3385 if (! replace)
3386 pop_obstacks ();
3387
3388 return val;
3389 }
3390
3391 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3392 replacement we currently believe is valid, mark it as not eliminable if X
3393 modifies DEST in any way other than by adding a constant integer to it.
3394
3395 If DEST is the frame pointer, we do nothing because we assume that
3396 all assignments to the hard frame pointer are nonlocal gotos and are being
3397 done at a time when they are valid and do not disturb anything else.
3398 Some machines want to eliminate a fake argument pointer with either the
3399 frame or stack pointer. Assignments to the hard frame pointer must not
3400 prevent this elimination.
3401
3402 Called via note_stores from reload before starting its passes to scan
3403 the insns of the function. */
3404
3405 static void
3406 mark_not_eliminable (dest, x)
3407 rtx dest;
3408 rtx x;
3409 {
3410 register int i;
3411
3412 /* A SUBREG of a hard register here is just changing its mode. We should
3413 not see a SUBREG of an eliminable hard register, but check just in
3414 case. */
3415 if (GET_CODE (dest) == SUBREG)
3416 dest = SUBREG_REG (dest);
3417
3418 if (dest == hard_frame_pointer_rtx)
3419 return;
3420
3421 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3422 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3423 && (GET_CODE (x) != SET
3424 || GET_CODE (SET_SRC (x)) != PLUS
3425 || XEXP (SET_SRC (x), 0) != dest
3426 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3427 {
3428 reg_eliminate[i].can_eliminate_previous
3429 = reg_eliminate[i].can_eliminate = 0;
3430 num_eliminable--;
3431 }
3432 }
3433 \f
3434 /* Kick all pseudos out of hard register REGNO.
3435 If GLOBAL is nonzero, try to find someplace else to put them.
3436 If DUMPFILE is nonzero, log actions taken on that file.
3437
3438 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3439 because we found we can't eliminate some register. In the case, no pseudos
3440 are allowed to be in the register, even if they are only in a block that
3441 doesn't require spill registers, unlike the case when we are spilling this
3442 hard reg to produce another spill register.
3443
3444 Return nonzero if any pseudos needed to be kicked out. */
3445
3446 static int
3447 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3448 register int regno;
3449 int global;
3450 FILE *dumpfile;
3451 int cant_eliminate;
3452 {
3453 enum reg_class class = REGNO_REG_CLASS (regno);
3454 int something_changed = 0;
3455 register int i;
3456
3457 SET_HARD_REG_BIT (forbidden_regs, regno);
3458
3459 if (cant_eliminate)
3460 regs_ever_live[regno] = 1;
3461
3462 /* Spill every pseudo reg that was allocated to this reg
3463 or to something that overlaps this reg. */
3464
3465 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3466 if (reg_renumber[i] >= 0
3467 && reg_renumber[i] <= regno
3468 && (reg_renumber[i]
3469 + HARD_REGNO_NREGS (reg_renumber[i],
3470 PSEUDO_REGNO_MODE (i))
3471 > regno))
3472 {
3473 /* If this register belongs solely to a basic block which needed no
3474 spilling of any class that this register is contained in,
3475 leave it be, unless we are spilling this register because
3476 it was a hard register that can't be eliminated. */
3477
3478 if (! cant_eliminate
3479 && basic_block_needs[0]
3480 && reg_basic_block[i] >= 0
3481 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3482 {
3483 enum reg_class *p;
3484
3485 for (p = reg_class_superclasses[(int) class];
3486 *p != LIM_REG_CLASSES; p++)
3487 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3488 break;
3489
3490 if (*p == LIM_REG_CLASSES)
3491 continue;
3492 }
3493
3494 /* Mark it as no longer having a hard register home. */
3495 reg_renumber[i] = -1;
3496 /* We will need to scan everything again. */
3497 something_changed = 1;
3498 if (global)
3499 retry_global_alloc (i, forbidden_regs);
3500
3501 alter_reg (i, regno);
3502 if (dumpfile)
3503 {
3504 if (reg_renumber[i] == -1)
3505 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3506 else
3507 fprintf (dumpfile, " Register %d now in %d.\n\n",
3508 i, reg_renumber[i]);
3509 }
3510 }
3511 for (i = 0; i < scratch_list_length; i++)
3512 {
3513 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3514 {
3515 if (! cant_eliminate && basic_block_needs[0]
3516 && ! basic_block_needs[(int) class][scratch_block[i]])
3517 {
3518 enum reg_class *p;
3519
3520 for (p = reg_class_superclasses[(int) class];
3521 *p != LIM_REG_CLASSES; p++)
3522 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3523 break;
3524
3525 if (*p == LIM_REG_CLASSES)
3526 continue;
3527 }
3528 PUT_CODE (scratch_list[i], SCRATCH);
3529 scratch_list[i] = 0;
3530 something_changed = 1;
3531 continue;
3532 }
3533 }
3534
3535 return something_changed;
3536 }
3537 \f
3538 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3539 Also mark any hard registers used to store user variables as
3540 forbidden from being used for spill registers. */
3541
3542 static void
3543 scan_paradoxical_subregs (x)
3544 register rtx x;
3545 {
3546 register int i;
3547 register char *fmt;
3548 register enum rtx_code code = GET_CODE (x);
3549
3550 switch (code)
3551 {
3552 case REG:
3553 #ifdef SMALL_REGISTER_CLASSES
3554 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3555 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3556 #endif
3557 return;
3558
3559 case CONST_INT:
3560 case CONST:
3561 case SYMBOL_REF:
3562 case LABEL_REF:
3563 case CONST_DOUBLE:
3564 case CC0:
3565 case PC:
3566 case USE:
3567 case CLOBBER:
3568 return;
3569
3570 case SUBREG:
3571 if (GET_CODE (SUBREG_REG (x)) == REG
3572 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3573 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3574 = GET_MODE_SIZE (GET_MODE (x));
3575 return;
3576 }
3577
3578 fmt = GET_RTX_FORMAT (code);
3579 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3580 {
3581 if (fmt[i] == 'e')
3582 scan_paradoxical_subregs (XEXP (x, i));
3583 else if (fmt[i] == 'E')
3584 {
3585 register int j;
3586 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3587 scan_paradoxical_subregs (XVECEXP (x, i, j));
3588 }
3589 }
3590 }
3591 \f
3592 static int
3593 hard_reg_use_compare (p1, p2)
3594 struct hard_reg_n_uses *p1, *p2;
3595 {
3596 int tem = p1->uses - p2->uses;
3597 if (tem != 0) return tem;
3598 /* If regs are equally good, sort by regno,
3599 so that the results of qsort leave nothing to chance. */
3600 return p1->regno - p2->regno;
3601 }
3602
3603 /* Choose the order to consider regs for use as reload registers
3604 based on how much trouble would be caused by spilling one.
3605 Store them in order of decreasing preference in potential_reload_regs. */
3606
3607 static void
3608 order_regs_for_reload ()
3609 {
3610 register int i;
3611 register int o = 0;
3612 int large = 0;
3613
3614 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3615
3616 CLEAR_HARD_REG_SET (bad_spill_regs);
3617
3618 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3619 potential_reload_regs[i] = -1;
3620
3621 /* Count number of uses of each hard reg by pseudo regs allocated to it
3622 and then order them by decreasing use. */
3623
3624 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3625 {
3626 hard_reg_n_uses[i].uses = 0;
3627 hard_reg_n_uses[i].regno = i;
3628 }
3629
3630 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3631 {
3632 int regno = reg_renumber[i];
3633 if (regno >= 0)
3634 {
3635 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3636 while (regno < lim)
3637 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3638 }
3639 large += reg_n_refs[i];
3640 }
3641
3642 /* Now fixed registers (which cannot safely be used for reloading)
3643 get a very high use count so they will be considered least desirable.
3644 Registers used explicitly in the rtl code are almost as bad. */
3645
3646 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3647 {
3648 if (fixed_regs[i])
3649 {
3650 hard_reg_n_uses[i].uses += 2 * large + 2;
3651 SET_HARD_REG_BIT (bad_spill_regs, i);
3652 }
3653 else if (regs_explicitly_used[i])
3654 {
3655 hard_reg_n_uses[i].uses += large + 1;
3656 #ifndef SMALL_REGISTER_CLASSES
3657 /* ??? We are doing this here because of the potential that
3658 bad code may be generated if a register explicitly used in
3659 an insn was used as a spill register for that insn. But
3660 not using these are spill registers may lose on some machine.
3661 We'll have to see how this works out. */
3662 SET_HARD_REG_BIT (bad_spill_regs, i);
3663 #endif
3664 }
3665 }
3666 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3667 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3668
3669 #ifdef ELIMINABLE_REGS
3670 /* If registers other than the frame pointer are eliminable, mark them as
3671 poor choices. */
3672 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3673 {
3674 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3675 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3676 }
3677 #endif
3678
3679 /* Prefer registers not so far used, for use in temporary loading.
3680 Among them, if REG_ALLOC_ORDER is defined, use that order.
3681 Otherwise, prefer registers not preserved by calls. */
3682
3683 #ifdef REG_ALLOC_ORDER
3684 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3685 {
3686 int regno = reg_alloc_order[i];
3687
3688 if (hard_reg_n_uses[regno].uses == 0)
3689 potential_reload_regs[o++] = regno;
3690 }
3691 #else
3692 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3693 {
3694 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3695 potential_reload_regs[o++] = i;
3696 }
3697 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3698 {
3699 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3700 potential_reload_regs[o++] = i;
3701 }
3702 #endif
3703
3704 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3705 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3706
3707 /* Now add the regs that are already used,
3708 preferring those used less often. The fixed and otherwise forbidden
3709 registers will be at the end of this list. */
3710
3711 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3712 if (hard_reg_n_uses[i].uses != 0)
3713 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3714 }
3715 \f
3716 /* Used in reload_as_needed to sort the spilled regs. */
3717
3718 static int
3719 compare_spill_regs (r1, r2)
3720 short *r1, *r2;
3721 {
3722 return *r1 - *r2;
3723 }
3724
3725 /* Reload pseudo-registers into hard regs around each insn as needed.
3726 Additional register load insns are output before the insn that needs it
3727 and perhaps store insns after insns that modify the reloaded pseudo reg.
3728
3729 reg_last_reload_reg and reg_reloaded_contents keep track of
3730 which registers are already available in reload registers.
3731 We update these for the reloads that we perform,
3732 as the insns are scanned. */
3733
3734 static void
3735 reload_as_needed (first, live_known)
3736 rtx first;
3737 int live_known;
3738 {
3739 register rtx insn;
3740 register int i;
3741 int this_block = 0;
3742 rtx x;
3743 rtx after_call = 0;
3744
3745 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3746 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3747 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3748 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3749 reg_has_output_reload = (char *) alloca (max_regno);
3750 for (i = 0; i < n_spills; i++)
3751 {
3752 reg_reloaded_contents[i] = -1;
3753 reg_reloaded_insn[i] = 0;
3754 }
3755
3756 /* Reset all offsets on eliminable registers to their initial values. */
3757 #ifdef ELIMINABLE_REGS
3758 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3759 {
3760 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3761 reg_eliminate[i].initial_offset);
3762 reg_eliminate[i].previous_offset
3763 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3764 }
3765 #else
3766 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3767 reg_eliminate[0].previous_offset
3768 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3769 #endif
3770
3771 num_not_at_initial_offset = 0;
3772
3773 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3774 pack registers with group needs. */
3775 if (n_spills > 1)
3776 {
3777 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3778 for (i = 0; i < n_spills; i++)
3779 spill_reg_order[spill_regs[i]] = i;
3780 }
3781
3782 for (insn = first; insn;)
3783 {
3784 register rtx next = NEXT_INSN (insn);
3785
3786 /* Notice when we move to a new basic block. */
3787 if (live_known && this_block + 1 < n_basic_blocks
3788 && insn == basic_block_head[this_block+1])
3789 ++this_block;
3790
3791 /* If we pass a label, copy the offsets from the label information
3792 into the current offsets of each elimination. */
3793 if (GET_CODE (insn) == CODE_LABEL)
3794 {
3795 num_not_at_initial_offset = 0;
3796 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3797 {
3798 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3799 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3800 if (reg_eliminate[i].can_eliminate
3801 && (reg_eliminate[i].offset
3802 != reg_eliminate[i].initial_offset))
3803 num_not_at_initial_offset++;
3804 }
3805 }
3806
3807 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3808 {
3809 rtx avoid_return_reg = 0;
3810 rtx oldpat = PATTERN (insn);
3811
3812 #ifdef SMALL_REGISTER_CLASSES
3813 /* Set avoid_return_reg if this is an insn
3814 that might use the value of a function call. */
3815 if (GET_CODE (insn) == CALL_INSN)
3816 {
3817 if (GET_CODE (PATTERN (insn)) == SET)
3818 after_call = SET_DEST (PATTERN (insn));
3819 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3820 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3821 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3822 else
3823 after_call = 0;
3824 }
3825 else if (after_call != 0
3826 && !(GET_CODE (PATTERN (insn)) == SET
3827 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3828 {
3829 if (reg_referenced_p (after_call, PATTERN (insn)))
3830 avoid_return_reg = after_call;
3831 after_call = 0;
3832 }
3833 #endif /* SMALL_REGISTER_CLASSES */
3834
3835 /* If this is a USE and CLOBBER of a MEM, ensure that any
3836 references to eliminable registers have been removed. */
3837
3838 if ((GET_CODE (PATTERN (insn)) == USE
3839 || GET_CODE (PATTERN (insn)) == CLOBBER)
3840 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3841 XEXP (XEXP (PATTERN (insn), 0), 0)
3842 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3843 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3844
3845 /* If we need to do register elimination processing, do so.
3846 This might delete the insn, in which case we are done. */
3847 if (num_eliminable && GET_MODE (insn) == QImode)
3848 {
3849 eliminate_regs_in_insn (insn, 1);
3850 if (GET_CODE (insn) == NOTE)
3851 {
3852 insn = next;
3853 continue;
3854 }
3855 }
3856
3857 if (GET_MODE (insn) == VOIDmode)
3858 n_reloads = 0;
3859 /* First find the pseudo regs that must be reloaded for this insn.
3860 This info is returned in the tables reload_... (see reload.h).
3861 Also modify the body of INSN by substituting RELOAD
3862 rtx's for those pseudo regs. */
3863 else
3864 {
3865 bzero (reg_has_output_reload, max_regno);
3866 CLEAR_HARD_REG_SET (reg_is_output_reload);
3867
3868 find_reloads (insn, 1, spill_indirect_levels, live_known,
3869 spill_reg_order);
3870 }
3871
3872 if (n_reloads > 0)
3873 {
3874 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3875 rtx p;
3876 int class;
3877
3878 /* If this block has not had spilling done for a
3879 particular clas and we have any non-optionals that need a
3880 spill reg in that class, abort. */
3881
3882 for (class = 0; class < N_REG_CLASSES; class++)
3883 if (basic_block_needs[class] != 0
3884 && basic_block_needs[class][this_block] == 0)
3885 for (i = 0; i < n_reloads; i++)
3886 if (class == (int) reload_reg_class[i]
3887 && reload_reg_rtx[i] == 0
3888 && ! reload_optional[i]
3889 && (reload_in[i] != 0 || reload_out[i] != 0
3890 || reload_secondary_p[i] != 0))
3891 fatal_insn ("Non-optional registers need a spill register", insn);
3892
3893 /* Now compute which reload regs to reload them into. Perhaps
3894 reusing reload regs from previous insns, or else output
3895 load insns to reload them. Maybe output store insns too.
3896 Record the choices of reload reg in reload_reg_rtx. */
3897 choose_reload_regs (insn, avoid_return_reg);
3898
3899 #ifdef SMALL_REGISTER_CLASSES
3900 /* Merge any reloads that we didn't combine for fear of
3901 increasing the number of spill registers needed but now
3902 discover can be safely merged. */
3903 merge_assigned_reloads (insn);
3904 #endif
3905
3906 /* Generate the insns to reload operands into or out of
3907 their reload regs. */
3908 emit_reload_insns (insn);
3909
3910 /* Substitute the chosen reload regs from reload_reg_rtx
3911 into the insn's body (or perhaps into the bodies of other
3912 load and store insn that we just made for reloading
3913 and that we moved the structure into). */
3914 subst_reloads ();
3915
3916 /* If this was an ASM, make sure that all the reload insns
3917 we have generated are valid. If not, give an error
3918 and delete them. */
3919
3920 if (asm_noperands (PATTERN (insn)) >= 0)
3921 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3922 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3923 && (recog_memoized (p) < 0
3924 || (insn_extract (p),
3925 ! constrain_operands (INSN_CODE (p), 1))))
3926 {
3927 error_for_asm (insn,
3928 "`asm' operand requires impossible reload");
3929 PUT_CODE (p, NOTE);
3930 NOTE_SOURCE_FILE (p) = 0;
3931 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3932 }
3933 }
3934 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3935 is no longer validly lying around to save a future reload.
3936 Note that this does not detect pseudos that were reloaded
3937 for this insn in order to be stored in
3938 (obeying register constraints). That is correct; such reload
3939 registers ARE still valid. */
3940 note_stores (oldpat, forget_old_reloads_1);
3941
3942 /* There may have been CLOBBER insns placed after INSN. So scan
3943 between INSN and NEXT and use them to forget old reloads. */
3944 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3945 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3946 note_stores (PATTERN (x), forget_old_reloads_1);
3947
3948 #ifdef AUTO_INC_DEC
3949 /* Likewise for regs altered by auto-increment in this insn.
3950 But note that the reg-notes are not changed by reloading:
3951 they still contain the pseudo-regs, not the spill regs. */
3952 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3953 if (REG_NOTE_KIND (x) == REG_INC)
3954 {
3955 /* See if this pseudo reg was reloaded in this insn.
3956 If so, its last-reload info is still valid
3957 because it is based on this insn's reload. */
3958 for (i = 0; i < n_reloads; i++)
3959 if (reload_out[i] == XEXP (x, 0))
3960 break;
3961
3962 if (i == n_reloads)
3963 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3964 }
3965 #endif
3966 }
3967 /* A reload reg's contents are unknown after a label. */
3968 if (GET_CODE (insn) == CODE_LABEL)
3969 for (i = 0; i < n_spills; i++)
3970 {
3971 reg_reloaded_contents[i] = -1;
3972 reg_reloaded_insn[i] = 0;
3973 }
3974
3975 /* Don't assume a reload reg is still good after a call insn
3976 if it is a call-used reg. */
3977 else if (GET_CODE (insn) == CALL_INSN)
3978 for (i = 0; i < n_spills; i++)
3979 if (call_used_regs[spill_regs[i]])
3980 {
3981 reg_reloaded_contents[i] = -1;
3982 reg_reloaded_insn[i] = 0;
3983 }
3984
3985 /* In case registers overlap, allow certain insns to invalidate
3986 particular hard registers. */
3987
3988 #ifdef INSN_CLOBBERS_REGNO_P
3989 for (i = 0 ; i < n_spills ; i++)
3990 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3991 {
3992 reg_reloaded_contents[i] = -1;
3993 reg_reloaded_insn[i] = 0;
3994 }
3995 #endif
3996
3997 insn = next;
3998
3999 #ifdef USE_C_ALLOCA
4000 alloca (0);
4001 #endif
4002 }
4003 }
4004
4005 /* Discard all record of any value reloaded from X,
4006 or reloaded in X from someplace else;
4007 unless X is an output reload reg of the current insn.
4008
4009 X may be a hard reg (the reload reg)
4010 or it may be a pseudo reg that was reloaded from. */
4011
4012 static void
4013 forget_old_reloads_1 (x, ignored)
4014 rtx x;
4015 rtx ignored;
4016 {
4017 register int regno;
4018 int nr;
4019 int offset = 0;
4020
4021 /* note_stores does give us subregs of hard regs. */
4022 while (GET_CODE (x) == SUBREG)
4023 {
4024 offset += SUBREG_WORD (x);
4025 x = SUBREG_REG (x);
4026 }
4027
4028 if (GET_CODE (x) != REG)
4029 return;
4030
4031 regno = REGNO (x) + offset;
4032
4033 if (regno >= FIRST_PSEUDO_REGISTER)
4034 nr = 1;
4035 else
4036 {
4037 int i;
4038 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4039 /* Storing into a spilled-reg invalidates its contents.
4040 This can happen if a block-local pseudo is allocated to that reg
4041 and it wasn't spilled because this block's total need is 0.
4042 Then some insn might have an optional reload and use this reg. */
4043 for (i = 0; i < nr; i++)
4044 if (spill_reg_order[regno + i] >= 0
4045 /* But don't do this if the reg actually serves as an output
4046 reload reg in the current instruction. */
4047 && (n_reloads == 0
4048 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4049 {
4050 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4051 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4052 }
4053 }
4054
4055 /* Since value of X has changed,
4056 forget any value previously copied from it. */
4057
4058 while (nr-- > 0)
4059 /* But don't forget a copy if this is the output reload
4060 that establishes the copy's validity. */
4061 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4062 reg_last_reload_reg[regno + nr] = 0;
4063 }
4064 \f
4065 /* For each reload, the mode of the reload register. */
4066 static enum machine_mode reload_mode[MAX_RELOADS];
4067
4068 /* For each reload, the largest number of registers it will require. */
4069 static int reload_nregs[MAX_RELOADS];
4070
4071 /* Comparison function for qsort to decide which of two reloads
4072 should be handled first. *P1 and *P2 are the reload numbers. */
4073
4074 static int
4075 reload_reg_class_lower (p1, p2)
4076 short *p1, *p2;
4077 {
4078 register int r1 = *p1, r2 = *p2;
4079 register int t;
4080
4081 /* Consider required reloads before optional ones. */
4082 t = reload_optional[r1] - reload_optional[r2];
4083 if (t != 0)
4084 return t;
4085
4086 /* Count all solitary classes before non-solitary ones. */
4087 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4088 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4089 if (t != 0)
4090 return t;
4091
4092 /* Aside from solitaires, consider all multi-reg groups first. */
4093 t = reload_nregs[r2] - reload_nregs[r1];
4094 if (t != 0)
4095 return t;
4096
4097 /* Consider reloads in order of increasing reg-class number. */
4098 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4099 if (t != 0)
4100 return t;
4101
4102 /* If reloads are equally urgent, sort by reload number,
4103 so that the results of qsort leave nothing to chance. */
4104 return r1 - r2;
4105 }
4106 \f
4107 /* The following HARD_REG_SETs indicate when each hard register is
4108 used for a reload of various parts of the current insn. */
4109
4110 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4111 static HARD_REG_SET reload_reg_used;
4112 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4113 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4114 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4115 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4116 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4117 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4118 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4119 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4120 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4121 static HARD_REG_SET reload_reg_used_in_op_addr;
4122 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4123 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4124 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4125 static HARD_REG_SET reload_reg_used_in_insn;
4126 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4127 static HARD_REG_SET reload_reg_used_in_other_addr;
4128
4129 /* If reg is in use as a reload reg for any sort of reload. */
4130 static HARD_REG_SET reload_reg_used_at_all;
4131
4132 /* If reg is use as an inherited reload. We just mark the first register
4133 in the group. */
4134 static HARD_REG_SET reload_reg_used_for_inherit;
4135
4136 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4137 TYPE. MODE is used to indicate how many consecutive regs are
4138 actually used. */
4139
4140 static void
4141 mark_reload_reg_in_use (regno, opnum, type, mode)
4142 int regno;
4143 int opnum;
4144 enum reload_type type;
4145 enum machine_mode mode;
4146 {
4147 int nregs = HARD_REGNO_NREGS (regno, mode);
4148 int i;
4149
4150 for (i = regno; i < nregs + regno; i++)
4151 {
4152 switch (type)
4153 {
4154 case RELOAD_OTHER:
4155 SET_HARD_REG_BIT (reload_reg_used, i);
4156 break;
4157
4158 case RELOAD_FOR_INPUT_ADDRESS:
4159 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4160 break;
4161
4162 case RELOAD_FOR_OUTPUT_ADDRESS:
4163 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4164 break;
4165
4166 case RELOAD_FOR_OPERAND_ADDRESS:
4167 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4168 break;
4169
4170 case RELOAD_FOR_OPADDR_ADDR:
4171 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4172 break;
4173
4174 case RELOAD_FOR_OTHER_ADDRESS:
4175 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4176 break;
4177
4178 case RELOAD_FOR_INPUT:
4179 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4180 break;
4181
4182 case RELOAD_FOR_OUTPUT:
4183 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4184 break;
4185
4186 case RELOAD_FOR_INSN:
4187 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4188 break;
4189 }
4190
4191 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4192 }
4193 }
4194
4195 /* Similarly, but show REGNO is no longer in use for a reload. */
4196
4197 static void
4198 clear_reload_reg_in_use (regno, opnum, type, mode)
4199 int regno;
4200 int opnum;
4201 enum reload_type type;
4202 enum machine_mode mode;
4203 {
4204 int nregs = HARD_REGNO_NREGS (regno, mode);
4205 int i;
4206
4207 for (i = regno; i < nregs + regno; i++)
4208 {
4209 switch (type)
4210 {
4211 case RELOAD_OTHER:
4212 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4213 break;
4214
4215 case RELOAD_FOR_INPUT_ADDRESS:
4216 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4217 break;
4218
4219 case RELOAD_FOR_OUTPUT_ADDRESS:
4220 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4221 break;
4222
4223 case RELOAD_FOR_OPERAND_ADDRESS:
4224 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4225 break;
4226
4227 case RELOAD_FOR_OPADDR_ADDR:
4228 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4229 break;
4230
4231 case RELOAD_FOR_OTHER_ADDRESS:
4232 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4233 break;
4234
4235 case RELOAD_FOR_INPUT:
4236 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4237 break;
4238
4239 case RELOAD_FOR_OUTPUT:
4240 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4241 break;
4242
4243 case RELOAD_FOR_INSN:
4244 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4245 break;
4246 }
4247 }
4248 }
4249
4250 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4251 specified by OPNUM and TYPE. */
4252
4253 static int
4254 reload_reg_free_p (regno, opnum, type)
4255 int regno;
4256 int opnum;
4257 enum reload_type type;
4258 {
4259 int i;
4260
4261 /* In use for a RELOAD_OTHER means it's not available for anything except
4262 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4263 to be used only for inputs. */
4264
4265 if (type != RELOAD_FOR_OTHER_ADDRESS
4266 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4267 return 0;
4268
4269 switch (type)
4270 {
4271 case RELOAD_OTHER:
4272 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4273 we can't use it for RELOAD_OTHER. */
4274 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4275 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4276 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4277 return 0;
4278
4279 for (i = 0; i < reload_n_operands; i++)
4280 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4281 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4282 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4283 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4284 return 0;
4285
4286 return 1;
4287
4288 case RELOAD_FOR_INPUT:
4289 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4290 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4291 return 0;
4292
4293 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4294 return 0;
4295
4296 /* If it is used for some other input, can't use it. */
4297 for (i = 0; i < reload_n_operands; i++)
4298 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4299 return 0;
4300
4301 /* If it is used in a later operand's address, can't use it. */
4302 for (i = opnum + 1; i < reload_n_operands; i++)
4303 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4304 return 0;
4305
4306 return 1;
4307
4308 case RELOAD_FOR_INPUT_ADDRESS:
4309 /* Can't use a register if it is used for an input address for this
4310 operand or used as an input in an earlier one. */
4311 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4312 return 0;
4313
4314 for (i = 0; i < opnum; i++)
4315 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4316 return 0;
4317
4318 return 1;
4319
4320 case RELOAD_FOR_OUTPUT_ADDRESS:
4321 /* Can't use a register if it is used for an output address for this
4322 operand or used as an output in this or a later operand. */
4323 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4324 return 0;
4325
4326 for (i = opnum; i < reload_n_operands; i++)
4327 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4328 return 0;
4329
4330 return 1;
4331
4332 case RELOAD_FOR_OPERAND_ADDRESS:
4333 for (i = 0; i < reload_n_operands; i++)
4334 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4335 return 0;
4336
4337 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4338 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4339
4340 case RELOAD_FOR_OPADDR_ADDR:
4341 for (i = 0; i < reload_n_operands; i++)
4342 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4343 return 0;
4344
4345 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4346
4347 case RELOAD_FOR_OUTPUT:
4348 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4349 outputs, or an operand address for this or an earlier output. */
4350 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4351 return 0;
4352
4353 for (i = 0; i < reload_n_operands; i++)
4354 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4355 return 0;
4356
4357 for (i = 0; i <= opnum; i++)
4358 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4359 return 0;
4360
4361 return 1;
4362
4363 case RELOAD_FOR_INSN:
4364 for (i = 0; i < reload_n_operands; i++)
4365 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4366 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4367 return 0;
4368
4369 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4370 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4371
4372 case RELOAD_FOR_OTHER_ADDRESS:
4373 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4374 }
4375 abort ();
4376 }
4377
4378 /* Return 1 if the value in reload reg REGNO, as used by a reload
4379 needed for the part of the insn specified by OPNUM and TYPE,
4380 is not in use for a reload in any prior part of the insn.
4381
4382 We can assume that the reload reg was already tested for availability
4383 at the time it is needed, and we should not check this again,
4384 in case the reg has already been marked in use. */
4385
4386 static int
4387 reload_reg_free_before_p (regno, opnum, type)
4388 int regno;
4389 int opnum;
4390 enum reload_type type;
4391 {
4392 int i;
4393
4394 switch (type)
4395 {
4396 case RELOAD_FOR_OTHER_ADDRESS:
4397 /* These always come first. */
4398 return 1;
4399
4400 case RELOAD_OTHER:
4401 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4402
4403 /* If this use is for part of the insn,
4404 check the reg is not in use for any prior part. It is tempting
4405 to try to do this by falling through from objecs that occur
4406 later in the insn to ones that occur earlier, but that will not
4407 correctly take into account the fact that here we MUST ignore
4408 things that would prevent the register from being allocated in
4409 the first place, since we know that it was allocated. */
4410
4411 case RELOAD_FOR_OUTPUT_ADDRESS:
4412 /* Earlier reloads are for earlier outputs or their addresses,
4413 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4414 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4415 RELOAD_OTHER).. */
4416 for (i = 0; i < opnum; i++)
4417 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4418 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4419 return 0;
4420
4421 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4422 return 0;
4423
4424 for (i = 0; i < reload_n_operands; i++)
4425 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4426 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4427 return 0;
4428
4429 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4430 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4431 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4432
4433 case RELOAD_FOR_OUTPUT:
4434 /* This can't be used in the output address for this operand and
4435 anything that can't be used for it, except that we've already
4436 tested for RELOAD_FOR_INSN objects. */
4437
4438 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4439 return 0;
4440
4441 for (i = 0; i < opnum; i++)
4442 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4443 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4444 return 0;
4445
4446 for (i = 0; i < reload_n_operands; i++)
4447 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4448 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4449 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4450 return 0;
4451
4452 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4453
4454 case RELOAD_FOR_OPERAND_ADDRESS:
4455 case RELOAD_FOR_OPADDR_ADDR:
4456 case RELOAD_FOR_INSN:
4457 /* These can't conflict with inputs, or each other, so all we have to
4458 test is input addresses and the addresses of OTHER items. */
4459
4460 for (i = 0; i < reload_n_operands; i++)
4461 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4462 return 0;
4463
4464 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4465
4466 case RELOAD_FOR_INPUT:
4467 /* The only things earlier are the address for this and
4468 earlier inputs, other inputs (which we know we don't conflict
4469 with), and addresses of RELOAD_OTHER objects. */
4470
4471 for (i = 0; i <= opnum; i++)
4472 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4473 return 0;
4474
4475 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4476
4477 case RELOAD_FOR_INPUT_ADDRESS:
4478 /* Similarly, all we have to check is for use in earlier inputs'
4479 addresses. */
4480 for (i = 0; i < opnum; i++)
4481 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4482 return 0;
4483
4484 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4485 }
4486 abort ();
4487 }
4488
4489 /* Return 1 if the value in reload reg REGNO, as used by a reload
4490 needed for the part of the insn specified by OPNUM and TYPE,
4491 is still available in REGNO at the end of the insn.
4492
4493 We can assume that the reload reg was already tested for availability
4494 at the time it is needed, and we should not check this again,
4495 in case the reg has already been marked in use. */
4496
4497 static int
4498 reload_reg_reaches_end_p (regno, opnum, type)
4499 int regno;
4500 int opnum;
4501 enum reload_type type;
4502 {
4503 int i;
4504
4505 switch (type)
4506 {
4507 case RELOAD_OTHER:
4508 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4509 its value must reach the end. */
4510 return 1;
4511
4512 /* If this use is for part of the insn,
4513 its value reaches if no subsequent part uses the same register.
4514 Just like the above function, don't try to do this with lots
4515 of fallthroughs. */
4516
4517 case RELOAD_FOR_OTHER_ADDRESS:
4518 /* Here we check for everything else, since these don't conflict
4519 with anything else and everything comes later. */
4520
4521 for (i = 0; i < reload_n_operands; i++)
4522 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4523 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4524 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4525 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4526 return 0;
4527
4528 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4529 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4530 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4531
4532 case RELOAD_FOR_INPUT_ADDRESS:
4533 /* Similar, except that we check only for this and subsequent inputs
4534 and the address of only subsequent inputs and we do not need
4535 to check for RELOAD_OTHER objects since they are known not to
4536 conflict. */
4537
4538 for (i = opnum; i < reload_n_operands; i++)
4539 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4540 return 0;
4541
4542 for (i = opnum + 1; i < reload_n_operands; i++)
4543 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4544 return 0;
4545
4546 for (i = 0; i < reload_n_operands; i++)
4547 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4548 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4549 return 0;
4550
4551 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4552 return 0;
4553
4554 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4555 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4556
4557 case RELOAD_FOR_INPUT:
4558 /* Similar to input address, except we start at the next operand for
4559 both input and input address and we do not check for
4560 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4561 would conflict. */
4562
4563 for (i = opnum + 1; i < reload_n_operands; i++)
4564 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4565 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4566 return 0;
4567
4568 /* ... fall through ... */
4569
4570 case RELOAD_FOR_OPERAND_ADDRESS:
4571 /* Check outputs and their addresses. */
4572
4573 for (i = 0; i < reload_n_operands; i++)
4574 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4575 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4576 return 0;
4577
4578 return 1;
4579
4580 case RELOAD_FOR_OPADDR_ADDR:
4581 for (i = 0; i < reload_n_operands; i++)
4582 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4583 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4584 return 0;
4585
4586 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4587 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4588
4589 case RELOAD_FOR_INSN:
4590 /* These conflict with other outputs with RELOAD_OTHER. So
4591 we need only check for output addresses. */
4592
4593 opnum = -1;
4594
4595 /* ... fall through ... */
4596
4597 case RELOAD_FOR_OUTPUT:
4598 case RELOAD_FOR_OUTPUT_ADDRESS:
4599 /* We already know these can't conflict with a later output. So the
4600 only thing to check are later output addresses. */
4601 for (i = opnum + 1; i < reload_n_operands; i++)
4602 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4603 return 0;
4604
4605 return 1;
4606 }
4607
4608 abort ();
4609 }
4610 \f
4611 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4612 Return 0 otherwise.
4613
4614 This function uses the same algorithm as reload_reg_free_p above. */
4615
4616 static int
4617 reloads_conflict (r1, r2)
4618 int r1, r2;
4619 {
4620 enum reload_type r1_type = reload_when_needed[r1];
4621 enum reload_type r2_type = reload_when_needed[r2];
4622 int r1_opnum = reload_opnum[r1];
4623 int r2_opnum = reload_opnum[r2];
4624
4625 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4626
4627 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4628 return 1;
4629
4630 /* Otherwise, check conflicts differently for each type. */
4631
4632 switch (r1_type)
4633 {
4634 case RELOAD_FOR_INPUT:
4635 return (r2_type == RELOAD_FOR_INSN
4636 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4637 || r2_type == RELOAD_FOR_OPADDR_ADDR
4638 || r2_type == RELOAD_FOR_INPUT
4639 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4640
4641 case RELOAD_FOR_INPUT_ADDRESS:
4642 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4643 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4644
4645 case RELOAD_FOR_OUTPUT_ADDRESS:
4646 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4647 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4648
4649 case RELOAD_FOR_OPERAND_ADDRESS:
4650 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4651 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4652
4653 case RELOAD_FOR_OPADDR_ADDR:
4654 return (r2_type == RELOAD_FOR_INPUT
4655 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4656
4657 case RELOAD_FOR_OUTPUT:
4658 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4659 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4660 && r2_opnum >= r1_opnum));
4661
4662 case RELOAD_FOR_INSN:
4663 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4664 || r2_type == RELOAD_FOR_INSN
4665 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4666
4667 case RELOAD_FOR_OTHER_ADDRESS:
4668 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4669
4670 case RELOAD_OTHER:
4671 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4672
4673 default:
4674 abort ();
4675 }
4676 }
4677 \f
4678 /* Vector of reload-numbers showing the order in which the reloads should
4679 be processed. */
4680 short reload_order[MAX_RELOADS];
4681
4682 /* Indexed by reload number, 1 if incoming value
4683 inherited from previous insns. */
4684 char reload_inherited[MAX_RELOADS];
4685
4686 /* For an inherited reload, this is the insn the reload was inherited from,
4687 if we know it. Otherwise, this is 0. */
4688 rtx reload_inheritance_insn[MAX_RELOADS];
4689
4690 /* If non-zero, this is a place to get the value of the reload,
4691 rather than using reload_in. */
4692 rtx reload_override_in[MAX_RELOADS];
4693
4694 /* For each reload, the index in spill_regs of the spill register used,
4695 or -1 if we did not need one of the spill registers for this reload. */
4696 int reload_spill_index[MAX_RELOADS];
4697
4698 /* Find a spill register to use as a reload register for reload R.
4699 LAST_RELOAD is non-zero if this is the last reload for the insn being
4700 processed.
4701
4702 Set reload_reg_rtx[R] to the register allocated.
4703
4704 If NOERROR is nonzero, we return 1 if successful,
4705 or 0 if we couldn't find a spill reg and we didn't change anything. */
4706
4707 static int
4708 allocate_reload_reg (r, insn, last_reload, noerror)
4709 int r;
4710 rtx insn;
4711 int last_reload;
4712 int noerror;
4713 {
4714 int i;
4715 int pass;
4716 int count;
4717 rtx new;
4718 int regno;
4719
4720 /* If we put this reload ahead, thinking it is a group,
4721 then insist on finding a group. Otherwise we can grab a
4722 reg that some other reload needs.
4723 (That can happen when we have a 68000 DATA_OR_FP_REG
4724 which is a group of data regs or one fp reg.)
4725 We need not be so restrictive if there are no more reloads
4726 for this insn.
4727
4728 ??? Really it would be nicer to have smarter handling
4729 for that kind of reg class, where a problem like this is normal.
4730 Perhaps those classes should be avoided for reloading
4731 by use of more alternatives. */
4732
4733 int force_group = reload_nregs[r] > 1 && ! last_reload;
4734
4735 /* If we want a single register and haven't yet found one,
4736 take any reg in the right class and not in use.
4737 If we want a consecutive group, here is where we look for it.
4738
4739 We use two passes so we can first look for reload regs to
4740 reuse, which are already in use for other reloads in this insn,
4741 and only then use additional registers.
4742 I think that maximizing reuse is needed to make sure we don't
4743 run out of reload regs. Suppose we have three reloads, and
4744 reloads A and B can share regs. These need two regs.
4745 Suppose A and B are given different regs.
4746 That leaves none for C. */
4747 for (pass = 0; pass < 2; pass++)
4748 {
4749 /* I is the index in spill_regs.
4750 We advance it round-robin between insns to use all spill regs
4751 equally, so that inherited reloads have a chance
4752 of leapfrogging each other. Don't do this, however, when we have
4753 group needs and failure would be fatal; if we only have a relatively
4754 small number of spill registers, and more than one of them has
4755 group needs, then by starting in the middle, we may end up
4756 allocating the first one in such a way that we are not left with
4757 sufficient groups to handle the rest. */
4758
4759 if (noerror || ! force_group)
4760 i = last_spill_reg;
4761 else
4762 i = -1;
4763
4764 for (count = 0; count < n_spills; count++)
4765 {
4766 int class = (int) reload_reg_class[r];
4767
4768 i = (i + 1) % n_spills;
4769
4770 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4771 reload_when_needed[r])
4772 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4773 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4774 /* Look first for regs to share, then for unshared. But
4775 don't share regs used for inherited reloads; they are
4776 the ones we want to preserve. */
4777 && (pass
4778 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4779 spill_regs[i])
4780 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4781 spill_regs[i]))))
4782 {
4783 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4784 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4785 (on 68000) got us two FP regs. If NR is 1,
4786 we would reject both of them. */
4787 if (force_group)
4788 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4789 /* If we need only one reg, we have already won. */
4790 if (nr == 1)
4791 {
4792 /* But reject a single reg if we demand a group. */
4793 if (force_group)
4794 continue;
4795 break;
4796 }
4797 /* Otherwise check that as many consecutive regs as we need
4798 are available here.
4799 Also, don't use for a group registers that are
4800 needed for nongroups. */
4801 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4802 while (nr > 1)
4803 {
4804 regno = spill_regs[i] + nr - 1;
4805 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4806 && spill_reg_order[regno] >= 0
4807 && reload_reg_free_p (regno, reload_opnum[r],
4808 reload_when_needed[r])
4809 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4810 regno)))
4811 break;
4812 nr--;
4813 }
4814 if (nr == 1)
4815 break;
4816 }
4817 }
4818
4819 /* If we found something on pass 1, omit pass 2. */
4820 if (count < n_spills)
4821 break;
4822 }
4823
4824 /* We should have found a spill register by now. */
4825 if (count == n_spills)
4826 {
4827 if (noerror)
4828 return 0;
4829 goto failure;
4830 }
4831
4832 /* I is the index in SPILL_REG_RTX of the reload register we are to
4833 allocate. Get an rtx for it and find its register number. */
4834
4835 new = spill_reg_rtx[i];
4836
4837 if (new == 0 || GET_MODE (new) != reload_mode[r])
4838 spill_reg_rtx[i] = new
4839 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4840
4841 regno = true_regnum (new);
4842
4843 /* Detect when the reload reg can't hold the reload mode.
4844 This used to be one `if', but Sequent compiler can't handle that. */
4845 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4846 {
4847 enum machine_mode test_mode = VOIDmode;
4848 if (reload_in[r])
4849 test_mode = GET_MODE (reload_in[r]);
4850 /* If reload_in[r] has VOIDmode, it means we will load it
4851 in whatever mode the reload reg has: to wit, reload_mode[r].
4852 We have already tested that for validity. */
4853 /* Aside from that, we need to test that the expressions
4854 to reload from or into have modes which are valid for this
4855 reload register. Otherwise the reload insns would be invalid. */
4856 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4857 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4858 if (! (reload_out[r] != 0
4859 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4860 {
4861 /* The reg is OK. */
4862 last_spill_reg = i;
4863
4864 /* Mark as in use for this insn the reload regs we use
4865 for this. */
4866 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4867 reload_when_needed[r], reload_mode[r]);
4868
4869 reload_reg_rtx[r] = new;
4870 reload_spill_index[r] = i;
4871 return 1;
4872 }
4873 }
4874
4875 /* The reg is not OK. */
4876 if (noerror)
4877 return 0;
4878
4879 failure:
4880 if (asm_noperands (PATTERN (insn)) < 0)
4881 /* It's the compiler's fault. */
4882 fatal_insn ("Could not find a spill register", insn);
4883
4884 /* It's the user's fault; the operand's mode and constraint
4885 don't match. Disable this reload so we don't crash in final. */
4886 error_for_asm (insn,
4887 "`asm' operand constraint incompatible with operand size");
4888 reload_in[r] = 0;
4889 reload_out[r] = 0;
4890 reload_reg_rtx[r] = 0;
4891 reload_optional[r] = 1;
4892 reload_secondary_p[r] = 1;
4893
4894 return 1;
4895 }
4896 \f
4897 /* Assign hard reg targets for the pseudo-registers we must reload
4898 into hard regs for this insn.
4899 Also output the instructions to copy them in and out of the hard regs.
4900
4901 For machines with register classes, we are responsible for
4902 finding a reload reg in the proper class. */
4903
4904 static void
4905 choose_reload_regs (insn, avoid_return_reg)
4906 rtx insn;
4907 rtx avoid_return_reg;
4908 {
4909 register int i, j;
4910 int max_group_size = 1;
4911 enum reg_class group_class = NO_REGS;
4912 int inheritance;
4913
4914 rtx save_reload_reg_rtx[MAX_RELOADS];
4915 char save_reload_inherited[MAX_RELOADS];
4916 rtx save_reload_inheritance_insn[MAX_RELOADS];
4917 rtx save_reload_override_in[MAX_RELOADS];
4918 int save_reload_spill_index[MAX_RELOADS];
4919 HARD_REG_SET save_reload_reg_used;
4920 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4921 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4922 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4923 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4924 HARD_REG_SET save_reload_reg_used_in_op_addr;
4925 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4926 HARD_REG_SET save_reload_reg_used_in_insn;
4927 HARD_REG_SET save_reload_reg_used_in_other_addr;
4928 HARD_REG_SET save_reload_reg_used_at_all;
4929
4930 bzero (reload_inherited, MAX_RELOADS);
4931 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4932 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4933
4934 CLEAR_HARD_REG_SET (reload_reg_used);
4935 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4936 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4937 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4938 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4939 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4940
4941 for (i = 0; i < reload_n_operands; i++)
4942 {
4943 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4944 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4945 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4946 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4947 }
4948
4949 #ifdef SMALL_REGISTER_CLASSES
4950 /* Don't bother with avoiding the return reg
4951 if we have no mandatory reload that could use it. */
4952 if (avoid_return_reg)
4953 {
4954 int do_avoid = 0;
4955 int regno = REGNO (avoid_return_reg);
4956 int nregs
4957 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4958 int r;
4959
4960 for (r = regno; r < regno + nregs; r++)
4961 if (spill_reg_order[r] >= 0)
4962 for (j = 0; j < n_reloads; j++)
4963 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4964 && (reload_in[j] != 0 || reload_out[j] != 0
4965 || reload_secondary_p[j])
4966 &&
4967 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4968 do_avoid = 1;
4969 if (!do_avoid)
4970 avoid_return_reg = 0;
4971 }
4972 #endif /* SMALL_REGISTER_CLASSES */
4973
4974 #if 0 /* Not needed, now that we can always retry without inheritance. */
4975 /* See if we have more mandatory reloads than spill regs.
4976 If so, then we cannot risk optimizations that could prevent
4977 reloads from sharing one spill register.
4978
4979 Since we will try finding a better register than reload_reg_rtx
4980 unless it is equal to reload_in or reload_out, count such reloads. */
4981
4982 {
4983 int tem = 0;
4984 #ifdef SMALL_REGISTER_CLASSES
4985 int tem = (avoid_return_reg != 0);
4986 #endif
4987 for (j = 0; j < n_reloads; j++)
4988 if (! reload_optional[j]
4989 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4990 && (reload_reg_rtx[j] == 0
4991 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4992 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4993 tem++;
4994 if (tem > n_spills)
4995 must_reuse = 1;
4996 }
4997 #endif
4998
4999 #ifdef SMALL_REGISTER_CLASSES
5000 /* Don't use the subroutine call return reg for a reload
5001 if we are supposed to avoid it. */
5002 if (avoid_return_reg)
5003 {
5004 int regno = REGNO (avoid_return_reg);
5005 int nregs
5006 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5007 int r;
5008
5009 for (r = regno; r < regno + nregs; r++)
5010 if (spill_reg_order[r] >= 0)
5011 SET_HARD_REG_BIT (reload_reg_used, r);
5012 }
5013 #endif /* SMALL_REGISTER_CLASSES */
5014
5015 /* In order to be certain of getting the registers we need,
5016 we must sort the reloads into order of increasing register class.
5017 Then our grabbing of reload registers will parallel the process
5018 that provided the reload registers.
5019
5020 Also note whether any of the reloads wants a consecutive group of regs.
5021 If so, record the maximum size of the group desired and what
5022 register class contains all the groups needed by this insn. */
5023
5024 for (j = 0; j < n_reloads; j++)
5025 {
5026 reload_order[j] = j;
5027 reload_spill_index[j] = -1;
5028
5029 reload_mode[j]
5030 = (reload_inmode[j] == VOIDmode
5031 || (GET_MODE_SIZE (reload_outmode[j])
5032 > GET_MODE_SIZE (reload_inmode[j])))
5033 ? reload_outmode[j] : reload_inmode[j];
5034
5035 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5036
5037 if (reload_nregs[j] > 1)
5038 {
5039 max_group_size = MAX (reload_nregs[j], max_group_size);
5040 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5041 }
5042
5043 /* If we have already decided to use a certain register,
5044 don't use it in another way. */
5045 if (reload_reg_rtx[j])
5046 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5047 reload_when_needed[j], reload_mode[j]);
5048 }
5049
5050 if (n_reloads > 1)
5051 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5052
5053 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5054 sizeof reload_reg_rtx);
5055 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5056 bcopy ((char *) reload_inheritance_insn,
5057 (char *) save_reload_inheritance_insn,
5058 sizeof reload_inheritance_insn);
5059 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5060 sizeof reload_override_in);
5061 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5062 sizeof reload_spill_index);
5063 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5064 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5065 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5066 reload_reg_used_in_op_addr);
5067
5068 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5069 reload_reg_used_in_op_addr_reload);
5070
5071 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5072 reload_reg_used_in_insn);
5073 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5074 reload_reg_used_in_other_addr);
5075
5076 for (i = 0; i < reload_n_operands; i++)
5077 {
5078 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5079 reload_reg_used_in_output[i]);
5080 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5081 reload_reg_used_in_input[i]);
5082 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5083 reload_reg_used_in_input_addr[i]);
5084 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5085 reload_reg_used_in_output_addr[i]);
5086 }
5087
5088 /* If -O, try first with inheritance, then turning it off.
5089 If not -O, don't do inheritance.
5090 Using inheritance when not optimizing leads to paradoxes
5091 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5092 because one side of the comparison might be inherited. */
5093
5094 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5095 {
5096 /* Process the reloads in order of preference just found.
5097 Beyond this point, subregs can be found in reload_reg_rtx.
5098
5099 This used to look for an existing reloaded home for all
5100 of the reloads, and only then perform any new reloads.
5101 But that could lose if the reloads were done out of reg-class order
5102 because a later reload with a looser constraint might have an old
5103 home in a register needed by an earlier reload with a tighter constraint.
5104
5105 To solve this, we make two passes over the reloads, in the order
5106 described above. In the first pass we try to inherit a reload
5107 from a previous insn. If there is a later reload that needs a
5108 class that is a proper subset of the class being processed, we must
5109 also allocate a spill register during the first pass.
5110
5111 Then make a second pass over the reloads to allocate any reloads
5112 that haven't been given registers yet. */
5113
5114 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5115
5116 for (j = 0; j < n_reloads; j++)
5117 {
5118 register int r = reload_order[j];
5119
5120 /* Ignore reloads that got marked inoperative. */
5121 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5122 continue;
5123
5124 /* If find_reloads chose a to use reload_in or reload_out as a reload
5125 register, we don't need to chose one. Otherwise, try even if it found
5126 one since we might save an insn if we find the value lying around. */
5127 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5128 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5129 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5130 continue;
5131
5132 #if 0 /* No longer needed for correct operation.
5133 It might give better code, or might not; worth an experiment? */
5134 /* If this is an optional reload, we can't inherit from earlier insns
5135 until we are sure that any non-optional reloads have been allocated.
5136 The following code takes advantage of the fact that optional reloads
5137 are at the end of reload_order. */
5138 if (reload_optional[r] != 0)
5139 for (i = 0; i < j; i++)
5140 if ((reload_out[reload_order[i]] != 0
5141 || reload_in[reload_order[i]] != 0
5142 || reload_secondary_p[reload_order[i]])
5143 && ! reload_optional[reload_order[i]]
5144 && reload_reg_rtx[reload_order[i]] == 0)
5145 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5146 #endif
5147
5148 /* First see if this pseudo is already available as reloaded
5149 for a previous insn. We cannot try to inherit for reloads
5150 that are smaller than the maximum number of registers needed
5151 for groups unless the register we would allocate cannot be used
5152 for the groups.
5153
5154 We could check here to see if this is a secondary reload for
5155 an object that is already in a register of the desired class.
5156 This would avoid the need for the secondary reload register.
5157 But this is complex because we can't easily determine what
5158 objects might want to be loaded via this reload. So let a register
5159 be allocated here. In `emit_reload_insns' we suppress one of the
5160 loads in the case described above. */
5161
5162 if (inheritance)
5163 {
5164 register int regno = -1;
5165 enum machine_mode mode;
5166
5167 if (reload_in[r] == 0)
5168 ;
5169 else if (GET_CODE (reload_in[r]) == REG)
5170 {
5171 regno = REGNO (reload_in[r]);
5172 mode = GET_MODE (reload_in[r]);
5173 }
5174 else if (GET_CODE (reload_in_reg[r]) == REG)
5175 {
5176 regno = REGNO (reload_in_reg[r]);
5177 mode = GET_MODE (reload_in_reg[r]);
5178 }
5179 #if 0
5180 /* This won't work, since REGNO can be a pseudo reg number.
5181 Also, it takes much more hair to keep track of all the things
5182 that can invalidate an inherited reload of part of a pseudoreg. */
5183 else if (GET_CODE (reload_in[r]) == SUBREG
5184 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5185 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5186 #endif
5187
5188 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5189 {
5190 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5191
5192 if (reg_reloaded_contents[i] == regno
5193 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5194 >= GET_MODE_SIZE (mode))
5195 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5196 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5197 spill_regs[i])
5198 && (reload_nregs[r] == max_group_size
5199 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5200 spill_regs[i]))
5201 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5202 reload_when_needed[r])
5203 && reload_reg_free_before_p (spill_regs[i],
5204 reload_opnum[r],
5205 reload_when_needed[r]))
5206 {
5207 /* If a group is needed, verify that all the subsequent
5208 registers still have their values intact. */
5209 int nr
5210 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5211 int k;
5212
5213 for (k = 1; k < nr; k++)
5214 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5215 != regno)
5216 break;
5217
5218 if (k == nr)
5219 {
5220 int i1;
5221
5222 /* We found a register that contains the
5223 value we need. If this register is the
5224 same as an `earlyclobber' operand of the
5225 current insn, just mark it as a place to
5226 reload from since we can't use it as the
5227 reload register itself. */
5228
5229 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5230 if (reg_overlap_mentioned_for_reload_p
5231 (reg_last_reload_reg[regno],
5232 reload_earlyclobbers[i1]))
5233 break;
5234
5235 if (i1 != n_earlyclobbers
5236 /* Don't really use the inherited spill reg
5237 if we need it wider than we've got it. */
5238 || (GET_MODE_SIZE (reload_mode[r])
5239 > GET_MODE_SIZE (mode)))
5240 reload_override_in[r] = reg_last_reload_reg[regno];
5241 else
5242 {
5243 int k;
5244 /* We can use this as a reload reg. */
5245 /* Mark the register as in use for this part of
5246 the insn. */
5247 mark_reload_reg_in_use (spill_regs[i],
5248 reload_opnum[r],
5249 reload_when_needed[r],
5250 reload_mode[r]);
5251 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5252 reload_inherited[r] = 1;
5253 reload_inheritance_insn[r]
5254 = reg_reloaded_insn[i];
5255 reload_spill_index[r] = i;
5256 for (k = 0; k < nr; k++)
5257 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5258 spill_regs[i + k]);
5259 }
5260 }
5261 }
5262 }
5263 }
5264
5265 /* Here's another way to see if the value is already lying around. */
5266 if (inheritance
5267 && reload_in[r] != 0
5268 && ! reload_inherited[r]
5269 && reload_out[r] == 0
5270 && (CONSTANT_P (reload_in[r])
5271 || GET_CODE (reload_in[r]) == PLUS
5272 || GET_CODE (reload_in[r]) == REG
5273 || GET_CODE (reload_in[r]) == MEM)
5274 && (reload_nregs[r] == max_group_size
5275 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5276 {
5277 register rtx equiv
5278 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5279 -1, NULL_PTR, 0, reload_mode[r]);
5280 int regno;
5281
5282 if (equiv != 0)
5283 {
5284 if (GET_CODE (equiv) == REG)
5285 regno = REGNO (equiv);
5286 else if (GET_CODE (equiv) == SUBREG)
5287 {
5288 /* This must be a SUBREG of a hard register.
5289 Make a new REG since this might be used in an
5290 address and not all machines support SUBREGs
5291 there. */
5292 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5293 equiv = gen_rtx (REG, reload_mode[r], regno);
5294 }
5295 else
5296 abort ();
5297 }
5298
5299 /* If we found a spill reg, reject it unless it is free
5300 and of the desired class. */
5301 if (equiv != 0
5302 && ((spill_reg_order[regno] >= 0
5303 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5304 reload_when_needed[r]))
5305 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5306 regno)))
5307 equiv = 0;
5308
5309 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5310 equiv = 0;
5311
5312 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5313 equiv = 0;
5314
5315 /* We found a register that contains the value we need.
5316 If this register is the same as an `earlyclobber' operand
5317 of the current insn, just mark it as a place to reload from
5318 since we can't use it as the reload register itself. */
5319
5320 if (equiv != 0)
5321 for (i = 0; i < n_earlyclobbers; i++)
5322 if (reg_overlap_mentioned_for_reload_p (equiv,
5323 reload_earlyclobbers[i]))
5324 {
5325 reload_override_in[r] = equiv;
5326 equiv = 0;
5327 break;
5328 }
5329
5330 /* JRV: If the equiv register we have found is explicitly
5331 clobbered in the current insn, mark but don't use, as above. */
5332
5333 if (equiv != 0 && regno_clobbered_p (regno, insn))
5334 {
5335 reload_override_in[r] = equiv;
5336 equiv = 0;
5337 }
5338
5339 /* If we found an equivalent reg, say no code need be generated
5340 to load it, and use it as our reload reg. */
5341 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5342 {
5343 reload_reg_rtx[r] = equiv;
5344 reload_inherited[r] = 1;
5345 /* If it is a spill reg,
5346 mark the spill reg as in use for this insn. */
5347 i = spill_reg_order[regno];
5348 if (i >= 0)
5349 {
5350 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5351 int k;
5352 mark_reload_reg_in_use (regno, reload_opnum[r],
5353 reload_when_needed[r],
5354 reload_mode[r]);
5355 for (k = 0; k < nr; k++)
5356 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5357 }
5358 }
5359 }
5360
5361 /* If we found a register to use already, or if this is an optional
5362 reload, we are done. */
5363 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5364 continue;
5365
5366 #if 0 /* No longer needed for correct operation. Might or might not
5367 give better code on the average. Want to experiment? */
5368
5369 /* See if there is a later reload that has a class different from our
5370 class that intersects our class or that requires less register
5371 than our reload. If so, we must allocate a register to this
5372 reload now, since that reload might inherit a previous reload
5373 and take the only available register in our class. Don't do this
5374 for optional reloads since they will force all previous reloads
5375 to be allocated. Also don't do this for reloads that have been
5376 turned off. */
5377
5378 for (i = j + 1; i < n_reloads; i++)
5379 {
5380 int s = reload_order[i];
5381
5382 if ((reload_in[s] == 0 && reload_out[s] == 0
5383 && ! reload_secondary_p[s])
5384 || reload_optional[s])
5385 continue;
5386
5387 if ((reload_reg_class[s] != reload_reg_class[r]
5388 && reg_classes_intersect_p (reload_reg_class[r],
5389 reload_reg_class[s]))
5390 || reload_nregs[s] < reload_nregs[r])
5391 break;
5392 }
5393
5394 if (i == n_reloads)
5395 continue;
5396
5397 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5398 #endif
5399 }
5400
5401 /* Now allocate reload registers for anything non-optional that
5402 didn't get one yet. */
5403 for (j = 0; j < n_reloads; j++)
5404 {
5405 register int r = reload_order[j];
5406
5407 /* Ignore reloads that got marked inoperative. */
5408 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5409 continue;
5410
5411 /* Skip reloads that already have a register allocated or are
5412 optional. */
5413 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5414 continue;
5415
5416 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5417 break;
5418 }
5419
5420 /* If that loop got all the way, we have won. */
5421 if (j == n_reloads)
5422 break;
5423
5424 fail:
5425 /* Loop around and try without any inheritance. */
5426 /* First undo everything done by the failed attempt
5427 to allocate with inheritance. */
5428 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5429 sizeof reload_reg_rtx);
5430 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5431 sizeof reload_inherited);
5432 bcopy ((char *) save_reload_inheritance_insn,
5433 (char *) reload_inheritance_insn,
5434 sizeof reload_inheritance_insn);
5435 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5436 sizeof reload_override_in);
5437 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5438 sizeof reload_spill_index);
5439 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5440 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5441 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5442 save_reload_reg_used_in_op_addr);
5443 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5444 save_reload_reg_used_in_op_addr_reload);
5445 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5446 save_reload_reg_used_in_insn);
5447 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5448 save_reload_reg_used_in_other_addr);
5449
5450 for (i = 0; i < reload_n_operands; i++)
5451 {
5452 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5453 save_reload_reg_used_in_input[i]);
5454 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5455 save_reload_reg_used_in_output[i]);
5456 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5457 save_reload_reg_used_in_input_addr[i]);
5458 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5459 save_reload_reg_used_in_output_addr[i]);
5460 }
5461 }
5462
5463 /* If we thought we could inherit a reload, because it seemed that
5464 nothing else wanted the same reload register earlier in the insn,
5465 verify that assumption, now that all reloads have been assigned. */
5466
5467 for (j = 0; j < n_reloads; j++)
5468 {
5469 register int r = reload_order[j];
5470
5471 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5472 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5473 reload_opnum[r],
5474 reload_when_needed[r]))
5475 reload_inherited[r] = 0;
5476
5477 /* If we found a better place to reload from,
5478 validate it in the same fashion, if it is a reload reg. */
5479 if (reload_override_in[r]
5480 && (GET_CODE (reload_override_in[r]) == REG
5481 || GET_CODE (reload_override_in[r]) == SUBREG))
5482 {
5483 int regno = true_regnum (reload_override_in[r]);
5484 if (spill_reg_order[regno] >= 0
5485 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5486 reload_when_needed[r]))
5487 reload_override_in[r] = 0;
5488 }
5489 }
5490
5491 /* Now that reload_override_in is known valid,
5492 actually override reload_in. */
5493 for (j = 0; j < n_reloads; j++)
5494 if (reload_override_in[j])
5495 reload_in[j] = reload_override_in[j];
5496
5497 /* If this reload won't be done because it has been cancelled or is
5498 optional and not inherited, clear reload_reg_rtx so other
5499 routines (such as subst_reloads) don't get confused. */
5500 for (j = 0; j < n_reloads; j++)
5501 if (reload_reg_rtx[j] != 0
5502 && ((reload_optional[j] && ! reload_inherited[j])
5503 || (reload_in[j] == 0 && reload_out[j] == 0
5504 && ! reload_secondary_p[j])))
5505 {
5506 int regno = true_regnum (reload_reg_rtx[j]);
5507
5508 if (spill_reg_order[regno] >= 0)
5509 clear_reload_reg_in_use (regno, reload_opnum[j],
5510 reload_when_needed[j], reload_mode[j]);
5511 reload_reg_rtx[j] = 0;
5512 }
5513
5514 /* Record which pseudos and which spill regs have output reloads. */
5515 for (j = 0; j < n_reloads; j++)
5516 {
5517 register int r = reload_order[j];
5518
5519 i = reload_spill_index[r];
5520
5521 /* I is nonneg if this reload used one of the spill regs.
5522 If reload_reg_rtx[r] is 0, this is an optional reload
5523 that we opted to ignore. */
5524 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5525 && reload_reg_rtx[r] != 0)
5526 {
5527 register int nregno = REGNO (reload_out[r]);
5528 int nr = 1;
5529
5530 if (nregno < FIRST_PSEUDO_REGISTER)
5531 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5532
5533 while (--nr >= 0)
5534 reg_has_output_reload[nregno + nr] = 1;
5535
5536 if (i >= 0)
5537 {
5538 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5539 while (--nr >= 0)
5540 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5541 }
5542
5543 if (reload_when_needed[r] != RELOAD_OTHER
5544 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5545 && reload_when_needed[r] != RELOAD_FOR_INSN)
5546 abort ();
5547 }
5548 }
5549 }
5550 \f
5551 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5552 reloads of the same item for fear that we might not have enough reload
5553 registers. However, normally they will get the same reload register
5554 and hence actually need not be loaded twice.
5555
5556 Here we check for the most common case of this phenomenon: when we have
5557 a number of reloads for the same object, each of which were allocated
5558 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5559 reload, and is not modified in the insn itself. If we find such,
5560 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5561 This will not increase the number of spill registers needed and will
5562 prevent redundant code. */
5563
5564 #ifdef SMALL_REGISTER_CLASSES
5565
5566 static void
5567 merge_assigned_reloads (insn)
5568 rtx insn;
5569 {
5570 int i, j;
5571
5572 /* Scan all the reloads looking for ones that only load values and
5573 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5574 assigned and not modified by INSN. */
5575
5576 for (i = 0; i < n_reloads; i++)
5577 {
5578 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5579 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5580 || reg_set_p (reload_reg_rtx[i], insn))
5581 continue;
5582
5583 /* Look at all other reloads. Ensure that the only use of this
5584 reload_reg_rtx is in a reload that just loads the same value
5585 as we do. Note that any secondary reloads must be of the identical
5586 class since the values, modes, and result registers are the
5587 same, so we need not do anything with any secondary reloads. */
5588
5589 for (j = 0; j < n_reloads; j++)
5590 {
5591 if (i == j || reload_reg_rtx[j] == 0
5592 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5593 reload_reg_rtx[i]))
5594 continue;
5595
5596 /* If the reload regs aren't exactly the same (e.g, different modes)
5597 or if the values are different, we can't merge anything with this
5598 reload register. */
5599
5600 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5601 || reload_out[j] != 0 || reload_in[j] == 0
5602 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5603 break;
5604 }
5605
5606 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5607 we, in fact, found any matching reloads. */
5608
5609 if (j == n_reloads)
5610 {
5611 for (j = 0; j < n_reloads; j++)
5612 if (i != j && reload_reg_rtx[j] != 0
5613 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5614 {
5615 reload_when_needed[i] = RELOAD_OTHER;
5616 reload_in[j] = 0;
5617 transfer_replacements (i, j);
5618 }
5619
5620 /* If this is now RELOAD_OTHER, look for any reloads that load
5621 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5622 if they were for inputs, RELOAD_OTHER for outputs. Note that
5623 this test is equivalent to looking for reloads for this operand
5624 number. */
5625
5626 if (reload_when_needed[i] == RELOAD_OTHER)
5627 for (j = 0; j < n_reloads; j++)
5628 if (reload_in[j] != 0
5629 && reload_when_needed[i] != RELOAD_OTHER
5630 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5631 reload_in[i]))
5632 reload_when_needed[j]
5633 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5634 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5635 }
5636 }
5637 }
5638 #endif /* SMALL_RELOAD_CLASSES */
5639 \f
5640 /* Output insns to reload values in and out of the chosen reload regs. */
5641
5642 static void
5643 emit_reload_insns (insn)
5644 rtx insn;
5645 {
5646 register int j;
5647 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5648 rtx other_input_address_reload_insns = 0;
5649 rtx other_input_reload_insns = 0;
5650 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5651 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5652 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5653 rtx operand_reload_insns = 0;
5654 rtx other_operand_reload_insns = 0;
5655 rtx following_insn = NEXT_INSN (insn);
5656 rtx before_insn = insn;
5657 int special;
5658 /* Values to be put in spill_reg_store are put here first. */
5659 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5660
5661 for (j = 0; j < reload_n_operands; j++)
5662 input_reload_insns[j] = input_address_reload_insns[j]
5663 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5664
5665 /* Now output the instructions to copy the data into and out of the
5666 reload registers. Do these in the order that the reloads were reported,
5667 since reloads of base and index registers precede reloads of operands
5668 and the operands may need the base and index registers reloaded. */
5669
5670 for (j = 0; j < n_reloads; j++)
5671 {
5672 register rtx old;
5673 rtx oldequiv_reg = 0;
5674
5675 if (reload_spill_index[j] >= 0)
5676 new_spill_reg_store[reload_spill_index[j]] = 0;
5677
5678 old = reload_in[j];
5679 if (old != 0 && ! reload_inherited[j]
5680 && ! rtx_equal_p (reload_reg_rtx[j], old)
5681 && reload_reg_rtx[j] != 0)
5682 {
5683 register rtx reloadreg = reload_reg_rtx[j];
5684 rtx oldequiv = 0;
5685 enum machine_mode mode;
5686 rtx *where;
5687
5688 /* Determine the mode to reload in.
5689 This is very tricky because we have three to choose from.
5690 There is the mode the insn operand wants (reload_inmode[J]).
5691 There is the mode of the reload register RELOADREG.
5692 There is the intrinsic mode of the operand, which we could find
5693 by stripping some SUBREGs.
5694 It turns out that RELOADREG's mode is irrelevant:
5695 we can change that arbitrarily.
5696
5697 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5698 then the reload reg may not support QImode moves, so use SImode.
5699 If foo is in memory due to spilling a pseudo reg, this is safe,
5700 because the QImode value is in the least significant part of a
5701 slot big enough for a SImode. If foo is some other sort of
5702 memory reference, then it is impossible to reload this case,
5703 so previous passes had better make sure this never happens.
5704
5705 Then consider a one-word union which has SImode and one of its
5706 members is a float, being fetched as (SUBREG:SF union:SI).
5707 We must fetch that as SFmode because we could be loading into
5708 a float-only register. In this case OLD's mode is correct.
5709
5710 Consider an immediate integer: it has VOIDmode. Here we need
5711 to get a mode from something else.
5712
5713 In some cases, there is a fourth mode, the operand's
5714 containing mode. If the insn specifies a containing mode for
5715 this operand, it overrides all others.
5716
5717 I am not sure whether the algorithm here is always right,
5718 but it does the right things in those cases. */
5719
5720 mode = GET_MODE (old);
5721 if (mode == VOIDmode)
5722 mode = reload_inmode[j];
5723
5724 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5725 /* If we need a secondary register for this operation, see if
5726 the value is already in a register in that class. Don't
5727 do this if the secondary register will be used as a scratch
5728 register. */
5729
5730 if (reload_secondary_in_reload[j] >= 0
5731 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5732 && optimize)
5733 oldequiv
5734 = find_equiv_reg (old, insn,
5735 reload_reg_class[reload_secondary_in_reload[j]],
5736 -1, NULL_PTR, 0, mode);
5737 #endif
5738
5739 /* If reloading from memory, see if there is a register
5740 that already holds the same value. If so, reload from there.
5741 We can pass 0 as the reload_reg_p argument because
5742 any other reload has either already been emitted,
5743 in which case find_equiv_reg will see the reload-insn,
5744 or has yet to be emitted, in which case it doesn't matter
5745 because we will use this equiv reg right away. */
5746
5747 if (oldequiv == 0 && optimize
5748 && (GET_CODE (old) == MEM
5749 || (GET_CODE (old) == REG
5750 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5751 && reg_renumber[REGNO (old)] < 0)))
5752 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5753 -1, NULL_PTR, 0, mode);
5754
5755 if (oldequiv)
5756 {
5757 int regno = true_regnum (oldequiv);
5758
5759 /* If OLDEQUIV is a spill register, don't use it for this
5760 if any other reload needs it at an earlier stage of this insn
5761 or at this stage. */
5762 if (spill_reg_order[regno] >= 0
5763 && (! reload_reg_free_p (regno, reload_opnum[j],
5764 reload_when_needed[j])
5765 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5766 reload_when_needed[j])))
5767 oldequiv = 0;
5768
5769 /* If OLDEQUIV is not a spill register,
5770 don't use it if any other reload wants it. */
5771 if (spill_reg_order[regno] < 0)
5772 {
5773 int k;
5774 for (k = 0; k < n_reloads; k++)
5775 if (reload_reg_rtx[k] != 0 && k != j
5776 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5777 oldequiv))
5778 {
5779 oldequiv = 0;
5780 break;
5781 }
5782 }
5783
5784 /* If it is no cheaper to copy from OLDEQUIV into the
5785 reload register than it would be to move from memory,
5786 don't use it. Likewise, if we need a secondary register
5787 or memory. */
5788
5789 if (oldequiv != 0
5790 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5791 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5792 reload_reg_class[j])
5793 >= MEMORY_MOVE_COST (mode)))
5794 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5795 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5796 mode, oldequiv)
5797 != NO_REGS)
5798 #endif
5799 #ifdef SECONDARY_MEMORY_NEEDED
5800 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5801 REGNO_REG_CLASS (regno),
5802 mode)
5803 #endif
5804 ))
5805 oldequiv = 0;
5806 }
5807
5808 if (oldequiv == 0)
5809 oldequiv = old;
5810 else if (GET_CODE (oldequiv) == REG)
5811 oldequiv_reg = oldequiv;
5812 else if (GET_CODE (oldequiv) == SUBREG)
5813 oldequiv_reg = SUBREG_REG (oldequiv);
5814
5815 /* If we are reloading from a register that was recently stored in
5816 with an output-reload, see if we can prove there was
5817 actually no need to store the old value in it. */
5818
5819 if (optimize && GET_CODE (oldequiv) == REG
5820 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5821 && spill_reg_order[REGNO (oldequiv)] >= 0
5822 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5823 && find_reg_note (insn, REG_DEAD, reload_in[j])
5824 /* This is unsafe if operand occurs more than once in current
5825 insn. Perhaps some occurrences weren't reloaded. */
5826 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5827 delete_output_reload
5828 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5829
5830 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5831 then load RELOADREG from OLDEQUIV. Note that we cannot use
5832 gen_lowpart_common since it can do the wrong thing when
5833 RELOADREG has a multi-word mode. Note that RELOADREG
5834 must always be a REG here. */
5835
5836 if (GET_MODE (reloadreg) != mode)
5837 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5838 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5839 oldequiv = SUBREG_REG (oldequiv);
5840 if (GET_MODE (oldequiv) != VOIDmode
5841 && mode != GET_MODE (oldequiv))
5842 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5843
5844 /* Switch to the right place to emit the reload insns. */
5845 switch (reload_when_needed[j])
5846 {
5847 case RELOAD_OTHER:
5848 where = &other_input_reload_insns;
5849 break;
5850 case RELOAD_FOR_INPUT:
5851 where = &input_reload_insns[reload_opnum[j]];
5852 break;
5853 case RELOAD_FOR_INPUT_ADDRESS:
5854 where = &input_address_reload_insns[reload_opnum[j]];
5855 break;
5856 case RELOAD_FOR_OUTPUT_ADDRESS:
5857 where = &output_address_reload_insns[reload_opnum[j]];
5858 break;
5859 case RELOAD_FOR_OPERAND_ADDRESS:
5860 where = &operand_reload_insns;
5861 break;
5862 case RELOAD_FOR_OPADDR_ADDR:
5863 where = &other_operand_reload_insns;
5864 break;
5865 case RELOAD_FOR_OTHER_ADDRESS:
5866 where = &other_input_address_reload_insns;
5867 break;
5868 default:
5869 abort ();
5870 }
5871
5872 push_to_sequence (*where);
5873 special = 0;
5874
5875 /* Auto-increment addresses must be reloaded in a special way. */
5876 if (GET_CODE (oldequiv) == POST_INC
5877 || GET_CODE (oldequiv) == POST_DEC
5878 || GET_CODE (oldequiv) == PRE_INC
5879 || GET_CODE (oldequiv) == PRE_DEC)
5880 {
5881 /* We are not going to bother supporting the case where a
5882 incremented register can't be copied directly from
5883 OLDEQUIV since this seems highly unlikely. */
5884 if (reload_secondary_in_reload[j] >= 0)
5885 abort ();
5886 /* Prevent normal processing of this reload. */
5887 special = 1;
5888 /* Output a special code sequence for this case. */
5889 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5890 }
5891
5892 /* If we are reloading a pseudo-register that was set by the previous
5893 insn, see if we can get rid of that pseudo-register entirely
5894 by redirecting the previous insn into our reload register. */
5895
5896 else if (optimize && GET_CODE (old) == REG
5897 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5898 && dead_or_set_p (insn, old)
5899 /* This is unsafe if some other reload
5900 uses the same reg first. */
5901 && reload_reg_free_before_p (REGNO (reloadreg),
5902 reload_opnum[j],
5903 reload_when_needed[j]))
5904 {
5905 rtx temp = PREV_INSN (insn);
5906 while (temp && GET_CODE (temp) == NOTE)
5907 temp = PREV_INSN (temp);
5908 if (temp
5909 && GET_CODE (temp) == INSN
5910 && GET_CODE (PATTERN (temp)) == SET
5911 && SET_DEST (PATTERN (temp)) == old
5912 /* Make sure we can access insn_operand_constraint. */
5913 && asm_noperands (PATTERN (temp)) < 0
5914 /* This is unsafe if prev insn rejects our reload reg. */
5915 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5916 reloadreg)
5917 /* This is unsafe if operand occurs more than once in current
5918 insn. Perhaps some occurrences aren't reloaded. */
5919 && count_occurrences (PATTERN (insn), old) == 1
5920 /* Don't risk splitting a matching pair of operands. */
5921 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5922 {
5923 /* Store into the reload register instead of the pseudo. */
5924 SET_DEST (PATTERN (temp)) = reloadreg;
5925 /* If these are the only uses of the pseudo reg,
5926 pretend for GDB it lives in the reload reg we used. */
5927 if (reg_n_deaths[REGNO (old)] == 1
5928 && reg_n_sets[REGNO (old)] == 1)
5929 {
5930 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5931 alter_reg (REGNO (old), -1);
5932 }
5933 special = 1;
5934 }
5935 }
5936
5937 /* We can't do that, so output an insn to load RELOADREG. */
5938
5939 if (! special)
5940 {
5941 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5942 rtx second_reload_reg = 0;
5943 enum insn_code icode;
5944
5945 /* If we have a secondary reload, pick up the secondary register
5946 and icode, if any. If OLDEQUIV and OLD are different or
5947 if this is an in-out reload, recompute whether or not we
5948 still need a secondary register and what the icode should
5949 be. If we still need a secondary register and the class or
5950 icode is different, go back to reloading from OLD if using
5951 OLDEQUIV means that we got the wrong type of register. We
5952 cannot have different class or icode due to an in-out reload
5953 because we don't make such reloads when both the input and
5954 output need secondary reload registers. */
5955
5956 if (reload_secondary_in_reload[j] >= 0)
5957 {
5958 int secondary_reload = reload_secondary_in_reload[j];
5959 rtx real_oldequiv = oldequiv;
5960 rtx real_old = old;
5961
5962 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5963 and similarly for OLD.
5964 See comments in get_secondary_reload in reload.c. */
5965 if (GET_CODE (oldequiv) == REG
5966 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5967 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5968 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5969
5970 if (GET_CODE (old) == REG
5971 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5972 && reg_equiv_mem[REGNO (old)] != 0)
5973 real_old = reg_equiv_mem[REGNO (old)];
5974
5975 second_reload_reg = reload_reg_rtx[secondary_reload];
5976 icode = reload_secondary_in_icode[j];
5977
5978 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5979 || (reload_in[j] != 0 && reload_out[j] != 0))
5980 {
5981 enum reg_class new_class
5982 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5983 mode, real_oldequiv);
5984
5985 if (new_class == NO_REGS)
5986 second_reload_reg = 0;
5987 else
5988 {
5989 enum insn_code new_icode;
5990 enum machine_mode new_mode;
5991
5992 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5993 REGNO (second_reload_reg)))
5994 oldequiv = old, real_oldequiv = real_old;
5995 else
5996 {
5997 new_icode = reload_in_optab[(int) mode];
5998 if (new_icode != CODE_FOR_nothing
5999 && ((insn_operand_predicate[(int) new_icode][0]
6000 && ! ((*insn_operand_predicate[(int) new_icode][0])
6001 (reloadreg, mode)))
6002 || (insn_operand_predicate[(int) new_icode][1]
6003 && ! ((*insn_operand_predicate[(int) new_icode][1])
6004 (real_oldequiv, mode)))))
6005 new_icode = CODE_FOR_nothing;
6006
6007 if (new_icode == CODE_FOR_nothing)
6008 new_mode = mode;
6009 else
6010 new_mode = insn_operand_mode[(int) new_icode][2];
6011
6012 if (GET_MODE (second_reload_reg) != new_mode)
6013 {
6014 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6015 new_mode))
6016 oldequiv = old, real_oldequiv = real_old;
6017 else
6018 second_reload_reg
6019 = gen_rtx (REG, new_mode,
6020 REGNO (second_reload_reg));
6021 }
6022 }
6023 }
6024 }
6025
6026 /* If we still need a secondary reload register, check
6027 to see if it is being used as a scratch or intermediate
6028 register and generate code appropriately. If we need
6029 a scratch register, use REAL_OLDEQUIV since the form of
6030 the insn may depend on the actual address if it is
6031 a MEM. */
6032
6033 if (second_reload_reg)
6034 {
6035 if (icode != CODE_FOR_nothing)
6036 {
6037 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6038 second_reload_reg));
6039 special = 1;
6040 }
6041 else
6042 {
6043 /* See if we need a scratch register to load the
6044 intermediate register (a tertiary reload). */
6045 enum insn_code tertiary_icode
6046 = reload_secondary_in_icode[secondary_reload];
6047
6048 if (tertiary_icode != CODE_FOR_nothing)
6049 {
6050 rtx third_reload_reg
6051 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6052
6053 emit_insn ((GEN_FCN (tertiary_icode)
6054 (second_reload_reg, real_oldequiv,
6055 third_reload_reg)));
6056 }
6057 else
6058 gen_reload (second_reload_reg, oldequiv,
6059 reload_opnum[j],
6060 reload_when_needed[j]);
6061
6062 oldequiv = second_reload_reg;
6063 }
6064 }
6065 }
6066 #endif
6067
6068 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6069 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6070 reload_when_needed[j]);
6071
6072 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6073 /* We may have to make a REG_DEAD note for the secondary reload
6074 register in the insns we just made. Find the last insn that
6075 mentioned the register. */
6076 if (! special && second_reload_reg
6077 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6078 {
6079 rtx prev;
6080
6081 for (prev = get_last_insn (); prev;
6082 prev = PREV_INSN (prev))
6083 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6084 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6085 PATTERN (prev)))
6086 {
6087 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6088 second_reload_reg,
6089 REG_NOTES (prev));
6090 break;
6091 }
6092 }
6093 #endif
6094 }
6095
6096 /* End this sequence. */
6097 *where = get_insns ();
6098 end_sequence ();
6099 }
6100
6101 /* Add a note saying the input reload reg
6102 dies in this insn, if anyone cares. */
6103 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6104 if (old != 0
6105 && reload_reg_rtx[j] != old
6106 && reload_reg_rtx[j] != 0
6107 && reload_out[j] == 0
6108 && ! reload_inherited[j]
6109 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6110 {
6111 register rtx reloadreg = reload_reg_rtx[j];
6112
6113 #if 0
6114 /* We can't abort here because we need to support this for sched.c.
6115 It's not terrible to miss a REG_DEAD note, but we should try
6116 to figure out how to do this correctly. */
6117 /* The code below is incorrect for address-only reloads. */
6118 if (reload_when_needed[j] != RELOAD_OTHER
6119 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6120 abort ();
6121 #endif
6122
6123 /* Add a death note to this insn, for an input reload. */
6124
6125 if ((reload_when_needed[j] == RELOAD_OTHER
6126 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6127 && ! dead_or_set_p (insn, reloadreg))
6128 REG_NOTES (insn)
6129 = gen_rtx (EXPR_LIST, REG_DEAD,
6130 reloadreg, REG_NOTES (insn));
6131 }
6132
6133 /* When we inherit a reload, the last marked death of the reload reg
6134 may no longer really be a death. */
6135 if (reload_reg_rtx[j] != 0
6136 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6137 && reload_inherited[j])
6138 {
6139 /* Handle inheriting an output reload.
6140 Remove the death note from the output reload insn. */
6141 if (reload_spill_index[j] >= 0
6142 && GET_CODE (reload_in[j]) == REG
6143 && spill_reg_store[reload_spill_index[j]] != 0
6144 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6145 REG_DEAD, REGNO (reload_reg_rtx[j])))
6146 remove_death (REGNO (reload_reg_rtx[j]),
6147 spill_reg_store[reload_spill_index[j]]);
6148 /* Likewise for input reloads that were inherited. */
6149 else if (reload_spill_index[j] >= 0
6150 && GET_CODE (reload_in[j]) == REG
6151 && spill_reg_store[reload_spill_index[j]] == 0
6152 && reload_inheritance_insn[j] != 0
6153 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6154 REGNO (reload_reg_rtx[j])))
6155 remove_death (REGNO (reload_reg_rtx[j]),
6156 reload_inheritance_insn[j]);
6157 else
6158 {
6159 rtx prev;
6160
6161 /* We got this register from find_equiv_reg.
6162 Search back for its last death note and get rid of it.
6163 But don't search back too far.
6164 Don't go past a place where this reg is set,
6165 since a death note before that remains valid. */
6166 for (prev = PREV_INSN (insn);
6167 prev && GET_CODE (prev) != CODE_LABEL;
6168 prev = PREV_INSN (prev))
6169 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6170 && dead_or_set_p (prev, reload_reg_rtx[j]))
6171 {
6172 if (find_regno_note (prev, REG_DEAD,
6173 REGNO (reload_reg_rtx[j])))
6174 remove_death (REGNO (reload_reg_rtx[j]), prev);
6175 break;
6176 }
6177 }
6178 }
6179
6180 /* We might have used find_equiv_reg above to choose an alternate
6181 place from which to reload. If so, and it died, we need to remove
6182 that death and move it to one of the insns we just made. */
6183
6184 if (oldequiv_reg != 0
6185 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6186 {
6187 rtx prev, prev1;
6188
6189 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6190 prev = PREV_INSN (prev))
6191 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6192 && dead_or_set_p (prev, oldequiv_reg))
6193 {
6194 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6195 {
6196 for (prev1 = this_reload_insn;
6197 prev1; prev1 = PREV_INSN (prev1))
6198 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6199 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6200 PATTERN (prev1)))
6201 {
6202 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6203 oldequiv_reg,
6204 REG_NOTES (prev1));
6205 break;
6206 }
6207 remove_death (REGNO (oldequiv_reg), prev);
6208 }
6209 break;
6210 }
6211 }
6212 #endif
6213
6214 /* If we are reloading a register that was recently stored in with an
6215 output-reload, see if we can prove there was
6216 actually no need to store the old value in it. */
6217
6218 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6219 && reload_in[j] != 0
6220 && GET_CODE (reload_in[j]) == REG
6221 #if 0
6222 /* There doesn't seem to be any reason to restrict this to pseudos
6223 and doing so loses in the case where we are copying from a
6224 register of the wrong class. */
6225 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6226 #endif
6227 && spill_reg_store[reload_spill_index[j]] != 0
6228 /* This is unsafe if some other reload uses the same reg first. */
6229 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6230 reload_opnum[j], reload_when_needed[j])
6231 && dead_or_set_p (insn, reload_in[j])
6232 /* This is unsafe if operand occurs more than once in current
6233 insn. Perhaps some occurrences weren't reloaded. */
6234 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6235 delete_output_reload (insn, j,
6236 spill_reg_store[reload_spill_index[j]]);
6237
6238 /* Input-reloading is done. Now do output-reloading,
6239 storing the value from the reload-register after the main insn
6240 if reload_out[j] is nonzero.
6241
6242 ??? At some point we need to support handling output reloads of
6243 JUMP_INSNs or insns that set cc0. */
6244 old = reload_out[j];
6245 if (old != 0
6246 && reload_reg_rtx[j] != old
6247 && reload_reg_rtx[j] != 0)
6248 {
6249 register rtx reloadreg = reload_reg_rtx[j];
6250 register rtx second_reloadreg = 0;
6251 rtx note, p;
6252 enum machine_mode mode;
6253 int special = 0;
6254
6255 /* An output operand that dies right away does need a reload,
6256 but need not be copied from it. Show the new location in the
6257 REG_UNUSED note. */
6258 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6259 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6260 {
6261 XEXP (note, 0) = reload_reg_rtx[j];
6262 continue;
6263 }
6264 else if (GET_CODE (old) == SCRATCH)
6265 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6266 but we don't want to make an output reload. */
6267 continue;
6268
6269 #if 0
6270 /* Strip off of OLD any size-increasing SUBREGs such as
6271 (SUBREG:SI foo:QI 0). */
6272
6273 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6274 && (GET_MODE_SIZE (GET_MODE (old))
6275 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6276 old = SUBREG_REG (old);
6277 #endif
6278
6279 /* If is a JUMP_INSN, we can't support output reloads yet. */
6280 if (GET_CODE (insn) == JUMP_INSN)
6281 abort ();
6282
6283 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6284
6285 /* Determine the mode to reload in.
6286 See comments above (for input reloading). */
6287
6288 mode = GET_MODE (old);
6289 if (mode == VOIDmode)
6290 {
6291 /* VOIDmode should never happen for an output. */
6292 if (asm_noperands (PATTERN (insn)) < 0)
6293 /* It's the compiler's fault. */
6294 fatal_insn ("VOIDmode on an output", insn);
6295 error_for_asm (insn, "output operand is constant in `asm'");
6296 /* Prevent crash--use something we know is valid. */
6297 mode = word_mode;
6298 old = gen_rtx (REG, mode, REGNO (reloadreg));
6299 }
6300
6301 if (GET_MODE (reloadreg) != mode)
6302 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6303
6304 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6305
6306 /* If we need two reload regs, set RELOADREG to the intermediate
6307 one, since it will be stored into OLD. We might need a secondary
6308 register only for an input reload, so check again here. */
6309
6310 if (reload_secondary_out_reload[j] >= 0)
6311 {
6312 rtx real_old = old;
6313
6314 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6315 && reg_equiv_mem[REGNO (old)] != 0)
6316 real_old = reg_equiv_mem[REGNO (old)];
6317
6318 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6319 mode, real_old)
6320 != NO_REGS))
6321 {
6322 second_reloadreg = reloadreg;
6323 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6324
6325 /* See if RELOADREG is to be used as a scratch register
6326 or as an intermediate register. */
6327 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6328 {
6329 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6330 (real_old, second_reloadreg, reloadreg)));
6331 special = 1;
6332 }
6333 else
6334 {
6335 /* See if we need both a scratch and intermediate reload
6336 register. */
6337
6338 int secondary_reload = reload_secondary_out_reload[j];
6339 enum insn_code tertiary_icode
6340 = reload_secondary_out_icode[secondary_reload];
6341
6342 if (GET_MODE (reloadreg) != mode)
6343 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6344
6345 if (tertiary_icode != CODE_FOR_nothing)
6346 {
6347 rtx third_reloadreg
6348 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6349
6350 /* Copy primary reload reg to secondary reload reg.
6351 (Note that these have been swapped above, then
6352 secondary reload reg to OLD using our insn. */
6353
6354 gen_reload (reloadreg, second_reloadreg,
6355 reload_opnum[j], reload_when_needed[j]);
6356 emit_insn ((GEN_FCN (tertiary_icode)
6357 (real_old, reloadreg, third_reloadreg)));
6358 special = 1;
6359 }
6360
6361 else
6362 /* Copy between the reload regs here and then to
6363 OUT later. */
6364
6365 gen_reload (reloadreg, second_reloadreg,
6366 reload_opnum[j], reload_when_needed[j]);
6367 }
6368 }
6369 }
6370 #endif
6371
6372 /* Output the last reload insn. */
6373 if (! special)
6374 gen_reload (old, reloadreg, reload_opnum[j],
6375 reload_when_needed[j]);
6376
6377 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6378 /* If final will look at death notes for this reg,
6379 put one on the last output-reload insn to use it. Similarly
6380 for any secondary register. */
6381 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6382 for (p = get_last_insn (); p; p = PREV_INSN (p))
6383 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6384 && reg_overlap_mentioned_for_reload_p (reloadreg,
6385 PATTERN (p)))
6386 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6387 reloadreg, REG_NOTES (p));
6388
6389 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6390 if (! special
6391 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6392 for (p = get_last_insn (); p; p = PREV_INSN (p))
6393 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6394 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6395 PATTERN (p)))
6396 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6397 second_reloadreg, REG_NOTES (p));
6398 #endif
6399 #endif
6400 /* Look at all insns we emitted, just to be safe. */
6401 for (p = get_insns (); p; p = NEXT_INSN (p))
6402 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6403 {
6404 /* If this output reload doesn't come from a spill reg,
6405 clear any memory of reloaded copies of the pseudo reg.
6406 If this output reload comes from a spill reg,
6407 reg_has_output_reload will make this do nothing. */
6408 note_stores (PATTERN (p), forget_old_reloads_1);
6409
6410 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6411 && reload_spill_index[j] >= 0)
6412 new_spill_reg_store[reload_spill_index[j]] = p;
6413 }
6414
6415 output_reload_insns[reload_opnum[j]] = get_insns ();
6416 end_sequence ();
6417 }
6418 }
6419
6420 /* Now write all the insns we made for reloads in the order expected by
6421 the allocation functions. Prior to the insn being reloaded, we write
6422 the following reloads:
6423
6424 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6425
6426 RELOAD_OTHER reloads.
6427
6428 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6429 the RELOAD_FOR_INPUT reload for the operand.
6430
6431 RELOAD_FOR_OPADDR_ADDRS reloads.
6432
6433 RELOAD_FOR_OPERAND_ADDRESS reloads.
6434
6435 After the insn being reloaded, we write the following:
6436
6437 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6438 the RELOAD_FOR_OUTPUT reload for that operand. */
6439
6440 emit_insns_before (other_input_address_reload_insns, before_insn);
6441 emit_insns_before (other_input_reload_insns, before_insn);
6442
6443 for (j = 0; j < reload_n_operands; j++)
6444 {
6445 emit_insns_before (input_address_reload_insns[j], before_insn);
6446 emit_insns_before (input_reload_insns[j], before_insn);
6447 }
6448
6449 emit_insns_before (other_operand_reload_insns, before_insn);
6450 emit_insns_before (operand_reload_insns, before_insn);
6451
6452 for (j = 0; j < reload_n_operands; j++)
6453 {
6454 emit_insns_before (output_address_reload_insns[j], following_insn);
6455 emit_insns_before (output_reload_insns[j], following_insn);
6456 }
6457
6458 /* Move death notes from INSN
6459 to output-operand-address and output reload insns. */
6460 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6461 {
6462 rtx insn1;
6463 /* Loop over those insns, last ones first. */
6464 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6465 insn1 = PREV_INSN (insn1))
6466 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6467 {
6468 rtx source = SET_SRC (PATTERN (insn1));
6469 rtx dest = SET_DEST (PATTERN (insn1));
6470
6471 /* The note we will examine next. */
6472 rtx reg_notes = REG_NOTES (insn);
6473 /* The place that pointed to this note. */
6474 rtx *prev_reg_note = &REG_NOTES (insn);
6475
6476 /* If the note is for something used in the source of this
6477 reload insn, or in the output address, move the note. */
6478 while (reg_notes)
6479 {
6480 rtx next_reg_notes = XEXP (reg_notes, 1);
6481 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6482 && GET_CODE (XEXP (reg_notes, 0)) == REG
6483 && ((GET_CODE (dest) != REG
6484 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6485 dest))
6486 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6487 source)))
6488 {
6489 *prev_reg_note = next_reg_notes;
6490 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6491 REG_NOTES (insn1) = reg_notes;
6492 }
6493 else
6494 prev_reg_note = &XEXP (reg_notes, 1);
6495
6496 reg_notes = next_reg_notes;
6497 }
6498 }
6499 }
6500 #endif
6501
6502 /* For all the spill regs newly reloaded in this instruction,
6503 record what they were reloaded from, so subsequent instructions
6504 can inherit the reloads.
6505
6506 Update spill_reg_store for the reloads of this insn.
6507 Copy the elements that were updated in the loop above. */
6508
6509 for (j = 0; j < n_reloads; j++)
6510 {
6511 register int r = reload_order[j];
6512 register int i = reload_spill_index[r];
6513
6514 /* I is nonneg if this reload used one of the spill regs.
6515 If reload_reg_rtx[r] is 0, this is an optional reload
6516 that we opted to ignore.
6517
6518 Also ignore reloads that don't reach the end of the insn,
6519 since we will eventually see the one that does. */
6520
6521 if (i >= 0 && reload_reg_rtx[r] != 0
6522 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6523 reload_when_needed[r]))
6524 {
6525 /* First, clear out memory of what used to be in this spill reg.
6526 If consecutive registers are used, clear them all. */
6527 int nr
6528 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6529 int k;
6530
6531 for (k = 0; k < nr; k++)
6532 {
6533 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6534 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6535 }
6536
6537 /* Maybe the spill reg contains a copy of reload_out. */
6538 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6539 {
6540 register int nregno = REGNO (reload_out[r]);
6541 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6542 : HARD_REGNO_NREGS (nregno,
6543 GET_MODE (reload_reg_rtx[r])));
6544
6545 spill_reg_store[i] = new_spill_reg_store[i];
6546 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6547
6548 /* If NREGNO is a hard register, it may occupy more than
6549 one register. If it does, say what is in the
6550 rest of the registers assuming that both registers
6551 agree on how many words the object takes. If not,
6552 invalidate the subsequent registers. */
6553
6554 if (nregno < FIRST_PSEUDO_REGISTER)
6555 for (k = 1; k < nnr; k++)
6556 reg_last_reload_reg[nregno + k]
6557 = (nr == nnr ? gen_rtx (REG,
6558 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6559 REGNO (reload_reg_rtx[r]) + k)
6560 : 0);
6561
6562 /* Now do the inverse operation. */
6563 for (k = 0; k < nr; k++)
6564 {
6565 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6566 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6567 : nregno + k);
6568 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6569 }
6570 }
6571
6572 /* Maybe the spill reg contains a copy of reload_in. Only do
6573 something if there will not be an output reload for
6574 the register being reloaded. */
6575 else if (reload_out[r] == 0
6576 && reload_in[r] != 0
6577 && ((GET_CODE (reload_in[r]) == REG
6578 && ! reg_has_output_reload[REGNO (reload_in[r])]
6579 || (GET_CODE (reload_in_reg[r]) == REG
6580 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6581 {
6582 register int nregno;
6583 int nnr;
6584
6585 if (GET_CODE (reload_in[r]) == REG)
6586 nregno = REGNO (reload_in[r]);
6587 else
6588 nregno = REGNO (reload_in_reg[r]);
6589
6590 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6591 : HARD_REGNO_NREGS (nregno,
6592 GET_MODE (reload_reg_rtx[r])));
6593
6594 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6595
6596 if (nregno < FIRST_PSEUDO_REGISTER)
6597 for (k = 1; k < nnr; k++)
6598 reg_last_reload_reg[nregno + k]
6599 = (nr == nnr ? gen_rtx (REG,
6600 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6601 REGNO (reload_reg_rtx[r]) + k)
6602 : 0);
6603
6604 /* Unless we inherited this reload, show we haven't
6605 recently done a store. */
6606 if (! reload_inherited[r])
6607 spill_reg_store[i] = 0;
6608
6609 for (k = 0; k < nr; k++)
6610 {
6611 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6612 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6613 : nregno + k);
6614 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6615 = insn;
6616 }
6617 }
6618 }
6619
6620 /* The following if-statement was #if 0'd in 1.34 (or before...).
6621 It's reenabled in 1.35 because supposedly nothing else
6622 deals with this problem. */
6623
6624 /* If a register gets output-reloaded from a non-spill register,
6625 that invalidates any previous reloaded copy of it.
6626 But forget_old_reloads_1 won't get to see it, because
6627 it thinks only about the original insn. So invalidate it here. */
6628 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6629 {
6630 register int nregno = REGNO (reload_out[r]);
6631 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6632
6633 while (num_regs-- > 0)
6634 reg_last_reload_reg[nregno + num_regs] = 0;
6635 }
6636 }
6637 }
6638 \f
6639 /* Emit code to perform a reload from IN (which may be a reload register) to
6640 OUT (which may also be a reload register). IN or OUT is from operand
6641 OPNUM with reload type TYPE.
6642
6643 Returns first insn emitted. */
6644
6645 rtx
6646 gen_reload (out, in, opnum, type)
6647 rtx out;
6648 rtx in;
6649 int opnum;
6650 enum reload_type type;
6651 {
6652 rtx last = get_last_insn ();
6653 rtx tem;
6654
6655 /* If IN is a paradoxical SUBREG, remove it and try to put the
6656 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6657 if (GET_CODE (in) == SUBREG
6658 && (GET_MODE_SIZE (GET_MODE (in))
6659 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
6660 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
6661 in = SUBREG_REG (in), out = tem;
6662 else if (GET_CODE (out) == SUBREG
6663 && (GET_MODE_SIZE (GET_MODE (out))
6664 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
6665 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
6666 out = SUBREG_REG (out), in = tem;
6667
6668 /* How to do this reload can get quite tricky. Normally, we are being
6669 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6670 register that didn't get a hard register. In that case we can just
6671 call emit_move_insn.
6672
6673 We can also be asked to reload a PLUS that adds a register or a MEM to
6674 another register, constant or MEM. This can occur during frame pointer
6675 elimination and while reloading addresses. This case is handled by
6676 trying to emit a single insn to perform the add. If it is not valid,
6677 we use a two insn sequence.
6678
6679 Finally, we could be called to handle an 'o' constraint by putting
6680 an address into a register. In that case, we first try to do this
6681 with a named pattern of "reload_load_address". If no such pattern
6682 exists, we just emit a SET insn and hope for the best (it will normally
6683 be valid on machines that use 'o').
6684
6685 This entire process is made complex because reload will never
6686 process the insns we generate here and so we must ensure that
6687 they will fit their constraints and also by the fact that parts of
6688 IN might be being reloaded separately and replaced with spill registers.
6689 Because of this, we are, in some sense, just guessing the right approach
6690 here. The one listed above seems to work.
6691
6692 ??? At some point, this whole thing needs to be rethought. */
6693
6694 if (GET_CODE (in) == PLUS
6695 && (GET_CODE (XEXP (in, 0)) == REG
6696 || GET_CODE (XEXP (in, 0)) == MEM)
6697 && (GET_CODE (XEXP (in, 1)) == REG
6698 || CONSTANT_P (XEXP (in, 1))
6699 || GET_CODE (XEXP (in, 1)) == MEM))
6700 {
6701 /* We need to compute the sum of a register or a MEM and another
6702 register, constant, or MEM, and put it into the reload
6703 register. The best possible way of doing this is if the machine
6704 has a three-operand ADD insn that accepts the required operands.
6705
6706 The simplest approach is to try to generate such an insn and see if it
6707 is recognized and matches its constraints. If so, it can be used.
6708
6709 It might be better not to actually emit the insn unless it is valid,
6710 but we need to pass the insn as an operand to `recog' and
6711 `insn_extract' and it is simpler to emit and then delete the insn if
6712 not valid than to dummy things up. */
6713
6714 rtx op0, op1, tem, insn;
6715 int code;
6716
6717 op0 = find_replacement (&XEXP (in, 0));
6718 op1 = find_replacement (&XEXP (in, 1));
6719
6720 /* Since constraint checking is strict, commutativity won't be
6721 checked, so we need to do that here to avoid spurious failure
6722 if the add instruction is two-address and the second operand
6723 of the add is the same as the reload reg, which is frequently
6724 the case. If the insn would be A = B + A, rearrange it so
6725 it will be A = A + B as constrain_operands expects. */
6726
6727 if (GET_CODE (XEXP (in, 1)) == REG
6728 && REGNO (out) == REGNO (XEXP (in, 1)))
6729 tem = op0, op0 = op1, op1 = tem;
6730
6731 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6732 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6733
6734 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
6735 code = recog_memoized (insn);
6736
6737 if (code >= 0)
6738 {
6739 insn_extract (insn);
6740 /* We want constrain operands to treat this insn strictly in
6741 its validity determination, i.e., the way it would after reload
6742 has completed. */
6743 if (constrain_operands (code, 1))
6744 return insn;
6745 }
6746
6747 delete_insns_since (last);
6748
6749 /* If that failed, we must use a conservative two-insn sequence.
6750 use move to copy constant, MEM, or pseudo register to the reload
6751 register since "move" will be able to handle an arbitrary operand,
6752 unlike add which can't, in general. Then add the registers.
6753
6754 If there is another way to do this for a specific machine, a
6755 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6756 we emit below. */
6757
6758 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6759 || (GET_CODE (op1) == REG
6760 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6761 tem = op0, op0 = op1, op1 = tem;
6762
6763 emit_insn (gen_move_insn (out, op0));
6764
6765 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6766 This fixes a problem on the 32K where the stack pointer cannot
6767 be used as an operand of an add insn. */
6768
6769 if (rtx_equal_p (op0, op1))
6770 op1 = out;
6771
6772 insn = emit_insn (gen_add2_insn (out, op1));
6773
6774 /* If that failed, copy the address register to the reload register.
6775 Then add the constant to the reload register. */
6776
6777 code = recog_memoized (insn);
6778
6779 if (code >= 0)
6780 {
6781 insn_extract (insn);
6782 /* We want constrain operands to treat this insn strictly in
6783 its validity determination, i.e., the way it would after reload
6784 has completed. */
6785 if (constrain_operands (code, 1))
6786 return insn;
6787 }
6788
6789 delete_insns_since (last);
6790
6791 emit_insn (gen_move_insn (out, op1));
6792 emit_insn (gen_add2_insn (out, op0));
6793 }
6794
6795 #ifdef SECONDARY_MEMORY_NEEDED
6796 /* If we need a memory location to do the move, do it that way. */
6797 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6798 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
6799 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6800 REGNO_REG_CLASS (REGNO (out)),
6801 GET_MODE (out)))
6802 {
6803 /* Get the memory to use and rewrite both registers to its mode. */
6804 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
6805
6806 if (GET_MODE (loc) != GET_MODE (out))
6807 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
6808
6809 if (GET_MODE (loc) != GET_MODE (in))
6810 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6811
6812 emit_insn (gen_move_insn (loc, in));
6813 emit_insn (gen_move_insn (out, loc));
6814 }
6815 #endif
6816
6817 /* If IN is a simple operand, use gen_move_insn. */
6818 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6819 emit_insn (gen_move_insn (out, in));
6820
6821 #ifdef HAVE_reload_load_address
6822 else if (HAVE_reload_load_address)
6823 emit_insn (gen_reload_load_address (out, in));
6824 #endif
6825
6826 /* Otherwise, just write (set OUT IN) and hope for the best. */
6827 else
6828 emit_insn (gen_rtx (SET, VOIDmode, out, in));
6829
6830 /* Return the first insn emitted.
6831 We can not just return get_last_insn, because there may have
6832 been multiple instructions emitted. Also note that gen_move_insn may
6833 emit more than one insn itself, so we can not assume that there is one
6834 insn emitted per emit_insn_before call. */
6835
6836 return last ? NEXT_INSN (last) : get_insns ();
6837 }
6838 \f
6839 /* Delete a previously made output-reload
6840 whose result we now believe is not needed.
6841 First we double-check.
6842
6843 INSN is the insn now being processed.
6844 OUTPUT_RELOAD_INSN is the insn of the output reload.
6845 J is the reload-number for this insn. */
6846
6847 static void
6848 delete_output_reload (insn, j, output_reload_insn)
6849 rtx insn;
6850 int j;
6851 rtx output_reload_insn;
6852 {
6853 register rtx i1;
6854
6855 /* Get the raw pseudo-register referred to. */
6856
6857 rtx reg = reload_in[j];
6858 while (GET_CODE (reg) == SUBREG)
6859 reg = SUBREG_REG (reg);
6860
6861 /* If the pseudo-reg we are reloading is no longer referenced
6862 anywhere between the store into it and here,
6863 and no jumps or labels intervene, then the value can get
6864 here through the reload reg alone.
6865 Otherwise, give up--return. */
6866 for (i1 = NEXT_INSN (output_reload_insn);
6867 i1 != insn; i1 = NEXT_INSN (i1))
6868 {
6869 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6870 return;
6871 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6872 && reg_mentioned_p (reg, PATTERN (i1)))
6873 return;
6874 }
6875
6876 if (cannot_omit_stores[REGNO (reg)])
6877 return;
6878
6879 /* If this insn will store in the pseudo again,
6880 the previous store can be removed. */
6881 if (reload_out[j] == reload_in[j])
6882 delete_insn (output_reload_insn);
6883
6884 /* See if the pseudo reg has been completely replaced
6885 with reload regs. If so, delete the store insn
6886 and forget we had a stack slot for the pseudo. */
6887 else if (reg_n_deaths[REGNO (reg)] == 1
6888 && reg_basic_block[REGNO (reg)] >= 0
6889 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6890 {
6891 rtx i2;
6892
6893 /* We know that it was used only between here
6894 and the beginning of the current basic block.
6895 (We also know that the last use before INSN was
6896 the output reload we are thinking of deleting, but never mind that.)
6897 Search that range; see if any ref remains. */
6898 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6899 {
6900 rtx set = single_set (i2);
6901
6902 /* Uses which just store in the pseudo don't count,
6903 since if they are the only uses, they are dead. */
6904 if (set != 0 && SET_DEST (set) == reg)
6905 continue;
6906 if (GET_CODE (i2) == CODE_LABEL
6907 || GET_CODE (i2) == JUMP_INSN)
6908 break;
6909 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6910 && reg_mentioned_p (reg, PATTERN (i2)))
6911 /* Some other ref remains;
6912 we can't do anything. */
6913 return;
6914 }
6915
6916 /* Delete the now-dead stores into this pseudo. */
6917 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6918 {
6919 rtx set = single_set (i2);
6920
6921 if (set != 0 && SET_DEST (set) == reg)
6922 delete_insn (i2);
6923 if (GET_CODE (i2) == CODE_LABEL
6924 || GET_CODE (i2) == JUMP_INSN)
6925 break;
6926 }
6927
6928 /* For the debugging info,
6929 say the pseudo lives in this reload reg. */
6930 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6931 alter_reg (REGNO (reg), -1);
6932 }
6933 }
6934 \f
6935 /* Output reload-insns to reload VALUE into RELOADREG.
6936 VALUE is an autoincrement or autodecrement RTX whose operand
6937 is a register or memory location;
6938 so reloading involves incrementing that location.
6939
6940 INC_AMOUNT is the number to increment or decrement by (always positive).
6941 This cannot be deduced from VALUE. */
6942
6943 static void
6944 inc_for_reload (reloadreg, value, inc_amount)
6945 rtx reloadreg;
6946 rtx value;
6947 int inc_amount;
6948 {
6949 /* REG or MEM to be copied and incremented. */
6950 rtx incloc = XEXP (value, 0);
6951 /* Nonzero if increment after copying. */
6952 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6953 rtx last;
6954 rtx inc;
6955 rtx add_insn;
6956 int code;
6957
6958 /* No hard register is equivalent to this register after
6959 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6960 we could inc/dec that register as well (maybe even using it for
6961 the source), but I'm not sure it's worth worrying about. */
6962 if (GET_CODE (incloc) == REG)
6963 reg_last_reload_reg[REGNO (incloc)] = 0;
6964
6965 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6966 inc_amount = - inc_amount;
6967
6968 inc = GEN_INT (inc_amount);
6969
6970 /* If this is post-increment, first copy the location to the reload reg. */
6971 if (post)
6972 emit_insn (gen_move_insn (reloadreg, incloc));
6973
6974 /* See if we can directly increment INCLOC. Use a method similar to that
6975 in gen_reload. */
6976
6977 last = get_last_insn ();
6978 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6979 gen_rtx (PLUS, GET_MODE (incloc),
6980 incloc, inc)));
6981
6982 code = recog_memoized (add_insn);
6983 if (code >= 0)
6984 {
6985 insn_extract (add_insn);
6986 if (constrain_operands (code, 1))
6987 {
6988 /* If this is a pre-increment and we have incremented the value
6989 where it lives, copy the incremented value to RELOADREG to
6990 be used as an address. */
6991
6992 if (! post)
6993 emit_insn (gen_move_insn (reloadreg, incloc));
6994
6995 return;
6996 }
6997 }
6998
6999 delete_insns_since (last);
7000
7001 /* If couldn't do the increment directly, must increment in RELOADREG.
7002 The way we do this depends on whether this is pre- or post-increment.
7003 For pre-increment, copy INCLOC to the reload register, increment it
7004 there, then save back. */
7005
7006 if (! post)
7007 {
7008 emit_insn (gen_move_insn (reloadreg, incloc));
7009 emit_insn (gen_add2_insn (reloadreg, inc));
7010 emit_insn (gen_move_insn (incloc, reloadreg));
7011 }
7012 else
7013 {
7014 /* Postincrement.
7015 Because this might be a jump insn or a compare, and because RELOADREG
7016 may not be available after the insn in an input reload, we must do
7017 the incrementation before the insn being reloaded for.
7018
7019 We have already copied INCLOC to RELOADREG. Increment the copy in
7020 RELOADREG, save that back, then decrement RELOADREG so it has
7021 the original value. */
7022
7023 emit_insn (gen_add2_insn (reloadreg, inc));
7024 emit_insn (gen_move_insn (incloc, reloadreg));
7025 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7026 }
7027
7028 return;
7029 }
7030 \f
7031 /* Return 1 if we are certain that the constraint-string STRING allows
7032 the hard register REG. Return 0 if we can't be sure of this. */
7033
7034 static int
7035 constraint_accepts_reg_p (string, reg)
7036 char *string;
7037 rtx reg;
7038 {
7039 int value = 0;
7040 int regno = true_regnum (reg);
7041 int c;
7042
7043 /* Initialize for first alternative. */
7044 value = 0;
7045 /* Check that each alternative contains `g' or `r'. */
7046 while (1)
7047 switch (c = *string++)
7048 {
7049 case 0:
7050 /* If an alternative lacks `g' or `r', we lose. */
7051 return value;
7052 case ',':
7053 /* If an alternative lacks `g' or `r', we lose. */
7054 if (value == 0)
7055 return 0;
7056 /* Initialize for next alternative. */
7057 value = 0;
7058 break;
7059 case 'g':
7060 case 'r':
7061 /* Any general reg wins for this alternative. */
7062 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7063 value = 1;
7064 break;
7065 default:
7066 /* Any reg in specified class wins for this alternative. */
7067 {
7068 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7069
7070 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7071 value = 1;
7072 }
7073 }
7074 }
7075 \f
7076 /* Return the number of places FIND appears within X, but don't count
7077 an occurrence if some SET_DEST is FIND. */
7078
7079 static int
7080 count_occurrences (x, find)
7081 register rtx x, find;
7082 {
7083 register int i, j;
7084 register enum rtx_code code;
7085 register char *format_ptr;
7086 int count;
7087
7088 if (x == find)
7089 return 1;
7090 if (x == 0)
7091 return 0;
7092
7093 code = GET_CODE (x);
7094
7095 switch (code)
7096 {
7097 case REG:
7098 case QUEUED:
7099 case CONST_INT:
7100 case CONST_DOUBLE:
7101 case SYMBOL_REF:
7102 case CODE_LABEL:
7103 case PC:
7104 case CC0:
7105 return 0;
7106
7107 case SET:
7108 if (SET_DEST (x) == find)
7109 return count_occurrences (SET_SRC (x), find);
7110 break;
7111 }
7112
7113 format_ptr = GET_RTX_FORMAT (code);
7114 count = 0;
7115
7116 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7117 {
7118 switch (*format_ptr++)
7119 {
7120 case 'e':
7121 count += count_occurrences (XEXP (x, i), find);
7122 break;
7123
7124 case 'E':
7125 if (XVEC (x, i) != NULL)
7126 {
7127 for (j = 0; j < XVECLEN (x, i); j++)
7128 count += count_occurrences (XVECEXP (x, i, j), find);
7129 }
7130 break;
7131 }
7132 }
7133 return count;
7134 }