(reload): Can't eliminate if frame pointer needed and TO is stack pointer.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
186
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
192
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
198
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206 static char spill_indirect_levels;
207
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212 char indirect_symref_ok;
213
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216 char double_reg_address_ok;
217
218 /* Record the stack slot for each spilled hard register. */
219
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222 /* Width allocated so far for that stack slot. */
223
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231 char *basic_block_needs[N_REG_CLASSES];
232
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
236
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240 int caller_save_needed;
241
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245 int reload_in_progress = 0;
246
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258 struct obstack reload_obstack;
259 char *reload_firstobj;
260
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
263
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
266 \f
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272 static struct elim_table
273 {
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290 } reg_eliminate[] =
291
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296 #ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298 #else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300 #endif
301
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
308
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
311
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322 /* Number of labels in the current function. */
323
324 static int num_labels;
325
326 struct hard_reg_n_uses { int regno; int uses; };
327 \f
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void mark_scratch_live PROTO((rtx));
340 static void set_label_offsets PROTO((rtx, rtx, int));
341 static int eliminate_regs_in_insn PROTO((rtx, int));
342 static void mark_not_eliminable PROTO((rtx, rtx));
343 static int spill_hard_reg PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs PROTO((rtx));
345 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347 static void order_regs_for_reload PROTO((void));
348 static int compare_spill_regs PROTO((short *, short *));
349 static void reload_as_needed PROTO((rtx, int));
350 static void forget_old_reloads_1 PROTO((rtx, rtx));
351 static int reload_reg_class_lower PROTO((short *, short *));
352 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
354 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
356 static int reload_reg_free_p PROTO((int, int, enum reload_type));
357 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
359 static int reloads_conflict PROTO((int, int));
360 static int allocate_reload_reg PROTO((int, rtx, int, int));
361 static void choose_reload_regs PROTO((rtx, rtx));
362 static void merge_assigned_reloads PROTO((rtx));
363 static void emit_reload_insns PROTO((rtx));
364 static void delete_output_reload PROTO((rtx, int, rtx));
365 static void inc_for_reload PROTO((rtx, rtx, int));
366 static int constraint_accepts_reg_p PROTO((char *, rtx));
367 static int count_occurrences PROTO((rtx, rtx));
368 \f
369 /* Initialize the reload pass once per compilation. */
370
371 void
372 init_reload ()
373 {
374 register int i;
375
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
379
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
384 GEN_INT (4)));
385 spill_indirect_levels = 0;
386
387 while (memory_address_p (QImode, tem))
388 {
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
391 }
392
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
394
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
397
398 /* See if reg+reg is a valid (and offsettable) address. */
399
400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
401 {
402 tem = gen_rtx (PLUS, Pmode,
403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
407
408 if (memory_address_p (QImode, tem))
409 {
410 double_reg_address_ok = 1;
411 break;
412 }
413 }
414
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
418 }
419
420 /* Main entry point for the reload pass.
421
422 FIRST is the first insn of the function being compiled.
423
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
429
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
433 are spilled from them, and where the pseudo regs are reallocated to.
434
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
437
438 int
439 reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
443 {
444 register int class;
445 register int i, j, k;
446 register rtx insn;
447 register struct elim_table *ep;
448
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
455
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
458
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
461
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
464
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
467
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
470
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474 #endif
475
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
480
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
483 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
484
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
488
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
493
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
496
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
500
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
504
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
509
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
514
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
526 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
529
530 #ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532 #endif
533
534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
538
539 for (insn = first; insn; insn = NEXT_INSN (insn))
540 {
541 rtx set = single_set (insn);
542
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
544 {
545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
546 if (note
547 #ifdef LEGITIMATE_PIC_OPERAND_P
548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550 #endif
551 )
552 {
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
556 {
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
560 {
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
566 }
567 else
568 continue;
569
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
577 }
578 }
579 }
580
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
589
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
592 }
593
594 /* Does this function require a frame pointer? */
595
596 frame_pointer_needed = (! flag_omit_frame_pointer
597 #ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605 #endif
606 || FRAME_POINTER_REQUIRED);
607
608 num_eliminable = 0;
609
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612 #ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
614 {
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
617 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
618 }
619 #else
620 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
621 = ! frame_pointer_needed;
622 #endif
623
624 /* Count the number of eliminable registers and build the FROM and TO
625 REG rtx's. Note that code in gen_rtx will cause, e.g.,
626 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
627 We depend on this. */
628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
629 {
630 num_eliminable += ep->can_eliminate;
631 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
632 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
633 }
634
635 num_labels = max_label_num () - get_first_label_num ();
636
637 /* Allocate the tables used to store offset information at labels. */
638 offsets_known_at = (char *) alloca (num_labels);
639 offsets_at
640 = (int (*)[NUM_ELIMINABLE_REGS])
641 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
642
643 offsets_known_at -= get_first_label_num ();
644 offsets_at -= get_first_label_num ();
645
646 /* Alter each pseudo-reg rtx to contain its hard reg number.
647 Assign stack slots to the pseudos that lack hard regs or equivalents.
648 Do not touch virtual registers. */
649
650 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
651 alter_reg (i, -1);
652
653 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
654 because the stack size may be a part of the offset computation for
655 register elimination. */
656 assign_stack_local (BLKmode, 0, 0);
657
658 /* If we have some registers we think can be eliminated, scan all insns to
659 see if there is an insn that sets one of these registers to something
660 other than itself plus a constant. If so, the register cannot be
661 eliminated. Doing this scan here eliminates an extra pass through the
662 main reload loop in the most common case where register elimination
663 cannot be done. */
664 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
665 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
666 || GET_CODE (insn) == CALL_INSN)
667 note_stores (PATTERN (insn), mark_not_eliminable);
668
669 #ifndef REGISTER_CONSTRAINTS
670 /* If all the pseudo regs have hard regs,
671 except for those that are never referenced,
672 we know that no reloads are needed. */
673 /* But that is not true if there are register constraints, since
674 in that case some pseudos might be in the wrong kind of hard reg. */
675
676 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
677 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
678 break;
679
680 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
681 return;
682 #endif
683
684 /* Compute the order of preference for hard registers to spill.
685 Store them by decreasing preference in potential_reload_regs. */
686
687 order_regs_for_reload ();
688
689 /* So far, no hard regs have been spilled. */
690 n_spills = 0;
691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
692 spill_reg_order[i] = -1;
693
694 /* On most machines, we can't use any register explicitly used in the
695 rtl as a spill register. But on some, we have to. Those will have
696 taken care to keep the life of hard regs as short as possible. */
697
698 #ifndef SMALL_REGISTER_CLASSES
699 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
700 #endif
701
702 /* Spill any hard regs that we know we can't eliminate. */
703 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
704 if (! ep->can_eliminate)
705 spill_hard_reg (ep->from, global, dumpfile, 1);
706
707 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
708 if (frame_pointer_needed)
709 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
710 #endif
711
712 if (global)
713 for (i = 0; i < N_REG_CLASSES; i++)
714 {
715 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
716 bzero (basic_block_needs[i], n_basic_blocks);
717 }
718
719 /* From now on, we need to emit any moves without making new pseudos. */
720 reload_in_progress = 1;
721
722 /* This loop scans the entire function each go-round
723 and repeats until one repetition spills no additional hard regs. */
724
725 /* This flag is set when a pseudo reg is spilled,
726 to require another pass. Note that getting an additional reload
727 reg does not necessarily imply any pseudo reg was spilled;
728 sometimes we find a reload reg that no pseudo reg was allocated in. */
729 something_changed = 1;
730 /* This flag is set if there are any insns that require reloading. */
731 something_needs_reloads = 0;
732 /* This flag is set if there are any insns that require register
733 eliminations. */
734 something_needs_elimination = 0;
735 while (something_changed)
736 {
737 rtx after_call = 0;
738
739 /* For each class, number of reload regs needed in that class.
740 This is the maximum over all insns of the needs in that class
741 of the individual insn. */
742 int max_needs[N_REG_CLASSES];
743 /* For each class, size of group of consecutive regs
744 that is needed for the reloads of this class. */
745 int group_size[N_REG_CLASSES];
746 /* For each class, max number of consecutive groups needed.
747 (Each group contains group_size[CLASS] consecutive registers.) */
748 int max_groups[N_REG_CLASSES];
749 /* For each class, max number needed of regs that don't belong
750 to any of the groups. */
751 int max_nongroups[N_REG_CLASSES];
752 /* For each class, the machine mode which requires consecutive
753 groups of regs of that class.
754 If two different modes ever require groups of one class,
755 they must be the same size and equally restrictive for that class,
756 otherwise we can't handle the complexity. */
757 enum machine_mode group_mode[N_REG_CLASSES];
758 /* Record the insn where each maximum need is first found. */
759 rtx max_needs_insn[N_REG_CLASSES];
760 rtx max_groups_insn[N_REG_CLASSES];
761 rtx max_nongroups_insn[N_REG_CLASSES];
762 rtx x;
763 int starting_frame_size = get_frame_size ();
764 int previous_frame_pointer_needed = frame_pointer_needed;
765 static char *reg_class_names[] = REG_CLASS_NAMES;
766
767 something_changed = 0;
768 bzero ((char *) max_needs, sizeof max_needs);
769 bzero ((char *) max_groups, sizeof max_groups);
770 bzero ((char *) max_nongroups, sizeof max_nongroups);
771 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
772 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
773 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
774 bzero ((char *) group_size, sizeof group_size);
775 for (i = 0; i < N_REG_CLASSES; i++)
776 group_mode[i] = VOIDmode;
777
778 /* Keep track of which basic blocks are needing the reloads. */
779 this_block = 0;
780
781 /* Remember whether any element of basic_block_needs
782 changes from 0 to 1 in this pass. */
783 new_basic_block_needs = 0;
784
785 /* Reset all offsets on eliminable registers to their initial values. */
786 #ifdef ELIMINABLE_REGS
787 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
788 {
789 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
790 ep->previous_offset = ep->offset
791 = ep->max_offset = ep->initial_offset;
792 }
793 #else
794 #ifdef INITIAL_FRAME_POINTER_OFFSET
795 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
796 #else
797 if (!FRAME_POINTER_REQUIRED)
798 abort ();
799 reg_eliminate[0].initial_offset = 0;
800 #endif
801 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
802 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
803 #endif
804
805 num_not_at_initial_offset = 0;
806
807 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
808
809 /* Set a known offset for each forced label to be at the initial offset
810 of each elimination. We do this because we assume that all
811 computed jumps occur from a location where each elimination is
812 at its initial offset. */
813
814 for (x = forced_labels; x; x = XEXP (x, 1))
815 if (XEXP (x, 0))
816 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
817
818 /* For each pseudo register that has an equivalent location defined,
819 try to eliminate any eliminable registers (such as the frame pointer)
820 assuming initial offsets for the replacement register, which
821 is the normal case.
822
823 If the resulting location is directly addressable, substitute
824 the MEM we just got directly for the old REG.
825
826 If it is not addressable but is a constant or the sum of a hard reg
827 and constant, it is probably not addressable because the constant is
828 out of range, in that case record the address; we will generate
829 hairy code to compute the address in a register each time it is
830 needed. Similarly if it is a hard register, but one that is not
831 valid as an address register.
832
833 If the location is not addressable, but does not have one of the
834 above forms, assign a stack slot. We have to do this to avoid the
835 potential of producing lots of reloads if, e.g., a location involves
836 a pseudo that didn't get a hard register and has an equivalent memory
837 location that also involves a pseudo that didn't get a hard register.
838
839 Perhaps at some point we will improve reload_when_needed handling
840 so this problem goes away. But that's very hairy. */
841
842 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
843 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
844 {
845 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
846
847 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
848 XEXP (x, 0)))
849 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
850 else if (CONSTANT_P (XEXP (x, 0))
851 || (GET_CODE (XEXP (x, 0)) == REG
852 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
853 || (GET_CODE (XEXP (x, 0)) == PLUS
854 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
855 && (REGNO (XEXP (XEXP (x, 0), 0))
856 < FIRST_PSEUDO_REGISTER)
857 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
858 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
859 else
860 {
861 /* Make a new stack slot. Then indicate that something
862 changed so we go back and recompute offsets for
863 eliminable registers because the allocation of memory
864 below might change some offset. reg_equiv_{mem,address}
865 will be set up for this pseudo on the next pass around
866 the loop. */
867 reg_equiv_memory_loc[i] = 0;
868 reg_equiv_init[i] = 0;
869 alter_reg (i, -1);
870 something_changed = 1;
871 }
872 }
873
874 /* If we allocated another pseudo to the stack, redo elimination
875 bookkeeping. */
876 if (something_changed)
877 continue;
878
879 /* If caller-saves needs a group, initialize the group to include
880 the size and mode required for caller-saves. */
881
882 if (caller_save_group_size > 1)
883 {
884 group_mode[(int) caller_save_spill_class] = Pmode;
885 group_size[(int) caller_save_spill_class] = caller_save_group_size;
886 }
887
888 /* Compute the most additional registers needed by any instruction.
889 Collect information separately for each class of regs. */
890
891 for (insn = first; insn; insn = NEXT_INSN (insn))
892 {
893 if (global && this_block + 1 < n_basic_blocks
894 && insn == basic_block_head[this_block+1])
895 ++this_block;
896
897 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
898 might include REG_LABEL), we need to see what effects this
899 has on the known offsets at labels. */
900
901 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
902 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
903 && REG_NOTES (insn) != 0))
904 set_label_offsets (insn, insn, 0);
905
906 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
907 {
908 /* Nonzero means don't use a reload reg that overlaps
909 the place where a function value can be returned. */
910 rtx avoid_return_reg = 0;
911
912 rtx old_body = PATTERN (insn);
913 int old_code = INSN_CODE (insn);
914 rtx old_notes = REG_NOTES (insn);
915 int did_elimination = 0;
916
917 /* To compute the number of reload registers of each class
918 needed for an insn, we must similate what choose_reload_regs
919 can do. We do this by splitting an insn into an "input" and
920 an "output" part. RELOAD_OTHER reloads are used in both.
921 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
922 which must be live over the entire input section of reloads,
923 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
924 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
925 inputs.
926
927 The registers needed for output are RELOAD_OTHER and
928 RELOAD_FOR_OUTPUT, which are live for the entire output
929 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
930 reloads for each operand.
931
932 The total number of registers needed is the maximum of the
933 inputs and outputs. */
934
935 struct needs
936 {
937 /* [0] is normal, [1] is nongroup. */
938 int regs[2][N_REG_CLASSES];
939 int groups[N_REG_CLASSES];
940 };
941
942 /* Each `struct needs' corresponds to one RELOAD_... type. */
943 struct {
944 struct needs other;
945 struct needs input;
946 struct needs output;
947 struct needs insn;
948 struct needs other_addr;
949 struct needs op_addr;
950 struct needs op_addr_reload;
951 struct needs in_addr[MAX_RECOG_OPERANDS];
952 struct needs out_addr[MAX_RECOG_OPERANDS];
953 } insn_needs;
954
955 /* If needed, eliminate any eliminable registers. */
956 if (num_eliminable)
957 did_elimination = eliminate_regs_in_insn (insn, 0);
958
959 #ifdef SMALL_REGISTER_CLASSES
960 /* Set avoid_return_reg if this is an insn
961 that might use the value of a function call. */
962 if (GET_CODE (insn) == CALL_INSN)
963 {
964 if (GET_CODE (PATTERN (insn)) == SET)
965 after_call = SET_DEST (PATTERN (insn));
966 else if (GET_CODE (PATTERN (insn)) == PARALLEL
967 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
968 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
969 else
970 after_call = 0;
971 }
972 else if (after_call != 0
973 && !(GET_CODE (PATTERN (insn)) == SET
974 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
975 {
976 if (reg_referenced_p (after_call, PATTERN (insn)))
977 avoid_return_reg = after_call;
978 after_call = 0;
979 }
980 #endif /* SMALL_REGISTER_CLASSES */
981
982 /* Analyze the instruction. */
983 find_reloads (insn, 0, spill_indirect_levels, global,
984 spill_reg_order);
985
986 /* Remember for later shortcuts which insns had any reloads or
987 register eliminations.
988
989 One might think that it would be worthwhile to mark insns
990 that need register replacements but not reloads, but this is
991 not safe because find_reloads may do some manipulation of
992 the insn (such as swapping commutative operands), which would
993 be lost when we restore the old pattern after register
994 replacement. So the actions of find_reloads must be redone in
995 subsequent passes or in reload_as_needed.
996
997 However, it is safe to mark insns that need reloads
998 but not register replacement. */
999
1000 PUT_MODE (insn, (did_elimination ? QImode
1001 : n_reloads ? HImode
1002 : GET_MODE (insn) == DImode ? DImode
1003 : VOIDmode));
1004
1005 /* Discard any register replacements done. */
1006 if (did_elimination)
1007 {
1008 obstack_free (&reload_obstack, reload_firstobj);
1009 PATTERN (insn) = old_body;
1010 INSN_CODE (insn) = old_code;
1011 REG_NOTES (insn) = old_notes;
1012 something_needs_elimination = 1;
1013 }
1014
1015 /* If this insn has no reloads, we need not do anything except
1016 in the case of a CALL_INSN when we have caller-saves and
1017 caller-save needs reloads. */
1018
1019 if (n_reloads == 0
1020 && ! (GET_CODE (insn) == CALL_INSN
1021 && caller_save_spill_class != NO_REGS))
1022 continue;
1023
1024 something_needs_reloads = 1;
1025 bzero ((char *) &insn_needs, sizeof insn_needs);
1026
1027 /* Count each reload once in every class
1028 containing the reload's own class. */
1029
1030 for (i = 0; i < n_reloads; i++)
1031 {
1032 register enum reg_class *p;
1033 enum reg_class class = reload_reg_class[i];
1034 int size;
1035 enum machine_mode mode;
1036 int nongroup_need;
1037 struct needs *this_needs;
1038
1039 /* Don't count the dummy reloads, for which one of the
1040 regs mentioned in the insn can be used for reloading.
1041 Don't count optional reloads.
1042 Don't count reloads that got combined with others. */
1043 if (reload_reg_rtx[i] != 0
1044 || reload_optional[i] != 0
1045 || (reload_out[i] == 0 && reload_in[i] == 0
1046 && ! reload_secondary_p[i]))
1047 continue;
1048
1049 /* Show that a reload register of this class is needed
1050 in this basic block. We do not use insn_needs and
1051 insn_groups because they are overly conservative for
1052 this purpose. */
1053 if (global && ! basic_block_needs[(int) class][this_block])
1054 {
1055 basic_block_needs[(int) class][this_block] = 1;
1056 new_basic_block_needs = 1;
1057 }
1058
1059
1060 mode = reload_inmode[i];
1061 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1062 mode = reload_outmode[i];
1063 size = CLASS_MAX_NREGS (class, mode);
1064
1065 /* If this class doesn't want a group, determine if we have
1066 a nongroup need or a regular need. We have a nongroup
1067 need if this reload conflicts with a group reload whose
1068 class intersects with this reload's class. */
1069
1070 nongroup_need = 0;
1071 if (size == 1)
1072 for (j = 0; j < n_reloads; j++)
1073 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1074 (GET_MODE_SIZE (reload_outmode[j])
1075 > GET_MODE_SIZE (reload_inmode[j]))
1076 ? reload_outmode[j]
1077 : reload_inmode[j])
1078 > 1)
1079 && (!reload_optional[j])
1080 && (reload_in[j] != 0 || reload_out[j] != 0
1081 || reload_secondary_p[j])
1082 && reloads_conflict (i, j)
1083 && reg_classes_intersect_p (class,
1084 reload_reg_class[j]))
1085 {
1086 nongroup_need = 1;
1087 break;
1088 }
1089
1090 /* Decide which time-of-use to count this reload for. */
1091 switch (reload_when_needed[i])
1092 {
1093 case RELOAD_OTHER:
1094 this_needs = &insn_needs.other;
1095 break;
1096 case RELOAD_FOR_INPUT:
1097 this_needs = &insn_needs.input;
1098 break;
1099 case RELOAD_FOR_OUTPUT:
1100 this_needs = &insn_needs.output;
1101 break;
1102 case RELOAD_FOR_INSN:
1103 this_needs = &insn_needs.insn;
1104 break;
1105 case RELOAD_FOR_OTHER_ADDRESS:
1106 this_needs = &insn_needs.other_addr;
1107 break;
1108 case RELOAD_FOR_INPUT_ADDRESS:
1109 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1110 break;
1111 case RELOAD_FOR_OUTPUT_ADDRESS:
1112 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1113 break;
1114 case RELOAD_FOR_OPERAND_ADDRESS:
1115 this_needs = &insn_needs.op_addr;
1116 break;
1117 case RELOAD_FOR_OPADDR_ADDR:
1118 this_needs = &insn_needs.op_addr_reload;
1119 break;
1120 }
1121
1122 if (size > 1)
1123 {
1124 enum machine_mode other_mode, allocate_mode;
1125
1126 /* Count number of groups needed separately from
1127 number of individual regs needed. */
1128 this_needs->groups[(int) class]++;
1129 p = reg_class_superclasses[(int) class];
1130 while (*p != LIM_REG_CLASSES)
1131 this_needs->groups[(int) *p++]++;
1132
1133 /* Record size and mode of a group of this class. */
1134 /* If more than one size group is needed,
1135 make all groups the largest needed size. */
1136 if (group_size[(int) class] < size)
1137 {
1138 other_mode = group_mode[(int) class];
1139 allocate_mode = mode;
1140
1141 group_size[(int) class] = size;
1142 group_mode[(int) class] = mode;
1143 }
1144 else
1145 {
1146 other_mode = mode;
1147 allocate_mode = group_mode[(int) class];
1148 }
1149
1150 /* Crash if two dissimilar machine modes both need
1151 groups of consecutive regs of the same class. */
1152
1153 if (other_mode != VOIDmode && other_mode != allocate_mode
1154 && ! modes_equiv_for_class_p (allocate_mode,
1155 other_mode, class))
1156 abort ();
1157 }
1158 else if (size == 1)
1159 {
1160 this_needs->regs[nongroup_need][(int) class] += 1;
1161 p = reg_class_superclasses[(int) class];
1162 while (*p != LIM_REG_CLASSES)
1163 this_needs->regs[nongroup_need][(int) *p++] += 1;
1164 }
1165 else
1166 abort ();
1167 }
1168
1169 /* All reloads have been counted for this insn;
1170 now merge the various times of use.
1171 This sets insn_needs, etc., to the maximum total number
1172 of registers needed at any point in this insn. */
1173
1174 for (i = 0; i < N_REG_CLASSES; i++)
1175 {
1176 int in_max, out_max;
1177
1178 /* Compute normal and nongroup needs. */
1179 for (j = 0; j <= 1; j++)
1180 {
1181 for (in_max = 0, out_max = 0, k = 0;
1182 k < reload_n_operands; k++)
1183 {
1184 in_max
1185 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1186 out_max
1187 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1188 }
1189
1190 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1191 and operand addresses but not things used to reload
1192 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1193 don't conflict with things needed to reload inputs or
1194 outputs. */
1195
1196 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1197 insn_needs.op_addr_reload.regs[j][i]),
1198 in_max);
1199
1200 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1201
1202 insn_needs.input.regs[j][i]
1203 = MAX (insn_needs.input.regs[j][i]
1204 + insn_needs.op_addr.regs[j][i]
1205 + insn_needs.insn.regs[j][i],
1206 in_max + insn_needs.input.regs[j][i]);
1207
1208 insn_needs.output.regs[j][i] += out_max;
1209 insn_needs.other.regs[j][i]
1210 += MAX (MAX (insn_needs.input.regs[j][i],
1211 insn_needs.output.regs[j][i]),
1212 insn_needs.other_addr.regs[j][i]);
1213
1214 }
1215
1216 /* Now compute group needs. */
1217 for (in_max = 0, out_max = 0, j = 0;
1218 j < reload_n_operands; j++)
1219 {
1220 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1221 out_max
1222 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1223 }
1224
1225 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1226 insn_needs.op_addr_reload.groups[i]),
1227 in_max);
1228 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1229
1230 insn_needs.input.groups[i]
1231 = MAX (insn_needs.input.groups[i]
1232 + insn_needs.op_addr.groups[i]
1233 + insn_needs.insn.groups[i],
1234 in_max + insn_needs.input.groups[i]);
1235
1236 insn_needs.output.groups[i] += out_max;
1237 insn_needs.other.groups[i]
1238 += MAX (MAX (insn_needs.input.groups[i],
1239 insn_needs.output.groups[i]),
1240 insn_needs.other_addr.groups[i]);
1241 }
1242
1243 /* If this is a CALL_INSN and caller-saves will need
1244 a spill register, act as if the spill register is
1245 needed for this insn. However, the spill register
1246 can be used by any reload of this insn, so we only
1247 need do something if no need for that class has
1248 been recorded.
1249
1250 The assumption that every CALL_INSN will trigger a
1251 caller-save is highly conservative, however, the number
1252 of cases where caller-saves will need a spill register but
1253 a block containing a CALL_INSN won't need a spill register
1254 of that class should be quite rare.
1255
1256 If a group is needed, the size and mode of the group will
1257 have been set up at the beginning of this loop. */
1258
1259 if (GET_CODE (insn) == CALL_INSN
1260 && caller_save_spill_class != NO_REGS)
1261 {
1262 /* See if this register would conflict with any reload
1263 that needs a group. */
1264 int nongroup_need = 0;
1265 int *caller_save_needs;
1266
1267 for (j = 0; j < n_reloads; j++)
1268 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1269 (GET_MODE_SIZE (reload_outmode[j])
1270 > GET_MODE_SIZE (reload_inmode[j]))
1271 ? reload_outmode[j]
1272 : reload_inmode[j])
1273 > 1)
1274 && reg_classes_intersect_p (caller_save_spill_class,
1275 reload_reg_class[j]))
1276 {
1277 nongroup_need = 1;
1278 break;
1279 }
1280
1281 caller_save_needs
1282 = (caller_save_group_size > 1
1283 ? insn_needs.other.groups
1284 : insn_needs.other.regs[nongroup_need]);
1285
1286 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1287 {
1288 register enum reg_class *p
1289 = reg_class_superclasses[(int) caller_save_spill_class];
1290
1291 caller_save_needs[(int) caller_save_spill_class]++;
1292
1293 while (*p != LIM_REG_CLASSES)
1294 caller_save_needs[(int) *p++] += 1;
1295 }
1296
1297 /* Show that this basic block will need a register of
1298 this class. */
1299
1300 if (global
1301 && ! (basic_block_needs[(int) caller_save_spill_class]
1302 [this_block]))
1303 {
1304 basic_block_needs[(int) caller_save_spill_class]
1305 [this_block] = 1;
1306 new_basic_block_needs = 1;
1307 }
1308 }
1309
1310 #ifdef SMALL_REGISTER_CLASSES
1311 /* If this insn stores the value of a function call,
1312 and that value is in a register that has been spilled,
1313 and if the insn needs a reload in a class
1314 that might use that register as the reload register,
1315 then add add an extra need in that class.
1316 This makes sure we have a register available that does
1317 not overlap the return value. */
1318
1319 if (avoid_return_reg)
1320 {
1321 int regno = REGNO (avoid_return_reg);
1322 int nregs
1323 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1324 int r;
1325 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1326
1327 /* First compute the "basic needs", which counts a
1328 need only in the smallest class in which it
1329 is required. */
1330
1331 bcopy (insn_needs.other.regs[0], basic_needs,
1332 sizeof basic_needs);
1333 bcopy (insn_needs.other.groups, basic_groups,
1334 sizeof basic_groups);
1335
1336 for (i = 0; i < N_REG_CLASSES; i++)
1337 {
1338 enum reg_class *p;
1339
1340 if (basic_needs[i] >= 0)
1341 for (p = reg_class_superclasses[i];
1342 *p != LIM_REG_CLASSES; p++)
1343 basic_needs[(int) *p] -= basic_needs[i];
1344
1345 if (basic_groups[i] >= 0)
1346 for (p = reg_class_superclasses[i];
1347 *p != LIM_REG_CLASSES; p++)
1348 basic_groups[(int) *p] -= basic_groups[i];
1349 }
1350
1351 /* Now count extra regs if there might be a conflict with
1352 the return value register.
1353
1354 ??? This is not quite correct because we don't properly
1355 handle the case of groups, but if we end up doing
1356 something wrong, it either will end up not mattering or
1357 we will abort elsewhere. */
1358
1359 for (r = regno; r < regno + nregs; r++)
1360 if (spill_reg_order[r] >= 0)
1361 for (i = 0; i < N_REG_CLASSES; i++)
1362 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1363 {
1364 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1365 {
1366 enum reg_class *p;
1367
1368 insn_needs.other.regs[0][i]++;
1369 p = reg_class_superclasses[i];
1370 while (*p != LIM_REG_CLASSES)
1371 insn_needs.other.regs[0][(int) *p++]++;
1372 }
1373 }
1374 }
1375 #endif /* SMALL_REGISTER_CLASSES */
1376
1377 /* For each class, collect maximum need of any insn. */
1378
1379 for (i = 0; i < N_REG_CLASSES; i++)
1380 {
1381 if (max_needs[i] < insn_needs.other.regs[0][i])
1382 {
1383 max_needs[i] = insn_needs.other.regs[0][i];
1384 max_needs_insn[i] = insn;
1385 }
1386 if (max_groups[i] < insn_needs.other.groups[i])
1387 {
1388 max_groups[i] = insn_needs.other.groups[i];
1389 max_groups_insn[i] = insn;
1390 }
1391 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1392 {
1393 max_nongroups[i] = insn_needs.other.regs[1][i];
1394 max_nongroups_insn[i] = insn;
1395 }
1396 }
1397 }
1398 /* Note that there is a continue statement above. */
1399 }
1400
1401 /* If we allocated any new memory locations, make another pass
1402 since it might have changed elimination offsets. */
1403 if (starting_frame_size != get_frame_size ())
1404 something_changed = 1;
1405
1406 if (dumpfile)
1407 for (i = 0; i < N_REG_CLASSES; i++)
1408 {
1409 if (max_needs[i] > 0)
1410 fprintf (dumpfile,
1411 ";; Need %d reg%s of class %s (for insn %d).\n",
1412 max_needs[i], max_needs[i] == 1 ? "" : "s",
1413 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1414 if (max_nongroups[i] > 0)
1415 fprintf (dumpfile,
1416 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1417 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1418 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1419 if (max_groups[i] > 0)
1420 fprintf (dumpfile,
1421 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1422 max_groups[i], max_groups[i] == 1 ? "" : "s",
1423 mode_name[(int) group_mode[i]],
1424 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1425 }
1426
1427 /* If we have caller-saves, set up the save areas and see if caller-save
1428 will need a spill register. */
1429
1430 if (caller_save_needed
1431 && ! setup_save_areas (&something_changed)
1432 && caller_save_spill_class == NO_REGS)
1433 {
1434 /* The class we will need depends on whether the machine
1435 supports the sum of two registers for an address; see
1436 find_address_reloads for details. */
1437
1438 caller_save_spill_class
1439 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1440 caller_save_group_size
1441 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1442 something_changed = 1;
1443 }
1444
1445 /* See if anything that happened changes which eliminations are valid.
1446 For example, on the Sparc, whether or not the frame pointer can
1447 be eliminated can depend on what registers have been used. We need
1448 not check some conditions again (such as flag_omit_frame_pointer)
1449 since they can't have changed. */
1450
1451 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1452 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1453 #ifdef ELIMINABLE_REGS
1454 || ! CAN_ELIMINATE (ep->from, ep->to)
1455 #endif
1456 )
1457 ep->can_eliminate = 0;
1458
1459 /* Look for the case where we have discovered that we can't replace
1460 register A with register B and that means that we will now be
1461 trying to replace register A with register C. This means we can
1462 no longer replace register C with register B and we need to disable
1463 such an elimination, if it exists. This occurs often with A == ap,
1464 B == sp, and C == fp. */
1465
1466 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1467 {
1468 struct elim_table *op;
1469 register int new_to = -1;
1470
1471 if (! ep->can_eliminate && ep->can_eliminate_previous)
1472 {
1473 /* Find the current elimination for ep->from, if there is a
1474 new one. */
1475 for (op = reg_eliminate;
1476 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1477 if (op->from == ep->from && op->can_eliminate)
1478 {
1479 new_to = op->to;
1480 break;
1481 }
1482
1483 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1484 disable it. */
1485 for (op = reg_eliminate;
1486 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1487 if (op->from == new_to && op->to == ep->to)
1488 op->can_eliminate = 0;
1489 }
1490 }
1491
1492 /* See if any registers that we thought we could eliminate the previous
1493 time are no longer eliminable. If so, something has changed and we
1494 must spill the register. Also, recompute the number of eliminable
1495 registers and see if the frame pointer is needed; it is if there is
1496 no elimination of the frame pointer that we can perform. */
1497
1498 frame_pointer_needed = 1;
1499 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1500 {
1501 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1502 && ep->to != HARD_FRAME_POINTER_REGNUM)
1503 frame_pointer_needed = 0;
1504
1505 if (! ep->can_eliminate && ep->can_eliminate_previous)
1506 {
1507 ep->can_eliminate_previous = 0;
1508 spill_hard_reg (ep->from, global, dumpfile, 1);
1509 something_changed = 1;
1510 num_eliminable--;
1511 }
1512 }
1513
1514 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1515 /* If we didn't need a frame pointer last time, but we do now, spill
1516 the hard frame pointer. */
1517 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1518 {
1519 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1520 something_changed = 1;
1521 }
1522 #endif
1523
1524 /* If all needs are met, we win. */
1525
1526 for (i = 0; i < N_REG_CLASSES; i++)
1527 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1528 break;
1529 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1530 break;
1531
1532 /* Not all needs are met; must spill some hard regs. */
1533
1534 /* Put all registers spilled so far back in potential_reload_regs, but
1535 put them at the front, since we've already spilled most of the
1536 psuedos in them (we might have left some pseudos unspilled if they
1537 were in a block that didn't need any spill registers of a conflicting
1538 class. We used to try to mark off the need for those registers,
1539 but doing so properly is very complex and reallocating them is the
1540 simpler approach. First, "pack" potential_reload_regs by pushing
1541 any nonnegative entries towards the end. That will leave room
1542 for the registers we already spilled.
1543
1544 Also, undo the marking of the spill registers from the last time
1545 around in FORBIDDEN_REGS since we will be probably be allocating
1546 them again below.
1547
1548 ??? It is theoretically possible that we might end up not using one
1549 of our previously-spilled registers in this allocation, even though
1550 they are at the head of the list. It's not clear what to do about
1551 this, but it was no better before, when we marked off the needs met
1552 by the previously-spilled registers. With the current code, globals
1553 can be allocated into these registers, but locals cannot. */
1554
1555 if (n_spills)
1556 {
1557 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1558 if (potential_reload_regs[i] != -1)
1559 potential_reload_regs[j--] = potential_reload_regs[i];
1560
1561 for (i = 0; i < n_spills; i++)
1562 {
1563 potential_reload_regs[i] = spill_regs[i];
1564 spill_reg_order[spill_regs[i]] = -1;
1565 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1566 }
1567
1568 n_spills = 0;
1569 }
1570
1571 /* Now find more reload regs to satisfy the remaining need
1572 Do it by ascending class number, since otherwise a reg
1573 might be spilled for a big class and might fail to count
1574 for a smaller class even though it belongs to that class.
1575
1576 Count spilled regs in `spills', and add entries to
1577 `spill_regs' and `spill_reg_order'.
1578
1579 ??? Note there is a problem here.
1580 When there is a need for a group in a high-numbered class,
1581 and also need for non-group regs that come from a lower class,
1582 the non-group regs are chosen first. If there aren't many regs,
1583 they might leave no room for a group.
1584
1585 This was happening on the 386. To fix it, we added the code
1586 that calls possible_group_p, so that the lower class won't
1587 break up the last possible group.
1588
1589 Really fixing the problem would require changes above
1590 in counting the regs already spilled, and in choose_reload_regs.
1591 It might be hard to avoid introducing bugs there. */
1592
1593 CLEAR_HARD_REG_SET (counted_for_groups);
1594 CLEAR_HARD_REG_SET (counted_for_nongroups);
1595
1596 for (class = 0; class < N_REG_CLASSES; class++)
1597 {
1598 /* First get the groups of registers.
1599 If we got single registers first, we might fragment
1600 possible groups. */
1601 while (max_groups[class] > 0)
1602 {
1603 /* If any single spilled regs happen to form groups,
1604 count them now. Maybe we don't really need
1605 to spill another group. */
1606 count_possible_groups (group_size, group_mode, max_groups);
1607
1608 if (max_groups[class] <= 0)
1609 break;
1610
1611 /* Groups of size 2 (the only groups used on most machines)
1612 are treated specially. */
1613 if (group_size[class] == 2)
1614 {
1615 /* First, look for a register that will complete a group. */
1616 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1617 {
1618 int other;
1619
1620 j = potential_reload_regs[i];
1621 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1622 &&
1623 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1624 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1625 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1626 && HARD_REGNO_MODE_OK (other, group_mode[class])
1627 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1628 other)
1629 /* We don't want one part of another group.
1630 We could get "two groups" that overlap! */
1631 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1632 ||
1633 (j < FIRST_PSEUDO_REGISTER - 1
1634 && (other = j + 1, spill_reg_order[other] >= 0)
1635 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1636 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1637 && HARD_REGNO_MODE_OK (j, group_mode[class])
1638 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1639 other)
1640 && ! TEST_HARD_REG_BIT (counted_for_groups,
1641 other))))
1642 {
1643 register enum reg_class *p;
1644
1645 /* We have found one that will complete a group,
1646 so count off one group as provided. */
1647 max_groups[class]--;
1648 p = reg_class_superclasses[class];
1649 while (*p != LIM_REG_CLASSES)
1650 max_groups[(int) *p++]--;
1651
1652 /* Indicate both these regs are part of a group. */
1653 SET_HARD_REG_BIT (counted_for_groups, j);
1654 SET_HARD_REG_BIT (counted_for_groups, other);
1655 break;
1656 }
1657 }
1658 /* We can't complete a group, so start one. */
1659 #ifdef SMALL_REGISTER_CLASSES
1660 /* Look for a pair neither of which is explicitly used. */
1661 if (i == FIRST_PSEUDO_REGISTER)
1662 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1663 {
1664 int k;
1665 j = potential_reload_regs[i];
1666 /* Verify that J+1 is a potential reload reg. */
1667 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1668 if (potential_reload_regs[k] == j + 1)
1669 break;
1670 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1671 && k < FIRST_PSEUDO_REGISTER
1672 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1673 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1674 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1675 && HARD_REGNO_MODE_OK (j, group_mode[class])
1676 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1677 j + 1)
1678 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1679 /* Reject J at this stage
1680 if J+1 was explicitly used. */
1681 && ! regs_explicitly_used[j + 1])
1682 break;
1683 }
1684 #endif
1685 /* Now try any group at all
1686 whose registers are not in bad_spill_regs. */
1687 if (i == FIRST_PSEUDO_REGISTER)
1688 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1689 {
1690 int k;
1691 j = potential_reload_regs[i];
1692 /* Verify that J+1 is a potential reload reg. */
1693 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1694 if (potential_reload_regs[k] == j + 1)
1695 break;
1696 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1697 && k < FIRST_PSEUDO_REGISTER
1698 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1699 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1700 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1701 && HARD_REGNO_MODE_OK (j, group_mode[class])
1702 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1703 j + 1)
1704 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1705 break;
1706 }
1707
1708 /* I should be the index in potential_reload_regs
1709 of the new reload reg we have found. */
1710
1711 if (i >= FIRST_PSEUDO_REGISTER)
1712 {
1713 /* There are no groups left to spill. */
1714 spill_failure (max_groups_insn[class]);
1715 failure = 1;
1716 goto failed;
1717 }
1718 else
1719 something_changed
1720 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1721 global, dumpfile);
1722 }
1723 else
1724 {
1725 /* For groups of more than 2 registers,
1726 look for a sufficient sequence of unspilled registers,
1727 and spill them all at once. */
1728 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1729 {
1730 int k;
1731
1732 j = potential_reload_regs[i];
1733 if (j >= 0
1734 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1735 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1736 {
1737 /* Check each reg in the sequence. */
1738 for (k = 0; k < group_size[class]; k++)
1739 if (! (spill_reg_order[j + k] < 0
1740 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1741 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1742 break;
1743 /* We got a full sequence, so spill them all. */
1744 if (k == group_size[class])
1745 {
1746 register enum reg_class *p;
1747 for (k = 0; k < group_size[class]; k++)
1748 {
1749 int idx;
1750 SET_HARD_REG_BIT (counted_for_groups, j + k);
1751 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1752 if (potential_reload_regs[idx] == j + k)
1753 break;
1754 something_changed
1755 |= new_spill_reg (idx, class,
1756 max_needs, NULL_PTR,
1757 global, dumpfile);
1758 }
1759
1760 /* We have found one that will complete a group,
1761 so count off one group as provided. */
1762 max_groups[class]--;
1763 p = reg_class_superclasses[class];
1764 while (*p != LIM_REG_CLASSES)
1765 max_groups[(int) *p++]--;
1766
1767 break;
1768 }
1769 }
1770 }
1771 /* We couldn't find any registers for this reload.
1772 Avoid going into an infinite loop. */
1773 if (i >= FIRST_PSEUDO_REGISTER)
1774 {
1775 /* There are no groups left. */
1776 spill_failure (max_groups_insn[class]);
1777 failure = 1;
1778 goto failed;
1779 }
1780 }
1781 }
1782
1783 /* Now similarly satisfy all need for single registers. */
1784
1785 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1786 {
1787 #ifdef SMALL_REGISTER_CLASSES
1788 /* This should be right for all machines, but only the 386
1789 is known to need it, so this conditional plays safe.
1790 ??? For 2.5, try making this unconditional. */
1791 /* If we spilled enough regs, but they weren't counted
1792 against the non-group need, see if we can count them now.
1793 If so, we can avoid some actual spilling. */
1794 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1795 for (i = 0; i < n_spills; i++)
1796 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1797 spill_regs[i])
1798 && !TEST_HARD_REG_BIT (counted_for_groups,
1799 spill_regs[i])
1800 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1801 spill_regs[i])
1802 && max_nongroups[class] > 0)
1803 {
1804 register enum reg_class *p;
1805
1806 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1807 max_nongroups[class]--;
1808 p = reg_class_superclasses[class];
1809 while (*p != LIM_REG_CLASSES)
1810 max_nongroups[(int) *p++]--;
1811 }
1812 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1813 break;
1814 #endif
1815
1816 /* Consider the potential reload regs that aren't
1817 yet in use as reload regs, in order of preference.
1818 Find the most preferred one that's in this class. */
1819
1820 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1821 if (potential_reload_regs[i] >= 0
1822 && TEST_HARD_REG_BIT (reg_class_contents[class],
1823 potential_reload_regs[i])
1824 /* If this reg will not be available for groups,
1825 pick one that does not foreclose possible groups.
1826 This is a kludge, and not very general,
1827 but it should be sufficient to make the 386 work,
1828 and the problem should not occur on machines with
1829 more registers. */
1830 && (max_nongroups[class] == 0
1831 || possible_group_p (potential_reload_regs[i], max_groups)))
1832 break;
1833
1834 /* If we couldn't get a register, try to get one even if we
1835 might foreclose possible groups. This may cause problems
1836 later, but that's better than aborting now, since it is
1837 possible that we will, in fact, be able to form the needed
1838 group even with this allocation. */
1839
1840 if (i >= FIRST_PSEUDO_REGISTER
1841 && (asm_noperands (max_needs[class] > 0
1842 ? max_needs_insn[class]
1843 : max_nongroups_insn[class])
1844 < 0))
1845 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1846 if (potential_reload_regs[i] >= 0
1847 && TEST_HARD_REG_BIT (reg_class_contents[class],
1848 potential_reload_regs[i]))
1849 break;
1850
1851 /* I should be the index in potential_reload_regs
1852 of the new reload reg we have found. */
1853
1854 if (i >= FIRST_PSEUDO_REGISTER)
1855 {
1856 /* There are no possible registers left to spill. */
1857 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1858 : max_nongroups_insn[class]);
1859 failure = 1;
1860 goto failed;
1861 }
1862 else
1863 something_changed
1864 |= new_spill_reg (i, class, max_needs, max_nongroups,
1865 global, dumpfile);
1866 }
1867 }
1868 }
1869
1870 /* If global-alloc was run, notify it of any register eliminations we have
1871 done. */
1872 if (global)
1873 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1874 if (ep->can_eliminate)
1875 mark_elimination (ep->from, ep->to);
1876
1877 /* Insert code to save and restore call-clobbered hard regs
1878 around calls. Tell if what mode to use so that we will process
1879 those insns in reload_as_needed if we have to. */
1880
1881 if (caller_save_needed)
1882 save_call_clobbered_regs (num_eliminable ? QImode
1883 : caller_save_spill_class != NO_REGS ? HImode
1884 : VOIDmode);
1885
1886 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1887 If that insn didn't set the register (i.e., it copied the register to
1888 memory), just delete that insn instead of the equivalencing insn plus
1889 anything now dead. If we call delete_dead_insn on that insn, we may
1890 delete the insn that actually sets the register if the register die
1891 there and that is incorrect. */
1892
1893 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1894 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1895 && GET_CODE (reg_equiv_init[i]) != NOTE)
1896 {
1897 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1898 delete_dead_insn (reg_equiv_init[i]);
1899 else
1900 {
1901 PUT_CODE (reg_equiv_init[i], NOTE);
1902 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1903 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1904 }
1905 }
1906
1907 /* Use the reload registers where necessary
1908 by generating move instructions to move the must-be-register
1909 values into or out of the reload registers. */
1910
1911 if (something_needs_reloads || something_needs_elimination
1912 || (caller_save_needed && num_eliminable)
1913 || caller_save_spill_class != NO_REGS)
1914 reload_as_needed (first, global);
1915
1916 /* If we were able to eliminate the frame pointer, show that it is no
1917 longer live at the start of any basic block. If it ls live by
1918 virtue of being in a pseudo, that pseudo will be marked live
1919 and hence the frame pointer will be known to be live via that
1920 pseudo. */
1921
1922 if (! frame_pointer_needed)
1923 for (i = 0; i < n_basic_blocks; i++)
1924 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1925 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1926 % REGSET_ELT_BITS));
1927
1928 /* Come here (with failure set nonzero) if we can't get enough spill regs
1929 and we decide not to abort about it. */
1930 failed:
1931
1932 reload_in_progress = 0;
1933
1934 /* Now eliminate all pseudo regs by modifying them into
1935 their equivalent memory references.
1936 The REG-rtx's for the pseudos are modified in place,
1937 so all insns that used to refer to them now refer to memory.
1938
1939 For a reg that has a reg_equiv_address, all those insns
1940 were changed by reloading so that no insns refer to it any longer;
1941 but the DECL_RTL of a variable decl may refer to it,
1942 and if so this causes the debugging info to mention the variable. */
1943
1944 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1945 {
1946 rtx addr = 0;
1947 int in_struct = 0;
1948 if (reg_equiv_mem[i])
1949 {
1950 addr = XEXP (reg_equiv_mem[i], 0);
1951 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1952 }
1953 if (reg_equiv_address[i])
1954 addr = reg_equiv_address[i];
1955 if (addr)
1956 {
1957 if (reg_renumber[i] < 0)
1958 {
1959 rtx reg = regno_reg_rtx[i];
1960 XEXP (reg, 0) = addr;
1961 REG_USERVAR_P (reg) = 0;
1962 MEM_IN_STRUCT_P (reg) = in_struct;
1963 PUT_CODE (reg, MEM);
1964 }
1965 else if (reg_equiv_mem[i])
1966 XEXP (reg_equiv_mem[i], 0) = addr;
1967 }
1968 }
1969
1970 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1971 /* Make a pass over all the insns and remove death notes for things that
1972 are no longer registers or no longer die in the insn (e.g., an input
1973 and output pseudo being tied). */
1974
1975 for (insn = first; insn; insn = NEXT_INSN (insn))
1976 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1977 {
1978 rtx note, next;
1979
1980 for (note = REG_NOTES (insn); note; note = next)
1981 {
1982 next = XEXP (note, 1);
1983 if (REG_NOTE_KIND (note) == REG_DEAD
1984 && (GET_CODE (XEXP (note, 0)) != REG
1985 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1986 remove_note (insn, note);
1987 }
1988 }
1989 #endif
1990
1991 /* Indicate that we no longer have known memory locations or constants. */
1992 reg_equiv_constant = 0;
1993 reg_equiv_memory_loc = 0;
1994
1995 if (scratch_list)
1996 free (scratch_list);
1997 scratch_list = 0;
1998 if (scratch_block)
1999 free (scratch_block);
2000 scratch_block = 0;
2001
2002 return failure;
2003 }
2004 \f
2005 /* Nonzero if, after spilling reg REGNO for non-groups,
2006 it will still be possible to find a group if we still need one. */
2007
2008 static int
2009 possible_group_p (regno, max_groups)
2010 int regno;
2011 int *max_groups;
2012 {
2013 int i;
2014 int class = (int) NO_REGS;
2015
2016 for (i = 0; i < (int) N_REG_CLASSES; i++)
2017 if (max_groups[i] > 0)
2018 {
2019 class = i;
2020 break;
2021 }
2022
2023 if (class == (int) NO_REGS)
2024 return 1;
2025
2026 /* Consider each pair of consecutive registers. */
2027 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2028 {
2029 /* Ignore pairs that include reg REGNO. */
2030 if (i == regno || i + 1 == regno)
2031 continue;
2032
2033 /* Ignore pairs that are outside the class that needs the group.
2034 ??? Here we fail to handle the case where two different classes
2035 independently need groups. But this never happens with our
2036 current machine descriptions. */
2037 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2038 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2039 continue;
2040
2041 /* A pair of consecutive regs we can still spill does the trick. */
2042 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2043 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2044 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2045 return 1;
2046
2047 /* A pair of one already spilled and one we can spill does it
2048 provided the one already spilled is not otherwise reserved. */
2049 if (spill_reg_order[i] < 0
2050 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2051 && spill_reg_order[i + 1] >= 0
2052 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2053 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2054 return 1;
2055 if (spill_reg_order[i + 1] < 0
2056 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2057 && spill_reg_order[i] >= 0
2058 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2059 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2060 return 1;
2061 }
2062
2063 return 0;
2064 }
2065 \f
2066 /* Count any groups that can be formed from the registers recently spilled.
2067 This is done class by class, in order of ascending class number. */
2068
2069 static void
2070 count_possible_groups (group_size, group_mode, max_groups)
2071 int *group_size;
2072 enum machine_mode *group_mode;
2073 int *max_groups;
2074 {
2075 int i;
2076 /* Now find all consecutive groups of spilled registers
2077 and mark each group off against the need for such groups.
2078 But don't count them against ordinary need, yet. */
2079
2080 for (i = 0; i < N_REG_CLASSES; i++)
2081 if (group_size[i] > 1)
2082 {
2083 HARD_REG_SET new;
2084 int j;
2085
2086 CLEAR_HARD_REG_SET (new);
2087
2088 /* Make a mask of all the regs that are spill regs in class I. */
2089 for (j = 0; j < n_spills; j++)
2090 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2091 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2092 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2093 spill_regs[j]))
2094 SET_HARD_REG_BIT (new, spill_regs[j]);
2095
2096 /* Find each consecutive group of them. */
2097 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2098 if (TEST_HARD_REG_BIT (new, j)
2099 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2100 /* Next line in case group-mode for this class
2101 demands an even-odd pair. */
2102 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2103 {
2104 int k;
2105 for (k = 1; k < group_size[i]; k++)
2106 if (! TEST_HARD_REG_BIT (new, j + k))
2107 break;
2108 if (k == group_size[i])
2109 {
2110 /* We found a group. Mark it off against this class's
2111 need for groups, and against each superclass too. */
2112 register enum reg_class *p;
2113 max_groups[i]--;
2114 p = reg_class_superclasses[i];
2115 while (*p != LIM_REG_CLASSES)
2116 max_groups[(int) *p++]--;
2117 /* Don't count these registers again. */
2118 for (k = 0; k < group_size[i]; k++)
2119 SET_HARD_REG_BIT (counted_for_groups, j + k);
2120 }
2121 /* Skip to the last reg in this group. When j is incremented
2122 above, it will then point to the first reg of the next
2123 possible group. */
2124 j += k - 1;
2125 }
2126 }
2127
2128 }
2129 \f
2130 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2131 another mode that needs to be reloaded for the same register class CLASS.
2132 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2133 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2134
2135 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2136 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2137 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2138 causes unnecessary failures on machines requiring alignment of register
2139 groups when the two modes are different sizes, because the larger mode has
2140 more strict alignment rules than the smaller mode. */
2141
2142 static int
2143 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2144 enum machine_mode allocate_mode, other_mode;
2145 enum reg_class class;
2146 {
2147 register int regno;
2148 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2149 {
2150 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2151 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2152 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2153 return 0;
2154 }
2155 return 1;
2156 }
2157
2158 /* Handle the failure to find a register to spill.
2159 INSN should be one of the insns which needed this particular spill reg. */
2160
2161 static void
2162 spill_failure (insn)
2163 rtx insn;
2164 {
2165 if (asm_noperands (PATTERN (insn)) >= 0)
2166 error_for_asm (insn, "`asm' needs too many reloads");
2167 else
2168 abort ();
2169 }
2170
2171 /* Add a new register to the tables of available spill-registers
2172 (as well as spilling all pseudos allocated to the register).
2173 I is the index of this register in potential_reload_regs.
2174 CLASS is the regclass whose need is being satisfied.
2175 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2176 so that this register can count off against them.
2177 MAX_NONGROUPS is 0 if this register is part of a group.
2178 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2179
2180 static int
2181 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2182 int i;
2183 int class;
2184 int *max_needs;
2185 int *max_nongroups;
2186 int global;
2187 FILE *dumpfile;
2188 {
2189 register enum reg_class *p;
2190 int val;
2191 int regno = potential_reload_regs[i];
2192
2193 if (i >= FIRST_PSEUDO_REGISTER)
2194 abort (); /* Caller failed to find any register. */
2195
2196 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2197 fatal ("fixed or forbidden register was spilled.\n\
2198 This may be due to a compiler bug or to impossible asm\n\
2199 statements or clauses.");
2200
2201 /* Make reg REGNO an additional reload reg. */
2202
2203 potential_reload_regs[i] = -1;
2204 spill_regs[n_spills] = regno;
2205 spill_reg_order[regno] = n_spills;
2206 if (dumpfile)
2207 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2208
2209 /* Clear off the needs we just satisfied. */
2210
2211 max_needs[class]--;
2212 p = reg_class_superclasses[class];
2213 while (*p != LIM_REG_CLASSES)
2214 max_needs[(int) *p++]--;
2215
2216 if (max_nongroups && max_nongroups[class] > 0)
2217 {
2218 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2219 max_nongroups[class]--;
2220 p = reg_class_superclasses[class];
2221 while (*p != LIM_REG_CLASSES)
2222 max_nongroups[(int) *p++]--;
2223 }
2224
2225 /* Spill every pseudo reg that was allocated to this reg
2226 or to something that overlaps this reg. */
2227
2228 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2229
2230 /* If there are some registers still to eliminate and this register
2231 wasn't ever used before, additional stack space may have to be
2232 allocated to store this register. Thus, we may have changed the offset
2233 between the stack and frame pointers, so mark that something has changed.
2234 (If new pseudos were spilled, thus requiring more space, VAL would have
2235 been set non-zero by the call to spill_hard_reg above since additional
2236 reloads may be needed in that case.
2237
2238 One might think that we need only set VAL to 1 if this is a call-used
2239 register. However, the set of registers that must be saved by the
2240 prologue is not identical to the call-used set. For example, the
2241 register used by the call insn for the return PC is a call-used register,
2242 but must be saved by the prologue. */
2243 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2244 val = 1;
2245
2246 regs_ever_live[spill_regs[n_spills]] = 1;
2247 n_spills++;
2248
2249 return val;
2250 }
2251 \f
2252 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2253 data that is dead in INSN. */
2254
2255 static void
2256 delete_dead_insn (insn)
2257 rtx insn;
2258 {
2259 rtx prev = prev_real_insn (insn);
2260 rtx prev_dest;
2261
2262 /* If the previous insn sets a register that dies in our insn, delete it
2263 too. */
2264 if (prev && GET_CODE (PATTERN (prev)) == SET
2265 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2266 && reg_mentioned_p (prev_dest, PATTERN (insn))
2267 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2268 delete_dead_insn (prev);
2269
2270 PUT_CODE (insn, NOTE);
2271 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2272 NOTE_SOURCE_FILE (insn) = 0;
2273 }
2274
2275 /* Modify the home of pseudo-reg I.
2276 The new home is present in reg_renumber[I].
2277
2278 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2279 or it may be -1, meaning there is none or it is not relevant.
2280 This is used so that all pseudos spilled from a given hard reg
2281 can share one stack slot. */
2282
2283 static void
2284 alter_reg (i, from_reg)
2285 register int i;
2286 int from_reg;
2287 {
2288 /* When outputting an inline function, this can happen
2289 for a reg that isn't actually used. */
2290 if (regno_reg_rtx[i] == 0)
2291 return;
2292
2293 /* If the reg got changed to a MEM at rtl-generation time,
2294 ignore it. */
2295 if (GET_CODE (regno_reg_rtx[i]) != REG)
2296 return;
2297
2298 /* Modify the reg-rtx to contain the new hard reg
2299 number or else to contain its pseudo reg number. */
2300 REGNO (regno_reg_rtx[i])
2301 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2302
2303 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2304 allocate a stack slot for it. */
2305
2306 if (reg_renumber[i] < 0
2307 && reg_n_refs[i] > 0
2308 && reg_equiv_constant[i] == 0
2309 && reg_equiv_memory_loc[i] == 0)
2310 {
2311 register rtx x;
2312 int inherent_size = PSEUDO_REGNO_BYTES (i);
2313 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2314 int adjust = 0;
2315
2316 /* Each pseudo reg has an inherent size which comes from its own mode,
2317 and a total size which provides room for paradoxical subregs
2318 which refer to the pseudo reg in wider modes.
2319
2320 We can use a slot already allocated if it provides both
2321 enough inherent space and enough total space.
2322 Otherwise, we allocate a new slot, making sure that it has no less
2323 inherent space, and no less total space, then the previous slot. */
2324 if (from_reg == -1)
2325 {
2326 /* No known place to spill from => no slot to reuse. */
2327 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2328 #if BYTES_BIG_ENDIAN
2329 /* Cancel the big-endian correction done in assign_stack_local.
2330 Get the address of the beginning of the slot.
2331 This is so we can do a big-endian correction unconditionally
2332 below. */
2333 adjust = inherent_size - total_size;
2334 #endif
2335 }
2336 /* Reuse a stack slot if possible. */
2337 else if (spill_stack_slot[from_reg] != 0
2338 && spill_stack_slot_width[from_reg] >= total_size
2339 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2340 >= inherent_size))
2341 x = spill_stack_slot[from_reg];
2342 /* Allocate a bigger slot. */
2343 else
2344 {
2345 /* Compute maximum size needed, both for inherent size
2346 and for total size. */
2347 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2348 if (spill_stack_slot[from_reg])
2349 {
2350 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2351 > inherent_size)
2352 mode = GET_MODE (spill_stack_slot[from_reg]);
2353 if (spill_stack_slot_width[from_reg] > total_size)
2354 total_size = spill_stack_slot_width[from_reg];
2355 }
2356 /* Make a slot with that size. */
2357 x = assign_stack_local (mode, total_size, -1);
2358 #if BYTES_BIG_ENDIAN
2359 /* Cancel the big-endian correction done in assign_stack_local.
2360 Get the address of the beginning of the slot.
2361 This is so we can do a big-endian correction unconditionally
2362 below. */
2363 adjust = GET_MODE_SIZE (mode) - total_size;
2364 #endif
2365 spill_stack_slot[from_reg] = x;
2366 spill_stack_slot_width[from_reg] = total_size;
2367 }
2368
2369 #if BYTES_BIG_ENDIAN
2370 /* On a big endian machine, the "address" of the slot
2371 is the address of the low part that fits its inherent mode. */
2372 if (inherent_size < total_size)
2373 adjust += (total_size - inherent_size);
2374 #endif /* BYTES_BIG_ENDIAN */
2375
2376 /* If we have any adjustment to make, or if the stack slot is the
2377 wrong mode, make a new stack slot. */
2378 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2379 {
2380 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2381 plus_constant (XEXP (x, 0), adjust));
2382 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2383 }
2384
2385 /* Save the stack slot for later. */
2386 reg_equiv_memory_loc[i] = x;
2387 }
2388 }
2389
2390 /* Mark the slots in regs_ever_live for the hard regs
2391 used by pseudo-reg number REGNO. */
2392
2393 void
2394 mark_home_live (regno)
2395 int regno;
2396 {
2397 register int i, lim;
2398 i = reg_renumber[regno];
2399 if (i < 0)
2400 return;
2401 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2402 while (i < lim)
2403 regs_ever_live[i++] = 1;
2404 }
2405
2406 /* Mark the registers used in SCRATCH as being live. */
2407
2408 static void
2409 mark_scratch_live (scratch)
2410 rtx scratch;
2411 {
2412 register int i;
2413 int regno = REGNO (scratch);
2414 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2415
2416 for (i = regno; i < lim; i++)
2417 regs_ever_live[i] = 1;
2418 }
2419 \f
2420 /* This function handles the tracking of elimination offsets around branches.
2421
2422 X is a piece of RTL being scanned.
2423
2424 INSN is the insn that it came from, if any.
2425
2426 INITIAL_P is non-zero if we are to set the offset to be the initial
2427 offset and zero if we are setting the offset of the label to be the
2428 current offset. */
2429
2430 static void
2431 set_label_offsets (x, insn, initial_p)
2432 rtx x;
2433 rtx insn;
2434 int initial_p;
2435 {
2436 enum rtx_code code = GET_CODE (x);
2437 rtx tem;
2438 int i;
2439 struct elim_table *p;
2440
2441 switch (code)
2442 {
2443 case LABEL_REF:
2444 if (LABEL_REF_NONLOCAL_P (x))
2445 return;
2446
2447 x = XEXP (x, 0);
2448
2449 /* ... fall through ... */
2450
2451 case CODE_LABEL:
2452 /* If we know nothing about this label, set the desired offsets. Note
2453 that this sets the offset at a label to be the offset before a label
2454 if we don't know anything about the label. This is not correct for
2455 the label after a BARRIER, but is the best guess we can make. If
2456 we guessed wrong, we will suppress an elimination that might have
2457 been possible had we been able to guess correctly. */
2458
2459 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2460 {
2461 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2462 offsets_at[CODE_LABEL_NUMBER (x)][i]
2463 = (initial_p ? reg_eliminate[i].initial_offset
2464 : reg_eliminate[i].offset);
2465 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2466 }
2467
2468 /* Otherwise, if this is the definition of a label and it is
2469 preceded by a BARRIER, set our offsets to the known offset of
2470 that label. */
2471
2472 else if (x == insn
2473 && (tem = prev_nonnote_insn (insn)) != 0
2474 && GET_CODE (tem) == BARRIER)
2475 {
2476 num_not_at_initial_offset = 0;
2477 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2478 {
2479 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2480 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2481 if (reg_eliminate[i].can_eliminate
2482 && (reg_eliminate[i].offset
2483 != reg_eliminate[i].initial_offset))
2484 num_not_at_initial_offset++;
2485 }
2486 }
2487
2488 else
2489 /* If neither of the above cases is true, compare each offset
2490 with those previously recorded and suppress any eliminations
2491 where the offsets disagree. */
2492
2493 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2494 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2495 != (initial_p ? reg_eliminate[i].initial_offset
2496 : reg_eliminate[i].offset))
2497 reg_eliminate[i].can_eliminate = 0;
2498
2499 return;
2500
2501 case JUMP_INSN:
2502 set_label_offsets (PATTERN (insn), insn, initial_p);
2503
2504 /* ... fall through ... */
2505
2506 case INSN:
2507 case CALL_INSN:
2508 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2509 and hence must have all eliminations at their initial offsets. */
2510 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2511 if (REG_NOTE_KIND (tem) == REG_LABEL)
2512 set_label_offsets (XEXP (tem, 0), insn, 1);
2513 return;
2514
2515 case ADDR_VEC:
2516 case ADDR_DIFF_VEC:
2517 /* Each of the labels in the address vector must be at their initial
2518 offsets. We want the first first for ADDR_VEC and the second
2519 field for ADDR_DIFF_VEC. */
2520
2521 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2522 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2523 insn, initial_p);
2524 return;
2525
2526 case SET:
2527 /* We only care about setting PC. If the source is not RETURN,
2528 IF_THEN_ELSE, or a label, disable any eliminations not at
2529 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2530 isn't one of those possibilities. For branches to a label,
2531 call ourselves recursively.
2532
2533 Note that this can disable elimination unnecessarily when we have
2534 a non-local goto since it will look like a non-constant jump to
2535 someplace in the current function. This isn't a significant
2536 problem since such jumps will normally be when all elimination
2537 pairs are back to their initial offsets. */
2538
2539 if (SET_DEST (x) != pc_rtx)
2540 return;
2541
2542 switch (GET_CODE (SET_SRC (x)))
2543 {
2544 case PC:
2545 case RETURN:
2546 return;
2547
2548 case LABEL_REF:
2549 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2550 return;
2551
2552 case IF_THEN_ELSE:
2553 tem = XEXP (SET_SRC (x), 1);
2554 if (GET_CODE (tem) == LABEL_REF)
2555 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2556 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2557 break;
2558
2559 tem = XEXP (SET_SRC (x), 2);
2560 if (GET_CODE (tem) == LABEL_REF)
2561 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2562 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2563 break;
2564 return;
2565 }
2566
2567 /* If we reach here, all eliminations must be at their initial
2568 offset because we are doing a jump to a variable address. */
2569 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2570 if (p->offset != p->initial_offset)
2571 p->can_eliminate = 0;
2572 }
2573 }
2574 \f
2575 /* Used for communication between the next two function to properly share
2576 the vector for an ASM_OPERANDS. */
2577
2578 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2579
2580 /* Scan X and replace any eliminable registers (such as fp) with a
2581 replacement (such as sp), plus an offset.
2582
2583 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2584 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2585 MEM, we are allowed to replace a sum of a register and the constant zero
2586 with the register, which we cannot do outside a MEM. In addition, we need
2587 to record the fact that a register is referenced outside a MEM.
2588
2589 If INSN is an insn, it is the insn containing X. If we replace a REG
2590 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2591 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2592 that the REG is being modified.
2593
2594 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2595 That's used when we eliminate in expressions stored in notes.
2596 This means, do not set ref_outside_mem even if the reference
2597 is outside of MEMs.
2598
2599 If we see a modification to a register we know about, take the
2600 appropriate action (see case SET, below).
2601
2602 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2603 replacements done assuming all offsets are at their initial values. If
2604 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2605 encounter, return the actual location so that find_reloads will do
2606 the proper thing. */
2607
2608 rtx
2609 eliminate_regs (x, mem_mode, insn)
2610 rtx x;
2611 enum machine_mode mem_mode;
2612 rtx insn;
2613 {
2614 enum rtx_code code = GET_CODE (x);
2615 struct elim_table *ep;
2616 int regno;
2617 rtx new;
2618 int i, j;
2619 char *fmt;
2620 int copied = 0;
2621
2622 switch (code)
2623 {
2624 case CONST_INT:
2625 case CONST_DOUBLE:
2626 case CONST:
2627 case SYMBOL_REF:
2628 case CODE_LABEL:
2629 case PC:
2630 case CC0:
2631 case ASM_INPUT:
2632 case ADDR_VEC:
2633 case ADDR_DIFF_VEC:
2634 case RETURN:
2635 return x;
2636
2637 case REG:
2638 regno = REGNO (x);
2639
2640 /* First handle the case where we encounter a bare register that
2641 is eliminable. Replace it with a PLUS. */
2642 if (regno < FIRST_PSEUDO_REGISTER)
2643 {
2644 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2645 ep++)
2646 if (ep->from_rtx == x && ep->can_eliminate)
2647 {
2648 if (! mem_mode
2649 /* Refs inside notes don't count for this purpose. */
2650 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2651 || GET_CODE (insn) == INSN_LIST)))
2652 ep->ref_outside_mem = 1;
2653 return plus_constant (ep->to_rtx, ep->previous_offset);
2654 }
2655
2656 }
2657 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2658 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2659 {
2660 /* In this case, find_reloads would attempt to either use an
2661 incorrect address (if something is not at its initial offset)
2662 or substitute an replaced address into an insn (which loses
2663 if the offset is changed by some later action). So we simply
2664 return the replaced stack slot (assuming it is changed by
2665 elimination) and ignore the fact that this is actually a
2666 reference to the pseudo. Ensure we make a copy of the
2667 address in case it is shared. */
2668 new = eliminate_regs (reg_equiv_memory_loc[regno],
2669 mem_mode, insn);
2670 if (new != reg_equiv_memory_loc[regno])
2671 {
2672 cannot_omit_stores[regno] = 1;
2673 return copy_rtx (new);
2674 }
2675 }
2676 return x;
2677
2678 case PLUS:
2679 /* If this is the sum of an eliminable register and a constant, rework
2680 the sum. */
2681 if (GET_CODE (XEXP (x, 0)) == REG
2682 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2683 && CONSTANT_P (XEXP (x, 1)))
2684 {
2685 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2686 ep++)
2687 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2688 {
2689 if (! mem_mode
2690 /* Refs inside notes don't count for this purpose. */
2691 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2692 || GET_CODE (insn) == INSN_LIST)))
2693 ep->ref_outside_mem = 1;
2694
2695 /* The only time we want to replace a PLUS with a REG (this
2696 occurs when the constant operand of the PLUS is the negative
2697 of the offset) is when we are inside a MEM. We won't want
2698 to do so at other times because that would change the
2699 structure of the insn in a way that reload can't handle.
2700 We special-case the commonest situation in
2701 eliminate_regs_in_insn, so just replace a PLUS with a
2702 PLUS here, unless inside a MEM. */
2703 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2704 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2705 return ep->to_rtx;
2706 else
2707 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2708 plus_constant (XEXP (x, 1),
2709 ep->previous_offset));
2710 }
2711
2712 /* If the register is not eliminable, we are done since the other
2713 operand is a constant. */
2714 return x;
2715 }
2716
2717 /* If this is part of an address, we want to bring any constant to the
2718 outermost PLUS. We will do this by doing register replacement in
2719 our operands and seeing if a constant shows up in one of them.
2720
2721 We assume here this is part of an address (or a "load address" insn)
2722 since an eliminable register is not likely to appear in any other
2723 context.
2724
2725 If we have (plus (eliminable) (reg)), we want to produce
2726 (plus (plus (replacement) (reg) (const))). If this was part of a
2727 normal add insn, (plus (replacement) (reg)) will be pushed as a
2728 reload. This is the desired action. */
2729
2730 {
2731 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2732 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2733
2734 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2735 {
2736 /* If one side is a PLUS and the other side is a pseudo that
2737 didn't get a hard register but has a reg_equiv_constant,
2738 we must replace the constant here since it may no longer
2739 be in the position of any operand. */
2740 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2741 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2742 && reg_renumber[REGNO (new1)] < 0
2743 && reg_equiv_constant != 0
2744 && reg_equiv_constant[REGNO (new1)] != 0)
2745 new1 = reg_equiv_constant[REGNO (new1)];
2746 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2747 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2748 && reg_renumber[REGNO (new0)] < 0
2749 && reg_equiv_constant[REGNO (new0)] != 0)
2750 new0 = reg_equiv_constant[REGNO (new0)];
2751
2752 new = form_sum (new0, new1);
2753
2754 /* As above, if we are not inside a MEM we do not want to
2755 turn a PLUS into something else. We might try to do so here
2756 for an addition of 0 if we aren't optimizing. */
2757 if (! mem_mode && GET_CODE (new) != PLUS)
2758 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2759 else
2760 return new;
2761 }
2762 }
2763 return x;
2764
2765 case MULT:
2766 /* If this is the product of an eliminable register and a
2767 constant, apply the distribute law and move the constant out
2768 so that we have (plus (mult ..) ..). This is needed in order
2769 to keep load-address insns valid. This case is pathalogical.
2770 We ignore the possibility of overflow here. */
2771 if (GET_CODE (XEXP (x, 0)) == REG
2772 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2773 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2774 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2775 ep++)
2776 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2777 {
2778 if (! mem_mode
2779 /* Refs inside notes don't count for this purpose. */
2780 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2781 || GET_CODE (insn) == INSN_LIST)))
2782 ep->ref_outside_mem = 1;
2783
2784 return
2785 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2786 ep->previous_offset * INTVAL (XEXP (x, 1)));
2787 }
2788
2789 /* ... fall through ... */
2790
2791 case CALL:
2792 case COMPARE:
2793 case MINUS:
2794 case DIV: case UDIV:
2795 case MOD: case UMOD:
2796 case AND: case IOR: case XOR:
2797 case ROTATERT: case ROTATE:
2798 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2799 case NE: case EQ:
2800 case GE: case GT: case GEU: case GTU:
2801 case LE: case LT: case LEU: case LTU:
2802 {
2803 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2804 rtx new1
2805 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2806
2807 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2808 return gen_rtx (code, GET_MODE (x), new0, new1);
2809 }
2810 return x;
2811
2812 case EXPR_LIST:
2813 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2814 if (XEXP (x, 0))
2815 {
2816 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2817 if (new != XEXP (x, 0))
2818 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2819 }
2820
2821 /* ... fall through ... */
2822
2823 case INSN_LIST:
2824 /* Now do eliminations in the rest of the chain. If this was
2825 an EXPR_LIST, this might result in allocating more memory than is
2826 strictly needed, but it simplifies the code. */
2827 if (XEXP (x, 1))
2828 {
2829 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2830 if (new != XEXP (x, 1))
2831 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2832 }
2833 return x;
2834
2835 case PRE_INC:
2836 case POST_INC:
2837 case PRE_DEC:
2838 case POST_DEC:
2839 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2840 if (ep->to_rtx == XEXP (x, 0))
2841 {
2842 int size = GET_MODE_SIZE (mem_mode);
2843
2844 /* If more bytes than MEM_MODE are pushed, account for them. */
2845 #ifdef PUSH_ROUNDING
2846 if (ep->to_rtx == stack_pointer_rtx)
2847 size = PUSH_ROUNDING (size);
2848 #endif
2849 if (code == PRE_DEC || code == POST_DEC)
2850 ep->offset += size;
2851 else
2852 ep->offset -= size;
2853 }
2854
2855 /* Fall through to generic unary operation case. */
2856 case USE:
2857 case STRICT_LOW_PART:
2858 case NEG: case NOT:
2859 case SIGN_EXTEND: case ZERO_EXTEND:
2860 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2861 case FLOAT: case FIX:
2862 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2863 case ABS:
2864 case SQRT:
2865 case FFS:
2866 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2867 if (new != XEXP (x, 0))
2868 return gen_rtx (code, GET_MODE (x), new);
2869 return x;
2870
2871 case SUBREG:
2872 /* Similar to above processing, but preserve SUBREG_WORD.
2873 Convert (subreg (mem)) to (mem) if not paradoxical.
2874 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2875 pseudo didn't get a hard reg, we must replace this with the
2876 eliminated version of the memory location because push_reloads
2877 may do the replacement in certain circumstances. */
2878 if (GET_CODE (SUBREG_REG (x)) == REG
2879 && (GET_MODE_SIZE (GET_MODE (x))
2880 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2881 && reg_equiv_memory_loc != 0
2882 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2883 {
2884 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2885 mem_mode, insn);
2886
2887 /* If we didn't change anything, we must retain the pseudo. */
2888 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2889 new = XEXP (x, 0);
2890 else
2891 /* Otherwise, ensure NEW isn't shared in case we have to reload
2892 it. */
2893 new = copy_rtx (new);
2894 }
2895 else
2896 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2897
2898 if (new != XEXP (x, 0))
2899 {
2900 if (GET_CODE (new) == MEM
2901 && (GET_MODE_SIZE (GET_MODE (x))
2902 <= GET_MODE_SIZE (GET_MODE (new)))
2903 #ifdef LOAD_EXTEND_OP
2904 /* On these machines we will be reloading what is
2905 inside the SUBREG if it originally was a pseudo and
2906 the inner and outer modes are both a word or
2907 smaller. So leave the SUBREG then. */
2908 && ! (GET_CODE (SUBREG_REG (x)) == REG
2909 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2910 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2911 #endif
2912 )
2913 {
2914 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2915 enum machine_mode mode = GET_MODE (x);
2916
2917 #if BYTES_BIG_ENDIAN
2918 offset += (MIN (UNITS_PER_WORD,
2919 GET_MODE_SIZE (GET_MODE (new)))
2920 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2921 #endif
2922
2923 PUT_MODE (new, mode);
2924 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2925 return new;
2926 }
2927 else
2928 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2929 }
2930
2931 return x;
2932
2933 case CLOBBER:
2934 /* If clobbering a register that is the replacement register for an
2935 elimination we still think can be performed, note that it cannot
2936 be performed. Otherwise, we need not be concerned about it. */
2937 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2938 if (ep->to_rtx == XEXP (x, 0))
2939 ep->can_eliminate = 0;
2940
2941 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2942 if (new != XEXP (x, 0))
2943 return gen_rtx (code, GET_MODE (x), new);
2944 return x;
2945
2946 case ASM_OPERANDS:
2947 {
2948 rtx *temp_vec;
2949 /* Properly handle sharing input and constraint vectors. */
2950 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2951 {
2952 /* When we come to a new vector not seen before,
2953 scan all its elements; keep the old vector if none
2954 of them changes; otherwise, make a copy. */
2955 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2956 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2957 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2958 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2959 mem_mode, insn);
2960
2961 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2962 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2963 break;
2964
2965 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2966 new_asm_operands_vec = old_asm_operands_vec;
2967 else
2968 new_asm_operands_vec
2969 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2970 }
2971
2972 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2973 if (new_asm_operands_vec == old_asm_operands_vec)
2974 return x;
2975
2976 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2977 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2978 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2979 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2980 ASM_OPERANDS_SOURCE_FILE (x),
2981 ASM_OPERANDS_SOURCE_LINE (x));
2982 new->volatil = x->volatil;
2983 return new;
2984 }
2985
2986 case SET:
2987 /* Check for setting a register that we know about. */
2988 if (GET_CODE (SET_DEST (x)) == REG)
2989 {
2990 /* See if this is setting the replacement register for an
2991 elimination.
2992
2993 If DEST is the hard frame pointer, we do nothing because we
2994 assume that all assignments to the frame pointer are for
2995 non-local gotos and are being done at a time when they are valid
2996 and do not disturb anything else. Some machines want to
2997 eliminate a fake argument pointer (or even a fake frame pointer)
2998 with either the real frame or the stack pointer. Assignments to
2999 the hard frame pointer must not prevent this elimination. */
3000
3001 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3002 ep++)
3003 if (ep->to_rtx == SET_DEST (x)
3004 && SET_DEST (x) != hard_frame_pointer_rtx)
3005 {
3006 /* If it is being incremented, adjust the offset. Otherwise,
3007 this elimination can't be done. */
3008 rtx src = SET_SRC (x);
3009
3010 if (GET_CODE (src) == PLUS
3011 && XEXP (src, 0) == SET_DEST (x)
3012 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3013 ep->offset -= INTVAL (XEXP (src, 1));
3014 else
3015 ep->can_eliminate = 0;
3016 }
3017
3018 /* Now check to see we are assigning to a register that can be
3019 eliminated. If so, it must be as part of a PARALLEL, since we
3020 will not have been called if this is a single SET. So indicate
3021 that we can no longer eliminate this reg. */
3022 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3023 ep++)
3024 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3025 ep->can_eliminate = 0;
3026 }
3027
3028 /* Now avoid the loop below in this common case. */
3029 {
3030 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3031 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3032
3033 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3034 write a CLOBBER insn. */
3035 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3036 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3037 && GET_CODE (insn) != INSN_LIST)
3038 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3039
3040 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3041 return gen_rtx (SET, VOIDmode, new0, new1);
3042 }
3043
3044 return x;
3045
3046 case MEM:
3047 /* Our only special processing is to pass the mode of the MEM to our
3048 recursive call and copy the flags. While we are here, handle this
3049 case more efficiently. */
3050 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3051 if (new != XEXP (x, 0))
3052 {
3053 new = gen_rtx (MEM, GET_MODE (x), new);
3054 new->volatil = x->volatil;
3055 new->unchanging = x->unchanging;
3056 new->in_struct = x->in_struct;
3057 return new;
3058 }
3059 else
3060 return x;
3061 }
3062
3063 /* Process each of our operands recursively. If any have changed, make a
3064 copy of the rtx. */
3065 fmt = GET_RTX_FORMAT (code);
3066 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3067 {
3068 if (*fmt == 'e')
3069 {
3070 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3071 if (new != XEXP (x, i) && ! copied)
3072 {
3073 rtx new_x = rtx_alloc (code);
3074 bcopy ((char *) x, (char *) new_x,
3075 (sizeof (*new_x) - sizeof (new_x->fld)
3076 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3077 x = new_x;
3078 copied = 1;
3079 }
3080 XEXP (x, i) = new;
3081 }
3082 else if (*fmt == 'E')
3083 {
3084 int copied_vec = 0;
3085 for (j = 0; j < XVECLEN (x, i); j++)
3086 {
3087 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3088 if (new != XVECEXP (x, i, j) && ! copied_vec)
3089 {
3090 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3091 &XVECEXP (x, i, 0));
3092 if (! copied)
3093 {
3094 rtx new_x = rtx_alloc (code);
3095 bcopy ((char *) x, (char *) new_x,
3096 (sizeof (*new_x) - sizeof (new_x->fld)
3097 + (sizeof (new_x->fld[0])
3098 * GET_RTX_LENGTH (code))));
3099 x = new_x;
3100 copied = 1;
3101 }
3102 XVEC (x, i) = new_v;
3103 copied_vec = 1;
3104 }
3105 XVECEXP (x, i, j) = new;
3106 }
3107 }
3108 }
3109
3110 return x;
3111 }
3112 \f
3113 /* Scan INSN and eliminate all eliminable registers in it.
3114
3115 If REPLACE is nonzero, do the replacement destructively. Also
3116 delete the insn as dead it if it is setting an eliminable register.
3117
3118 If REPLACE is zero, do all our allocations in reload_obstack.
3119
3120 If no eliminations were done and this insn doesn't require any elimination
3121 processing (these are not identical conditions: it might be updating sp,
3122 but not referencing fp; this needs to be seen during reload_as_needed so
3123 that the offset between fp and sp can be taken into consideration), zero
3124 is returned. Otherwise, 1 is returned. */
3125
3126 static int
3127 eliminate_regs_in_insn (insn, replace)
3128 rtx insn;
3129 int replace;
3130 {
3131 rtx old_body = PATTERN (insn);
3132 rtx old_set = single_set (insn);
3133 rtx new_body;
3134 int val = 0;
3135 struct elim_table *ep;
3136
3137 if (! replace)
3138 push_obstacks (&reload_obstack, &reload_obstack);
3139
3140 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3141 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3142 {
3143 /* Check for setting an eliminable register. */
3144 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3145 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3146 {
3147 /* In this case this insn isn't serving a useful purpose. We
3148 will delete it in reload_as_needed once we know that this
3149 elimination is, in fact, being done.
3150
3151 If REPLACE isn't set, we can't delete this insn, but neededn't
3152 process it since it won't be used unless something changes. */
3153 if (replace)
3154 delete_dead_insn (insn);
3155 val = 1;
3156 goto done;
3157 }
3158
3159 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3160 in the insn is the negative of the offset in FROM. Substitute
3161 (set (reg) (reg to)) for the insn and change its code.
3162
3163 We have to do this here, rather than in eliminate_regs, do that we can
3164 change the insn code. */
3165
3166 if (GET_CODE (SET_SRC (old_set)) == PLUS
3167 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3168 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3169 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3170 ep++)
3171 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3172 && ep->can_eliminate)
3173 {
3174 /* We must stop at the first elimination that will be used.
3175 If this one would replace the PLUS with a REG, do it
3176 now. Otherwise, quit the loop and let eliminate_regs
3177 do its normal replacement. */
3178 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3179 {
3180 /* We assume here that we don't need a PARALLEL of
3181 any CLOBBERs for this assignment. There's not
3182 much we can do if we do need it. */
3183 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3184 SET_DEST (old_set), ep->to_rtx);
3185 INSN_CODE (insn) = -1;
3186 val = 1;
3187 goto done;
3188 }
3189
3190 break;
3191 }
3192 }
3193
3194 old_asm_operands_vec = 0;
3195
3196 /* Replace the body of this insn with a substituted form. If we changed
3197 something, return non-zero.
3198
3199 If we are replacing a body that was a (set X (plus Y Z)), try to
3200 re-recognize the insn. We do this in case we had a simple addition
3201 but now can do this as a load-address. This saves an insn in this
3202 common case. */
3203
3204 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3205 if (new_body != old_body)
3206 {
3207 /* If we aren't replacing things permanently and we changed something,
3208 make another copy to ensure that all the RTL is new. Otherwise
3209 things can go wrong if find_reload swaps commutative operands
3210 and one is inside RTL that has been copied while the other is not. */
3211
3212 /* Don't copy an asm_operands because (1) there's no need and (2)
3213 copy_rtx can't do it properly when there are multiple outputs. */
3214 if (! replace && asm_noperands (old_body) < 0)
3215 new_body = copy_rtx (new_body);
3216
3217 /* If we had a move insn but now we don't, rerecognize it. This will
3218 cause spurious re-recognition if the old move had a PARALLEL since
3219 the new one still will, but we can't call single_set without
3220 having put NEW_BODY into the insn and the re-recognition won't
3221 hurt in this rare case. */
3222 if (old_set != 0
3223 && ((GET_CODE (SET_SRC (old_set)) == REG
3224 && (GET_CODE (new_body) != SET
3225 || GET_CODE (SET_SRC (new_body)) != REG))
3226 /* If this was a load from or store to memory, compare
3227 the MEM in recog_operand to the one in the insn. If they
3228 are not equal, then rerecognize the insn. */
3229 || (old_set != 0
3230 && ((GET_CODE (SET_SRC (old_set)) == MEM
3231 && SET_SRC (old_set) != recog_operand[1])
3232 || (GET_CODE (SET_DEST (old_set)) == MEM
3233 && SET_DEST (old_set) != recog_operand[0])))
3234 /* If this was an add insn before, rerecognize. */
3235 || GET_CODE (SET_SRC (old_set)) == PLUS))
3236 {
3237 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3238 /* If recognition fails, store the new body anyway.
3239 It's normal to have recognition failures here
3240 due to bizarre memory addresses; reloading will fix them. */
3241 PATTERN (insn) = new_body;
3242 }
3243 else
3244 PATTERN (insn) = new_body;
3245
3246 val = 1;
3247 }
3248
3249 /* Loop through all elimination pairs. See if any have changed and
3250 recalculate the number not at initial offset.
3251
3252 Compute the maximum offset (minimum offset if the stack does not
3253 grow downward) for each elimination pair.
3254
3255 We also detect a cases where register elimination cannot be done,
3256 namely, if a register would be both changed and referenced outside a MEM
3257 in the resulting insn since such an insn is often undefined and, even if
3258 not, we cannot know what meaning will be given to it. Note that it is
3259 valid to have a register used in an address in an insn that changes it
3260 (presumably with a pre- or post-increment or decrement).
3261
3262 If anything changes, return nonzero. */
3263
3264 num_not_at_initial_offset = 0;
3265 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3266 {
3267 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3268 ep->can_eliminate = 0;
3269
3270 ep->ref_outside_mem = 0;
3271
3272 if (ep->previous_offset != ep->offset)
3273 val = 1;
3274
3275 ep->previous_offset = ep->offset;
3276 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3277 num_not_at_initial_offset++;
3278
3279 #ifdef STACK_GROWS_DOWNWARD
3280 ep->max_offset = MAX (ep->max_offset, ep->offset);
3281 #else
3282 ep->max_offset = MIN (ep->max_offset, ep->offset);
3283 #endif
3284 }
3285
3286 done:
3287 /* If we changed something, perform elmination in REG_NOTES. This is
3288 needed even when REPLACE is zero because a REG_DEAD note might refer
3289 to a register that we eliminate and could cause a different number
3290 of spill registers to be needed in the final reload pass than in
3291 the pre-passes. */
3292 if (val && REG_NOTES (insn) != 0)
3293 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3294
3295 if (! replace)
3296 pop_obstacks ();
3297
3298 return val;
3299 }
3300
3301 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3302 replacement we currently believe is valid, mark it as not eliminable if X
3303 modifies DEST in any way other than by adding a constant integer to it.
3304
3305 If DEST is the frame pointer, we do nothing because we assume that
3306 all assignments to the hard frame pointer are nonlocal gotos and are being
3307 done at a time when they are valid and do not disturb anything else.
3308 Some machines want to eliminate a fake argument pointer with either the
3309 frame or stack pointer. Assignments to the hard frame pointer must not
3310 prevent this elimination.
3311
3312 Called via note_stores from reload before starting its passes to scan
3313 the insns of the function. */
3314
3315 static void
3316 mark_not_eliminable (dest, x)
3317 rtx dest;
3318 rtx x;
3319 {
3320 register int i;
3321
3322 /* A SUBREG of a hard register here is just changing its mode. We should
3323 not see a SUBREG of an eliminable hard register, but check just in
3324 case. */
3325 if (GET_CODE (dest) == SUBREG)
3326 dest = SUBREG_REG (dest);
3327
3328 if (dest == hard_frame_pointer_rtx)
3329 return;
3330
3331 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3332 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3333 && (GET_CODE (x) != SET
3334 || GET_CODE (SET_SRC (x)) != PLUS
3335 || XEXP (SET_SRC (x), 0) != dest
3336 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3337 {
3338 reg_eliminate[i].can_eliminate_previous
3339 = reg_eliminate[i].can_eliminate = 0;
3340 num_eliminable--;
3341 }
3342 }
3343 \f
3344 /* Kick all pseudos out of hard register REGNO.
3345 If GLOBAL is nonzero, try to find someplace else to put them.
3346 If DUMPFILE is nonzero, log actions taken on that file.
3347
3348 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3349 because we found we can't eliminate some register. In the case, no pseudos
3350 are allowed to be in the register, even if they are only in a block that
3351 doesn't require spill registers, unlike the case when we are spilling this
3352 hard reg to produce another spill register.
3353
3354 Return nonzero if any pseudos needed to be kicked out. */
3355
3356 static int
3357 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3358 register int regno;
3359 int global;
3360 FILE *dumpfile;
3361 int cant_eliminate;
3362 {
3363 enum reg_class class = REGNO_REG_CLASS (regno);
3364 int something_changed = 0;
3365 register int i;
3366
3367 SET_HARD_REG_BIT (forbidden_regs, regno);
3368
3369 if (cant_eliminate)
3370 regs_ever_live[regno] = 1;
3371
3372 /* Spill every pseudo reg that was allocated to this reg
3373 or to something that overlaps this reg. */
3374
3375 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3376 if (reg_renumber[i] >= 0
3377 && reg_renumber[i] <= regno
3378 && (reg_renumber[i]
3379 + HARD_REGNO_NREGS (reg_renumber[i],
3380 PSEUDO_REGNO_MODE (i))
3381 > regno))
3382 {
3383 /* If this register belongs solely to a basic block which needed no
3384 spilling of any class that this register is contained in,
3385 leave it be, unless we are spilling this register because
3386 it was a hard register that can't be eliminated. */
3387
3388 if (! cant_eliminate
3389 && basic_block_needs[0]
3390 && reg_basic_block[i] >= 0
3391 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3392 {
3393 enum reg_class *p;
3394
3395 for (p = reg_class_superclasses[(int) class];
3396 *p != LIM_REG_CLASSES; p++)
3397 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3398 break;
3399
3400 if (*p == LIM_REG_CLASSES)
3401 continue;
3402 }
3403
3404 /* Mark it as no longer having a hard register home. */
3405 reg_renumber[i] = -1;
3406 /* We will need to scan everything again. */
3407 something_changed = 1;
3408 if (global)
3409 retry_global_alloc (i, forbidden_regs);
3410
3411 alter_reg (i, regno);
3412 if (dumpfile)
3413 {
3414 if (reg_renumber[i] == -1)
3415 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3416 else
3417 fprintf (dumpfile, " Register %d now in %d.\n\n",
3418 i, reg_renumber[i]);
3419 }
3420 }
3421 for (i = 0; i < scratch_list_length; i++)
3422 {
3423 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3424 {
3425 if (! cant_eliminate && basic_block_needs[0]
3426 && ! basic_block_needs[(int) class][scratch_block[i]])
3427 {
3428 enum reg_class *p;
3429
3430 for (p = reg_class_superclasses[(int) class];
3431 *p != LIM_REG_CLASSES; p++)
3432 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3433 break;
3434
3435 if (*p == LIM_REG_CLASSES)
3436 continue;
3437 }
3438 PUT_CODE (scratch_list[i], SCRATCH);
3439 scratch_list[i] = 0;
3440 something_changed = 1;
3441 continue;
3442 }
3443 }
3444
3445 return something_changed;
3446 }
3447 \f
3448 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3449 Also mark any hard registers used to store user variables as
3450 forbidden from being used for spill registers. */
3451
3452 static void
3453 scan_paradoxical_subregs (x)
3454 register rtx x;
3455 {
3456 register int i;
3457 register char *fmt;
3458 register enum rtx_code code = GET_CODE (x);
3459
3460 switch (code)
3461 {
3462 case REG:
3463 #ifdef SMALL_REGISTER_CLASSES
3464 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3465 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3466 #endif
3467 return;
3468
3469 case CONST_INT:
3470 case CONST:
3471 case SYMBOL_REF:
3472 case LABEL_REF:
3473 case CONST_DOUBLE:
3474 case CC0:
3475 case PC:
3476 case USE:
3477 case CLOBBER:
3478 return;
3479
3480 case SUBREG:
3481 if (GET_CODE (SUBREG_REG (x)) == REG
3482 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3483 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3484 = GET_MODE_SIZE (GET_MODE (x));
3485 return;
3486 }
3487
3488 fmt = GET_RTX_FORMAT (code);
3489 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3490 {
3491 if (fmt[i] == 'e')
3492 scan_paradoxical_subregs (XEXP (x, i));
3493 else if (fmt[i] == 'E')
3494 {
3495 register int j;
3496 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3497 scan_paradoxical_subregs (XVECEXP (x, i, j));
3498 }
3499 }
3500 }
3501 \f
3502 static int
3503 hard_reg_use_compare (p1, p2)
3504 struct hard_reg_n_uses *p1, *p2;
3505 {
3506 int tem = p1->uses - p2->uses;
3507 if (tem != 0) return tem;
3508 /* If regs are equally good, sort by regno,
3509 so that the results of qsort leave nothing to chance. */
3510 return p1->regno - p2->regno;
3511 }
3512
3513 /* Choose the order to consider regs for use as reload registers
3514 based on how much trouble would be caused by spilling one.
3515 Store them in order of decreasing preference in potential_reload_regs. */
3516
3517 static void
3518 order_regs_for_reload ()
3519 {
3520 register int i;
3521 register int o = 0;
3522 int large = 0;
3523
3524 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3525
3526 CLEAR_HARD_REG_SET (bad_spill_regs);
3527
3528 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3529 potential_reload_regs[i] = -1;
3530
3531 /* Count number of uses of each hard reg by pseudo regs allocated to it
3532 and then order them by decreasing use. */
3533
3534 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3535 {
3536 hard_reg_n_uses[i].uses = 0;
3537 hard_reg_n_uses[i].regno = i;
3538 }
3539
3540 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3541 {
3542 int regno = reg_renumber[i];
3543 if (regno >= 0)
3544 {
3545 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3546 while (regno < lim)
3547 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3548 }
3549 large += reg_n_refs[i];
3550 }
3551
3552 /* Now fixed registers (which cannot safely be used for reloading)
3553 get a very high use count so they will be considered least desirable.
3554 Registers used explicitly in the rtl code are almost as bad. */
3555
3556 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3557 {
3558 if (fixed_regs[i])
3559 {
3560 hard_reg_n_uses[i].uses += 2 * large + 2;
3561 SET_HARD_REG_BIT (bad_spill_regs, i);
3562 }
3563 else if (regs_explicitly_used[i])
3564 {
3565 hard_reg_n_uses[i].uses += large + 1;
3566 #ifndef SMALL_REGISTER_CLASSES
3567 /* ??? We are doing this here because of the potential that
3568 bad code may be generated if a register explicitly used in
3569 an insn was used as a spill register for that insn. But
3570 not using these are spill registers may lose on some machine.
3571 We'll have to see how this works out. */
3572 SET_HARD_REG_BIT (bad_spill_regs, i);
3573 #endif
3574 }
3575 }
3576 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3577 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3578
3579 #ifdef ELIMINABLE_REGS
3580 /* If registers other than the frame pointer are eliminable, mark them as
3581 poor choices. */
3582 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3583 {
3584 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3585 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3586 }
3587 #endif
3588
3589 /* Prefer registers not so far used, for use in temporary loading.
3590 Among them, if REG_ALLOC_ORDER is defined, use that order.
3591 Otherwise, prefer registers not preserved by calls. */
3592
3593 #ifdef REG_ALLOC_ORDER
3594 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3595 {
3596 int regno = reg_alloc_order[i];
3597
3598 if (hard_reg_n_uses[regno].uses == 0)
3599 potential_reload_regs[o++] = regno;
3600 }
3601 #else
3602 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3603 {
3604 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3605 potential_reload_regs[o++] = i;
3606 }
3607 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3608 {
3609 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3610 potential_reload_regs[o++] = i;
3611 }
3612 #endif
3613
3614 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3615 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3616
3617 /* Now add the regs that are already used,
3618 preferring those used less often. The fixed and otherwise forbidden
3619 registers will be at the end of this list. */
3620
3621 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3622 if (hard_reg_n_uses[i].uses != 0)
3623 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3624 }
3625 \f
3626 /* Used in reload_as_needed to sort the spilled regs. */
3627 static int
3628 compare_spill_regs (r1, r2)
3629 short *r1, *r2;
3630 {
3631 return *r1 < *r2 ? -1: 1;
3632 }
3633
3634 /* Reload pseudo-registers into hard regs around each insn as needed.
3635 Additional register load insns are output before the insn that needs it
3636 and perhaps store insns after insns that modify the reloaded pseudo reg.
3637
3638 reg_last_reload_reg and reg_reloaded_contents keep track of
3639 which registers are already available in reload registers.
3640 We update these for the reloads that we perform,
3641 as the insns are scanned. */
3642
3643 static void
3644 reload_as_needed (first, live_known)
3645 rtx first;
3646 int live_known;
3647 {
3648 register rtx insn;
3649 register int i;
3650 int this_block = 0;
3651 rtx x;
3652 rtx after_call = 0;
3653
3654 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3655 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3656 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3657 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3658 reg_has_output_reload = (char *) alloca (max_regno);
3659 for (i = 0; i < n_spills; i++)
3660 {
3661 reg_reloaded_contents[i] = -1;
3662 reg_reloaded_insn[i] = 0;
3663 }
3664
3665 /* Reset all offsets on eliminable registers to their initial values. */
3666 #ifdef ELIMINABLE_REGS
3667 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3668 {
3669 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3670 reg_eliminate[i].initial_offset);
3671 reg_eliminate[i].previous_offset
3672 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3673 }
3674 #else
3675 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3676 reg_eliminate[0].previous_offset
3677 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3678 #endif
3679
3680 num_not_at_initial_offset = 0;
3681
3682 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3683 pack registers with group needs. */
3684 if (n_spills > 1)
3685 {
3686 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3687 for (i = 0; i < n_spills; i++)
3688 spill_reg_order[spill_regs[i]] = i;
3689 }
3690
3691 for (insn = first; insn;)
3692 {
3693 register rtx next = NEXT_INSN (insn);
3694
3695 /* Notice when we move to a new basic block. */
3696 if (live_known && this_block + 1 < n_basic_blocks
3697 && insn == basic_block_head[this_block+1])
3698 ++this_block;
3699
3700 /* If we pass a label, copy the offsets from the label information
3701 into the current offsets of each elimination. */
3702 if (GET_CODE (insn) == CODE_LABEL)
3703 {
3704 num_not_at_initial_offset = 0;
3705 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3706 {
3707 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3708 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3709 if (reg_eliminate[i].can_eliminate
3710 && (reg_eliminate[i].offset
3711 != reg_eliminate[i].initial_offset))
3712 num_not_at_initial_offset++;
3713 }
3714 }
3715
3716 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3717 {
3718 rtx avoid_return_reg = 0;
3719
3720 #ifdef SMALL_REGISTER_CLASSES
3721 /* Set avoid_return_reg if this is an insn
3722 that might use the value of a function call. */
3723 if (GET_CODE (insn) == CALL_INSN)
3724 {
3725 if (GET_CODE (PATTERN (insn)) == SET)
3726 after_call = SET_DEST (PATTERN (insn));
3727 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3728 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3729 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3730 else
3731 after_call = 0;
3732 }
3733 else if (after_call != 0
3734 && !(GET_CODE (PATTERN (insn)) == SET
3735 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3736 {
3737 if (reg_referenced_p (after_call, PATTERN (insn)))
3738 avoid_return_reg = after_call;
3739 after_call = 0;
3740 }
3741 #endif /* SMALL_REGISTER_CLASSES */
3742
3743 /* If this is a USE and CLOBBER of a MEM, ensure that any
3744 references to eliminable registers have been removed. */
3745
3746 if ((GET_CODE (PATTERN (insn)) == USE
3747 || GET_CODE (PATTERN (insn)) == CLOBBER)
3748 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3749 XEXP (XEXP (PATTERN (insn), 0), 0)
3750 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3751 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3752
3753 /* If we need to do register elimination processing, do so.
3754 This might delete the insn, in which case we are done. */
3755 if (num_eliminable && GET_MODE (insn) == QImode)
3756 {
3757 eliminate_regs_in_insn (insn, 1);
3758 if (GET_CODE (insn) == NOTE)
3759 {
3760 insn = next;
3761 continue;
3762 }
3763 }
3764
3765 if (GET_MODE (insn) == VOIDmode)
3766 n_reloads = 0;
3767 /* First find the pseudo regs that must be reloaded for this insn.
3768 This info is returned in the tables reload_... (see reload.h).
3769 Also modify the body of INSN by substituting RELOAD
3770 rtx's for those pseudo regs. */
3771 else
3772 {
3773 bzero (reg_has_output_reload, max_regno);
3774 CLEAR_HARD_REG_SET (reg_is_output_reload);
3775
3776 find_reloads (insn, 1, spill_indirect_levels, live_known,
3777 spill_reg_order);
3778 }
3779
3780 if (n_reloads > 0)
3781 {
3782 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3783 rtx p;
3784 int class;
3785
3786 /* If this block has not had spilling done for a
3787 particular clas and we have any non-optionals that need a
3788 spill reg in that class, abort. */
3789
3790 for (class = 0; class < N_REG_CLASSES; class++)
3791 if (basic_block_needs[class] != 0
3792 && basic_block_needs[class][this_block] == 0)
3793 for (i = 0; i < n_reloads; i++)
3794 if (class == (int) reload_reg_class[i]
3795 && reload_reg_rtx[i] == 0
3796 && ! reload_optional[i]
3797 && (reload_in[i] != 0 || reload_out[i] != 0
3798 || reload_secondary_p[i] != 0))
3799 abort ();
3800
3801 /* Now compute which reload regs to reload them into. Perhaps
3802 reusing reload regs from previous insns, or else output
3803 load insns to reload them. Maybe output store insns too.
3804 Record the choices of reload reg in reload_reg_rtx. */
3805 choose_reload_regs (insn, avoid_return_reg);
3806
3807 #ifdef SMALL_REGISTER_CLASSES
3808 /* Merge any reloads that we didn't combine for fear of
3809 increasing the number of spill registers needed but now
3810 discover can be safely merged. */
3811 merge_assigned_reloads (insn);
3812 #endif
3813
3814 /* Generate the insns to reload operands into or out of
3815 their reload regs. */
3816 emit_reload_insns (insn);
3817
3818 /* Substitute the chosen reload regs from reload_reg_rtx
3819 into the insn's body (or perhaps into the bodies of other
3820 load and store insn that we just made for reloading
3821 and that we moved the structure into). */
3822 subst_reloads ();
3823
3824 /* If this was an ASM, make sure that all the reload insns
3825 we have generated are valid. If not, give an error
3826 and delete them. */
3827
3828 if (asm_noperands (PATTERN (insn)) >= 0)
3829 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3830 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3831 && (recog_memoized (p) < 0
3832 || (insn_extract (p),
3833 ! constrain_operands (INSN_CODE (p), 1))))
3834 {
3835 error_for_asm (insn,
3836 "`asm' operand requires impossible reload");
3837 PUT_CODE (p, NOTE);
3838 NOTE_SOURCE_FILE (p) = 0;
3839 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3840 }
3841 }
3842 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3843 is no longer validly lying around to save a future reload.
3844 Note that this does not detect pseudos that were reloaded
3845 for this insn in order to be stored in
3846 (obeying register constraints). That is correct; such reload
3847 registers ARE still valid. */
3848 note_stores (PATTERN (insn), forget_old_reloads_1);
3849
3850 /* There may have been CLOBBER insns placed after INSN. So scan
3851 between INSN and NEXT and use them to forget old reloads. */
3852 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3853 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3854 note_stores (PATTERN (x), forget_old_reloads_1);
3855
3856 #ifdef AUTO_INC_DEC
3857 /* Likewise for regs altered by auto-increment in this insn.
3858 But note that the reg-notes are not changed by reloading:
3859 they still contain the pseudo-regs, not the spill regs. */
3860 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3861 if (REG_NOTE_KIND (x) == REG_INC)
3862 {
3863 /* See if this pseudo reg was reloaded in this insn.
3864 If so, its last-reload info is still valid
3865 because it is based on this insn's reload. */
3866 for (i = 0; i < n_reloads; i++)
3867 if (reload_out[i] == XEXP (x, 0))
3868 break;
3869
3870 if (i == n_reloads)
3871 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3872 }
3873 #endif
3874 }
3875 /* A reload reg's contents are unknown after a label. */
3876 if (GET_CODE (insn) == CODE_LABEL)
3877 for (i = 0; i < n_spills; i++)
3878 {
3879 reg_reloaded_contents[i] = -1;
3880 reg_reloaded_insn[i] = 0;
3881 }
3882
3883 /* Don't assume a reload reg is still good after a call insn
3884 if it is a call-used reg. */
3885 else if (GET_CODE (insn) == CALL_INSN)
3886 for (i = 0; i < n_spills; i++)
3887 if (call_used_regs[spill_regs[i]])
3888 {
3889 reg_reloaded_contents[i] = -1;
3890 reg_reloaded_insn[i] = 0;
3891 }
3892
3893 /* In case registers overlap, allow certain insns to invalidate
3894 particular hard registers. */
3895
3896 #ifdef INSN_CLOBBERS_REGNO_P
3897 for (i = 0 ; i < n_spills ; i++)
3898 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3899 {
3900 reg_reloaded_contents[i] = -1;
3901 reg_reloaded_insn[i] = 0;
3902 }
3903 #endif
3904
3905 insn = next;
3906
3907 #ifdef USE_C_ALLOCA
3908 alloca (0);
3909 #endif
3910 }
3911 }
3912
3913 /* Discard all record of any value reloaded from X,
3914 or reloaded in X from someplace else;
3915 unless X is an output reload reg of the current insn.
3916
3917 X may be a hard reg (the reload reg)
3918 or it may be a pseudo reg that was reloaded from. */
3919
3920 static void
3921 forget_old_reloads_1 (x, ignored)
3922 rtx x;
3923 rtx ignored;
3924 {
3925 register int regno;
3926 int nr;
3927 int offset = 0;
3928
3929 /* note_stores does give us subregs of hard regs. */
3930 while (GET_CODE (x) == SUBREG)
3931 {
3932 offset += SUBREG_WORD (x);
3933 x = SUBREG_REG (x);
3934 }
3935
3936 if (GET_CODE (x) != REG)
3937 return;
3938
3939 regno = REGNO (x) + offset;
3940
3941 if (regno >= FIRST_PSEUDO_REGISTER)
3942 nr = 1;
3943 else
3944 {
3945 int i;
3946 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3947 /* Storing into a spilled-reg invalidates its contents.
3948 This can happen if a block-local pseudo is allocated to that reg
3949 and it wasn't spilled because this block's total need is 0.
3950 Then some insn might have an optional reload and use this reg. */
3951 for (i = 0; i < nr; i++)
3952 if (spill_reg_order[regno + i] >= 0
3953 /* But don't do this if the reg actually serves as an output
3954 reload reg in the current instruction. */
3955 && (n_reloads == 0
3956 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3957 {
3958 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3959 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3960 }
3961 }
3962
3963 /* Since value of X has changed,
3964 forget any value previously copied from it. */
3965
3966 while (nr-- > 0)
3967 /* But don't forget a copy if this is the output reload
3968 that establishes the copy's validity. */
3969 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3970 reg_last_reload_reg[regno + nr] = 0;
3971 }
3972 \f
3973 /* For each reload, the mode of the reload register. */
3974 static enum machine_mode reload_mode[MAX_RELOADS];
3975
3976 /* For each reload, the largest number of registers it will require. */
3977 static int reload_nregs[MAX_RELOADS];
3978
3979 /* Comparison function for qsort to decide which of two reloads
3980 should be handled first. *P1 and *P2 are the reload numbers. */
3981
3982 static int
3983 reload_reg_class_lower (p1, p2)
3984 short *p1, *p2;
3985 {
3986 register int r1 = *p1, r2 = *p2;
3987 register int t;
3988
3989 /* Consider required reloads before optional ones. */
3990 t = reload_optional[r1] - reload_optional[r2];
3991 if (t != 0)
3992 return t;
3993
3994 /* Count all solitary classes before non-solitary ones. */
3995 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3996 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3997 if (t != 0)
3998 return t;
3999
4000 /* Aside from solitaires, consider all multi-reg groups first. */
4001 t = reload_nregs[r2] - reload_nregs[r1];
4002 if (t != 0)
4003 return t;
4004
4005 /* Consider reloads in order of increasing reg-class number. */
4006 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4007 if (t != 0)
4008 return t;
4009
4010 /* If reloads are equally urgent, sort by reload number,
4011 so that the results of qsort leave nothing to chance. */
4012 return r1 - r2;
4013 }
4014 \f
4015 /* The following HARD_REG_SETs indicate when each hard register is
4016 used for a reload of various parts of the current insn. */
4017
4018 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4019 static HARD_REG_SET reload_reg_used;
4020 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4021 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4022 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4023 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4024 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4025 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4026 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4027 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4028 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4029 static HARD_REG_SET reload_reg_used_in_op_addr;
4030 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4031 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4032 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4033 static HARD_REG_SET reload_reg_used_in_insn;
4034 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4035 static HARD_REG_SET reload_reg_used_in_other_addr;
4036
4037 /* If reg is in use as a reload reg for any sort of reload. */
4038 static HARD_REG_SET reload_reg_used_at_all;
4039
4040 /* If reg is use as an inherited reload. We just mark the first register
4041 in the group. */
4042 static HARD_REG_SET reload_reg_used_for_inherit;
4043
4044 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4045 TYPE. MODE is used to indicate how many consecutive regs are
4046 actually used. */
4047
4048 static void
4049 mark_reload_reg_in_use (regno, opnum, type, mode)
4050 int regno;
4051 int opnum;
4052 enum reload_type type;
4053 enum machine_mode mode;
4054 {
4055 int nregs = HARD_REGNO_NREGS (regno, mode);
4056 int i;
4057
4058 for (i = regno; i < nregs + regno; i++)
4059 {
4060 switch (type)
4061 {
4062 case RELOAD_OTHER:
4063 SET_HARD_REG_BIT (reload_reg_used, i);
4064 break;
4065
4066 case RELOAD_FOR_INPUT_ADDRESS:
4067 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4068 break;
4069
4070 case RELOAD_FOR_OUTPUT_ADDRESS:
4071 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4072 break;
4073
4074 case RELOAD_FOR_OPERAND_ADDRESS:
4075 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4076 break;
4077
4078 case RELOAD_FOR_OPADDR_ADDR:
4079 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4080 break;
4081
4082 case RELOAD_FOR_OTHER_ADDRESS:
4083 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4084 break;
4085
4086 case RELOAD_FOR_INPUT:
4087 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4088 break;
4089
4090 case RELOAD_FOR_OUTPUT:
4091 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4092 break;
4093
4094 case RELOAD_FOR_INSN:
4095 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4096 break;
4097 }
4098
4099 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4100 }
4101 }
4102
4103 /* Similarly, but show REGNO is no longer in use for a reload. */
4104
4105 static void
4106 clear_reload_reg_in_use (regno, opnum, type, mode)
4107 int regno;
4108 int opnum;
4109 enum reload_type type;
4110 enum machine_mode mode;
4111 {
4112 int nregs = HARD_REGNO_NREGS (regno, mode);
4113 int i;
4114
4115 for (i = regno; i < nregs + regno; i++)
4116 {
4117 switch (type)
4118 {
4119 case RELOAD_OTHER:
4120 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4121 break;
4122
4123 case RELOAD_FOR_INPUT_ADDRESS:
4124 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4125 break;
4126
4127 case RELOAD_FOR_OUTPUT_ADDRESS:
4128 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4129 break;
4130
4131 case RELOAD_FOR_OPERAND_ADDRESS:
4132 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4133 break;
4134
4135 case RELOAD_FOR_OPADDR_ADDR:
4136 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4137 break;
4138
4139 case RELOAD_FOR_OTHER_ADDRESS:
4140 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4141 break;
4142
4143 case RELOAD_FOR_INPUT:
4144 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4145 break;
4146
4147 case RELOAD_FOR_OUTPUT:
4148 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4149 break;
4150
4151 case RELOAD_FOR_INSN:
4152 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4153 break;
4154 }
4155 }
4156 }
4157
4158 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4159 specified by OPNUM and TYPE. */
4160
4161 static int
4162 reload_reg_free_p (regno, opnum, type)
4163 int regno;
4164 int opnum;
4165 enum reload_type type;
4166 {
4167 int i;
4168
4169 /* In use for a RELOAD_OTHER means it's not available for anything except
4170 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4171 to be used only for inputs. */
4172
4173 if (type != RELOAD_FOR_OTHER_ADDRESS
4174 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4175 return 0;
4176
4177 switch (type)
4178 {
4179 case RELOAD_OTHER:
4180 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4181 we can't use it for RELOAD_OTHER. */
4182 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4183 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4184 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4185 return 0;
4186
4187 for (i = 0; i < reload_n_operands; i++)
4188 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4189 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4190 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4191 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4192 return 0;
4193
4194 return 1;
4195
4196 case RELOAD_FOR_INPUT:
4197 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4198 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4199 return 0;
4200
4201 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4202 return 0;
4203
4204 /* If it is used for some other input, can't use it. */
4205 for (i = 0; i < reload_n_operands; i++)
4206 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4207 return 0;
4208
4209 /* If it is used in a later operand's address, can't use it. */
4210 for (i = opnum + 1; i < reload_n_operands; i++)
4211 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4212 return 0;
4213
4214 return 1;
4215
4216 case RELOAD_FOR_INPUT_ADDRESS:
4217 /* Can't use a register if it is used for an input address for this
4218 operand or used as an input in an earlier one. */
4219 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4220 return 0;
4221
4222 for (i = 0; i < opnum; i++)
4223 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4224 return 0;
4225
4226 return 1;
4227
4228 case RELOAD_FOR_OUTPUT_ADDRESS:
4229 /* Can't use a register if it is used for an output address for this
4230 operand or used as an output in this or a later operand. */
4231 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4232 return 0;
4233
4234 for (i = opnum; i < reload_n_operands; i++)
4235 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4236 return 0;
4237
4238 return 1;
4239
4240 case RELOAD_FOR_OPERAND_ADDRESS:
4241 for (i = 0; i < reload_n_operands; i++)
4242 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4243 return 0;
4244
4245 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4246 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4247
4248 case RELOAD_FOR_OPADDR_ADDR:
4249 for (i = 0; i < reload_n_operands; i++)
4250 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4251 return 0;
4252
4253 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4254
4255 case RELOAD_FOR_OUTPUT:
4256 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4257 outputs, or an operand address for this or an earlier output. */
4258 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4259 return 0;
4260
4261 for (i = 0; i < reload_n_operands; i++)
4262 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4263 return 0;
4264
4265 for (i = 0; i <= opnum; i++)
4266 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4267 return 0;
4268
4269 return 1;
4270
4271 case RELOAD_FOR_INSN:
4272 for (i = 0; i < reload_n_operands; i++)
4273 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4274 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4275 return 0;
4276
4277 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4278 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4279
4280 case RELOAD_FOR_OTHER_ADDRESS:
4281 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4282 }
4283 abort ();
4284 }
4285
4286 /* Return 1 if the value in reload reg REGNO, as used by a reload
4287 needed for the part of the insn specified by OPNUM and TYPE,
4288 is not in use for a reload in any prior part of the insn.
4289
4290 We can assume that the reload reg was already tested for availability
4291 at the time it is needed, and we should not check this again,
4292 in case the reg has already been marked in use. */
4293
4294 static int
4295 reload_reg_free_before_p (regno, opnum, type)
4296 int regno;
4297 int opnum;
4298 enum reload_type type;
4299 {
4300 int i;
4301
4302 switch (type)
4303 {
4304 case RELOAD_FOR_OTHER_ADDRESS:
4305 /* These always come first. */
4306 return 1;
4307
4308 case RELOAD_OTHER:
4309 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4310
4311 /* If this use is for part of the insn,
4312 check the reg is not in use for any prior part. It is tempting
4313 to try to do this by falling through from objecs that occur
4314 later in the insn to ones that occur earlier, but that will not
4315 correctly take into account the fact that here we MUST ignore
4316 things that would prevent the register from being allocated in
4317 the first place, since we know that it was allocated. */
4318
4319 case RELOAD_FOR_OUTPUT_ADDRESS:
4320 /* Earlier reloads are for earlier outputs or their addresses,
4321 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4322 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4323 RELOAD_OTHER).. */
4324 for (i = 0; i < opnum; i++)
4325 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4326 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4327 return 0;
4328
4329 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4330 return 0;
4331
4332 for (i = 0; i < reload_n_operands; i++)
4333 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4334 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4335 return 0;
4336
4337 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4338 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4339 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4340
4341 case RELOAD_FOR_OUTPUT:
4342 /* This can't be used in the output address for this operand and
4343 anything that can't be used for it, except that we've already
4344 tested for RELOAD_FOR_INSN objects. */
4345
4346 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4347 return 0;
4348
4349 for (i = 0; i < opnum; i++)
4350 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4351 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4352 return 0;
4353
4354 for (i = 0; i < reload_n_operands; i++)
4355 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4356 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4357 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4358 return 0;
4359
4360 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4361
4362 case RELOAD_FOR_OPERAND_ADDRESS:
4363 case RELOAD_FOR_OPADDR_ADDR:
4364 case RELOAD_FOR_INSN:
4365 /* These can't conflict with inputs, or each other, so all we have to
4366 test is input addresses and the addresses of OTHER items. */
4367
4368 for (i = 0; i < reload_n_operands; i++)
4369 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4370 return 0;
4371
4372 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4373
4374 case RELOAD_FOR_INPUT:
4375 /* The only things earlier are the address for this and
4376 earlier inputs, other inputs (which we know we don't conflict
4377 with), and addresses of RELOAD_OTHER objects. */
4378
4379 for (i = 0; i <= opnum; i++)
4380 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4381 return 0;
4382
4383 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4384
4385 case RELOAD_FOR_INPUT_ADDRESS:
4386 /* Similarly, all we have to check is for use in earlier inputs'
4387 addresses. */
4388 for (i = 0; i < opnum; i++)
4389 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4390 return 0;
4391
4392 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4393 }
4394 abort ();
4395 }
4396
4397 /* Return 1 if the value in reload reg REGNO, as used by a reload
4398 needed for the part of the insn specified by OPNUM and TYPE,
4399 is still available in REGNO at the end of the insn.
4400
4401 We can assume that the reload reg was already tested for availability
4402 at the time it is needed, and we should not check this again,
4403 in case the reg has already been marked in use. */
4404
4405 static int
4406 reload_reg_reaches_end_p (regno, opnum, type)
4407 int regno;
4408 int opnum;
4409 enum reload_type type;
4410 {
4411 int i;
4412
4413 switch (type)
4414 {
4415 case RELOAD_OTHER:
4416 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4417 its value must reach the end. */
4418 return 1;
4419
4420 /* If this use is for part of the insn,
4421 its value reaches if no subsequent part uses the same register.
4422 Just like the above function, don't try to do this with lots
4423 of fallthroughs. */
4424
4425 case RELOAD_FOR_OTHER_ADDRESS:
4426 /* Here we check for everything else, since these don't conflict
4427 with anything else and everything comes later. */
4428
4429 for (i = 0; i < reload_n_operands; i++)
4430 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4431 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4432 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4433 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4434 return 0;
4435
4436 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4437 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4438 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4439
4440 case RELOAD_FOR_INPUT_ADDRESS:
4441 /* Similar, except that we check only for this and subsequent inputs
4442 and the address of only subsequent inputs and we do not need
4443 to check for RELOAD_OTHER objects since they are known not to
4444 conflict. */
4445
4446 for (i = opnum; i < reload_n_operands; i++)
4447 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4448 return 0;
4449
4450 for (i = opnum + 1; i < reload_n_operands; i++)
4451 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4452 return 0;
4453
4454 for (i = 0; i < reload_n_operands; i++)
4455 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4456 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4457 return 0;
4458
4459 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4460 return 0;
4461
4462 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4463 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4464
4465 case RELOAD_FOR_INPUT:
4466 /* Similar to input address, except we start at the next operand for
4467 both input and input address and we do not check for
4468 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4469 would conflict. */
4470
4471 for (i = opnum + 1; i < reload_n_operands; i++)
4472 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4473 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4474 return 0;
4475
4476 /* ... fall through ... */
4477
4478 case RELOAD_FOR_OPERAND_ADDRESS:
4479 /* Check outputs and their addresses. */
4480
4481 for (i = 0; i < reload_n_operands; i++)
4482 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4483 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4484 return 0;
4485
4486 return 1;
4487
4488 case RELOAD_FOR_OPADDR_ADDR:
4489 for (i = 0; i < reload_n_operands; i++)
4490 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4491 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4492 return 0;
4493
4494 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4495 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4496
4497 case RELOAD_FOR_INSN:
4498 /* These conflict with other outputs with RELOAD_OTHER. So
4499 we need only check for output addresses. */
4500
4501 opnum = -1;
4502
4503 /* ... fall through ... */
4504
4505 case RELOAD_FOR_OUTPUT:
4506 case RELOAD_FOR_OUTPUT_ADDRESS:
4507 /* We already know these can't conflict with a later output. So the
4508 only thing to check are later output addresses. */
4509 for (i = opnum + 1; i < reload_n_operands; i++)
4510 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4511 return 0;
4512
4513 return 1;
4514 }
4515
4516 abort ();
4517 }
4518 \f
4519 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4520 Return 0 otherwise.
4521
4522 This function uses the same algorithm as reload_reg_free_p above. */
4523
4524 static int
4525 reloads_conflict (r1, r2)
4526 int r1, r2;
4527 {
4528 enum reload_type r1_type = reload_when_needed[r1];
4529 enum reload_type r2_type = reload_when_needed[r2];
4530 int r1_opnum = reload_opnum[r1];
4531 int r2_opnum = reload_opnum[r2];
4532
4533 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4534
4535 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4536 return 1;
4537
4538 /* Otherwise, check conflicts differently for each type. */
4539
4540 switch (r1_type)
4541 {
4542 case RELOAD_FOR_INPUT:
4543 return (r2_type == RELOAD_FOR_INSN
4544 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4545 || r2_type == RELOAD_FOR_OPADDR_ADDR
4546 || r2_type == RELOAD_FOR_INPUT
4547 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4548
4549 case RELOAD_FOR_INPUT_ADDRESS:
4550 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4551 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4552
4553 case RELOAD_FOR_OUTPUT_ADDRESS:
4554 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4555 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4556
4557 case RELOAD_FOR_OPERAND_ADDRESS:
4558 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4559 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4560
4561 case RELOAD_FOR_OPADDR_ADDR:
4562 return (r2_type == RELOAD_FOR_INPUT
4563 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4564
4565 case RELOAD_FOR_OUTPUT:
4566 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4567 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4568 && r2_opnum >= r1_opnum));
4569
4570 case RELOAD_FOR_INSN:
4571 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4572 || r2_type == RELOAD_FOR_INSN
4573 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4574
4575 case RELOAD_FOR_OTHER_ADDRESS:
4576 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4577
4578 case RELOAD_OTHER:
4579 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4580
4581 default:
4582 abort ();
4583 }
4584 }
4585 \f
4586 /* Vector of reload-numbers showing the order in which the reloads should
4587 be processed. */
4588 short reload_order[MAX_RELOADS];
4589
4590 /* Indexed by reload number, 1 if incoming value
4591 inherited from previous insns. */
4592 char reload_inherited[MAX_RELOADS];
4593
4594 /* For an inherited reload, this is the insn the reload was inherited from,
4595 if we know it. Otherwise, this is 0. */
4596 rtx reload_inheritance_insn[MAX_RELOADS];
4597
4598 /* If non-zero, this is a place to get the value of the reload,
4599 rather than using reload_in. */
4600 rtx reload_override_in[MAX_RELOADS];
4601
4602 /* For each reload, the index in spill_regs of the spill register used,
4603 or -1 if we did not need one of the spill registers for this reload. */
4604 int reload_spill_index[MAX_RELOADS];
4605
4606 /* Index of last register assigned as a spill register. We allocate in
4607 a round-robin fashio. */
4608
4609 static int last_spill_reg = 0;
4610
4611 /* Find a spill register to use as a reload register for reload R.
4612 LAST_RELOAD is non-zero if this is the last reload for the insn being
4613 processed.
4614
4615 Set reload_reg_rtx[R] to the register allocated.
4616
4617 If NOERROR is nonzero, we return 1 if successful,
4618 or 0 if we couldn't find a spill reg and we didn't change anything. */
4619
4620 static int
4621 allocate_reload_reg (r, insn, last_reload, noerror)
4622 int r;
4623 rtx insn;
4624 int last_reload;
4625 int noerror;
4626 {
4627 int i;
4628 int pass;
4629 int count;
4630 rtx new;
4631 int regno;
4632
4633 /* If we put this reload ahead, thinking it is a group,
4634 then insist on finding a group. Otherwise we can grab a
4635 reg that some other reload needs.
4636 (That can happen when we have a 68000 DATA_OR_FP_REG
4637 which is a group of data regs or one fp reg.)
4638 We need not be so restrictive if there are no more reloads
4639 for this insn.
4640
4641 ??? Really it would be nicer to have smarter handling
4642 for that kind of reg class, where a problem like this is normal.
4643 Perhaps those classes should be avoided for reloading
4644 by use of more alternatives. */
4645
4646 int force_group = reload_nregs[r] > 1 && ! last_reload;
4647
4648 /* If we want a single register and haven't yet found one,
4649 take any reg in the right class and not in use.
4650 If we want a consecutive group, here is where we look for it.
4651
4652 We use two passes so we can first look for reload regs to
4653 reuse, which are already in use for other reloads in this insn,
4654 and only then use additional registers.
4655 I think that maximizing reuse is needed to make sure we don't
4656 run out of reload regs. Suppose we have three reloads, and
4657 reloads A and B can share regs. These need two regs.
4658 Suppose A and B are given different regs.
4659 That leaves none for C. */
4660 for (pass = 0; pass < 2; pass++)
4661 {
4662 /* I is the index in spill_regs.
4663 We advance it round-robin between insns to use all spill regs
4664 equally, so that inherited reloads have a chance
4665 of leapfrogging each other. Don't do this, however, when we have
4666 group needs and failure would be fatal; if we only have a relatively
4667 small number of spill registers, and more than one of them has
4668 group needs, then by starting in the middle, we may end up
4669 allocating the first one in such a way that we are not left with
4670 sufficient groups to handle the rest. */
4671
4672 if (noerror || ! force_group)
4673 i = last_spill_reg;
4674 else
4675 i = -1;
4676
4677 for (count = 0; count < n_spills; count++)
4678 {
4679 int class = (int) reload_reg_class[r];
4680
4681 i = (i + 1) % n_spills;
4682
4683 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4684 reload_when_needed[r])
4685 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4686 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4687 /* Look first for regs to share, then for unshared. But
4688 don't share regs used for inherited reloads; they are
4689 the ones we want to preserve. */
4690 && (pass
4691 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4692 spill_regs[i])
4693 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4694 spill_regs[i]))))
4695 {
4696 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4697 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4698 (on 68000) got us two FP regs. If NR is 1,
4699 we would reject both of them. */
4700 if (force_group)
4701 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4702 /* If we need only one reg, we have already won. */
4703 if (nr == 1)
4704 {
4705 /* But reject a single reg if we demand a group. */
4706 if (force_group)
4707 continue;
4708 break;
4709 }
4710 /* Otherwise check that as many consecutive regs as we need
4711 are available here.
4712 Also, don't use for a group registers that are
4713 needed for nongroups. */
4714 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4715 while (nr > 1)
4716 {
4717 regno = spill_regs[i] + nr - 1;
4718 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4719 && spill_reg_order[regno] >= 0
4720 && reload_reg_free_p (regno, reload_opnum[r],
4721 reload_when_needed[r])
4722 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4723 regno)))
4724 break;
4725 nr--;
4726 }
4727 if (nr == 1)
4728 break;
4729 }
4730 }
4731
4732 /* If we found something on pass 1, omit pass 2. */
4733 if (count < n_spills)
4734 break;
4735 }
4736
4737 /* We should have found a spill register by now. */
4738 if (count == n_spills)
4739 {
4740 if (noerror)
4741 return 0;
4742 goto failure;
4743 }
4744
4745 /* I is the index in SPILL_REG_RTX of the reload register we are to
4746 allocate. Get an rtx for it and find its register number. */
4747
4748 new = spill_reg_rtx[i];
4749
4750 if (new == 0 || GET_MODE (new) != reload_mode[r])
4751 spill_reg_rtx[i] = new
4752 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4753
4754 regno = true_regnum (new);
4755
4756 /* Detect when the reload reg can't hold the reload mode.
4757 This used to be one `if', but Sequent compiler can't handle that. */
4758 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4759 {
4760 enum machine_mode test_mode = VOIDmode;
4761 if (reload_in[r])
4762 test_mode = GET_MODE (reload_in[r]);
4763 /* If reload_in[r] has VOIDmode, it means we will load it
4764 in whatever mode the reload reg has: to wit, reload_mode[r].
4765 We have already tested that for validity. */
4766 /* Aside from that, we need to test that the expressions
4767 to reload from or into have modes which are valid for this
4768 reload register. Otherwise the reload insns would be invalid. */
4769 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4770 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4771 if (! (reload_out[r] != 0
4772 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4773 {
4774 /* The reg is OK. */
4775 last_spill_reg = i;
4776
4777 /* Mark as in use for this insn the reload regs we use
4778 for this. */
4779 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4780 reload_when_needed[r], reload_mode[r]);
4781
4782 reload_reg_rtx[r] = new;
4783 reload_spill_index[r] = i;
4784 return 1;
4785 }
4786 }
4787
4788 /* The reg is not OK. */
4789 if (noerror)
4790 return 0;
4791
4792 failure:
4793 if (asm_noperands (PATTERN (insn)) < 0)
4794 /* It's the compiler's fault. */
4795 abort ();
4796
4797 /* It's the user's fault; the operand's mode and constraint
4798 don't match. Disable this reload so we don't crash in final. */
4799 error_for_asm (insn,
4800 "`asm' operand constraint incompatible with operand size");
4801 reload_in[r] = 0;
4802 reload_out[r] = 0;
4803 reload_reg_rtx[r] = 0;
4804 reload_optional[r] = 1;
4805 reload_secondary_p[r] = 1;
4806
4807 return 1;
4808 }
4809 \f
4810 /* Assign hard reg targets for the pseudo-registers we must reload
4811 into hard regs for this insn.
4812 Also output the instructions to copy them in and out of the hard regs.
4813
4814 For machines with register classes, we are responsible for
4815 finding a reload reg in the proper class. */
4816
4817 static void
4818 choose_reload_regs (insn, avoid_return_reg)
4819 rtx insn;
4820 rtx avoid_return_reg;
4821 {
4822 register int i, j;
4823 int max_group_size = 1;
4824 enum reg_class group_class = NO_REGS;
4825 int inheritance;
4826
4827 rtx save_reload_reg_rtx[MAX_RELOADS];
4828 char save_reload_inherited[MAX_RELOADS];
4829 rtx save_reload_inheritance_insn[MAX_RELOADS];
4830 rtx save_reload_override_in[MAX_RELOADS];
4831 int save_reload_spill_index[MAX_RELOADS];
4832 HARD_REG_SET save_reload_reg_used;
4833 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4834 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4835 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4836 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4837 HARD_REG_SET save_reload_reg_used_in_op_addr;
4838 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4839 HARD_REG_SET save_reload_reg_used_in_insn;
4840 HARD_REG_SET save_reload_reg_used_in_other_addr;
4841 HARD_REG_SET save_reload_reg_used_at_all;
4842
4843 bzero (reload_inherited, MAX_RELOADS);
4844 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4845 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4846
4847 CLEAR_HARD_REG_SET (reload_reg_used);
4848 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4849 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4850 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4851 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4852 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4853
4854 for (i = 0; i < reload_n_operands; i++)
4855 {
4856 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4857 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4858 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4859 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4860 }
4861
4862 #ifdef SMALL_REGISTER_CLASSES
4863 /* Don't bother with avoiding the return reg
4864 if we have no mandatory reload that could use it. */
4865 if (avoid_return_reg)
4866 {
4867 int do_avoid = 0;
4868 int regno = REGNO (avoid_return_reg);
4869 int nregs
4870 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4871 int r;
4872
4873 for (r = regno; r < regno + nregs; r++)
4874 if (spill_reg_order[r] >= 0)
4875 for (j = 0; j < n_reloads; j++)
4876 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4877 && (reload_in[j] != 0 || reload_out[j] != 0
4878 || reload_secondary_p[j])
4879 &&
4880 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4881 do_avoid = 1;
4882 if (!do_avoid)
4883 avoid_return_reg = 0;
4884 }
4885 #endif /* SMALL_REGISTER_CLASSES */
4886
4887 #if 0 /* Not needed, now that we can always retry without inheritance. */
4888 /* See if we have more mandatory reloads than spill regs.
4889 If so, then we cannot risk optimizations that could prevent
4890 reloads from sharing one spill register.
4891
4892 Since we will try finding a better register than reload_reg_rtx
4893 unless it is equal to reload_in or reload_out, count such reloads. */
4894
4895 {
4896 int tem = 0;
4897 #ifdef SMALL_REGISTER_CLASSES
4898 int tem = (avoid_return_reg != 0);
4899 #endif
4900 for (j = 0; j < n_reloads; j++)
4901 if (! reload_optional[j]
4902 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4903 && (reload_reg_rtx[j] == 0
4904 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4905 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4906 tem++;
4907 if (tem > n_spills)
4908 must_reuse = 1;
4909 }
4910 #endif
4911
4912 #ifdef SMALL_REGISTER_CLASSES
4913 /* Don't use the subroutine call return reg for a reload
4914 if we are supposed to avoid it. */
4915 if (avoid_return_reg)
4916 {
4917 int regno = REGNO (avoid_return_reg);
4918 int nregs
4919 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4920 int r;
4921
4922 for (r = regno; r < regno + nregs; r++)
4923 if (spill_reg_order[r] >= 0)
4924 SET_HARD_REG_BIT (reload_reg_used, r);
4925 }
4926 #endif /* SMALL_REGISTER_CLASSES */
4927
4928 /* In order to be certain of getting the registers we need,
4929 we must sort the reloads into order of increasing register class.
4930 Then our grabbing of reload registers will parallel the process
4931 that provided the reload registers.
4932
4933 Also note whether any of the reloads wants a consecutive group of regs.
4934 If so, record the maximum size of the group desired and what
4935 register class contains all the groups needed by this insn. */
4936
4937 for (j = 0; j < n_reloads; j++)
4938 {
4939 reload_order[j] = j;
4940 reload_spill_index[j] = -1;
4941
4942 reload_mode[j]
4943 = (reload_inmode[j] == VOIDmode
4944 || (GET_MODE_SIZE (reload_outmode[j])
4945 > GET_MODE_SIZE (reload_inmode[j])))
4946 ? reload_outmode[j] : reload_inmode[j];
4947
4948 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4949
4950 if (reload_nregs[j] > 1)
4951 {
4952 max_group_size = MAX (reload_nregs[j], max_group_size);
4953 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4954 }
4955
4956 /* If we have already decided to use a certain register,
4957 don't use it in another way. */
4958 if (reload_reg_rtx[j])
4959 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
4960 reload_when_needed[j], reload_mode[j]);
4961 }
4962
4963 if (n_reloads > 1)
4964 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4965
4966 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
4967 sizeof reload_reg_rtx);
4968 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4969 bcopy ((char *) reload_inheritance_insn,
4970 (char *) save_reload_inheritance_insn,
4971 sizeof reload_inheritance_insn);
4972 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
4973 sizeof reload_override_in);
4974 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
4975 sizeof reload_spill_index);
4976 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4977 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4978 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4979 reload_reg_used_in_op_addr);
4980
4981 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
4982 reload_reg_used_in_op_addr_reload);
4983
4984 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4985 reload_reg_used_in_insn);
4986 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4987 reload_reg_used_in_other_addr);
4988
4989 for (i = 0; i < reload_n_operands; i++)
4990 {
4991 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4992 reload_reg_used_in_output[i]);
4993 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4994 reload_reg_used_in_input[i]);
4995 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4996 reload_reg_used_in_input_addr[i]);
4997 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4998 reload_reg_used_in_output_addr[i]);
4999 }
5000
5001 /* If -O, try first with inheritance, then turning it off.
5002 If not -O, don't do inheritance.
5003 Using inheritance when not optimizing leads to paradoxes
5004 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5005 because one side of the comparison might be inherited. */
5006
5007 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5008 {
5009 /* Process the reloads in order of preference just found.
5010 Beyond this point, subregs can be found in reload_reg_rtx.
5011
5012 This used to look for an existing reloaded home for all
5013 of the reloads, and only then perform any new reloads.
5014 But that could lose if the reloads were done out of reg-class order
5015 because a later reload with a looser constraint might have an old
5016 home in a register needed by an earlier reload with a tighter constraint.
5017
5018 To solve this, we make two passes over the reloads, in the order
5019 described above. In the first pass we try to inherit a reload
5020 from a previous insn. If there is a later reload that needs a
5021 class that is a proper subset of the class being processed, we must
5022 also allocate a spill register during the first pass.
5023
5024 Then make a second pass over the reloads to allocate any reloads
5025 that haven't been given registers yet. */
5026
5027 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5028
5029 for (j = 0; j < n_reloads; j++)
5030 {
5031 register int r = reload_order[j];
5032
5033 /* Ignore reloads that got marked inoperative. */
5034 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5035 continue;
5036
5037 /* If find_reloads chose a to use reload_in or reload_out as a reload
5038 register, we don't need to chose one. Otherwise, try even if it found
5039 one since we might save an insn if we find the value lying around. */
5040 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5041 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5042 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5043 continue;
5044
5045 #if 0 /* No longer needed for correct operation.
5046 It might give better code, or might not; worth an experiment? */
5047 /* If this is an optional reload, we can't inherit from earlier insns
5048 until we are sure that any non-optional reloads have been allocated.
5049 The following code takes advantage of the fact that optional reloads
5050 are at the end of reload_order. */
5051 if (reload_optional[r] != 0)
5052 for (i = 0; i < j; i++)
5053 if ((reload_out[reload_order[i]] != 0
5054 || reload_in[reload_order[i]] != 0
5055 || reload_secondary_p[reload_order[i]])
5056 && ! reload_optional[reload_order[i]]
5057 && reload_reg_rtx[reload_order[i]] == 0)
5058 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5059 #endif
5060
5061 /* First see if this pseudo is already available as reloaded
5062 for a previous insn. We cannot try to inherit for reloads
5063 that are smaller than the maximum number of registers needed
5064 for groups unless the register we would allocate cannot be used
5065 for the groups.
5066
5067 We could check here to see if this is a secondary reload for
5068 an object that is already in a register of the desired class.
5069 This would avoid the need for the secondary reload register.
5070 But this is complex because we can't easily determine what
5071 objects might want to be loaded via this reload. So let a register
5072 be allocated here. In `emit_reload_insns' we suppress one of the
5073 loads in the case described above. */
5074
5075 if (inheritance)
5076 {
5077 register int regno = -1;
5078 enum machine_mode mode;
5079
5080 if (reload_in[r] == 0)
5081 ;
5082 else if (GET_CODE (reload_in[r]) == REG)
5083 {
5084 regno = REGNO (reload_in[r]);
5085 mode = GET_MODE (reload_in[r]);
5086 }
5087 else if (GET_CODE (reload_in_reg[r]) == REG)
5088 {
5089 regno = REGNO (reload_in_reg[r]);
5090 mode = GET_MODE (reload_in_reg[r]);
5091 }
5092 #if 0
5093 /* This won't work, since REGNO can be a pseudo reg number.
5094 Also, it takes much more hair to keep track of all the things
5095 that can invalidate an inherited reload of part of a pseudoreg. */
5096 else if (GET_CODE (reload_in[r]) == SUBREG
5097 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5098 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5099 #endif
5100
5101 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5102 {
5103 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5104
5105 if (reg_reloaded_contents[i] == regno
5106 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5107 >= GET_MODE_SIZE (mode))
5108 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5109 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5110 spill_regs[i])
5111 && (reload_nregs[r] == max_group_size
5112 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5113 spill_regs[i]))
5114 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5115 reload_when_needed[r])
5116 && reload_reg_free_before_p (spill_regs[i],
5117 reload_opnum[r],
5118 reload_when_needed[r]))
5119 {
5120 /* If a group is needed, verify that all the subsequent
5121 registers still have their values intact. */
5122 int nr
5123 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5124 int k;
5125
5126 for (k = 1; k < nr; k++)
5127 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5128 != regno)
5129 break;
5130
5131 if (k == nr)
5132 {
5133 int i1;
5134
5135 /* We found a register that contains the
5136 value we need. If this register is the
5137 same as an `earlyclobber' operand of the
5138 current insn, just mark it as a place to
5139 reload from since we can't use it as the
5140 reload register itself. */
5141
5142 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5143 if (reg_overlap_mentioned_for_reload_p
5144 (reg_last_reload_reg[regno],
5145 reload_earlyclobbers[i1]))
5146 break;
5147
5148 if (i1 != n_earlyclobbers
5149 /* Don't really use the inherited spill reg
5150 if we need it wider than we've got it. */
5151 || (GET_MODE_SIZE (reload_mode[r])
5152 > GET_MODE_SIZE (mode)))
5153 reload_override_in[r] = reg_last_reload_reg[regno];
5154 else
5155 {
5156 int k;
5157 /* We can use this as a reload reg. */
5158 /* Mark the register as in use for this part of
5159 the insn. */
5160 mark_reload_reg_in_use (spill_regs[i],
5161 reload_opnum[r],
5162 reload_when_needed[r],
5163 reload_mode[r]);
5164 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5165 reload_inherited[r] = 1;
5166 reload_inheritance_insn[r]
5167 = reg_reloaded_insn[i];
5168 reload_spill_index[r] = i;
5169 for (k = 0; k < nr; k++)
5170 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5171 spill_regs[i + k]);
5172 }
5173 }
5174 }
5175 }
5176 }
5177
5178 /* Here's another way to see if the value is already lying around. */
5179 if (inheritance
5180 && reload_in[r] != 0
5181 && ! reload_inherited[r]
5182 && reload_out[r] == 0
5183 && (CONSTANT_P (reload_in[r])
5184 || GET_CODE (reload_in[r]) == PLUS
5185 || GET_CODE (reload_in[r]) == REG
5186 || GET_CODE (reload_in[r]) == MEM)
5187 && (reload_nregs[r] == max_group_size
5188 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5189 {
5190 register rtx equiv
5191 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5192 -1, NULL_PTR, 0, reload_mode[r]);
5193 int regno;
5194
5195 if (equiv != 0)
5196 {
5197 if (GET_CODE (equiv) == REG)
5198 regno = REGNO (equiv);
5199 else if (GET_CODE (equiv) == SUBREG)
5200 {
5201 /* This must be a SUBREG of a hard register.
5202 Make a new REG since this might be used in an
5203 address and not all machines support SUBREGs
5204 there. */
5205 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5206 equiv = gen_rtx (REG, reload_mode[r], regno);
5207 }
5208 else
5209 abort ();
5210 }
5211
5212 /* If we found a spill reg, reject it unless it is free
5213 and of the desired class. */
5214 if (equiv != 0
5215 && ((spill_reg_order[regno] >= 0
5216 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5217 reload_when_needed[r]))
5218 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5219 regno)))
5220 equiv = 0;
5221
5222 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5223 equiv = 0;
5224
5225 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5226 equiv = 0;
5227
5228 /* We found a register that contains the value we need.
5229 If this register is the same as an `earlyclobber' operand
5230 of the current insn, just mark it as a place to reload from
5231 since we can't use it as the reload register itself. */
5232
5233 if (equiv != 0)
5234 for (i = 0; i < n_earlyclobbers; i++)
5235 if (reg_overlap_mentioned_for_reload_p (equiv,
5236 reload_earlyclobbers[i]))
5237 {
5238 reload_override_in[r] = equiv;
5239 equiv = 0;
5240 break;
5241 }
5242
5243 /* JRV: If the equiv register we have found is explicitly
5244 clobbered in the current insn, mark but don't use, as above. */
5245
5246 if (equiv != 0 && regno_clobbered_p (regno, insn))
5247 {
5248 reload_override_in[r] = equiv;
5249 equiv = 0;
5250 }
5251
5252 /* If we found an equivalent reg, say no code need be generated
5253 to load it, and use it as our reload reg. */
5254 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5255 {
5256 reload_reg_rtx[r] = equiv;
5257 reload_inherited[r] = 1;
5258 /* If it is a spill reg,
5259 mark the spill reg as in use for this insn. */
5260 i = spill_reg_order[regno];
5261 if (i >= 0)
5262 {
5263 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5264 int k;
5265 mark_reload_reg_in_use (regno, reload_opnum[r],
5266 reload_when_needed[r],
5267 reload_mode[r]);
5268 for (k = 0; k < nr; k++)
5269 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5270 }
5271 }
5272 }
5273
5274 /* If we found a register to use already, or if this is an optional
5275 reload, we are done. */
5276 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5277 continue;
5278
5279 #if 0 /* No longer needed for correct operation. Might or might not
5280 give better code on the average. Want to experiment? */
5281
5282 /* See if there is a later reload that has a class different from our
5283 class that intersects our class or that requires less register
5284 than our reload. If so, we must allocate a register to this
5285 reload now, since that reload might inherit a previous reload
5286 and take the only available register in our class. Don't do this
5287 for optional reloads since they will force all previous reloads
5288 to be allocated. Also don't do this for reloads that have been
5289 turned off. */
5290
5291 for (i = j + 1; i < n_reloads; i++)
5292 {
5293 int s = reload_order[i];
5294
5295 if ((reload_in[s] == 0 && reload_out[s] == 0
5296 && ! reload_secondary_p[s])
5297 || reload_optional[s])
5298 continue;
5299
5300 if ((reload_reg_class[s] != reload_reg_class[r]
5301 && reg_classes_intersect_p (reload_reg_class[r],
5302 reload_reg_class[s]))
5303 || reload_nregs[s] < reload_nregs[r])
5304 break;
5305 }
5306
5307 if (i == n_reloads)
5308 continue;
5309
5310 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5311 #endif
5312 }
5313
5314 /* Now allocate reload registers for anything non-optional that
5315 didn't get one yet. */
5316 for (j = 0; j < n_reloads; j++)
5317 {
5318 register int r = reload_order[j];
5319
5320 /* Ignore reloads that got marked inoperative. */
5321 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5322 continue;
5323
5324 /* Skip reloads that already have a register allocated or are
5325 optional. */
5326 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5327 continue;
5328
5329 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5330 break;
5331 }
5332
5333 /* If that loop got all the way, we have won. */
5334 if (j == n_reloads)
5335 break;
5336
5337 fail:
5338 /* Loop around and try without any inheritance. */
5339 /* First undo everything done by the failed attempt
5340 to allocate with inheritance. */
5341 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5342 sizeof reload_reg_rtx);
5343 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5344 sizeof reload_inherited);
5345 bcopy ((char *) save_reload_inheritance_insn,
5346 (char *) reload_inheritance_insn,
5347 sizeof reload_inheritance_insn);
5348 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5349 sizeof reload_override_in);
5350 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5351 sizeof reload_spill_index);
5352 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5353 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5354 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5355 save_reload_reg_used_in_op_addr);
5356 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5357 save_reload_reg_used_in_op_addr_reload);
5358 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5359 save_reload_reg_used_in_insn);
5360 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5361 save_reload_reg_used_in_other_addr);
5362
5363 for (i = 0; i < reload_n_operands; i++)
5364 {
5365 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5366 save_reload_reg_used_in_input[i]);
5367 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5368 save_reload_reg_used_in_output[i]);
5369 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5370 save_reload_reg_used_in_input_addr[i]);
5371 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5372 save_reload_reg_used_in_output_addr[i]);
5373 }
5374 }
5375
5376 /* If we thought we could inherit a reload, because it seemed that
5377 nothing else wanted the same reload register earlier in the insn,
5378 verify that assumption, now that all reloads have been assigned. */
5379
5380 for (j = 0; j < n_reloads; j++)
5381 {
5382 register int r = reload_order[j];
5383
5384 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5385 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5386 reload_opnum[r],
5387 reload_when_needed[r]))
5388 reload_inherited[r] = 0;
5389
5390 /* If we found a better place to reload from,
5391 validate it in the same fashion, if it is a reload reg. */
5392 if (reload_override_in[r]
5393 && (GET_CODE (reload_override_in[r]) == REG
5394 || GET_CODE (reload_override_in[r]) == SUBREG))
5395 {
5396 int regno = true_regnum (reload_override_in[r]);
5397 if (spill_reg_order[regno] >= 0
5398 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5399 reload_when_needed[r]))
5400 reload_override_in[r] = 0;
5401 }
5402 }
5403
5404 /* Now that reload_override_in is known valid,
5405 actually override reload_in. */
5406 for (j = 0; j < n_reloads; j++)
5407 if (reload_override_in[j])
5408 reload_in[j] = reload_override_in[j];
5409
5410 /* If this reload won't be done because it has been cancelled or is
5411 optional and not inherited, clear reload_reg_rtx so other
5412 routines (such as subst_reloads) don't get confused. */
5413 for (j = 0; j < n_reloads; j++)
5414 if (reload_reg_rtx[j] != 0
5415 && ((reload_optional[j] && ! reload_inherited[j])
5416 || (reload_in[j] == 0 && reload_out[j] == 0
5417 && ! reload_secondary_p[j])))
5418 {
5419 int regno = true_regnum (reload_reg_rtx[j]);
5420
5421 if (spill_reg_order[regno] >= 0)
5422 clear_reload_reg_in_use (regno, reload_opnum[j],
5423 reload_when_needed[j], reload_mode[j]);
5424 reload_reg_rtx[j] = 0;
5425 }
5426
5427 /* Record which pseudos and which spill regs have output reloads. */
5428 for (j = 0; j < n_reloads; j++)
5429 {
5430 register int r = reload_order[j];
5431
5432 i = reload_spill_index[r];
5433
5434 /* I is nonneg if this reload used one of the spill regs.
5435 If reload_reg_rtx[r] is 0, this is an optional reload
5436 that we opted to ignore. */
5437 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5438 && reload_reg_rtx[r] != 0)
5439 {
5440 register int nregno = REGNO (reload_out[r]);
5441 int nr = 1;
5442
5443 if (nregno < FIRST_PSEUDO_REGISTER)
5444 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5445
5446 while (--nr >= 0)
5447 reg_has_output_reload[nregno + nr] = 1;
5448
5449 if (i >= 0)
5450 {
5451 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5452 while (--nr >= 0)
5453 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5454 }
5455
5456 if (reload_when_needed[r] != RELOAD_OTHER
5457 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5458 && reload_when_needed[r] != RELOAD_FOR_INSN)
5459 abort ();
5460 }
5461 }
5462 }
5463 \f
5464 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5465 reloads of the same item for fear that we might not have enough reload
5466 registers. However, normally they will get the same reload register
5467 and hence actually need not be loaded twice.
5468
5469 Here we check for the most common case of this phenomenon: when we have
5470 a number of reloads for the same object, each of which were allocated
5471 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5472 reload, and is not modified in the insn itself. If we find such,
5473 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5474 This will not increase the number of spill registers needed and will
5475 prevent redundant code. */
5476
5477 #ifdef SMALL_REGISTER_CLASSES
5478
5479 static void
5480 merge_assigned_reloads (insn)
5481 rtx insn;
5482 {
5483 int i, j;
5484
5485 /* Scan all the reloads looking for ones that only load values and
5486 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5487 assigned and not modified by INSN. */
5488
5489 for (i = 0; i < n_reloads; i++)
5490 {
5491 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5492 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5493 || reg_set_p (reload_reg_rtx[i], insn))
5494 continue;
5495
5496 /* Look at all other reloads. Ensure that the only use of this
5497 reload_reg_rtx is in a reload that just loads the same value
5498 as we do. Note that any secondary reloads must be of the identical
5499 class since the values, modes, and result registers are the
5500 same, so we need not do anything with any secondary reloads. */
5501
5502 for (j = 0; j < n_reloads; j++)
5503 {
5504 if (i == j || reload_reg_rtx[j] == 0
5505 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5506 reload_reg_rtx[i]))
5507 continue;
5508
5509 /* If the reload regs aren't exactly the same (e.g, different modes)
5510 or if the values are different, we can't merge anything with this
5511 reload register. */
5512
5513 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5514 || reload_out[j] != 0 || reload_in[j] == 0
5515 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5516 break;
5517 }
5518
5519 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5520 we, in fact, found any matching reloads. */
5521
5522 if (j == n_reloads)
5523 {
5524 for (j = 0; j < n_reloads; j++)
5525 if (i != j && reload_reg_rtx[j] != 0
5526 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5527 {
5528 reload_when_needed[i] = RELOAD_OTHER;
5529 reload_in[j] = 0;
5530 transfer_replacements (i, j);
5531 }
5532
5533 /* If this is now RELOAD_OTHER, look for any reloads that load
5534 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5535 if they were for inputs, RELOAD_OTHER for outputs. Note that
5536 this test is equivalent to looking for reloads for this operand
5537 number. */
5538
5539 if (reload_when_needed[i] == RELOAD_OTHER)
5540 for (j = 0; j < n_reloads; j++)
5541 if (reload_in[j] != 0
5542 && reload_when_needed[i] != RELOAD_OTHER
5543 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5544 reload_in[i]))
5545 reload_when_needed[j]
5546 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5547 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5548 }
5549 }
5550 }
5551 #endif /* SMALL_RELOAD_CLASSES */
5552 \f
5553 /* Output insns to reload values in and out of the chosen reload regs. */
5554
5555 static void
5556 emit_reload_insns (insn)
5557 rtx insn;
5558 {
5559 register int j;
5560 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5561 rtx other_input_address_reload_insns = 0;
5562 rtx other_input_reload_insns = 0;
5563 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5564 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5565 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5566 rtx operand_reload_insns = 0;
5567 rtx other_operand_reload_insns = 0;
5568 rtx following_insn = NEXT_INSN (insn);
5569 rtx before_insn = insn;
5570 int special;
5571 /* Values to be put in spill_reg_store are put here first. */
5572 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5573
5574 for (j = 0; j < reload_n_operands; j++)
5575 input_reload_insns[j] = input_address_reload_insns[j]
5576 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5577
5578 /* Now output the instructions to copy the data into and out of the
5579 reload registers. Do these in the order that the reloads were reported,
5580 since reloads of base and index registers precede reloads of operands
5581 and the operands may need the base and index registers reloaded. */
5582
5583 for (j = 0; j < n_reloads; j++)
5584 {
5585 register rtx old;
5586 rtx oldequiv_reg = 0;
5587 rtx store_insn = 0;
5588
5589 old = reload_in[j];
5590 if (old != 0 && ! reload_inherited[j]
5591 && ! rtx_equal_p (reload_reg_rtx[j], old)
5592 && reload_reg_rtx[j] != 0)
5593 {
5594 register rtx reloadreg = reload_reg_rtx[j];
5595 rtx oldequiv = 0;
5596 enum machine_mode mode;
5597 rtx *where;
5598
5599 /* Determine the mode to reload in.
5600 This is very tricky because we have three to choose from.
5601 There is the mode the insn operand wants (reload_inmode[J]).
5602 There is the mode of the reload register RELOADREG.
5603 There is the intrinsic mode of the operand, which we could find
5604 by stripping some SUBREGs.
5605 It turns out that RELOADREG's mode is irrelevant:
5606 we can change that arbitrarily.
5607
5608 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5609 then the reload reg may not support QImode moves, so use SImode.
5610 If foo is in memory due to spilling a pseudo reg, this is safe,
5611 because the QImode value is in the least significant part of a
5612 slot big enough for a SImode. If foo is some other sort of
5613 memory reference, then it is impossible to reload this case,
5614 so previous passes had better make sure this never happens.
5615
5616 Then consider a one-word union which has SImode and one of its
5617 members is a float, being fetched as (SUBREG:SF union:SI).
5618 We must fetch that as SFmode because we could be loading into
5619 a float-only register. In this case OLD's mode is correct.
5620
5621 Consider an immediate integer: it has VOIDmode. Here we need
5622 to get a mode from something else.
5623
5624 In some cases, there is a fourth mode, the operand's
5625 containing mode. If the insn specifies a containing mode for
5626 this operand, it overrides all others.
5627
5628 I am not sure whether the algorithm here is always right,
5629 but it does the right things in those cases. */
5630
5631 mode = GET_MODE (old);
5632 if (mode == VOIDmode)
5633 mode = reload_inmode[j];
5634
5635 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5636 /* If we need a secondary register for this operation, see if
5637 the value is already in a register in that class. Don't
5638 do this if the secondary register will be used as a scratch
5639 register. */
5640
5641 if (reload_secondary_in_reload[j] >= 0
5642 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5643 && optimize)
5644 oldequiv
5645 = find_equiv_reg (old, insn,
5646 reload_reg_class[reload_secondary_in_reload[j]],
5647 -1, NULL_PTR, 0, mode);
5648 #endif
5649
5650 /* If reloading from memory, see if there is a register
5651 that already holds the same value. If so, reload from there.
5652 We can pass 0 as the reload_reg_p argument because
5653 any other reload has either already been emitted,
5654 in which case find_equiv_reg will see the reload-insn,
5655 or has yet to be emitted, in which case it doesn't matter
5656 because we will use this equiv reg right away. */
5657
5658 if (oldequiv == 0 && optimize
5659 && (GET_CODE (old) == MEM
5660 || (GET_CODE (old) == REG
5661 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5662 && reg_renumber[REGNO (old)] < 0)))
5663 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5664 -1, NULL_PTR, 0, mode);
5665
5666 if (oldequiv)
5667 {
5668 int regno = true_regnum (oldequiv);
5669
5670 /* If OLDEQUIV is a spill register, don't use it for this
5671 if any other reload needs it at an earlier stage of this insn
5672 or at this stage. */
5673 if (spill_reg_order[regno] >= 0
5674 && (! reload_reg_free_p (regno, reload_opnum[j],
5675 reload_when_needed[j])
5676 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5677 reload_when_needed[j])))
5678 oldequiv = 0;
5679
5680 /* If OLDEQUIV is not a spill register,
5681 don't use it if any other reload wants it. */
5682 if (spill_reg_order[regno] < 0)
5683 {
5684 int k;
5685 for (k = 0; k < n_reloads; k++)
5686 if (reload_reg_rtx[k] != 0 && k != j
5687 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5688 oldequiv))
5689 {
5690 oldequiv = 0;
5691 break;
5692 }
5693 }
5694
5695 /* If it is no cheaper to copy from OLDEQUIV into the
5696 reload register than it would be to move from memory,
5697 don't use it. Likewise, if we need a secondary register
5698 or memory. */
5699
5700 if (oldequiv != 0
5701 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5702 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5703 reload_reg_class[j])
5704 >= MEMORY_MOVE_COST (mode)))
5705 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5706 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5707 mode, oldequiv)
5708 != NO_REGS)
5709 #endif
5710 #ifdef SECONDARY_MEMORY_NEEDED
5711 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5712 REGNO_REG_CLASS (regno),
5713 mode)
5714 #endif
5715 ))
5716 oldequiv = 0;
5717 }
5718
5719 if (oldequiv == 0)
5720 oldequiv = old;
5721 else if (GET_CODE (oldequiv) == REG)
5722 oldequiv_reg = oldequiv;
5723 else if (GET_CODE (oldequiv) == SUBREG)
5724 oldequiv_reg = SUBREG_REG (oldequiv);
5725
5726 /* If we are reloading from a register that was recently stored in
5727 with an output-reload, see if we can prove there was
5728 actually no need to store the old value in it. */
5729
5730 if (optimize && GET_CODE (oldequiv) == REG
5731 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5732 && spill_reg_order[REGNO (oldequiv)] >= 0
5733 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5734 && find_reg_note (insn, REG_DEAD, reload_in[j])
5735 /* This is unsafe if operand occurs more than once in current
5736 insn. Perhaps some occurrences weren't reloaded. */
5737 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5738 delete_output_reload
5739 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5740
5741 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5742 then load RELOADREG from OLDEQUIV. Note that we cannot use
5743 gen_lowpart_common since it can do the wrong thing when
5744 RELOADREG has a multi-word mode. Note that RELOADREG
5745 must always be a REG here. */
5746
5747 if (GET_MODE (reloadreg) != mode)
5748 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5749 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5750 oldequiv = SUBREG_REG (oldequiv);
5751 if (GET_MODE (oldequiv) != VOIDmode
5752 && mode != GET_MODE (oldequiv))
5753 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5754
5755 /* Switch to the right place to emit the reload insns. */
5756 switch (reload_when_needed[j])
5757 {
5758 case RELOAD_OTHER:
5759 where = &other_input_reload_insns;
5760 break;
5761 case RELOAD_FOR_INPUT:
5762 where = &input_reload_insns[reload_opnum[j]];
5763 break;
5764 case RELOAD_FOR_INPUT_ADDRESS:
5765 where = &input_address_reload_insns[reload_opnum[j]];
5766 break;
5767 case RELOAD_FOR_OUTPUT_ADDRESS:
5768 where = &output_address_reload_insns[reload_opnum[j]];
5769 break;
5770 case RELOAD_FOR_OPERAND_ADDRESS:
5771 where = &operand_reload_insns;
5772 break;
5773 case RELOAD_FOR_OPADDR_ADDR:
5774 where = &other_operand_reload_insns;
5775 break;
5776 case RELOAD_FOR_OTHER_ADDRESS:
5777 where = &other_input_address_reload_insns;
5778 break;
5779 default:
5780 abort ();
5781 }
5782
5783 push_to_sequence (*where);
5784 special = 0;
5785
5786 /* Auto-increment addresses must be reloaded in a special way. */
5787 if (GET_CODE (oldequiv) == POST_INC
5788 || GET_CODE (oldequiv) == POST_DEC
5789 || GET_CODE (oldequiv) == PRE_INC
5790 || GET_CODE (oldequiv) == PRE_DEC)
5791 {
5792 /* We are not going to bother supporting the case where a
5793 incremented register can't be copied directly from
5794 OLDEQUIV since this seems highly unlikely. */
5795 if (reload_secondary_in_reload[j] >= 0)
5796 abort ();
5797 /* Prevent normal processing of this reload. */
5798 special = 1;
5799 /* Output a special code sequence for this case. */
5800 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5801 }
5802
5803 /* If we are reloading a pseudo-register that was set by the previous
5804 insn, see if we can get rid of that pseudo-register entirely
5805 by redirecting the previous insn into our reload register. */
5806
5807 else if (optimize && GET_CODE (old) == REG
5808 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5809 && dead_or_set_p (insn, old)
5810 /* This is unsafe if some other reload
5811 uses the same reg first. */
5812 && reload_reg_free_before_p (REGNO (reloadreg),
5813 reload_opnum[j],
5814 reload_when_needed[j]))
5815 {
5816 rtx temp = PREV_INSN (insn);
5817 while (temp && GET_CODE (temp) == NOTE)
5818 temp = PREV_INSN (temp);
5819 if (temp
5820 && GET_CODE (temp) == INSN
5821 && GET_CODE (PATTERN (temp)) == SET
5822 && SET_DEST (PATTERN (temp)) == old
5823 /* Make sure we can access insn_operand_constraint. */
5824 && asm_noperands (PATTERN (temp)) < 0
5825 /* This is unsafe if prev insn rejects our reload reg. */
5826 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5827 reloadreg)
5828 /* This is unsafe if operand occurs more than once in current
5829 insn. Perhaps some occurrences aren't reloaded. */
5830 && count_occurrences (PATTERN (insn), old) == 1
5831 /* Don't risk splitting a matching pair of operands. */
5832 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5833 {
5834 /* Store into the reload register instead of the pseudo. */
5835 SET_DEST (PATTERN (temp)) = reloadreg;
5836 /* If these are the only uses of the pseudo reg,
5837 pretend for GDB it lives in the reload reg we used. */
5838 if (reg_n_deaths[REGNO (old)] == 1
5839 && reg_n_sets[REGNO (old)] == 1)
5840 {
5841 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5842 alter_reg (REGNO (old), -1);
5843 }
5844 special = 1;
5845 }
5846 }
5847
5848 /* We can't do that, so output an insn to load RELOADREG. */
5849
5850 if (! special)
5851 {
5852 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5853 rtx second_reload_reg = 0;
5854 enum insn_code icode;
5855
5856 /* If we have a secondary reload, pick up the secondary register
5857 and icode, if any. If OLDEQUIV and OLD are different or
5858 if this is an in-out reload, recompute whether or not we
5859 still need a secondary register and what the icode should
5860 be. If we still need a secondary register and the class or
5861 icode is different, go back to reloading from OLD if using
5862 OLDEQUIV means that we got the wrong type of register. We
5863 cannot have different class or icode due to an in-out reload
5864 because we don't make such reloads when both the input and
5865 output need secondary reload registers. */
5866
5867 if (reload_secondary_in_reload[j] >= 0)
5868 {
5869 int secondary_reload = reload_secondary_in_reload[j];
5870 rtx real_oldequiv = oldequiv;
5871 rtx real_old = old;
5872
5873 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5874 and similarly for OLD.
5875 See comments in get_secondary_reload in reload.c. */
5876 if (GET_CODE (oldequiv) == REG
5877 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5878 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5879 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5880
5881 if (GET_CODE (old) == REG
5882 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5883 && reg_equiv_mem[REGNO (old)] != 0)
5884 real_old = reg_equiv_mem[REGNO (old)];
5885
5886 second_reload_reg = reload_reg_rtx[secondary_reload];
5887 icode = reload_secondary_in_icode[j];
5888
5889 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5890 || (reload_in[j] != 0 && reload_out[j] != 0))
5891 {
5892 enum reg_class new_class
5893 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5894 mode, real_oldequiv);
5895
5896 if (new_class == NO_REGS)
5897 second_reload_reg = 0;
5898 else
5899 {
5900 enum insn_code new_icode;
5901 enum machine_mode new_mode;
5902
5903 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5904 REGNO (second_reload_reg)))
5905 oldequiv = old, real_oldequiv = real_old;
5906 else
5907 {
5908 new_icode = reload_in_optab[(int) mode];
5909 if (new_icode != CODE_FOR_nothing
5910 && ((insn_operand_predicate[(int) new_icode][0]
5911 && ! ((*insn_operand_predicate[(int) new_icode][0])
5912 (reloadreg, mode)))
5913 || (insn_operand_predicate[(int) new_icode][1]
5914 && ! ((*insn_operand_predicate[(int) new_icode][1])
5915 (real_oldequiv, mode)))))
5916 new_icode = CODE_FOR_nothing;
5917
5918 if (new_icode == CODE_FOR_nothing)
5919 new_mode = mode;
5920 else
5921 new_mode = insn_operand_mode[(int) new_icode][2];
5922
5923 if (GET_MODE (second_reload_reg) != new_mode)
5924 {
5925 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5926 new_mode))
5927 oldequiv = old, real_oldequiv = real_old;
5928 else
5929 second_reload_reg
5930 = gen_rtx (REG, new_mode,
5931 REGNO (second_reload_reg));
5932 }
5933 }
5934 }
5935 }
5936
5937 /* If we still need a secondary reload register, check
5938 to see if it is being used as a scratch or intermediate
5939 register and generate code appropriately. If we need
5940 a scratch register, use REAL_OLDEQUIV since the form of
5941 the insn may depend on the actual address if it is
5942 a MEM. */
5943
5944 if (second_reload_reg)
5945 {
5946 if (icode != CODE_FOR_nothing)
5947 {
5948 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5949 second_reload_reg));
5950 special = 1;
5951 }
5952 else
5953 {
5954 /* See if we need a scratch register to load the
5955 intermediate register (a tertiary reload). */
5956 enum insn_code tertiary_icode
5957 = reload_secondary_in_icode[secondary_reload];
5958
5959 if (tertiary_icode != CODE_FOR_nothing)
5960 {
5961 rtx third_reload_reg
5962 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
5963
5964 emit_insn ((GEN_FCN (tertiary_icode)
5965 (second_reload_reg, real_oldequiv,
5966 third_reload_reg)));
5967 }
5968 else
5969 gen_input_reload (second_reload_reg, oldequiv,
5970 reload_opnum[j],
5971 reload_when_needed[j]);
5972
5973 oldequiv = second_reload_reg;
5974 }
5975 }
5976 }
5977 #endif
5978
5979 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
5980 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5981 reload_when_needed[j]);
5982
5983 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5984 /* We may have to make a REG_DEAD note for the secondary reload
5985 register in the insns we just made. Find the last insn that
5986 mentioned the register. */
5987 if (! special && second_reload_reg
5988 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5989 {
5990 rtx prev;
5991
5992 for (prev = get_last_insn (); prev;
5993 prev = PREV_INSN (prev))
5994 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
5995 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5996 PATTERN (prev)))
5997 {
5998 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5999 second_reload_reg,
6000 REG_NOTES (prev));
6001 break;
6002 }
6003 }
6004 #endif
6005 }
6006
6007 /* End this sequence. */
6008 *where = get_insns ();
6009 end_sequence ();
6010 }
6011
6012 /* Add a note saying the input reload reg
6013 dies in this insn, if anyone cares. */
6014 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6015 if (old != 0
6016 && reload_reg_rtx[j] != old
6017 && reload_reg_rtx[j] != 0
6018 && reload_out[j] == 0
6019 && ! reload_inherited[j]
6020 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6021 {
6022 register rtx reloadreg = reload_reg_rtx[j];
6023
6024 #if 0
6025 /* We can't abort here because we need to support this for sched.c.
6026 It's not terrible to miss a REG_DEAD note, but we should try
6027 to figure out how to do this correctly. */
6028 /* The code below is incorrect for address-only reloads. */
6029 if (reload_when_needed[j] != RELOAD_OTHER
6030 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6031 abort ();
6032 #endif
6033
6034 /* Add a death note to this insn, for an input reload. */
6035
6036 if ((reload_when_needed[j] == RELOAD_OTHER
6037 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6038 && ! dead_or_set_p (insn, reloadreg))
6039 REG_NOTES (insn)
6040 = gen_rtx (EXPR_LIST, REG_DEAD,
6041 reloadreg, REG_NOTES (insn));
6042 }
6043
6044 /* When we inherit a reload, the last marked death of the reload reg
6045 may no longer really be a death. */
6046 if (reload_reg_rtx[j] != 0
6047 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6048 && reload_inherited[j])
6049 {
6050 /* Handle inheriting an output reload.
6051 Remove the death note from the output reload insn. */
6052 if (reload_spill_index[j] >= 0
6053 && GET_CODE (reload_in[j]) == REG
6054 && spill_reg_store[reload_spill_index[j]] != 0
6055 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6056 REG_DEAD, REGNO (reload_reg_rtx[j])))
6057 remove_death (REGNO (reload_reg_rtx[j]),
6058 spill_reg_store[reload_spill_index[j]]);
6059 /* Likewise for input reloads that were inherited. */
6060 else if (reload_spill_index[j] >= 0
6061 && GET_CODE (reload_in[j]) == REG
6062 && spill_reg_store[reload_spill_index[j]] == 0
6063 && reload_inheritance_insn[j] != 0
6064 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6065 REGNO (reload_reg_rtx[j])))
6066 remove_death (REGNO (reload_reg_rtx[j]),
6067 reload_inheritance_insn[j]);
6068 else
6069 {
6070 rtx prev;
6071
6072 /* We got this register from find_equiv_reg.
6073 Search back for its last death note and get rid of it.
6074 But don't search back too far.
6075 Don't go past a place where this reg is set,
6076 since a death note before that remains valid. */
6077 for (prev = PREV_INSN (insn);
6078 prev && GET_CODE (prev) != CODE_LABEL;
6079 prev = PREV_INSN (prev))
6080 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6081 && dead_or_set_p (prev, reload_reg_rtx[j]))
6082 {
6083 if (find_regno_note (prev, REG_DEAD,
6084 REGNO (reload_reg_rtx[j])))
6085 remove_death (REGNO (reload_reg_rtx[j]), prev);
6086 break;
6087 }
6088 }
6089 }
6090
6091 /* We might have used find_equiv_reg above to choose an alternate
6092 place from which to reload. If so, and it died, we need to remove
6093 that death and move it to one of the insns we just made. */
6094
6095 if (oldequiv_reg != 0
6096 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6097 {
6098 rtx prev, prev1;
6099
6100 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6101 prev = PREV_INSN (prev))
6102 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6103 && dead_or_set_p (prev, oldequiv_reg))
6104 {
6105 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6106 {
6107 for (prev1 = this_reload_insn;
6108 prev1; prev1 = PREV_INSN (prev1))
6109 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6110 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6111 PATTERN (prev1)))
6112 {
6113 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6114 oldequiv_reg,
6115 REG_NOTES (prev1));
6116 break;
6117 }
6118 remove_death (REGNO (oldequiv_reg), prev);
6119 }
6120 break;
6121 }
6122 }
6123 #endif
6124
6125 /* If we are reloading a register that was recently stored in with an
6126 output-reload, see if we can prove there was
6127 actually no need to store the old value in it. */
6128
6129 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6130 && reload_in[j] != 0
6131 && GET_CODE (reload_in[j]) == REG
6132 #if 0
6133 /* There doesn't seem to be any reason to restrict this to pseudos
6134 and doing so loses in the case where we are copying from a
6135 register of the wrong class. */
6136 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6137 #endif
6138 && spill_reg_store[reload_spill_index[j]] != 0
6139 /* This is unsafe if some other reload uses the same reg first. */
6140 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6141 reload_opnum[j], reload_when_needed[j])
6142 && dead_or_set_p (insn, reload_in[j])
6143 /* This is unsafe if operand occurs more than once in current
6144 insn. Perhaps some occurrences weren't reloaded. */
6145 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6146 delete_output_reload (insn, j,
6147 spill_reg_store[reload_spill_index[j]]);
6148
6149 /* Input-reloading is done. Now do output-reloading,
6150 storing the value from the reload-register after the main insn
6151 if reload_out[j] is nonzero.
6152
6153 ??? At some point we need to support handling output reloads of
6154 JUMP_INSNs or insns that set cc0. */
6155 old = reload_out[j];
6156 if (old != 0
6157 && reload_reg_rtx[j] != old
6158 && reload_reg_rtx[j] != 0)
6159 {
6160 register rtx reloadreg = reload_reg_rtx[j];
6161 register rtx second_reloadreg = 0;
6162 rtx note, p;
6163 enum machine_mode mode;
6164 int special = 0;
6165
6166 /* An output operand that dies right away does need a reload,
6167 but need not be copied from it. Show the new location in the
6168 REG_UNUSED note. */
6169 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6170 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6171 {
6172 XEXP (note, 0) = reload_reg_rtx[j];
6173 continue;
6174 }
6175 else if (GET_CODE (old) == SCRATCH)
6176 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6177 but we don't want to make an output reload. */
6178 continue;
6179
6180 #if 0
6181 /* Strip off of OLD any size-increasing SUBREGs such as
6182 (SUBREG:SI foo:QI 0). */
6183
6184 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6185 && (GET_MODE_SIZE (GET_MODE (old))
6186 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6187 old = SUBREG_REG (old);
6188 #endif
6189
6190 /* If is a JUMP_INSN, we can't support output reloads yet. */
6191 if (GET_CODE (insn) == JUMP_INSN)
6192 abort ();
6193
6194 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6195
6196 /* Determine the mode to reload in.
6197 See comments above (for input reloading). */
6198
6199 mode = GET_MODE (old);
6200 if (mode == VOIDmode)
6201 {
6202 /* VOIDmode should never happen for an output. */
6203 if (asm_noperands (PATTERN (insn)) < 0)
6204 /* It's the compiler's fault. */
6205 abort ();
6206 error_for_asm (insn, "output operand is constant in `asm'");
6207 /* Prevent crash--use something we know is valid. */
6208 mode = word_mode;
6209 old = gen_rtx (REG, mode, REGNO (reloadreg));
6210 }
6211
6212 if (GET_MODE (reloadreg) != mode)
6213 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6214
6215 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6216
6217 /* If we need two reload regs, set RELOADREG to the intermediate
6218 one, since it will be stored into OUT. We might need a secondary
6219 register only for an input reload, so check again here. */
6220
6221 if (reload_secondary_out_reload[j] >= 0)
6222 {
6223 rtx real_old = old;
6224
6225 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6226 && reg_equiv_mem[REGNO (old)] != 0)
6227 real_old = reg_equiv_mem[REGNO (old)];
6228
6229 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6230 mode, real_old)
6231 != NO_REGS))
6232 {
6233 second_reloadreg = reloadreg;
6234 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6235
6236 /* See if RELOADREG is to be used as a scratch register
6237 or as an intermediate register. */
6238 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6239 {
6240 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6241 (real_old, second_reloadreg, reloadreg)));
6242 special = 1;
6243 }
6244 else
6245 {
6246 /* See if we need both a scratch and intermediate reload
6247 register. */
6248 int secondary_reload = reload_secondary_out_reload[j];
6249 enum insn_code tertiary_icode
6250 = reload_secondary_out_icode[secondary_reload];
6251 rtx pat;
6252
6253 if (GET_MODE (reloadreg) != mode)
6254 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6255
6256 if (tertiary_icode != CODE_FOR_nothing)
6257 {
6258 rtx third_reloadreg
6259 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6260 pat = (GEN_FCN (tertiary_icode)
6261 (reloadreg, second_reloadreg, third_reloadreg));
6262 }
6263 #ifdef SECONDARY_MEMORY_NEEDED
6264 /* If we need a memory location to do the move, do it that way. */
6265 else if (GET_CODE (reloadreg) == REG
6266 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6267 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6268 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6269 GET_MODE (second_reloadreg)))
6270 {
6271 /* Get the memory to use and rewrite both registers
6272 to its mode. */
6273 rtx loc
6274 = get_secondary_mem (reloadreg,
6275 GET_MODE (second_reloadreg),
6276 reload_opnum[j],
6277 reload_when_needed[j]);
6278 rtx tmp_reloadreg;
6279
6280 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6281 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6282 REGNO (second_reloadreg));
6283
6284 if (GET_MODE (loc) != GET_MODE (reloadreg))
6285 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6286 REGNO (reloadreg));
6287 else
6288 tmp_reloadreg = reloadreg;
6289
6290 emit_move_insn (loc, second_reloadreg);
6291 pat = gen_move_insn (tmp_reloadreg, loc);
6292 }
6293 #endif
6294 else
6295 pat = gen_move_insn (reloadreg, second_reloadreg);
6296
6297 emit_insn (pat);
6298 }
6299 }
6300 }
6301 #endif
6302
6303 /* Output the last reload insn. */
6304 if (! special)
6305 {
6306 #ifdef SECONDARY_MEMORY_NEEDED
6307 /* If we need a memory location to do the move, do it that way. */
6308 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6309 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6310 REGNO_REG_CLASS (REGNO (reloadreg)),
6311 GET_MODE (reloadreg)))
6312 {
6313 /* Get the memory to use and rewrite both registers to
6314 its mode. */
6315 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6316 reload_opnum[j],
6317 reload_when_needed[j]);
6318
6319 if (GET_MODE (loc) != GET_MODE (reloadreg))
6320 reloadreg = gen_rtx (REG, GET_MODE (loc),
6321 REGNO (reloadreg));
6322
6323 if (GET_MODE (loc) != GET_MODE (old))
6324 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6325
6326 emit_insn (gen_move_insn (loc, reloadreg));
6327 emit_insn (gen_move_insn (old, loc));
6328 }
6329 else
6330 #endif
6331 emit_insn (gen_move_insn (old, reloadreg));
6332 }
6333
6334 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6335 /* If final will look at death notes for this reg,
6336 put one on the last output-reload insn to use it. Similarly
6337 for any secondary register. */
6338 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6339 for (p = get_last_insn (); p; p = PREV_INSN (p))
6340 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6341 && reg_overlap_mentioned_for_reload_p (reloadreg,
6342 PATTERN (p)))
6343 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6344 reloadreg, REG_NOTES (p));
6345
6346 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6347 if (! special
6348 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6349 for (p = get_last_insn (); p; p = PREV_INSN (p))
6350 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6351 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6352 PATTERN (p)))
6353 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6354 second_reloadreg, REG_NOTES (p));
6355 #endif
6356 #endif
6357 /* Look at all insns we emitted, just to be safe. */
6358 for (p = get_insns (); p; p = NEXT_INSN (p))
6359 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6360 {
6361 /* If this output reload doesn't come from a spill reg,
6362 clear any memory of reloaded copies of the pseudo reg.
6363 If this output reload comes from a spill reg,
6364 reg_has_output_reload will make this do nothing. */
6365 note_stores (PATTERN (p), forget_old_reloads_1);
6366
6367 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6368 store_insn = p;
6369 }
6370
6371 output_reload_insns[reload_opnum[j]] = get_insns ();
6372 end_sequence ();
6373
6374 }
6375
6376 if (reload_spill_index[j] >= 0)
6377 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6378 }
6379
6380 /* Now write all the insns we made for reloads in the order expected by
6381 the allocation functions. Prior to the insn being reloaded, we write
6382 the following reloads:
6383
6384 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6385
6386 RELOAD_OTHER reloads.
6387
6388 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6389 the RELOAD_FOR_INPUT reload for the operand.
6390
6391 RELOAD_FOR_OPADDR_ADDRS reloads.
6392
6393 RELOAD_FOR_OPERAND_ADDRESS reloads.
6394
6395 After the insn being reloaded, we write the following:
6396
6397 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6398 the RELOAD_FOR_OUTPUT reload for that operand. */
6399
6400 emit_insns_before (other_input_address_reload_insns, before_insn);
6401 emit_insns_before (other_input_reload_insns, before_insn);
6402
6403 for (j = 0; j < reload_n_operands; j++)
6404 {
6405 emit_insns_before (input_address_reload_insns[j], before_insn);
6406 emit_insns_before (input_reload_insns[j], before_insn);
6407 }
6408
6409 emit_insns_before (other_operand_reload_insns, before_insn);
6410 emit_insns_before (operand_reload_insns, before_insn);
6411
6412 for (j = 0; j < reload_n_operands; j++)
6413 {
6414 emit_insns_before (output_address_reload_insns[j], following_insn);
6415 emit_insns_before (output_reload_insns[j], following_insn);
6416 }
6417
6418 /* Move death notes from INSN
6419 to output-operand-address and output reload insns. */
6420 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6421 {
6422 rtx insn1;
6423 /* Loop over those insns, last ones first. */
6424 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6425 insn1 = PREV_INSN (insn1))
6426 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6427 {
6428 rtx source = SET_SRC (PATTERN (insn1));
6429 rtx dest = SET_DEST (PATTERN (insn1));
6430
6431 /* The note we will examine next. */
6432 rtx reg_notes = REG_NOTES (insn);
6433 /* The place that pointed to this note. */
6434 rtx *prev_reg_note = &REG_NOTES (insn);
6435
6436 /* If the note is for something used in the source of this
6437 reload insn, or in the output address, move the note. */
6438 while (reg_notes)
6439 {
6440 rtx next_reg_notes = XEXP (reg_notes, 1);
6441 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6442 && GET_CODE (XEXP (reg_notes, 0)) == REG
6443 && ((GET_CODE (dest) != REG
6444 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6445 dest))
6446 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6447 source)))
6448 {
6449 *prev_reg_note = next_reg_notes;
6450 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6451 REG_NOTES (insn1) = reg_notes;
6452 }
6453 else
6454 prev_reg_note = &XEXP (reg_notes, 1);
6455
6456 reg_notes = next_reg_notes;
6457 }
6458 }
6459 }
6460 #endif
6461
6462 /* For all the spill regs newly reloaded in this instruction,
6463 record what they were reloaded from, so subsequent instructions
6464 can inherit the reloads.
6465
6466 Update spill_reg_store for the reloads of this insn.
6467 Copy the elements that were updated in the loop above. */
6468
6469 for (j = 0; j < n_reloads; j++)
6470 {
6471 register int r = reload_order[j];
6472 register int i = reload_spill_index[r];
6473
6474 /* I is nonneg if this reload used one of the spill regs.
6475 If reload_reg_rtx[r] is 0, this is an optional reload
6476 that we opted to ignore.
6477
6478 Also ignore reloads that don't reach the end of the insn,
6479 since we will eventually see the one that does. */
6480
6481 if (i >= 0 && reload_reg_rtx[r] != 0
6482 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6483 reload_when_needed[r]))
6484 {
6485 /* First, clear out memory of what used to be in this spill reg.
6486 If consecutive registers are used, clear them all. */
6487 int nr
6488 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6489 int k;
6490
6491 for (k = 0; k < nr; k++)
6492 {
6493 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6494 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6495 }
6496
6497 /* Maybe the spill reg contains a copy of reload_out. */
6498 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6499 {
6500 register int nregno = REGNO (reload_out[r]);
6501 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6502 : HARD_REGNO_NREGS (nregno,
6503 GET_MODE (reload_reg_rtx[r])));
6504
6505 spill_reg_store[i] = new_spill_reg_store[i];
6506 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6507
6508 /* If NREGNO is a hard register, it may occupy more than
6509 one register. If it does, say what is in the
6510 rest of the registers assuming that both registers
6511 agree on how many words the object takes. If not,
6512 invalidate the subsequent registers. */
6513
6514 if (nregno < FIRST_PSEUDO_REGISTER)
6515 for (k = 1; k < nnr; k++)
6516 reg_last_reload_reg[nregno + k]
6517 = (nr == nnr ? gen_rtx (REG,
6518 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6519 REGNO (reload_reg_rtx[r]) + k)
6520 : 0);
6521
6522 /* Now do the inverse operation. */
6523 for (k = 0; k < nr; k++)
6524 {
6525 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6526 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6527 : nregno + k);
6528 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6529 }
6530 }
6531
6532 /* Maybe the spill reg contains a copy of reload_in. Only do
6533 something if there will not be an output reload for
6534 the register being reloaded. */
6535 else if (reload_out[r] == 0
6536 && reload_in[r] != 0
6537 && ((GET_CODE (reload_in[r]) == REG
6538 && ! reg_has_output_reload[REGNO (reload_in[r])]
6539 || (GET_CODE (reload_in_reg[r]) == REG
6540 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6541 {
6542 register int nregno;
6543 int nnr;
6544
6545 if (GET_CODE (reload_in[r]) == REG)
6546 nregno = REGNO (reload_in[r]);
6547 else
6548 nregno = REGNO (reload_in_reg[r]);
6549
6550 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6551 : HARD_REGNO_NREGS (nregno,
6552 GET_MODE (reload_reg_rtx[r])));
6553
6554 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6555
6556 if (nregno < FIRST_PSEUDO_REGISTER)
6557 for (k = 1; k < nnr; k++)
6558 reg_last_reload_reg[nregno + k]
6559 = (nr == nnr ? gen_rtx (REG,
6560 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6561 REGNO (reload_reg_rtx[r]) + k)
6562 : 0);
6563
6564 /* Unless we inherited this reload, show we haven't
6565 recently done a store. */
6566 if (! reload_inherited[r])
6567 spill_reg_store[i] = 0;
6568
6569 for (k = 0; k < nr; k++)
6570 {
6571 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6572 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6573 : nregno + k);
6574 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6575 = insn;
6576 }
6577 }
6578 }
6579
6580 /* The following if-statement was #if 0'd in 1.34 (or before...).
6581 It's reenabled in 1.35 because supposedly nothing else
6582 deals with this problem. */
6583
6584 /* If a register gets output-reloaded from a non-spill register,
6585 that invalidates any previous reloaded copy of it.
6586 But forget_old_reloads_1 won't get to see it, because
6587 it thinks only about the original insn. So invalidate it here. */
6588 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6589 {
6590 register int nregno = REGNO (reload_out[r]);
6591 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6592
6593 while (num_regs-- > 0)
6594 reg_last_reload_reg[nregno + num_regs] = 0;
6595 }
6596 }
6597 }
6598 \f
6599 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6600 operand OPNUM with reload type TYPE.
6601
6602 Returns first insn emitted. */
6603
6604 rtx
6605 gen_input_reload (reloadreg, in, opnum, type)
6606 rtx reloadreg;
6607 rtx in;
6608 int opnum;
6609 enum reload_type type;
6610 {
6611 rtx last = get_last_insn ();
6612
6613 /* How to do this reload can get quite tricky. Normally, we are being
6614 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6615 register that didn't get a hard register. In that case we can just
6616 call emit_move_insn.
6617
6618 We can also be asked to reload a PLUS that adds a register or a MEM to
6619 another register, constant or MEM. This can occur during frame pointer
6620 elimination and while reloading addresses. This case is handled by
6621 trying to emit a single insn to perform the add. If it is not valid,
6622 we use a two insn sequence.
6623
6624 Finally, we could be called to handle an 'o' constraint by putting
6625 an address into a register. In that case, we first try to do this
6626 with a named pattern of "reload_load_address". If no such pattern
6627 exists, we just emit a SET insn and hope for the best (it will normally
6628 be valid on machines that use 'o').
6629
6630 This entire process is made complex because reload will never
6631 process the insns we generate here and so we must ensure that
6632 they will fit their constraints and also by the fact that parts of
6633 IN might be being reloaded separately and replaced with spill registers.
6634 Because of this, we are, in some sense, just guessing the right approach
6635 here. The one listed above seems to work.
6636
6637 ??? At some point, this whole thing needs to be rethought. */
6638
6639 if (GET_CODE (in) == PLUS
6640 && (GET_CODE (XEXP (in, 0)) == REG
6641 || GET_CODE (XEXP (in, 0)) == MEM)
6642 && (GET_CODE (XEXP (in, 1)) == REG
6643 || CONSTANT_P (XEXP (in, 1))
6644 || GET_CODE (XEXP (in, 1)) == MEM))
6645 {
6646 /* We need to compute the sum of a register or a MEM and another
6647 register, constant, or MEM, and put it into the reload
6648 register. The best possible way of doing this is if the machine
6649 has a three-operand ADD insn that accepts the required operands.
6650
6651 The simplest approach is to try to generate such an insn and see if it
6652 is recognized and matches its constraints. If so, it can be used.
6653
6654 It might be better not to actually emit the insn unless it is valid,
6655 but we need to pass the insn as an operand to `recog' and
6656 `insn_extract' and it is simpler to emit and then delete the insn if
6657 not valid than to dummy things up. */
6658
6659 rtx op0, op1, tem, insn;
6660 int code;
6661
6662 op0 = find_replacement (&XEXP (in, 0));
6663 op1 = find_replacement (&XEXP (in, 1));
6664
6665 /* Since constraint checking is strict, commutativity won't be
6666 checked, so we need to do that here to avoid spurious failure
6667 if the add instruction is two-address and the second operand
6668 of the add is the same as the reload reg, which is frequently
6669 the case. If the insn would be A = B + A, rearrange it so
6670 it will be A = A + B as constrain_operands expects. */
6671
6672 if (GET_CODE (XEXP (in, 1)) == REG
6673 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6674 tem = op0, op0 = op1, op1 = tem;
6675
6676 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6677 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6678
6679 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6680 code = recog_memoized (insn);
6681
6682 if (code >= 0)
6683 {
6684 insn_extract (insn);
6685 /* We want constrain operands to treat this insn strictly in
6686 its validity determination, i.e., the way it would after reload
6687 has completed. */
6688 if (constrain_operands (code, 1))
6689 return insn;
6690 }
6691
6692 delete_insns_since (last);
6693
6694 /* If that failed, we must use a conservative two-insn sequence.
6695 use move to copy constant, MEM, or pseudo register to the reload
6696 register since "move" will be able to handle an arbitrary operand,
6697 unlike add which can't, in general. Then add the registers.
6698
6699 If there is another way to do this for a specific machine, a
6700 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6701 we emit below. */
6702
6703 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6704 || (GET_CODE (op1) == REG
6705 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6706 tem = op0, op0 = op1, op1 = tem;
6707
6708 emit_insn (gen_move_insn (reloadreg, op0));
6709
6710 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6711 This fixes a problem on the 32K where the stack pointer cannot
6712 be used as an operand of an add insn. */
6713
6714 if (rtx_equal_p (op0, op1))
6715 op1 = reloadreg;
6716
6717 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6718
6719 /* If that failed, copy the address register to the reload register.
6720 Then add the constant to the reload register. */
6721
6722 code = recog_memoized (insn);
6723
6724 if (code >= 0)
6725 {
6726 insn_extract (insn);
6727 /* We want constrain operands to treat this insn strictly in
6728 its validity determination, i.e., the way it would after reload
6729 has completed. */
6730 if (constrain_operands (code, 1))
6731 return insn;
6732 }
6733
6734 delete_insns_since (last);
6735
6736 emit_insn (gen_move_insn (reloadreg, op1));
6737 emit_insn (gen_add2_insn (reloadreg, op0));
6738 }
6739
6740 #ifdef SECONDARY_MEMORY_NEEDED
6741 /* If we need a memory location to do the move, do it that way. */
6742 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6743 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6744 REGNO_REG_CLASS (REGNO (reloadreg)),
6745 GET_MODE (reloadreg)))
6746 {
6747 /* Get the memory to use and rewrite both registers to its mode. */
6748 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6749
6750 if (GET_MODE (loc) != GET_MODE (reloadreg))
6751 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6752
6753 if (GET_MODE (loc) != GET_MODE (in))
6754 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6755
6756 emit_insn (gen_move_insn (loc, in));
6757 emit_insn (gen_move_insn (reloadreg, loc));
6758 }
6759 #endif
6760
6761 /* If IN is a simple operand, use gen_move_insn. */
6762 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6763 emit_insn (gen_move_insn (reloadreg, in));
6764
6765 #ifdef HAVE_reload_load_address
6766 else if (HAVE_reload_load_address)
6767 emit_insn (gen_reload_load_address (reloadreg, in));
6768 #endif
6769
6770 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6771 else
6772 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6773
6774 /* Return the first insn emitted.
6775 We can not just return get_last_insn, because there may have
6776 been multiple instructions emitted. Also note that gen_move_insn may
6777 emit more than one insn itself, so we can not assume that there is one
6778 insn emitted per emit_insn_before call. */
6779
6780 return last ? NEXT_INSN (last) : get_insns ();
6781 }
6782 \f
6783 /* Delete a previously made output-reload
6784 whose result we now believe is not needed.
6785 First we double-check.
6786
6787 INSN is the insn now being processed.
6788 OUTPUT_RELOAD_INSN is the insn of the output reload.
6789 J is the reload-number for this insn. */
6790
6791 static void
6792 delete_output_reload (insn, j, output_reload_insn)
6793 rtx insn;
6794 int j;
6795 rtx output_reload_insn;
6796 {
6797 register rtx i1;
6798
6799 /* Get the raw pseudo-register referred to. */
6800
6801 rtx reg = reload_in[j];
6802 while (GET_CODE (reg) == SUBREG)
6803 reg = SUBREG_REG (reg);
6804
6805 /* If the pseudo-reg we are reloading is no longer referenced
6806 anywhere between the store into it and here,
6807 and no jumps or labels intervene, then the value can get
6808 here through the reload reg alone.
6809 Otherwise, give up--return. */
6810 for (i1 = NEXT_INSN (output_reload_insn);
6811 i1 != insn; i1 = NEXT_INSN (i1))
6812 {
6813 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6814 return;
6815 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6816 && reg_mentioned_p (reg, PATTERN (i1)))
6817 return;
6818 }
6819
6820 if (cannot_omit_stores[REGNO (reg)])
6821 return;
6822
6823 /* If this insn will store in the pseudo again,
6824 the previous store can be removed. */
6825 if (reload_out[j] == reload_in[j])
6826 delete_insn (output_reload_insn);
6827
6828 /* See if the pseudo reg has been completely replaced
6829 with reload regs. If so, delete the store insn
6830 and forget we had a stack slot for the pseudo. */
6831 else if (reg_n_deaths[REGNO (reg)] == 1
6832 && reg_basic_block[REGNO (reg)] >= 0
6833 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6834 {
6835 rtx i2;
6836
6837 /* We know that it was used only between here
6838 and the beginning of the current basic block.
6839 (We also know that the last use before INSN was
6840 the output reload we are thinking of deleting, but never mind that.)
6841 Search that range; see if any ref remains. */
6842 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6843 {
6844 rtx set = single_set (i2);
6845
6846 /* Uses which just store in the pseudo don't count,
6847 since if they are the only uses, they are dead. */
6848 if (set != 0 && SET_DEST (set) == reg)
6849 continue;
6850 if (GET_CODE (i2) == CODE_LABEL
6851 || GET_CODE (i2) == JUMP_INSN)
6852 break;
6853 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6854 && reg_mentioned_p (reg, PATTERN (i2)))
6855 /* Some other ref remains;
6856 we can't do anything. */
6857 return;
6858 }
6859
6860 /* Delete the now-dead stores into this pseudo. */
6861 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6862 {
6863 rtx set = single_set (i2);
6864
6865 if (set != 0 && SET_DEST (set) == reg)
6866 delete_insn (i2);
6867 if (GET_CODE (i2) == CODE_LABEL
6868 || GET_CODE (i2) == JUMP_INSN)
6869 break;
6870 }
6871
6872 /* For the debugging info,
6873 say the pseudo lives in this reload reg. */
6874 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6875 alter_reg (REGNO (reg), -1);
6876 }
6877 }
6878 \f
6879 /* Output reload-insns to reload VALUE into RELOADREG.
6880 VALUE is an autoincrement or autodecrement RTX whose operand
6881 is a register or memory location;
6882 so reloading involves incrementing that location.
6883
6884 INC_AMOUNT is the number to increment or decrement by (always positive).
6885 This cannot be deduced from VALUE. */
6886
6887 static void
6888 inc_for_reload (reloadreg, value, inc_amount)
6889 rtx reloadreg;
6890 rtx value;
6891 int inc_amount;
6892 {
6893 /* REG or MEM to be copied and incremented. */
6894 rtx incloc = XEXP (value, 0);
6895 /* Nonzero if increment after copying. */
6896 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6897 rtx last;
6898 rtx inc;
6899 rtx add_insn;
6900 int code;
6901
6902 /* No hard register is equivalent to this register after
6903 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6904 we could inc/dec that register as well (maybe even using it for
6905 the source), but I'm not sure it's worth worrying about. */
6906 if (GET_CODE (incloc) == REG)
6907 reg_last_reload_reg[REGNO (incloc)] = 0;
6908
6909 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6910 inc_amount = - inc_amount;
6911
6912 inc = GEN_INT (inc_amount);
6913
6914 /* If this is post-increment, first copy the location to the reload reg. */
6915 if (post)
6916 emit_insn (gen_move_insn (reloadreg, incloc));
6917
6918 /* See if we can directly increment INCLOC. Use a method similar to that
6919 in gen_input_reload. */
6920
6921 last = get_last_insn ();
6922 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6923 gen_rtx (PLUS, GET_MODE (incloc),
6924 incloc, inc)));
6925
6926 code = recog_memoized (add_insn);
6927 if (code >= 0)
6928 {
6929 insn_extract (add_insn);
6930 if (constrain_operands (code, 1))
6931 {
6932 /* If this is a pre-increment and we have incremented the value
6933 where it lives, copy the incremented value to RELOADREG to
6934 be used as an address. */
6935
6936 if (! post)
6937 emit_insn (gen_move_insn (reloadreg, incloc));
6938
6939 return;
6940 }
6941 }
6942
6943 delete_insns_since (last);
6944
6945 /* If couldn't do the increment directly, must increment in RELOADREG.
6946 The way we do this depends on whether this is pre- or post-increment.
6947 For pre-increment, copy INCLOC to the reload register, increment it
6948 there, then save back. */
6949
6950 if (! post)
6951 {
6952 emit_insn (gen_move_insn (reloadreg, incloc));
6953 emit_insn (gen_add2_insn (reloadreg, inc));
6954 emit_insn (gen_move_insn (incloc, reloadreg));
6955 }
6956 else
6957 {
6958 /* Postincrement.
6959 Because this might be a jump insn or a compare, and because RELOADREG
6960 may not be available after the insn in an input reload, we must do
6961 the incrementation before the insn being reloaded for.
6962
6963 We have already copied INCLOC to RELOADREG. Increment the copy in
6964 RELOADREG, save that back, then decrement RELOADREG so it has
6965 the original value. */
6966
6967 emit_insn (gen_add2_insn (reloadreg, inc));
6968 emit_insn (gen_move_insn (incloc, reloadreg));
6969 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6970 }
6971
6972 return;
6973 }
6974 \f
6975 /* Return 1 if we are certain that the constraint-string STRING allows
6976 the hard register REG. Return 0 if we can't be sure of this. */
6977
6978 static int
6979 constraint_accepts_reg_p (string, reg)
6980 char *string;
6981 rtx reg;
6982 {
6983 int value = 0;
6984 int regno = true_regnum (reg);
6985 int c;
6986
6987 /* Initialize for first alternative. */
6988 value = 0;
6989 /* Check that each alternative contains `g' or `r'. */
6990 while (1)
6991 switch (c = *string++)
6992 {
6993 case 0:
6994 /* If an alternative lacks `g' or `r', we lose. */
6995 return value;
6996 case ',':
6997 /* If an alternative lacks `g' or `r', we lose. */
6998 if (value == 0)
6999 return 0;
7000 /* Initialize for next alternative. */
7001 value = 0;
7002 break;
7003 case 'g':
7004 case 'r':
7005 /* Any general reg wins for this alternative. */
7006 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7007 value = 1;
7008 break;
7009 default:
7010 /* Any reg in specified class wins for this alternative. */
7011 {
7012 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7013
7014 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7015 value = 1;
7016 }
7017 }
7018 }
7019 \f
7020 /* Return the number of places FIND appears within X, but don't count
7021 an occurrence if some SET_DEST is FIND. */
7022
7023 static int
7024 count_occurrences (x, find)
7025 register rtx x, find;
7026 {
7027 register int i, j;
7028 register enum rtx_code code;
7029 register char *format_ptr;
7030 int count;
7031
7032 if (x == find)
7033 return 1;
7034 if (x == 0)
7035 return 0;
7036
7037 code = GET_CODE (x);
7038
7039 switch (code)
7040 {
7041 case REG:
7042 case QUEUED:
7043 case CONST_INT:
7044 case CONST_DOUBLE:
7045 case SYMBOL_REF:
7046 case CODE_LABEL:
7047 case PC:
7048 case CC0:
7049 return 0;
7050
7051 case SET:
7052 if (SET_DEST (x) == find)
7053 return count_occurrences (SET_SRC (x), find);
7054 break;
7055 }
7056
7057 format_ptr = GET_RTX_FORMAT (code);
7058 count = 0;
7059
7060 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7061 {
7062 switch (*format_ptr++)
7063 {
7064 case 'e':
7065 count += count_occurrences (XEXP (x, i), find);
7066 break;
7067
7068 case 'E':
7069 if (XVEC (x, i) != NULL)
7070 {
7071 for (j = 0; j < XVECLEN (x, i); j++)
7072 count += count_occurrences (XVECEXP (x, i, j), find);
7073 }
7074 break;
7075 }
7076 }
7077 return count;
7078 }