(eliminate_regs, case SUBREG): Add USE of pseudo so we don't
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
186
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
192
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
198
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206 static char spill_indirect_levels;
207
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212 char indirect_symref_ok;
213
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216 char double_reg_address_ok;
217
218 /* Record the stack slot for each spilled hard register. */
219
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222 /* Width allocated so far for that stack slot. */
223
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231 char *basic_block_needs[N_REG_CLASSES];
232
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
236
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240 int caller_save_needed;
241
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245 int reload_in_progress = 0;
246
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258 struct obstack reload_obstack;
259 char *reload_firstobj;
260
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
263
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
266 \f
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272 static struct elim_table
273 {
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290 } reg_eliminate[] =
291
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296 #ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298 #else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300 #endif
301
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
308
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
311
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322 /* Number of labels in the current function. */
323
324 static int num_labels;
325
326 struct hard_reg_n_uses { int regno; int uses; };
327 \f
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void mark_scratch_live PROTO((rtx));
340 static void set_label_offsets PROTO((rtx, rtx, int));
341 static int eliminate_regs_in_insn PROTO((rtx, int));
342 static void mark_not_eliminable PROTO((rtx, rtx));
343 static int spill_hard_reg PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs PROTO((rtx));
345 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347 static void order_regs_for_reload PROTO((void));
348 static int compare_spill_regs PROTO((short *, short *));
349 static void reload_as_needed PROTO((rtx, int));
350 static void forget_old_reloads_1 PROTO((rtx, rtx));
351 static int reload_reg_class_lower PROTO((short *, short *));
352 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
354 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
356 static int reload_reg_free_p PROTO((int, int, enum reload_type));
357 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
359 static int reloads_conflict PROTO((int, int));
360 static int allocate_reload_reg PROTO((int, rtx, int, int));
361 static void choose_reload_regs PROTO((rtx, rtx));
362 static void merge_assigned_reloads PROTO((rtx));
363 static void emit_reload_insns PROTO((rtx));
364 static void delete_output_reload PROTO((rtx, int, rtx));
365 static void inc_for_reload PROTO((rtx, rtx, int));
366 static int constraint_accepts_reg_p PROTO((char *, rtx));
367 static int count_occurrences PROTO((rtx, rtx));
368 \f
369 /* Initialize the reload pass once per compilation. */
370
371 void
372 init_reload ()
373 {
374 register int i;
375
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
379
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
384 GEN_INT (4)));
385 spill_indirect_levels = 0;
386
387 while (memory_address_p (QImode, tem))
388 {
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
391 }
392
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
394
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
397
398 /* See if reg+reg is a valid (and offsettable) address. */
399
400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
401 {
402 tem = gen_rtx (PLUS, Pmode,
403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
407
408 if (memory_address_p (QImode, tem))
409 {
410 double_reg_address_ok = 1;
411 break;
412 }
413 }
414
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
418 }
419
420 /* Main entry point for the reload pass.
421
422 FIRST is the first insn of the function being compiled.
423
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
429
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
433 are spilled from them, and where the pseudo regs are reallocated to.
434
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
437
438 int
439 reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
443 {
444 register int class;
445 register int i, j, k;
446 register rtx insn;
447 register struct elim_table *ep;
448
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
455
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
458
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
461
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
464
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
467
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
470
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474 #endif
475
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
480
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
483 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
484
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
488
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
493
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
496
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
500
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
504
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
509
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
514
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
526 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
529
530 #ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532 #endif
533
534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
538
539 for (insn = first; insn; insn = NEXT_INSN (insn))
540 {
541 rtx set = single_set (insn);
542
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
544 {
545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
546 if (note
547 #ifdef LEGITIMATE_PIC_OPERAND_P
548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550 #endif
551 )
552 {
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
556 {
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
560 {
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
566 }
567 else
568 continue;
569
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
577 }
578 }
579 }
580
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
589
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
592 }
593
594 /* Does this function require a frame pointer? */
595
596 frame_pointer_needed = (! flag_omit_frame_pointer
597 #ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605 #endif
606 || FRAME_POINTER_REQUIRED);
607
608 num_eliminable = 0;
609
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612 #ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
614 {
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
617 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
618 }
619 #else
620 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
621 = ! frame_pointer_needed;
622 #endif
623
624 /* Count the number of eliminable registers and build the FROM and TO
625 REG rtx's. Note that code in gen_rtx will cause, e.g.,
626 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
627 We depend on this. */
628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
629 {
630 num_eliminable += ep->can_eliminate;
631 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
632 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
633 }
634
635 num_labels = max_label_num () - get_first_label_num ();
636
637 /* Allocate the tables used to store offset information at labels. */
638 offsets_known_at = (char *) alloca (num_labels);
639 offsets_at
640 = (int (*)[NUM_ELIMINABLE_REGS])
641 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
642
643 offsets_known_at -= get_first_label_num ();
644 offsets_at -= get_first_label_num ();
645
646 /* Alter each pseudo-reg rtx to contain its hard reg number.
647 Assign stack slots to the pseudos that lack hard regs or equivalents.
648 Do not touch virtual registers. */
649
650 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
651 alter_reg (i, -1);
652
653 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
654 because the stack size may be a part of the offset computation for
655 register elimination. */
656 assign_stack_local (BLKmode, 0, 0);
657
658 /* If we have some registers we think can be eliminated, scan all insns to
659 see if there is an insn that sets one of these registers to something
660 other than itself plus a constant. If so, the register cannot be
661 eliminated. Doing this scan here eliminates an extra pass through the
662 main reload loop in the most common case where register elimination
663 cannot be done. */
664 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
665 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
666 || GET_CODE (insn) == CALL_INSN)
667 note_stores (PATTERN (insn), mark_not_eliminable);
668
669 #ifndef REGISTER_CONSTRAINTS
670 /* If all the pseudo regs have hard regs,
671 except for those that are never referenced,
672 we know that no reloads are needed. */
673 /* But that is not true if there are register constraints, since
674 in that case some pseudos might be in the wrong kind of hard reg. */
675
676 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
677 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
678 break;
679
680 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
681 return;
682 #endif
683
684 /* Compute the order of preference for hard registers to spill.
685 Store them by decreasing preference in potential_reload_regs. */
686
687 order_regs_for_reload ();
688
689 /* So far, no hard regs have been spilled. */
690 n_spills = 0;
691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
692 spill_reg_order[i] = -1;
693
694 /* On most machines, we can't use any register explicitly used in the
695 rtl as a spill register. But on some, we have to. Those will have
696 taken care to keep the life of hard regs as short as possible. */
697
698 #ifndef SMALL_REGISTER_CLASSES
699 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
700 #endif
701
702 /* Spill any hard regs that we know we can't eliminate. */
703 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
704 if (! ep->can_eliminate)
705 spill_hard_reg (ep->from, global, dumpfile, 1);
706
707 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
708 if (frame_pointer_needed)
709 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
710 #endif
711
712 if (global)
713 for (i = 0; i < N_REG_CLASSES; i++)
714 {
715 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
716 bzero (basic_block_needs[i], n_basic_blocks);
717 }
718
719 /* From now on, we need to emit any moves without making new pseudos. */
720 reload_in_progress = 1;
721
722 /* This loop scans the entire function each go-round
723 and repeats until one repetition spills no additional hard regs. */
724
725 /* This flag is set when a pseudo reg is spilled,
726 to require another pass. Note that getting an additional reload
727 reg does not necessarily imply any pseudo reg was spilled;
728 sometimes we find a reload reg that no pseudo reg was allocated in. */
729 something_changed = 1;
730 /* This flag is set if there are any insns that require reloading. */
731 something_needs_reloads = 0;
732 /* This flag is set if there are any insns that require register
733 eliminations. */
734 something_needs_elimination = 0;
735 while (something_changed)
736 {
737 rtx after_call = 0;
738
739 /* For each class, number of reload regs needed in that class.
740 This is the maximum over all insns of the needs in that class
741 of the individual insn. */
742 int max_needs[N_REG_CLASSES];
743 /* For each class, size of group of consecutive regs
744 that is needed for the reloads of this class. */
745 int group_size[N_REG_CLASSES];
746 /* For each class, max number of consecutive groups needed.
747 (Each group contains group_size[CLASS] consecutive registers.) */
748 int max_groups[N_REG_CLASSES];
749 /* For each class, max number needed of regs that don't belong
750 to any of the groups. */
751 int max_nongroups[N_REG_CLASSES];
752 /* For each class, the machine mode which requires consecutive
753 groups of regs of that class.
754 If two different modes ever require groups of one class,
755 they must be the same size and equally restrictive for that class,
756 otherwise we can't handle the complexity. */
757 enum machine_mode group_mode[N_REG_CLASSES];
758 /* Record the insn where each maximum need is first found. */
759 rtx max_needs_insn[N_REG_CLASSES];
760 rtx max_groups_insn[N_REG_CLASSES];
761 rtx max_nongroups_insn[N_REG_CLASSES];
762 rtx x;
763 int starting_frame_size = get_frame_size ();
764 int previous_frame_pointer_needed = frame_pointer_needed;
765 static char *reg_class_names[] = REG_CLASS_NAMES;
766
767 something_changed = 0;
768 bzero ((char *) max_needs, sizeof max_needs);
769 bzero ((char *) max_groups, sizeof max_groups);
770 bzero ((char *) max_nongroups, sizeof max_nongroups);
771 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
772 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
773 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
774 bzero ((char *) group_size, sizeof group_size);
775 for (i = 0; i < N_REG_CLASSES; i++)
776 group_mode[i] = VOIDmode;
777
778 /* Keep track of which basic blocks are needing the reloads. */
779 this_block = 0;
780
781 /* Remember whether any element of basic_block_needs
782 changes from 0 to 1 in this pass. */
783 new_basic_block_needs = 0;
784
785 /* Reset all offsets on eliminable registers to their initial values. */
786 #ifdef ELIMINABLE_REGS
787 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
788 {
789 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
790 ep->previous_offset = ep->offset
791 = ep->max_offset = ep->initial_offset;
792 }
793 #else
794 #ifdef INITIAL_FRAME_POINTER_OFFSET
795 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
796 #else
797 if (!FRAME_POINTER_REQUIRED)
798 abort ();
799 reg_eliminate[0].initial_offset = 0;
800 #endif
801 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
802 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
803 #endif
804
805 num_not_at_initial_offset = 0;
806
807 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
808
809 /* Set a known offset for each forced label to be at the initial offset
810 of each elimination. We do this because we assume that all
811 computed jumps occur from a location where each elimination is
812 at its initial offset. */
813
814 for (x = forced_labels; x; x = XEXP (x, 1))
815 if (XEXP (x, 0))
816 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
817
818 /* For each pseudo register that has an equivalent location defined,
819 try to eliminate any eliminable registers (such as the frame pointer)
820 assuming initial offsets for the replacement register, which
821 is the normal case.
822
823 If the resulting location is directly addressable, substitute
824 the MEM we just got directly for the old REG.
825
826 If it is not addressable but is a constant or the sum of a hard reg
827 and constant, it is probably not addressable because the constant is
828 out of range, in that case record the address; we will generate
829 hairy code to compute the address in a register each time it is
830 needed. Similarly if it is a hard register, but one that is not
831 valid as an address register.
832
833 If the location is not addressable, but does not have one of the
834 above forms, assign a stack slot. We have to do this to avoid the
835 potential of producing lots of reloads if, e.g., a location involves
836 a pseudo that didn't get a hard register and has an equivalent memory
837 location that also involves a pseudo that didn't get a hard register.
838
839 Perhaps at some point we will improve reload_when_needed handling
840 so this problem goes away. But that's very hairy. */
841
842 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
843 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
844 {
845 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
846
847 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
848 XEXP (x, 0)))
849 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
850 else if (CONSTANT_P (XEXP (x, 0))
851 || (GET_CODE (XEXP (x, 0)) == REG
852 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
853 || (GET_CODE (XEXP (x, 0)) == PLUS
854 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
855 && (REGNO (XEXP (XEXP (x, 0), 0))
856 < FIRST_PSEUDO_REGISTER)
857 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
858 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
859 else
860 {
861 /* Make a new stack slot. Then indicate that something
862 changed so we go back and recompute offsets for
863 eliminable registers because the allocation of memory
864 below might change some offset. reg_equiv_{mem,address}
865 will be set up for this pseudo on the next pass around
866 the loop. */
867 reg_equiv_memory_loc[i] = 0;
868 reg_equiv_init[i] = 0;
869 alter_reg (i, -1);
870 something_changed = 1;
871 }
872 }
873
874 /* If we allocated another pseudo to the stack, redo elimination
875 bookkeeping. */
876 if (something_changed)
877 continue;
878
879 /* If caller-saves needs a group, initialize the group to include
880 the size and mode required for caller-saves. */
881
882 if (caller_save_group_size > 1)
883 {
884 group_mode[(int) caller_save_spill_class] = Pmode;
885 group_size[(int) caller_save_spill_class] = caller_save_group_size;
886 }
887
888 /* Compute the most additional registers needed by any instruction.
889 Collect information separately for each class of regs. */
890
891 for (insn = first; insn; insn = NEXT_INSN (insn))
892 {
893 if (global && this_block + 1 < n_basic_blocks
894 && insn == basic_block_head[this_block+1])
895 ++this_block;
896
897 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
898 might include REG_LABEL), we need to see what effects this
899 has on the known offsets at labels. */
900
901 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
902 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
903 && REG_NOTES (insn) != 0))
904 set_label_offsets (insn, insn, 0);
905
906 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
907 {
908 /* Nonzero means don't use a reload reg that overlaps
909 the place where a function value can be returned. */
910 rtx avoid_return_reg = 0;
911
912 rtx old_body = PATTERN (insn);
913 int old_code = INSN_CODE (insn);
914 rtx old_notes = REG_NOTES (insn);
915 int did_elimination = 0;
916
917 /* To compute the number of reload registers of each class
918 needed for an insn, we must similate what choose_reload_regs
919 can do. We do this by splitting an insn into an "input" and
920 an "output" part. RELOAD_OTHER reloads are used in both.
921 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
922 which must be live over the entire input section of reloads,
923 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
924 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
925 inputs.
926
927 The registers needed for output are RELOAD_OTHER and
928 RELOAD_FOR_OUTPUT, which are live for the entire output
929 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
930 reloads for each operand.
931
932 The total number of registers needed is the maximum of the
933 inputs and outputs. */
934
935 struct needs
936 {
937 /* [0] is normal, [1] is nongroup. */
938 int regs[2][N_REG_CLASSES];
939 int groups[N_REG_CLASSES];
940 };
941
942 /* Each `struct needs' corresponds to one RELOAD_... type. */
943 struct {
944 struct needs other;
945 struct needs input;
946 struct needs output;
947 struct needs insn;
948 struct needs other_addr;
949 struct needs op_addr;
950 struct needs op_addr_reload;
951 struct needs in_addr[MAX_RECOG_OPERANDS];
952 struct needs out_addr[MAX_RECOG_OPERANDS];
953 } insn_needs;
954
955 /* If needed, eliminate any eliminable registers. */
956 if (num_eliminable)
957 did_elimination = eliminate_regs_in_insn (insn, 0);
958
959 #ifdef SMALL_REGISTER_CLASSES
960 /* Set avoid_return_reg if this is an insn
961 that might use the value of a function call. */
962 if (GET_CODE (insn) == CALL_INSN)
963 {
964 if (GET_CODE (PATTERN (insn)) == SET)
965 after_call = SET_DEST (PATTERN (insn));
966 else if (GET_CODE (PATTERN (insn)) == PARALLEL
967 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
968 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
969 else
970 after_call = 0;
971 }
972 else if (after_call != 0
973 && !(GET_CODE (PATTERN (insn)) == SET
974 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
975 {
976 if (reg_referenced_p (after_call, PATTERN (insn)))
977 avoid_return_reg = after_call;
978 after_call = 0;
979 }
980 #endif /* SMALL_REGISTER_CLASSES */
981
982 /* Analyze the instruction. */
983 find_reloads (insn, 0, spill_indirect_levels, global,
984 spill_reg_order);
985
986 /* Remember for later shortcuts which insns had any reloads or
987 register eliminations.
988
989 One might think that it would be worthwhile to mark insns
990 that need register replacements but not reloads, but this is
991 not safe because find_reloads may do some manipulation of
992 the insn (such as swapping commutative operands), which would
993 be lost when we restore the old pattern after register
994 replacement. So the actions of find_reloads must be redone in
995 subsequent passes or in reload_as_needed.
996
997 However, it is safe to mark insns that need reloads
998 but not register replacement. */
999
1000 PUT_MODE (insn, (did_elimination ? QImode
1001 : n_reloads ? HImode
1002 : GET_MODE (insn) == DImode ? DImode
1003 : VOIDmode));
1004
1005 /* Discard any register replacements done. */
1006 if (did_elimination)
1007 {
1008 obstack_free (&reload_obstack, reload_firstobj);
1009 PATTERN (insn) = old_body;
1010 INSN_CODE (insn) = old_code;
1011 REG_NOTES (insn) = old_notes;
1012 something_needs_elimination = 1;
1013 }
1014
1015 /* If this insn has no reloads, we need not do anything except
1016 in the case of a CALL_INSN when we have caller-saves and
1017 caller-save needs reloads. */
1018
1019 if (n_reloads == 0
1020 && ! (GET_CODE (insn) == CALL_INSN
1021 && caller_save_spill_class != NO_REGS))
1022 continue;
1023
1024 something_needs_reloads = 1;
1025 bzero ((char *) &insn_needs, sizeof insn_needs);
1026
1027 /* Count each reload once in every class
1028 containing the reload's own class. */
1029
1030 for (i = 0; i < n_reloads; i++)
1031 {
1032 register enum reg_class *p;
1033 enum reg_class class = reload_reg_class[i];
1034 int size;
1035 enum machine_mode mode;
1036 int nongroup_need;
1037 struct needs *this_needs;
1038
1039 /* Don't count the dummy reloads, for which one of the
1040 regs mentioned in the insn can be used for reloading.
1041 Don't count optional reloads.
1042 Don't count reloads that got combined with others. */
1043 if (reload_reg_rtx[i] != 0
1044 || reload_optional[i] != 0
1045 || (reload_out[i] == 0 && reload_in[i] == 0
1046 && ! reload_secondary_p[i]))
1047 continue;
1048
1049 /* Show that a reload register of this class is needed
1050 in this basic block. We do not use insn_needs and
1051 insn_groups because they are overly conservative for
1052 this purpose. */
1053 if (global && ! basic_block_needs[(int) class][this_block])
1054 {
1055 basic_block_needs[(int) class][this_block] = 1;
1056 new_basic_block_needs = 1;
1057 }
1058
1059
1060 mode = reload_inmode[i];
1061 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1062 mode = reload_outmode[i];
1063 size = CLASS_MAX_NREGS (class, mode);
1064
1065 /* If this class doesn't want a group, determine if we have
1066 a nongroup need or a regular need. We have a nongroup
1067 need if this reload conflicts with a group reload whose
1068 class intersects with this reload's class. */
1069
1070 nongroup_need = 0;
1071 if (size == 1)
1072 for (j = 0; j < n_reloads; j++)
1073 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1074 (GET_MODE_SIZE (reload_outmode[j])
1075 > GET_MODE_SIZE (reload_inmode[j]))
1076 ? reload_outmode[j]
1077 : reload_inmode[j])
1078 > 1)
1079 && (!reload_optional[j])
1080 && (reload_in[j] != 0 || reload_out[j] != 0
1081 || reload_secondary_p[j])
1082 && reloads_conflict (i, j)
1083 && reg_classes_intersect_p (class,
1084 reload_reg_class[j]))
1085 {
1086 nongroup_need = 1;
1087 break;
1088 }
1089
1090 /* Decide which time-of-use to count this reload for. */
1091 switch (reload_when_needed[i])
1092 {
1093 case RELOAD_OTHER:
1094 this_needs = &insn_needs.other;
1095 break;
1096 case RELOAD_FOR_INPUT:
1097 this_needs = &insn_needs.input;
1098 break;
1099 case RELOAD_FOR_OUTPUT:
1100 this_needs = &insn_needs.output;
1101 break;
1102 case RELOAD_FOR_INSN:
1103 this_needs = &insn_needs.insn;
1104 break;
1105 case RELOAD_FOR_OTHER_ADDRESS:
1106 this_needs = &insn_needs.other_addr;
1107 break;
1108 case RELOAD_FOR_INPUT_ADDRESS:
1109 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1110 break;
1111 case RELOAD_FOR_OUTPUT_ADDRESS:
1112 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1113 break;
1114 case RELOAD_FOR_OPERAND_ADDRESS:
1115 this_needs = &insn_needs.op_addr;
1116 break;
1117 case RELOAD_FOR_OPADDR_ADDR:
1118 this_needs = &insn_needs.op_addr_reload;
1119 break;
1120 }
1121
1122 if (size > 1)
1123 {
1124 enum machine_mode other_mode, allocate_mode;
1125
1126 /* Count number of groups needed separately from
1127 number of individual regs needed. */
1128 this_needs->groups[(int) class]++;
1129 p = reg_class_superclasses[(int) class];
1130 while (*p != LIM_REG_CLASSES)
1131 this_needs->groups[(int) *p++]++;
1132
1133 /* Record size and mode of a group of this class. */
1134 /* If more than one size group is needed,
1135 make all groups the largest needed size. */
1136 if (group_size[(int) class] < size)
1137 {
1138 other_mode = group_mode[(int) class];
1139 allocate_mode = mode;
1140
1141 group_size[(int) class] = size;
1142 group_mode[(int) class] = mode;
1143 }
1144 else
1145 {
1146 other_mode = mode;
1147 allocate_mode = group_mode[(int) class];
1148 }
1149
1150 /* Crash if two dissimilar machine modes both need
1151 groups of consecutive regs of the same class. */
1152
1153 if (other_mode != VOIDmode && other_mode != allocate_mode
1154 && ! modes_equiv_for_class_p (allocate_mode,
1155 other_mode, class))
1156 abort ();
1157 }
1158 else if (size == 1)
1159 {
1160 this_needs->regs[nongroup_need][(int) class] += 1;
1161 p = reg_class_superclasses[(int) class];
1162 while (*p != LIM_REG_CLASSES)
1163 this_needs->regs[nongroup_need][(int) *p++] += 1;
1164 }
1165 else
1166 abort ();
1167 }
1168
1169 /* All reloads have been counted for this insn;
1170 now merge the various times of use.
1171 This sets insn_needs, etc., to the maximum total number
1172 of registers needed at any point in this insn. */
1173
1174 for (i = 0; i < N_REG_CLASSES; i++)
1175 {
1176 int in_max, out_max;
1177
1178 /* Compute normal and nongroup needs. */
1179 for (j = 0; j <= 1; j++)
1180 {
1181 for (in_max = 0, out_max = 0, k = 0;
1182 k < reload_n_operands; k++)
1183 {
1184 in_max
1185 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1186 out_max
1187 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1188 }
1189
1190 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1191 and operand addresses but not things used to reload
1192 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1193 don't conflict with things needed to reload inputs or
1194 outputs. */
1195
1196 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1197 insn_needs.op_addr_reload.regs[j][i]),
1198 in_max);
1199
1200 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1201
1202 insn_needs.input.regs[j][i]
1203 = MAX (insn_needs.input.regs[j][i]
1204 + insn_needs.op_addr.regs[j][i]
1205 + insn_needs.insn.regs[j][i],
1206 in_max + insn_needs.input.regs[j][i]);
1207
1208 insn_needs.output.regs[j][i] += out_max;
1209 insn_needs.other.regs[j][i]
1210 += MAX (MAX (insn_needs.input.regs[j][i],
1211 insn_needs.output.regs[j][i]),
1212 insn_needs.other_addr.regs[j][i]);
1213
1214 }
1215
1216 /* Now compute group needs. */
1217 for (in_max = 0, out_max = 0, j = 0;
1218 j < reload_n_operands; j++)
1219 {
1220 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1221 out_max
1222 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1223 }
1224
1225 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1226 insn_needs.op_addr_reload.groups[i]),
1227 in_max);
1228 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1229
1230 insn_needs.input.groups[i]
1231 = MAX (insn_needs.input.groups[i]
1232 + insn_needs.op_addr.groups[i]
1233 + insn_needs.insn.groups[i],
1234 in_max + insn_needs.input.groups[i]);
1235
1236 insn_needs.output.groups[i] += out_max;
1237 insn_needs.other.groups[i]
1238 += MAX (MAX (insn_needs.input.groups[i],
1239 insn_needs.output.groups[i]),
1240 insn_needs.other_addr.groups[i]);
1241 }
1242
1243 /* If this is a CALL_INSN and caller-saves will need
1244 a spill register, act as if the spill register is
1245 needed for this insn. However, the spill register
1246 can be used by any reload of this insn, so we only
1247 need do something if no need for that class has
1248 been recorded.
1249
1250 The assumption that every CALL_INSN will trigger a
1251 caller-save is highly conservative, however, the number
1252 of cases where caller-saves will need a spill register but
1253 a block containing a CALL_INSN won't need a spill register
1254 of that class should be quite rare.
1255
1256 If a group is needed, the size and mode of the group will
1257 have been set up at the beginning of this loop. */
1258
1259 if (GET_CODE (insn) == CALL_INSN
1260 && caller_save_spill_class != NO_REGS)
1261 {
1262 /* See if this register would conflict with any reload
1263 that needs a group. */
1264 int nongroup_need = 0;
1265 int *caller_save_needs;
1266
1267 for (j = 0; j < n_reloads; j++)
1268 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1269 (GET_MODE_SIZE (reload_outmode[j])
1270 > GET_MODE_SIZE (reload_inmode[j]))
1271 ? reload_outmode[j]
1272 : reload_inmode[j])
1273 > 1)
1274 && reg_classes_intersect_p (caller_save_spill_class,
1275 reload_reg_class[j]))
1276 {
1277 nongroup_need = 1;
1278 break;
1279 }
1280
1281 caller_save_needs
1282 = (caller_save_group_size > 1
1283 ? insn_needs.other.groups
1284 : insn_needs.other.regs[nongroup_need]);
1285
1286 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1287 {
1288 register enum reg_class *p
1289 = reg_class_superclasses[(int) caller_save_spill_class];
1290
1291 caller_save_needs[(int) caller_save_spill_class]++;
1292
1293 while (*p != LIM_REG_CLASSES)
1294 caller_save_needs[(int) *p++] += 1;
1295 }
1296
1297 /* Show that this basic block will need a register of
1298 this class. */
1299
1300 if (global
1301 && ! (basic_block_needs[(int) caller_save_spill_class]
1302 [this_block]))
1303 {
1304 basic_block_needs[(int) caller_save_spill_class]
1305 [this_block] = 1;
1306 new_basic_block_needs = 1;
1307 }
1308 }
1309
1310 #ifdef SMALL_REGISTER_CLASSES
1311 /* If this insn stores the value of a function call,
1312 and that value is in a register that has been spilled,
1313 and if the insn needs a reload in a class
1314 that might use that register as the reload register,
1315 then add add an extra need in that class.
1316 This makes sure we have a register available that does
1317 not overlap the return value. */
1318
1319 if (avoid_return_reg)
1320 {
1321 int regno = REGNO (avoid_return_reg);
1322 int nregs
1323 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1324 int r;
1325 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1326
1327 /* First compute the "basic needs", which counts a
1328 need only in the smallest class in which it
1329 is required. */
1330
1331 bcopy (insn_needs.other.regs[0], basic_needs,
1332 sizeof basic_needs);
1333 bcopy (insn_needs.other.groups, basic_groups,
1334 sizeof basic_groups);
1335
1336 for (i = 0; i < N_REG_CLASSES; i++)
1337 {
1338 enum reg_class *p;
1339
1340 if (basic_needs[i] >= 0)
1341 for (p = reg_class_superclasses[i];
1342 *p != LIM_REG_CLASSES; p++)
1343 basic_needs[(int) *p] -= basic_needs[i];
1344
1345 if (basic_groups[i] >= 0)
1346 for (p = reg_class_superclasses[i];
1347 *p != LIM_REG_CLASSES; p++)
1348 basic_groups[(int) *p] -= basic_groups[i];
1349 }
1350
1351 /* Now count extra regs if there might be a conflict with
1352 the return value register.
1353
1354 ??? This is not quite correct because we don't properly
1355 handle the case of groups, but if we end up doing
1356 something wrong, it either will end up not mattering or
1357 we will abort elsewhere. */
1358
1359 for (r = regno; r < regno + nregs; r++)
1360 if (spill_reg_order[r] >= 0)
1361 for (i = 0; i < N_REG_CLASSES; i++)
1362 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1363 {
1364 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1365 {
1366 enum reg_class *p;
1367
1368 insn_needs.other.regs[0][i]++;
1369 p = reg_class_superclasses[i];
1370 while (*p != LIM_REG_CLASSES)
1371 insn_needs.other.regs[0][(int) *p++]++;
1372 }
1373 }
1374 }
1375 #endif /* SMALL_REGISTER_CLASSES */
1376
1377 /* For each class, collect maximum need of any insn. */
1378
1379 for (i = 0; i < N_REG_CLASSES; i++)
1380 {
1381 if (max_needs[i] < insn_needs.other.regs[0][i])
1382 {
1383 max_needs[i] = insn_needs.other.regs[0][i];
1384 max_needs_insn[i] = insn;
1385 }
1386 if (max_groups[i] < insn_needs.other.groups[i])
1387 {
1388 max_groups[i] = insn_needs.other.groups[i];
1389 max_groups_insn[i] = insn;
1390 }
1391 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1392 {
1393 max_nongroups[i] = insn_needs.other.regs[1][i];
1394 max_nongroups_insn[i] = insn;
1395 }
1396 }
1397 }
1398 /* Note that there is a continue statement above. */
1399 }
1400
1401 /* If we allocated any new memory locations, make another pass
1402 since it might have changed elimination offsets. */
1403 if (starting_frame_size != get_frame_size ())
1404 something_changed = 1;
1405
1406 if (dumpfile)
1407 for (i = 0; i < N_REG_CLASSES; i++)
1408 {
1409 if (max_needs[i] > 0)
1410 fprintf (dumpfile,
1411 ";; Need %d reg%s of class %s (for insn %d).\n",
1412 max_needs[i], max_needs[i] == 1 ? "" : "s",
1413 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1414 if (max_nongroups[i] > 0)
1415 fprintf (dumpfile,
1416 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1417 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1418 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1419 if (max_groups[i] > 0)
1420 fprintf (dumpfile,
1421 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1422 max_groups[i], max_groups[i] == 1 ? "" : "s",
1423 mode_name[(int) group_mode[i]],
1424 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1425 }
1426
1427 /* If we have caller-saves, set up the save areas and see if caller-save
1428 will need a spill register. */
1429
1430 if (caller_save_needed
1431 && ! setup_save_areas (&something_changed)
1432 && caller_save_spill_class == NO_REGS)
1433 {
1434 /* The class we will need depends on whether the machine
1435 supports the sum of two registers for an address; see
1436 find_address_reloads for details. */
1437
1438 caller_save_spill_class
1439 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1440 caller_save_group_size
1441 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1442 something_changed = 1;
1443 }
1444
1445 /* See if anything that happened changes which eliminations are valid.
1446 For example, on the Sparc, whether or not the frame pointer can
1447 be eliminated can depend on what registers have been used. We need
1448 not check some conditions again (such as flag_omit_frame_pointer)
1449 since they can't have changed. */
1450
1451 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1452 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1453 #ifdef ELIMINABLE_REGS
1454 || ! CAN_ELIMINATE (ep->from, ep->to)
1455 #endif
1456 )
1457 ep->can_eliminate = 0;
1458
1459 /* Look for the case where we have discovered that we can't replace
1460 register A with register B and that means that we will now be
1461 trying to replace register A with register C. This means we can
1462 no longer replace register C with register B and we need to disable
1463 such an elimination, if it exists. This occurs often with A == ap,
1464 B == sp, and C == fp. */
1465
1466 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1467 {
1468 struct elim_table *op;
1469 register int new_to = -1;
1470
1471 if (! ep->can_eliminate && ep->can_eliminate_previous)
1472 {
1473 /* Find the current elimination for ep->from, if there is a
1474 new one. */
1475 for (op = reg_eliminate;
1476 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1477 if (op->from == ep->from && op->can_eliminate)
1478 {
1479 new_to = op->to;
1480 break;
1481 }
1482
1483 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1484 disable it. */
1485 for (op = reg_eliminate;
1486 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1487 if (op->from == new_to && op->to == ep->to)
1488 op->can_eliminate = 0;
1489 }
1490 }
1491
1492 /* See if any registers that we thought we could eliminate the previous
1493 time are no longer eliminable. If so, something has changed and we
1494 must spill the register. Also, recompute the number of eliminable
1495 registers and see if the frame pointer is needed; it is if there is
1496 no elimination of the frame pointer that we can perform. */
1497
1498 frame_pointer_needed = 1;
1499 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1500 {
1501 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1502 && ep->to != HARD_FRAME_POINTER_REGNUM)
1503 frame_pointer_needed = 0;
1504
1505 if (! ep->can_eliminate && ep->can_eliminate_previous)
1506 {
1507 ep->can_eliminate_previous = 0;
1508 spill_hard_reg (ep->from, global, dumpfile, 1);
1509 something_changed = 1;
1510 num_eliminable--;
1511 }
1512 }
1513
1514 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1515 /* If we didn't need a frame pointer last time, but we do now, spill
1516 the hard frame pointer. */
1517 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1518 {
1519 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1520 something_changed = 1;
1521 }
1522 #endif
1523
1524 /* If all needs are met, we win. */
1525
1526 for (i = 0; i < N_REG_CLASSES; i++)
1527 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1528 break;
1529 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1530 break;
1531
1532 /* Not all needs are met; must spill some hard regs. */
1533
1534 /* Put all registers spilled so far back in potential_reload_regs, but
1535 put them at the front, since we've already spilled most of the
1536 psuedos in them (we might have left some pseudos unspilled if they
1537 were in a block that didn't need any spill registers of a conflicting
1538 class. We used to try to mark off the need for those registers,
1539 but doing so properly is very complex and reallocating them is the
1540 simpler approach. First, "pack" potential_reload_regs by pushing
1541 any nonnegative entries towards the end. That will leave room
1542 for the registers we already spilled.
1543
1544 Also, undo the marking of the spill registers from the last time
1545 around in FORBIDDEN_REGS since we will be probably be allocating
1546 them again below.
1547
1548 ??? It is theoretically possible that we might end up not using one
1549 of our previously-spilled registers in this allocation, even though
1550 they are at the head of the list. It's not clear what to do about
1551 this, but it was no better before, when we marked off the needs met
1552 by the previously-spilled registers. With the current code, globals
1553 can be allocated into these registers, but locals cannot. */
1554
1555 if (n_spills)
1556 {
1557 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1558 if (potential_reload_regs[i] != -1)
1559 potential_reload_regs[j--] = potential_reload_regs[i];
1560
1561 for (i = 0; i < n_spills; i++)
1562 {
1563 potential_reload_regs[i] = spill_regs[i];
1564 spill_reg_order[spill_regs[i]] = -1;
1565 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1566 }
1567
1568 n_spills = 0;
1569 }
1570
1571 /* Now find more reload regs to satisfy the remaining need
1572 Do it by ascending class number, since otherwise a reg
1573 might be spilled for a big class and might fail to count
1574 for a smaller class even though it belongs to that class.
1575
1576 Count spilled regs in `spills', and add entries to
1577 `spill_regs' and `spill_reg_order'.
1578
1579 ??? Note there is a problem here.
1580 When there is a need for a group in a high-numbered class,
1581 and also need for non-group regs that come from a lower class,
1582 the non-group regs are chosen first. If there aren't many regs,
1583 they might leave no room for a group.
1584
1585 This was happening on the 386. To fix it, we added the code
1586 that calls possible_group_p, so that the lower class won't
1587 break up the last possible group.
1588
1589 Really fixing the problem would require changes above
1590 in counting the regs already spilled, and in choose_reload_regs.
1591 It might be hard to avoid introducing bugs there. */
1592
1593 CLEAR_HARD_REG_SET (counted_for_groups);
1594 CLEAR_HARD_REG_SET (counted_for_nongroups);
1595
1596 for (class = 0; class < N_REG_CLASSES; class++)
1597 {
1598 /* First get the groups of registers.
1599 If we got single registers first, we might fragment
1600 possible groups. */
1601 while (max_groups[class] > 0)
1602 {
1603 /* If any single spilled regs happen to form groups,
1604 count them now. Maybe we don't really need
1605 to spill another group. */
1606 count_possible_groups (group_size, group_mode, max_groups);
1607
1608 if (max_groups[class] <= 0)
1609 break;
1610
1611 /* Groups of size 2 (the only groups used on most machines)
1612 are treated specially. */
1613 if (group_size[class] == 2)
1614 {
1615 /* First, look for a register that will complete a group. */
1616 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1617 {
1618 int other;
1619
1620 j = potential_reload_regs[i];
1621 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1622 &&
1623 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1624 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1625 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1626 && HARD_REGNO_MODE_OK (other, group_mode[class])
1627 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1628 other)
1629 /* We don't want one part of another group.
1630 We could get "two groups" that overlap! */
1631 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1632 ||
1633 (j < FIRST_PSEUDO_REGISTER - 1
1634 && (other = j + 1, spill_reg_order[other] >= 0)
1635 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1636 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1637 && HARD_REGNO_MODE_OK (j, group_mode[class])
1638 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1639 other)
1640 && ! TEST_HARD_REG_BIT (counted_for_groups,
1641 other))))
1642 {
1643 register enum reg_class *p;
1644
1645 /* We have found one that will complete a group,
1646 so count off one group as provided. */
1647 max_groups[class]--;
1648 p = reg_class_superclasses[class];
1649 while (*p != LIM_REG_CLASSES)
1650 max_groups[(int) *p++]--;
1651
1652 /* Indicate both these regs are part of a group. */
1653 SET_HARD_REG_BIT (counted_for_groups, j);
1654 SET_HARD_REG_BIT (counted_for_groups, other);
1655 break;
1656 }
1657 }
1658 /* We can't complete a group, so start one. */
1659 #ifdef SMALL_REGISTER_CLASSES
1660 /* Look for a pair neither of which is explicitly used. */
1661 if (i == FIRST_PSEUDO_REGISTER)
1662 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1663 {
1664 int k;
1665 j = potential_reload_regs[i];
1666 /* Verify that J+1 is a potential reload reg. */
1667 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1668 if (potential_reload_regs[k] == j + 1)
1669 break;
1670 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1671 && k < FIRST_PSEUDO_REGISTER
1672 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1673 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1674 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1675 && HARD_REGNO_MODE_OK (j, group_mode[class])
1676 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1677 j + 1)
1678 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1679 /* Reject J at this stage
1680 if J+1 was explicitly used. */
1681 && ! regs_explicitly_used[j + 1])
1682 break;
1683 }
1684 #endif
1685 /* Now try any group at all
1686 whose registers are not in bad_spill_regs. */
1687 if (i == FIRST_PSEUDO_REGISTER)
1688 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1689 {
1690 int k;
1691 j = potential_reload_regs[i];
1692 /* Verify that J+1 is a potential reload reg. */
1693 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1694 if (potential_reload_regs[k] == j + 1)
1695 break;
1696 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1697 && k < FIRST_PSEUDO_REGISTER
1698 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1699 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1700 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1701 && HARD_REGNO_MODE_OK (j, group_mode[class])
1702 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1703 j + 1)
1704 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1705 break;
1706 }
1707
1708 /* I should be the index in potential_reload_regs
1709 of the new reload reg we have found. */
1710
1711 if (i >= FIRST_PSEUDO_REGISTER)
1712 {
1713 /* There are no groups left to spill. */
1714 spill_failure (max_groups_insn[class]);
1715 failure = 1;
1716 goto failed;
1717 }
1718 else
1719 something_changed
1720 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1721 global, dumpfile);
1722 }
1723 else
1724 {
1725 /* For groups of more than 2 registers,
1726 look for a sufficient sequence of unspilled registers,
1727 and spill them all at once. */
1728 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1729 {
1730 int k;
1731
1732 j = potential_reload_regs[i];
1733 if (j >= 0
1734 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1735 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1736 {
1737 /* Check each reg in the sequence. */
1738 for (k = 0; k < group_size[class]; k++)
1739 if (! (spill_reg_order[j + k] < 0
1740 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1741 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1742 break;
1743 /* We got a full sequence, so spill them all. */
1744 if (k == group_size[class])
1745 {
1746 register enum reg_class *p;
1747 for (k = 0; k < group_size[class]; k++)
1748 {
1749 int idx;
1750 SET_HARD_REG_BIT (counted_for_groups, j + k);
1751 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1752 if (potential_reload_regs[idx] == j + k)
1753 break;
1754 something_changed
1755 |= new_spill_reg (idx, class,
1756 max_needs, NULL_PTR,
1757 global, dumpfile);
1758 }
1759
1760 /* We have found one that will complete a group,
1761 so count off one group as provided. */
1762 max_groups[class]--;
1763 p = reg_class_superclasses[class];
1764 while (*p != LIM_REG_CLASSES)
1765 max_groups[(int) *p++]--;
1766
1767 break;
1768 }
1769 }
1770 }
1771 /* We couldn't find any registers for this reload.
1772 Avoid going into an infinite loop. */
1773 if (i >= FIRST_PSEUDO_REGISTER)
1774 {
1775 /* There are no groups left. */
1776 spill_failure (max_groups_insn[class]);
1777 failure = 1;
1778 goto failed;
1779 }
1780 }
1781 }
1782
1783 /* Now similarly satisfy all need for single registers. */
1784
1785 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1786 {
1787 #ifdef SMALL_REGISTER_CLASSES
1788 /* This should be right for all machines, but only the 386
1789 is known to need it, so this conditional plays safe.
1790 ??? For 2.5, try making this unconditional. */
1791 /* If we spilled enough regs, but they weren't counted
1792 against the non-group need, see if we can count them now.
1793 If so, we can avoid some actual spilling. */
1794 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1795 for (i = 0; i < n_spills; i++)
1796 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1797 spill_regs[i])
1798 && !TEST_HARD_REG_BIT (counted_for_groups,
1799 spill_regs[i])
1800 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1801 spill_regs[i])
1802 && max_nongroups[class] > 0)
1803 {
1804 register enum reg_class *p;
1805
1806 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1807 max_nongroups[class]--;
1808 p = reg_class_superclasses[class];
1809 while (*p != LIM_REG_CLASSES)
1810 max_nongroups[(int) *p++]--;
1811 }
1812 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1813 break;
1814 #endif
1815
1816 /* Consider the potential reload regs that aren't
1817 yet in use as reload regs, in order of preference.
1818 Find the most preferred one that's in this class. */
1819
1820 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1821 if (potential_reload_regs[i] >= 0
1822 && TEST_HARD_REG_BIT (reg_class_contents[class],
1823 potential_reload_regs[i])
1824 /* If this reg will not be available for groups,
1825 pick one that does not foreclose possible groups.
1826 This is a kludge, and not very general,
1827 but it should be sufficient to make the 386 work,
1828 and the problem should not occur on machines with
1829 more registers. */
1830 && (max_nongroups[class] == 0
1831 || possible_group_p (potential_reload_regs[i], max_groups)))
1832 break;
1833
1834 /* If we couldn't get a register, try to get one even if we
1835 might foreclose possible groups. This may cause problems
1836 later, but that's better than aborting now, since it is
1837 possible that we will, in fact, be able to form the needed
1838 group even with this allocation. */
1839
1840 if (i >= FIRST_PSEUDO_REGISTER
1841 && (asm_noperands (max_needs[class] > 0
1842 ? max_needs_insn[class]
1843 : max_nongroups_insn[class])
1844 < 0))
1845 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1846 if (potential_reload_regs[i] >= 0
1847 && TEST_HARD_REG_BIT (reg_class_contents[class],
1848 potential_reload_regs[i]))
1849 break;
1850
1851 /* I should be the index in potential_reload_regs
1852 of the new reload reg we have found. */
1853
1854 if (i >= FIRST_PSEUDO_REGISTER)
1855 {
1856 /* There are no possible registers left to spill. */
1857 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1858 : max_nongroups_insn[class]);
1859 failure = 1;
1860 goto failed;
1861 }
1862 else
1863 something_changed
1864 |= new_spill_reg (i, class, max_needs, max_nongroups,
1865 global, dumpfile);
1866 }
1867 }
1868 }
1869
1870 /* If global-alloc was run, notify it of any register eliminations we have
1871 done. */
1872 if (global)
1873 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1874 if (ep->can_eliminate)
1875 mark_elimination (ep->from, ep->to);
1876
1877 /* Insert code to save and restore call-clobbered hard regs
1878 around calls. Tell if what mode to use so that we will process
1879 those insns in reload_as_needed if we have to. */
1880
1881 if (caller_save_needed)
1882 save_call_clobbered_regs (num_eliminable ? QImode
1883 : caller_save_spill_class != NO_REGS ? HImode
1884 : VOIDmode);
1885
1886 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1887 If that insn didn't set the register (i.e., it copied the register to
1888 memory), just delete that insn instead of the equivalencing insn plus
1889 anything now dead. If we call delete_dead_insn on that insn, we may
1890 delete the insn that actually sets the register if the register die
1891 there and that is incorrect. */
1892
1893 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1894 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1895 && GET_CODE (reg_equiv_init[i]) != NOTE)
1896 {
1897 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1898 delete_dead_insn (reg_equiv_init[i]);
1899 else
1900 {
1901 PUT_CODE (reg_equiv_init[i], NOTE);
1902 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1903 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1904 }
1905 }
1906
1907 /* Use the reload registers where necessary
1908 by generating move instructions to move the must-be-register
1909 values into or out of the reload registers. */
1910
1911 if (something_needs_reloads || something_needs_elimination
1912 || (caller_save_needed && num_eliminable)
1913 || caller_save_spill_class != NO_REGS)
1914 reload_as_needed (first, global);
1915
1916 /* If we were able to eliminate the frame pointer, show that it is no
1917 longer live at the start of any basic block. If it ls live by
1918 virtue of being in a pseudo, that pseudo will be marked live
1919 and hence the frame pointer will be known to be live via that
1920 pseudo. */
1921
1922 if (! frame_pointer_needed)
1923 for (i = 0; i < n_basic_blocks; i++)
1924 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1925 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1926 % REGSET_ELT_BITS));
1927
1928 /* Come here (with failure set nonzero) if we can't get enough spill regs
1929 and we decide not to abort about it. */
1930 failed:
1931
1932 reload_in_progress = 0;
1933
1934 /* Now eliminate all pseudo regs by modifying them into
1935 their equivalent memory references.
1936 The REG-rtx's for the pseudos are modified in place,
1937 so all insns that used to refer to them now refer to memory.
1938
1939 For a reg that has a reg_equiv_address, all those insns
1940 were changed by reloading so that no insns refer to it any longer;
1941 but the DECL_RTL of a variable decl may refer to it,
1942 and if so this causes the debugging info to mention the variable. */
1943
1944 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1945 {
1946 rtx addr = 0;
1947 int in_struct = 0;
1948 if (reg_equiv_mem[i])
1949 {
1950 addr = XEXP (reg_equiv_mem[i], 0);
1951 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1952 }
1953 if (reg_equiv_address[i])
1954 addr = reg_equiv_address[i];
1955 if (addr)
1956 {
1957 if (reg_renumber[i] < 0)
1958 {
1959 rtx reg = regno_reg_rtx[i];
1960 XEXP (reg, 0) = addr;
1961 REG_USERVAR_P (reg) = 0;
1962 MEM_IN_STRUCT_P (reg) = in_struct;
1963 PUT_CODE (reg, MEM);
1964 }
1965 else if (reg_equiv_mem[i])
1966 XEXP (reg_equiv_mem[i], 0) = addr;
1967 }
1968 }
1969
1970 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1971 /* Make a pass over all the insns and remove death notes for things that
1972 are no longer registers or no longer die in the insn (e.g., an input
1973 and output pseudo being tied). */
1974
1975 for (insn = first; insn; insn = NEXT_INSN (insn))
1976 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1977 {
1978 rtx note, next;
1979
1980 for (note = REG_NOTES (insn); note; note = next)
1981 {
1982 next = XEXP (note, 1);
1983 if (REG_NOTE_KIND (note) == REG_DEAD
1984 && (GET_CODE (XEXP (note, 0)) != REG
1985 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1986 remove_note (insn, note);
1987 }
1988 }
1989 #endif
1990
1991 /* Indicate that we no longer have known memory locations or constants. */
1992 reg_equiv_constant = 0;
1993 reg_equiv_memory_loc = 0;
1994
1995 if (scratch_list)
1996 free (scratch_list);
1997 scratch_list = 0;
1998 if (scratch_block)
1999 free (scratch_block);
2000 scratch_block = 0;
2001
2002 return failure;
2003 }
2004 \f
2005 /* Nonzero if, after spilling reg REGNO for non-groups,
2006 it will still be possible to find a group if we still need one. */
2007
2008 static int
2009 possible_group_p (regno, max_groups)
2010 int regno;
2011 int *max_groups;
2012 {
2013 int i;
2014 int class = (int) NO_REGS;
2015
2016 for (i = 0; i < (int) N_REG_CLASSES; i++)
2017 if (max_groups[i] > 0)
2018 {
2019 class = i;
2020 break;
2021 }
2022
2023 if (class == (int) NO_REGS)
2024 return 1;
2025
2026 /* Consider each pair of consecutive registers. */
2027 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2028 {
2029 /* Ignore pairs that include reg REGNO. */
2030 if (i == regno || i + 1 == regno)
2031 continue;
2032
2033 /* Ignore pairs that are outside the class that needs the group.
2034 ??? Here we fail to handle the case where two different classes
2035 independently need groups. But this never happens with our
2036 current machine descriptions. */
2037 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2038 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2039 continue;
2040
2041 /* A pair of consecutive regs we can still spill does the trick. */
2042 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2043 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2044 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2045 return 1;
2046
2047 /* A pair of one already spilled and one we can spill does it
2048 provided the one already spilled is not otherwise reserved. */
2049 if (spill_reg_order[i] < 0
2050 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2051 && spill_reg_order[i + 1] >= 0
2052 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2053 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2054 return 1;
2055 if (spill_reg_order[i + 1] < 0
2056 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2057 && spill_reg_order[i] >= 0
2058 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2059 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2060 return 1;
2061 }
2062
2063 return 0;
2064 }
2065 \f
2066 /* Count any groups that can be formed from the registers recently spilled.
2067 This is done class by class, in order of ascending class number. */
2068
2069 static void
2070 count_possible_groups (group_size, group_mode, max_groups)
2071 int *group_size;
2072 enum machine_mode *group_mode;
2073 int *max_groups;
2074 {
2075 int i;
2076 /* Now find all consecutive groups of spilled registers
2077 and mark each group off against the need for such groups.
2078 But don't count them against ordinary need, yet. */
2079
2080 for (i = 0; i < N_REG_CLASSES; i++)
2081 if (group_size[i] > 1)
2082 {
2083 HARD_REG_SET new;
2084 int j;
2085
2086 CLEAR_HARD_REG_SET (new);
2087
2088 /* Make a mask of all the regs that are spill regs in class I. */
2089 for (j = 0; j < n_spills; j++)
2090 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2091 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2092 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2093 spill_regs[j]))
2094 SET_HARD_REG_BIT (new, spill_regs[j]);
2095
2096 /* Find each consecutive group of them. */
2097 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2098 if (TEST_HARD_REG_BIT (new, j)
2099 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2100 /* Next line in case group-mode for this class
2101 demands an even-odd pair. */
2102 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2103 {
2104 int k;
2105 for (k = 1; k < group_size[i]; k++)
2106 if (! TEST_HARD_REG_BIT (new, j + k))
2107 break;
2108 if (k == group_size[i])
2109 {
2110 /* We found a group. Mark it off against this class's
2111 need for groups, and against each superclass too. */
2112 register enum reg_class *p;
2113 max_groups[i]--;
2114 p = reg_class_superclasses[i];
2115 while (*p != LIM_REG_CLASSES)
2116 max_groups[(int) *p++]--;
2117 /* Don't count these registers again. */
2118 for (k = 0; k < group_size[i]; k++)
2119 SET_HARD_REG_BIT (counted_for_groups, j + k);
2120 }
2121 /* Skip to the last reg in this group. When j is incremented
2122 above, it will then point to the first reg of the next
2123 possible group. */
2124 j += k - 1;
2125 }
2126 }
2127
2128 }
2129 \f
2130 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2131 another mode that needs to be reloaded for the same register class CLASS.
2132 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2133 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2134
2135 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2136 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2137 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2138 causes unnecessary failures on machines requiring alignment of register
2139 groups when the two modes are different sizes, because the larger mode has
2140 more strict alignment rules than the smaller mode. */
2141
2142 static int
2143 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2144 enum machine_mode allocate_mode, other_mode;
2145 enum reg_class class;
2146 {
2147 register int regno;
2148 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2149 {
2150 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2151 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2152 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2153 return 0;
2154 }
2155 return 1;
2156 }
2157
2158 /* Handle the failure to find a register to spill.
2159 INSN should be one of the insns which needed this particular spill reg. */
2160
2161 static void
2162 spill_failure (insn)
2163 rtx insn;
2164 {
2165 if (asm_noperands (PATTERN (insn)) >= 0)
2166 error_for_asm (insn, "`asm' needs too many reloads");
2167 else
2168 abort ();
2169 }
2170
2171 /* Add a new register to the tables of available spill-registers
2172 (as well as spilling all pseudos allocated to the register).
2173 I is the index of this register in potential_reload_regs.
2174 CLASS is the regclass whose need is being satisfied.
2175 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2176 so that this register can count off against them.
2177 MAX_NONGROUPS is 0 if this register is part of a group.
2178 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2179
2180 static int
2181 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2182 int i;
2183 int class;
2184 int *max_needs;
2185 int *max_nongroups;
2186 int global;
2187 FILE *dumpfile;
2188 {
2189 register enum reg_class *p;
2190 int val;
2191 int regno = potential_reload_regs[i];
2192
2193 if (i >= FIRST_PSEUDO_REGISTER)
2194 abort (); /* Caller failed to find any register. */
2195
2196 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2197 fatal ("fixed or forbidden register was spilled.\n\
2198 This may be due to a compiler bug or to impossible asm\n\
2199 statements or clauses.");
2200
2201 /* Make reg REGNO an additional reload reg. */
2202
2203 potential_reload_regs[i] = -1;
2204 spill_regs[n_spills] = regno;
2205 spill_reg_order[regno] = n_spills;
2206 if (dumpfile)
2207 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2208
2209 /* Clear off the needs we just satisfied. */
2210
2211 max_needs[class]--;
2212 p = reg_class_superclasses[class];
2213 while (*p != LIM_REG_CLASSES)
2214 max_needs[(int) *p++]--;
2215
2216 if (max_nongroups && max_nongroups[class] > 0)
2217 {
2218 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2219 max_nongroups[class]--;
2220 p = reg_class_superclasses[class];
2221 while (*p != LIM_REG_CLASSES)
2222 max_nongroups[(int) *p++]--;
2223 }
2224
2225 /* Spill every pseudo reg that was allocated to this reg
2226 or to something that overlaps this reg. */
2227
2228 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2229
2230 /* If there are some registers still to eliminate and this register
2231 wasn't ever used before, additional stack space may have to be
2232 allocated to store this register. Thus, we may have changed the offset
2233 between the stack and frame pointers, so mark that something has changed.
2234 (If new pseudos were spilled, thus requiring more space, VAL would have
2235 been set non-zero by the call to spill_hard_reg above since additional
2236 reloads may be needed in that case.
2237
2238 One might think that we need only set VAL to 1 if this is a call-used
2239 register. However, the set of registers that must be saved by the
2240 prologue is not identical to the call-used set. For example, the
2241 register used by the call insn for the return PC is a call-used register,
2242 but must be saved by the prologue. */
2243 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2244 val = 1;
2245
2246 regs_ever_live[spill_regs[n_spills]] = 1;
2247 n_spills++;
2248
2249 return val;
2250 }
2251 \f
2252 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2253 data that is dead in INSN. */
2254
2255 static void
2256 delete_dead_insn (insn)
2257 rtx insn;
2258 {
2259 rtx prev = prev_real_insn (insn);
2260 rtx prev_dest;
2261
2262 /* If the previous insn sets a register that dies in our insn, delete it
2263 too. */
2264 if (prev && GET_CODE (PATTERN (prev)) == SET
2265 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2266 && reg_mentioned_p (prev_dest, PATTERN (insn))
2267 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2268 delete_dead_insn (prev);
2269
2270 PUT_CODE (insn, NOTE);
2271 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2272 NOTE_SOURCE_FILE (insn) = 0;
2273 }
2274
2275 /* Modify the home of pseudo-reg I.
2276 The new home is present in reg_renumber[I].
2277
2278 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2279 or it may be -1, meaning there is none or it is not relevant.
2280 This is used so that all pseudos spilled from a given hard reg
2281 can share one stack slot. */
2282
2283 static void
2284 alter_reg (i, from_reg)
2285 register int i;
2286 int from_reg;
2287 {
2288 /* When outputting an inline function, this can happen
2289 for a reg that isn't actually used. */
2290 if (regno_reg_rtx[i] == 0)
2291 return;
2292
2293 /* If the reg got changed to a MEM at rtl-generation time,
2294 ignore it. */
2295 if (GET_CODE (regno_reg_rtx[i]) != REG)
2296 return;
2297
2298 /* Modify the reg-rtx to contain the new hard reg
2299 number or else to contain its pseudo reg number. */
2300 REGNO (regno_reg_rtx[i])
2301 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2302
2303 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2304 allocate a stack slot for it. */
2305
2306 if (reg_renumber[i] < 0
2307 && reg_n_refs[i] > 0
2308 && reg_equiv_constant[i] == 0
2309 && reg_equiv_memory_loc[i] == 0)
2310 {
2311 register rtx x;
2312 int inherent_size = PSEUDO_REGNO_BYTES (i);
2313 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2314 int adjust = 0;
2315
2316 /* Each pseudo reg has an inherent size which comes from its own mode,
2317 and a total size which provides room for paradoxical subregs
2318 which refer to the pseudo reg in wider modes.
2319
2320 We can use a slot already allocated if it provides both
2321 enough inherent space and enough total space.
2322 Otherwise, we allocate a new slot, making sure that it has no less
2323 inherent space, and no less total space, then the previous slot. */
2324 if (from_reg == -1)
2325 {
2326 /* No known place to spill from => no slot to reuse. */
2327 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2328 #if BYTES_BIG_ENDIAN
2329 /* Cancel the big-endian correction done in assign_stack_local.
2330 Get the address of the beginning of the slot.
2331 This is so we can do a big-endian correction unconditionally
2332 below. */
2333 adjust = inherent_size - total_size;
2334 #endif
2335 }
2336 /* Reuse a stack slot if possible. */
2337 else if (spill_stack_slot[from_reg] != 0
2338 && spill_stack_slot_width[from_reg] >= total_size
2339 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2340 >= inherent_size))
2341 x = spill_stack_slot[from_reg];
2342 /* Allocate a bigger slot. */
2343 else
2344 {
2345 /* Compute maximum size needed, both for inherent size
2346 and for total size. */
2347 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2348 if (spill_stack_slot[from_reg])
2349 {
2350 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2351 > inherent_size)
2352 mode = GET_MODE (spill_stack_slot[from_reg]);
2353 if (spill_stack_slot_width[from_reg] > total_size)
2354 total_size = spill_stack_slot_width[from_reg];
2355 }
2356 /* Make a slot with that size. */
2357 x = assign_stack_local (mode, total_size, -1);
2358 #if BYTES_BIG_ENDIAN
2359 /* Cancel the big-endian correction done in assign_stack_local.
2360 Get the address of the beginning of the slot.
2361 This is so we can do a big-endian correction unconditionally
2362 below. */
2363 adjust = GET_MODE_SIZE (mode) - total_size;
2364 #endif
2365 spill_stack_slot[from_reg] = x;
2366 spill_stack_slot_width[from_reg] = total_size;
2367 }
2368
2369 #if BYTES_BIG_ENDIAN
2370 /* On a big endian machine, the "address" of the slot
2371 is the address of the low part that fits its inherent mode. */
2372 if (inherent_size < total_size)
2373 adjust += (total_size - inherent_size);
2374 #endif /* BYTES_BIG_ENDIAN */
2375
2376 /* If we have any adjustment to make, or if the stack slot is the
2377 wrong mode, make a new stack slot. */
2378 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2379 {
2380 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2381 plus_constant (XEXP (x, 0), adjust));
2382 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2383 }
2384
2385 /* Save the stack slot for later. */
2386 reg_equiv_memory_loc[i] = x;
2387 }
2388 }
2389
2390 /* Mark the slots in regs_ever_live for the hard regs
2391 used by pseudo-reg number REGNO. */
2392
2393 void
2394 mark_home_live (regno)
2395 int regno;
2396 {
2397 register int i, lim;
2398 i = reg_renumber[regno];
2399 if (i < 0)
2400 return;
2401 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2402 while (i < lim)
2403 regs_ever_live[i++] = 1;
2404 }
2405
2406 /* Mark the registers used in SCRATCH as being live. */
2407
2408 static void
2409 mark_scratch_live (scratch)
2410 rtx scratch;
2411 {
2412 register int i;
2413 int regno = REGNO (scratch);
2414 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2415
2416 for (i = regno; i < lim; i++)
2417 regs_ever_live[i] = 1;
2418 }
2419 \f
2420 /* This function handles the tracking of elimination offsets around branches.
2421
2422 X is a piece of RTL being scanned.
2423
2424 INSN is the insn that it came from, if any.
2425
2426 INITIAL_P is non-zero if we are to set the offset to be the initial
2427 offset and zero if we are setting the offset of the label to be the
2428 current offset. */
2429
2430 static void
2431 set_label_offsets (x, insn, initial_p)
2432 rtx x;
2433 rtx insn;
2434 int initial_p;
2435 {
2436 enum rtx_code code = GET_CODE (x);
2437 rtx tem;
2438 int i;
2439 struct elim_table *p;
2440
2441 switch (code)
2442 {
2443 case LABEL_REF:
2444 if (LABEL_REF_NONLOCAL_P (x))
2445 return;
2446
2447 x = XEXP (x, 0);
2448
2449 /* ... fall through ... */
2450
2451 case CODE_LABEL:
2452 /* If we know nothing about this label, set the desired offsets. Note
2453 that this sets the offset at a label to be the offset before a label
2454 if we don't know anything about the label. This is not correct for
2455 the label after a BARRIER, but is the best guess we can make. If
2456 we guessed wrong, we will suppress an elimination that might have
2457 been possible had we been able to guess correctly. */
2458
2459 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2460 {
2461 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2462 offsets_at[CODE_LABEL_NUMBER (x)][i]
2463 = (initial_p ? reg_eliminate[i].initial_offset
2464 : reg_eliminate[i].offset);
2465 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2466 }
2467
2468 /* Otherwise, if this is the definition of a label and it is
2469 preceded by a BARRIER, set our offsets to the known offset of
2470 that label. */
2471
2472 else if (x == insn
2473 && (tem = prev_nonnote_insn (insn)) != 0
2474 && GET_CODE (tem) == BARRIER)
2475 {
2476 num_not_at_initial_offset = 0;
2477 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2478 {
2479 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2480 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2481 if (reg_eliminate[i].can_eliminate
2482 && (reg_eliminate[i].offset
2483 != reg_eliminate[i].initial_offset))
2484 num_not_at_initial_offset++;
2485 }
2486 }
2487
2488 else
2489 /* If neither of the above cases is true, compare each offset
2490 with those previously recorded and suppress any eliminations
2491 where the offsets disagree. */
2492
2493 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2494 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2495 != (initial_p ? reg_eliminate[i].initial_offset
2496 : reg_eliminate[i].offset))
2497 reg_eliminate[i].can_eliminate = 0;
2498
2499 return;
2500
2501 case JUMP_INSN:
2502 set_label_offsets (PATTERN (insn), insn, initial_p);
2503
2504 /* ... fall through ... */
2505
2506 case INSN:
2507 case CALL_INSN:
2508 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2509 and hence must have all eliminations at their initial offsets. */
2510 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2511 if (REG_NOTE_KIND (tem) == REG_LABEL)
2512 set_label_offsets (XEXP (tem, 0), insn, 1);
2513 return;
2514
2515 case ADDR_VEC:
2516 case ADDR_DIFF_VEC:
2517 /* Each of the labels in the address vector must be at their initial
2518 offsets. We want the first first for ADDR_VEC and the second
2519 field for ADDR_DIFF_VEC. */
2520
2521 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2522 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2523 insn, initial_p);
2524 return;
2525
2526 case SET:
2527 /* We only care about setting PC. If the source is not RETURN,
2528 IF_THEN_ELSE, or a label, disable any eliminations not at
2529 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2530 isn't one of those possibilities. For branches to a label,
2531 call ourselves recursively.
2532
2533 Note that this can disable elimination unnecessarily when we have
2534 a non-local goto since it will look like a non-constant jump to
2535 someplace in the current function. This isn't a significant
2536 problem since such jumps will normally be when all elimination
2537 pairs are back to their initial offsets. */
2538
2539 if (SET_DEST (x) != pc_rtx)
2540 return;
2541
2542 switch (GET_CODE (SET_SRC (x)))
2543 {
2544 case PC:
2545 case RETURN:
2546 return;
2547
2548 case LABEL_REF:
2549 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2550 return;
2551
2552 case IF_THEN_ELSE:
2553 tem = XEXP (SET_SRC (x), 1);
2554 if (GET_CODE (tem) == LABEL_REF)
2555 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2556 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2557 break;
2558
2559 tem = XEXP (SET_SRC (x), 2);
2560 if (GET_CODE (tem) == LABEL_REF)
2561 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2562 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2563 break;
2564 return;
2565 }
2566
2567 /* If we reach here, all eliminations must be at their initial
2568 offset because we are doing a jump to a variable address. */
2569 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2570 if (p->offset != p->initial_offset)
2571 p->can_eliminate = 0;
2572 }
2573 }
2574 \f
2575 /* Used for communication between the next two function to properly share
2576 the vector for an ASM_OPERANDS. */
2577
2578 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2579
2580 /* Scan X and replace any eliminable registers (such as fp) with a
2581 replacement (such as sp), plus an offset.
2582
2583 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2584 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2585 MEM, we are allowed to replace a sum of a register and the constant zero
2586 with the register, which we cannot do outside a MEM. In addition, we need
2587 to record the fact that a register is referenced outside a MEM.
2588
2589 If INSN is an insn, it is the insn containing X. If we replace a REG
2590 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2591 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2592 that the REG is being modified.
2593
2594 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2595 That's used when we eliminate in expressions stored in notes.
2596 This means, do not set ref_outside_mem even if the reference
2597 is outside of MEMs.
2598
2599 If we see a modification to a register we know about, take the
2600 appropriate action (see case SET, below).
2601
2602 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2603 replacements done assuming all offsets are at their initial values. If
2604 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2605 encounter, return the actual location so that find_reloads will do
2606 the proper thing. */
2607
2608 rtx
2609 eliminate_regs (x, mem_mode, insn)
2610 rtx x;
2611 enum machine_mode mem_mode;
2612 rtx insn;
2613 {
2614 enum rtx_code code = GET_CODE (x);
2615 struct elim_table *ep;
2616 int regno;
2617 rtx new;
2618 int i, j;
2619 char *fmt;
2620 int copied = 0;
2621
2622 switch (code)
2623 {
2624 case CONST_INT:
2625 case CONST_DOUBLE:
2626 case CONST:
2627 case SYMBOL_REF:
2628 case CODE_LABEL:
2629 case PC:
2630 case CC0:
2631 case ASM_INPUT:
2632 case ADDR_VEC:
2633 case ADDR_DIFF_VEC:
2634 case RETURN:
2635 return x;
2636
2637 case REG:
2638 regno = REGNO (x);
2639
2640 /* First handle the case where we encounter a bare register that
2641 is eliminable. Replace it with a PLUS. */
2642 if (regno < FIRST_PSEUDO_REGISTER)
2643 {
2644 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2645 ep++)
2646 if (ep->from_rtx == x && ep->can_eliminate)
2647 {
2648 if (! mem_mode
2649 /* Refs inside notes don't count for this purpose. */
2650 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2651 || GET_CODE (insn) == INSN_LIST)))
2652 ep->ref_outside_mem = 1;
2653 return plus_constant (ep->to_rtx, ep->previous_offset);
2654 }
2655
2656 }
2657 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2658 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2659 {
2660 /* In this case, find_reloads would attempt to either use an
2661 incorrect address (if something is not at its initial offset)
2662 or substitute an replaced address into an insn (which loses
2663 if the offset is changed by some later action). So we simply
2664 return the replaced stack slot (assuming it is changed by
2665 elimination) and ignore the fact that this is actually a
2666 reference to the pseudo. Ensure we make a copy of the
2667 address in case it is shared. */
2668 new = eliminate_regs (reg_equiv_memory_loc[regno],
2669 mem_mode, insn);
2670 if (new != reg_equiv_memory_loc[regno])
2671 {
2672 cannot_omit_stores[regno] = 1;
2673 return copy_rtx (new);
2674 }
2675 }
2676 return x;
2677
2678 case PLUS:
2679 /* If this is the sum of an eliminable register and a constant, rework
2680 the sum. */
2681 if (GET_CODE (XEXP (x, 0)) == REG
2682 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2683 && CONSTANT_P (XEXP (x, 1)))
2684 {
2685 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2686 ep++)
2687 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2688 {
2689 if (! mem_mode
2690 /* Refs inside notes don't count for this purpose. */
2691 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2692 || GET_CODE (insn) == INSN_LIST)))
2693 ep->ref_outside_mem = 1;
2694
2695 /* The only time we want to replace a PLUS with a REG (this
2696 occurs when the constant operand of the PLUS is the negative
2697 of the offset) is when we are inside a MEM. We won't want
2698 to do so at other times because that would change the
2699 structure of the insn in a way that reload can't handle.
2700 We special-case the commonest situation in
2701 eliminate_regs_in_insn, so just replace a PLUS with a
2702 PLUS here, unless inside a MEM. */
2703 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2704 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2705 return ep->to_rtx;
2706 else
2707 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2708 plus_constant (XEXP (x, 1),
2709 ep->previous_offset));
2710 }
2711
2712 /* If the register is not eliminable, we are done since the other
2713 operand is a constant. */
2714 return x;
2715 }
2716
2717 /* If this is part of an address, we want to bring any constant to the
2718 outermost PLUS. We will do this by doing register replacement in
2719 our operands and seeing if a constant shows up in one of them.
2720
2721 We assume here this is part of an address (or a "load address" insn)
2722 since an eliminable register is not likely to appear in any other
2723 context.
2724
2725 If we have (plus (eliminable) (reg)), we want to produce
2726 (plus (plus (replacement) (reg) (const))). If this was part of a
2727 normal add insn, (plus (replacement) (reg)) will be pushed as a
2728 reload. This is the desired action. */
2729
2730 {
2731 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2732 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2733
2734 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2735 {
2736 /* If one side is a PLUS and the other side is a pseudo that
2737 didn't get a hard register but has a reg_equiv_constant,
2738 we must replace the constant here since it may no longer
2739 be in the position of any operand. */
2740 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2741 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2742 && reg_renumber[REGNO (new1)] < 0
2743 && reg_equiv_constant != 0
2744 && reg_equiv_constant[REGNO (new1)] != 0)
2745 new1 = reg_equiv_constant[REGNO (new1)];
2746 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2747 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2748 && reg_renumber[REGNO (new0)] < 0
2749 && reg_equiv_constant[REGNO (new0)] != 0)
2750 new0 = reg_equiv_constant[REGNO (new0)];
2751
2752 new = form_sum (new0, new1);
2753
2754 /* As above, if we are not inside a MEM we do not want to
2755 turn a PLUS into something else. We might try to do so here
2756 for an addition of 0 if we aren't optimizing. */
2757 if (! mem_mode && GET_CODE (new) != PLUS)
2758 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2759 else
2760 return new;
2761 }
2762 }
2763 return x;
2764
2765 case MULT:
2766 /* If this is the product of an eliminable register and a
2767 constant, apply the distribute law and move the constant out
2768 so that we have (plus (mult ..) ..). This is needed in order
2769 to keep load-address insns valid. This case is pathalogical.
2770 We ignore the possibility of overflow here. */
2771 if (GET_CODE (XEXP (x, 0)) == REG
2772 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2773 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2774 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2775 ep++)
2776 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2777 {
2778 if (! mem_mode
2779 /* Refs inside notes don't count for this purpose. */
2780 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2781 || GET_CODE (insn) == INSN_LIST)))
2782 ep->ref_outside_mem = 1;
2783
2784 return
2785 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2786 ep->previous_offset * INTVAL (XEXP (x, 1)));
2787 }
2788
2789 /* ... fall through ... */
2790
2791 case CALL:
2792 case COMPARE:
2793 case MINUS:
2794 case DIV: case UDIV:
2795 case MOD: case UMOD:
2796 case AND: case IOR: case XOR:
2797 case ROTATERT: case ROTATE:
2798 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2799 case NE: case EQ:
2800 case GE: case GT: case GEU: case GTU:
2801 case LE: case LT: case LEU: case LTU:
2802 {
2803 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2804 rtx new1
2805 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2806
2807 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2808 return gen_rtx (code, GET_MODE (x), new0, new1);
2809 }
2810 return x;
2811
2812 case EXPR_LIST:
2813 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2814 if (XEXP (x, 0))
2815 {
2816 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2817 if (new != XEXP (x, 0))
2818 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2819 }
2820
2821 /* ... fall through ... */
2822
2823 case INSN_LIST:
2824 /* Now do eliminations in the rest of the chain. If this was
2825 an EXPR_LIST, this might result in allocating more memory than is
2826 strictly needed, but it simplifies the code. */
2827 if (XEXP (x, 1))
2828 {
2829 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2830 if (new != XEXP (x, 1))
2831 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2832 }
2833 return x;
2834
2835 case PRE_INC:
2836 case POST_INC:
2837 case PRE_DEC:
2838 case POST_DEC:
2839 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2840 if (ep->to_rtx == XEXP (x, 0))
2841 {
2842 int size = GET_MODE_SIZE (mem_mode);
2843
2844 /* If more bytes than MEM_MODE are pushed, account for them. */
2845 #ifdef PUSH_ROUNDING
2846 if (ep->to_rtx == stack_pointer_rtx)
2847 size = PUSH_ROUNDING (size);
2848 #endif
2849 if (code == PRE_DEC || code == POST_DEC)
2850 ep->offset += size;
2851 else
2852 ep->offset -= size;
2853 }
2854
2855 /* Fall through to generic unary operation case. */
2856 case USE:
2857 case STRICT_LOW_PART:
2858 case NEG: case NOT:
2859 case SIGN_EXTEND: case ZERO_EXTEND:
2860 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2861 case FLOAT: case FIX:
2862 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2863 case ABS:
2864 case SQRT:
2865 case FFS:
2866 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2867 if (new != XEXP (x, 0))
2868 return gen_rtx (code, GET_MODE (x), new);
2869 return x;
2870
2871 case SUBREG:
2872 /* Similar to above processing, but preserve SUBREG_WORD.
2873 Convert (subreg (mem)) to (mem) if not paradoxical.
2874 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2875 pseudo didn't get a hard reg, we must replace this with the
2876 eliminated version of the memory location because push_reloads
2877 may do the replacement in certain circumstances. */
2878 if (GET_CODE (SUBREG_REG (x)) == REG
2879 && (GET_MODE_SIZE (GET_MODE (x))
2880 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2881 && reg_equiv_memory_loc != 0
2882 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2883 {
2884 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2885 mem_mode, insn);
2886
2887 /* If we didn't change anything, we must retain the pseudo. */
2888 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2889 new = SUBREG_REG (x);
2890 else
2891 {
2892 /* Otherwise, ensure NEW isn't shared in case we have to reload
2893 it. */
2894 new = copy_rtx (new);
2895
2896 /* In this case, we must show that the pseudo is used in this
2897 insn so that delete_output_reload will do the right thing. */
2898 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2899 && GET_CODE (insn) != INSN_LIST)
2900 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2901 insn);
2902 }
2903 }
2904 else
2905 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2906
2907 if (new != XEXP (x, 0))
2908 {
2909 if (GET_CODE (new) == MEM
2910 && (GET_MODE_SIZE (GET_MODE (x))
2911 <= GET_MODE_SIZE (GET_MODE (new)))
2912 #ifdef LOAD_EXTEND_OP
2913 /* On these machines we will be reloading what is
2914 inside the SUBREG if it originally was a pseudo and
2915 the inner and outer modes are both a word or
2916 smaller. So leave the SUBREG then. */
2917 && ! (GET_CODE (SUBREG_REG (x)) == REG
2918 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2919 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2920 #endif
2921 )
2922 {
2923 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2924 enum machine_mode mode = GET_MODE (x);
2925
2926 #if BYTES_BIG_ENDIAN
2927 offset += (MIN (UNITS_PER_WORD,
2928 GET_MODE_SIZE (GET_MODE (new)))
2929 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2930 #endif
2931
2932 PUT_MODE (new, mode);
2933 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2934 return new;
2935 }
2936 else
2937 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2938 }
2939
2940 return x;
2941
2942 case CLOBBER:
2943 /* If clobbering a register that is the replacement register for an
2944 elimination we still think can be performed, note that it cannot
2945 be performed. Otherwise, we need not be concerned about it. */
2946 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2947 if (ep->to_rtx == XEXP (x, 0))
2948 ep->can_eliminate = 0;
2949
2950 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2951 if (new != XEXP (x, 0))
2952 return gen_rtx (code, GET_MODE (x), new);
2953 return x;
2954
2955 case ASM_OPERANDS:
2956 {
2957 rtx *temp_vec;
2958 /* Properly handle sharing input and constraint vectors. */
2959 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2960 {
2961 /* When we come to a new vector not seen before,
2962 scan all its elements; keep the old vector if none
2963 of them changes; otherwise, make a copy. */
2964 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2965 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2966 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2967 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2968 mem_mode, insn);
2969
2970 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2971 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2972 break;
2973
2974 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2975 new_asm_operands_vec = old_asm_operands_vec;
2976 else
2977 new_asm_operands_vec
2978 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2979 }
2980
2981 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2982 if (new_asm_operands_vec == old_asm_operands_vec)
2983 return x;
2984
2985 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2986 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2987 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2988 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2989 ASM_OPERANDS_SOURCE_FILE (x),
2990 ASM_OPERANDS_SOURCE_LINE (x));
2991 new->volatil = x->volatil;
2992 return new;
2993 }
2994
2995 case SET:
2996 /* Check for setting a register that we know about. */
2997 if (GET_CODE (SET_DEST (x)) == REG)
2998 {
2999 /* See if this is setting the replacement register for an
3000 elimination.
3001
3002 If DEST is the hard frame pointer, we do nothing because we
3003 assume that all assignments to the frame pointer are for
3004 non-local gotos and are being done at a time when they are valid
3005 and do not disturb anything else. Some machines want to
3006 eliminate a fake argument pointer (or even a fake frame pointer)
3007 with either the real frame or the stack pointer. Assignments to
3008 the hard frame pointer must not prevent this elimination. */
3009
3010 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3011 ep++)
3012 if (ep->to_rtx == SET_DEST (x)
3013 && SET_DEST (x) != hard_frame_pointer_rtx)
3014 {
3015 /* If it is being incremented, adjust the offset. Otherwise,
3016 this elimination can't be done. */
3017 rtx src = SET_SRC (x);
3018
3019 if (GET_CODE (src) == PLUS
3020 && XEXP (src, 0) == SET_DEST (x)
3021 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3022 ep->offset -= INTVAL (XEXP (src, 1));
3023 else
3024 ep->can_eliminate = 0;
3025 }
3026
3027 /* Now check to see we are assigning to a register that can be
3028 eliminated. If so, it must be as part of a PARALLEL, since we
3029 will not have been called if this is a single SET. So indicate
3030 that we can no longer eliminate this reg. */
3031 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3032 ep++)
3033 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3034 ep->can_eliminate = 0;
3035 }
3036
3037 /* Now avoid the loop below in this common case. */
3038 {
3039 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3040 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3041
3042 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3043 write a CLOBBER insn. */
3044 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3045 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3046 && GET_CODE (insn) != INSN_LIST)
3047 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3048
3049 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3050 return gen_rtx (SET, VOIDmode, new0, new1);
3051 }
3052
3053 return x;
3054
3055 case MEM:
3056 /* Our only special processing is to pass the mode of the MEM to our
3057 recursive call and copy the flags. While we are here, handle this
3058 case more efficiently. */
3059 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3060 if (new != XEXP (x, 0))
3061 {
3062 new = gen_rtx (MEM, GET_MODE (x), new);
3063 new->volatil = x->volatil;
3064 new->unchanging = x->unchanging;
3065 new->in_struct = x->in_struct;
3066 return new;
3067 }
3068 else
3069 return x;
3070 }
3071
3072 /* Process each of our operands recursively. If any have changed, make a
3073 copy of the rtx. */
3074 fmt = GET_RTX_FORMAT (code);
3075 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3076 {
3077 if (*fmt == 'e')
3078 {
3079 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3080 if (new != XEXP (x, i) && ! copied)
3081 {
3082 rtx new_x = rtx_alloc (code);
3083 bcopy ((char *) x, (char *) new_x,
3084 (sizeof (*new_x) - sizeof (new_x->fld)
3085 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3086 x = new_x;
3087 copied = 1;
3088 }
3089 XEXP (x, i) = new;
3090 }
3091 else if (*fmt == 'E')
3092 {
3093 int copied_vec = 0;
3094 for (j = 0; j < XVECLEN (x, i); j++)
3095 {
3096 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3097 if (new != XVECEXP (x, i, j) && ! copied_vec)
3098 {
3099 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3100 &XVECEXP (x, i, 0));
3101 if (! copied)
3102 {
3103 rtx new_x = rtx_alloc (code);
3104 bcopy ((char *) x, (char *) new_x,
3105 (sizeof (*new_x) - sizeof (new_x->fld)
3106 + (sizeof (new_x->fld[0])
3107 * GET_RTX_LENGTH (code))));
3108 x = new_x;
3109 copied = 1;
3110 }
3111 XVEC (x, i) = new_v;
3112 copied_vec = 1;
3113 }
3114 XVECEXP (x, i, j) = new;
3115 }
3116 }
3117 }
3118
3119 return x;
3120 }
3121 \f
3122 /* Scan INSN and eliminate all eliminable registers in it.
3123
3124 If REPLACE is nonzero, do the replacement destructively. Also
3125 delete the insn as dead it if it is setting an eliminable register.
3126
3127 If REPLACE is zero, do all our allocations in reload_obstack.
3128
3129 If no eliminations were done and this insn doesn't require any elimination
3130 processing (these are not identical conditions: it might be updating sp,
3131 but not referencing fp; this needs to be seen during reload_as_needed so
3132 that the offset between fp and sp can be taken into consideration), zero
3133 is returned. Otherwise, 1 is returned. */
3134
3135 static int
3136 eliminate_regs_in_insn (insn, replace)
3137 rtx insn;
3138 int replace;
3139 {
3140 rtx old_body = PATTERN (insn);
3141 rtx old_set = single_set (insn);
3142 rtx new_body;
3143 int val = 0;
3144 struct elim_table *ep;
3145
3146 if (! replace)
3147 push_obstacks (&reload_obstack, &reload_obstack);
3148
3149 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3150 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3151 {
3152 /* Check for setting an eliminable register. */
3153 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3154 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3155 {
3156 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3157 /* If this is setting the frame pointer register to the
3158 hardware frame pointer register and this is an elimination
3159 that will be done (tested above), this insn is really
3160 adjusting the frame pointer downward to compensate for
3161 the adjustment done before a nonlocal goto. */
3162 if (ep->from == FRAME_POINTER_REGNUM
3163 && ep->to == HARD_FRAME_POINTER_REGNUM)
3164 {
3165 rtx src = SET_SRC (old_set);
3166 int offset, ok = 0;
3167
3168 if (src == ep->to_rtx)
3169 offset = 0, ok = 1;
3170 else if (GET_CODE (src) == PLUS
3171 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3172 offset = INTVAL (XEXP (src, 0)), ok = 1;
3173
3174 if (ok)
3175 {
3176 if (replace)
3177 {
3178 rtx src
3179 = plus_constant (ep->to_rtx, offset - ep->offset);
3180
3181 /* First see if this insn remains valid when we
3182 make the change. If not, keep the INSN_CODE
3183 the same and let reload fit it up. */
3184 validate_change (insn, &SET_SRC (old_set), src, 1);
3185 validate_change (insn, &SET_DEST (old_set),
3186 ep->to_rtx, 1);
3187 if (! apply_change_group ())
3188 {
3189 SET_SRC (old_set) = src;
3190 SET_DEST (old_set) = ep->to_rtx;
3191 }
3192 }
3193
3194 val = 1;
3195 goto done;
3196 }
3197 }
3198 #endif
3199
3200 /* In this case this insn isn't serving a useful purpose. We
3201 will delete it in reload_as_needed once we know that this
3202 elimination is, in fact, being done.
3203
3204 If REPLACE isn't set, we can't delete this insn, but neededn't
3205 process it since it won't be used unless something changes. */
3206 if (replace)
3207 delete_dead_insn (insn);
3208 val = 1;
3209 goto done;
3210 }
3211
3212 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3213 in the insn is the negative of the offset in FROM. Substitute
3214 (set (reg) (reg to)) for the insn and change its code.
3215
3216 We have to do this here, rather than in eliminate_regs, do that we can
3217 change the insn code. */
3218
3219 if (GET_CODE (SET_SRC (old_set)) == PLUS
3220 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3221 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3222 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3223 ep++)
3224 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3225 && ep->can_eliminate)
3226 {
3227 /* We must stop at the first elimination that will be used.
3228 If this one would replace the PLUS with a REG, do it
3229 now. Otherwise, quit the loop and let eliminate_regs
3230 do its normal replacement. */
3231 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3232 {
3233 /* We assume here that we don't need a PARALLEL of
3234 any CLOBBERs for this assignment. There's not
3235 much we can do if we do need it. */
3236 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3237 SET_DEST (old_set), ep->to_rtx);
3238 INSN_CODE (insn) = -1;
3239 val = 1;
3240 goto done;
3241 }
3242
3243 break;
3244 }
3245 }
3246
3247 old_asm_operands_vec = 0;
3248
3249 /* Replace the body of this insn with a substituted form. If we changed
3250 something, return non-zero.
3251
3252 If we are replacing a body that was a (set X (plus Y Z)), try to
3253 re-recognize the insn. We do this in case we had a simple addition
3254 but now can do this as a load-address. This saves an insn in this
3255 common case. */
3256
3257 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3258 if (new_body != old_body)
3259 {
3260 /* If we aren't replacing things permanently and we changed something,
3261 make another copy to ensure that all the RTL is new. Otherwise
3262 things can go wrong if find_reload swaps commutative operands
3263 and one is inside RTL that has been copied while the other is not. */
3264
3265 /* Don't copy an asm_operands because (1) there's no need and (2)
3266 copy_rtx can't do it properly when there are multiple outputs. */
3267 if (! replace && asm_noperands (old_body) < 0)
3268 new_body = copy_rtx (new_body);
3269
3270 /* If we had a move insn but now we don't, rerecognize it. This will
3271 cause spurious re-recognition if the old move had a PARALLEL since
3272 the new one still will, but we can't call single_set without
3273 having put NEW_BODY into the insn and the re-recognition won't
3274 hurt in this rare case. */
3275 if (old_set != 0
3276 && ((GET_CODE (SET_SRC (old_set)) == REG
3277 && (GET_CODE (new_body) != SET
3278 || GET_CODE (SET_SRC (new_body)) != REG))
3279 /* If this was a load from or store to memory, compare
3280 the MEM in recog_operand to the one in the insn. If they
3281 are not equal, then rerecognize the insn. */
3282 || (old_set != 0
3283 && ((GET_CODE (SET_SRC (old_set)) == MEM
3284 && SET_SRC (old_set) != recog_operand[1])
3285 || (GET_CODE (SET_DEST (old_set)) == MEM
3286 && SET_DEST (old_set) != recog_operand[0])))
3287 /* If this was an add insn before, rerecognize. */
3288 || GET_CODE (SET_SRC (old_set)) == PLUS))
3289 {
3290 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3291 /* If recognition fails, store the new body anyway.
3292 It's normal to have recognition failures here
3293 due to bizarre memory addresses; reloading will fix them. */
3294 PATTERN (insn) = new_body;
3295 }
3296 else
3297 PATTERN (insn) = new_body;
3298
3299 val = 1;
3300 }
3301
3302 /* Loop through all elimination pairs. See if any have changed and
3303 recalculate the number not at initial offset.
3304
3305 Compute the maximum offset (minimum offset if the stack does not
3306 grow downward) for each elimination pair.
3307
3308 We also detect a cases where register elimination cannot be done,
3309 namely, if a register would be both changed and referenced outside a MEM
3310 in the resulting insn since such an insn is often undefined and, even if
3311 not, we cannot know what meaning will be given to it. Note that it is
3312 valid to have a register used in an address in an insn that changes it
3313 (presumably with a pre- or post-increment or decrement).
3314
3315 If anything changes, return nonzero. */
3316
3317 num_not_at_initial_offset = 0;
3318 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3319 {
3320 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3321 ep->can_eliminate = 0;
3322
3323 ep->ref_outside_mem = 0;
3324
3325 if (ep->previous_offset != ep->offset)
3326 val = 1;
3327
3328 ep->previous_offset = ep->offset;
3329 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3330 num_not_at_initial_offset++;
3331
3332 #ifdef STACK_GROWS_DOWNWARD
3333 ep->max_offset = MAX (ep->max_offset, ep->offset);
3334 #else
3335 ep->max_offset = MIN (ep->max_offset, ep->offset);
3336 #endif
3337 }
3338
3339 done:
3340 /* If we changed something, perform elmination in REG_NOTES. This is
3341 needed even when REPLACE is zero because a REG_DEAD note might refer
3342 to a register that we eliminate and could cause a different number
3343 of spill registers to be needed in the final reload pass than in
3344 the pre-passes. */
3345 if (val && REG_NOTES (insn) != 0)
3346 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3347
3348 if (! replace)
3349 pop_obstacks ();
3350
3351 return val;
3352 }
3353
3354 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3355 replacement we currently believe is valid, mark it as not eliminable if X
3356 modifies DEST in any way other than by adding a constant integer to it.
3357
3358 If DEST is the frame pointer, we do nothing because we assume that
3359 all assignments to the hard frame pointer are nonlocal gotos and are being
3360 done at a time when they are valid and do not disturb anything else.
3361 Some machines want to eliminate a fake argument pointer with either the
3362 frame or stack pointer. Assignments to the hard frame pointer must not
3363 prevent this elimination.
3364
3365 Called via note_stores from reload before starting its passes to scan
3366 the insns of the function. */
3367
3368 static void
3369 mark_not_eliminable (dest, x)
3370 rtx dest;
3371 rtx x;
3372 {
3373 register int i;
3374
3375 /* A SUBREG of a hard register here is just changing its mode. We should
3376 not see a SUBREG of an eliminable hard register, but check just in
3377 case. */
3378 if (GET_CODE (dest) == SUBREG)
3379 dest = SUBREG_REG (dest);
3380
3381 if (dest == hard_frame_pointer_rtx)
3382 return;
3383
3384 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3385 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3386 && (GET_CODE (x) != SET
3387 || GET_CODE (SET_SRC (x)) != PLUS
3388 || XEXP (SET_SRC (x), 0) != dest
3389 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3390 {
3391 reg_eliminate[i].can_eliminate_previous
3392 = reg_eliminate[i].can_eliminate = 0;
3393 num_eliminable--;
3394 }
3395 }
3396 \f
3397 /* Kick all pseudos out of hard register REGNO.
3398 If GLOBAL is nonzero, try to find someplace else to put them.
3399 If DUMPFILE is nonzero, log actions taken on that file.
3400
3401 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3402 because we found we can't eliminate some register. In the case, no pseudos
3403 are allowed to be in the register, even if they are only in a block that
3404 doesn't require spill registers, unlike the case when we are spilling this
3405 hard reg to produce another spill register.
3406
3407 Return nonzero if any pseudos needed to be kicked out. */
3408
3409 static int
3410 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3411 register int regno;
3412 int global;
3413 FILE *dumpfile;
3414 int cant_eliminate;
3415 {
3416 enum reg_class class = REGNO_REG_CLASS (regno);
3417 int something_changed = 0;
3418 register int i;
3419
3420 SET_HARD_REG_BIT (forbidden_regs, regno);
3421
3422 if (cant_eliminate)
3423 regs_ever_live[regno] = 1;
3424
3425 /* Spill every pseudo reg that was allocated to this reg
3426 or to something that overlaps this reg. */
3427
3428 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3429 if (reg_renumber[i] >= 0
3430 && reg_renumber[i] <= regno
3431 && (reg_renumber[i]
3432 + HARD_REGNO_NREGS (reg_renumber[i],
3433 PSEUDO_REGNO_MODE (i))
3434 > regno))
3435 {
3436 /* If this register belongs solely to a basic block which needed no
3437 spilling of any class that this register is contained in,
3438 leave it be, unless we are spilling this register because
3439 it was a hard register that can't be eliminated. */
3440
3441 if (! cant_eliminate
3442 && basic_block_needs[0]
3443 && reg_basic_block[i] >= 0
3444 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3445 {
3446 enum reg_class *p;
3447
3448 for (p = reg_class_superclasses[(int) class];
3449 *p != LIM_REG_CLASSES; p++)
3450 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3451 break;
3452
3453 if (*p == LIM_REG_CLASSES)
3454 continue;
3455 }
3456
3457 /* Mark it as no longer having a hard register home. */
3458 reg_renumber[i] = -1;
3459 /* We will need to scan everything again. */
3460 something_changed = 1;
3461 if (global)
3462 retry_global_alloc (i, forbidden_regs);
3463
3464 alter_reg (i, regno);
3465 if (dumpfile)
3466 {
3467 if (reg_renumber[i] == -1)
3468 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3469 else
3470 fprintf (dumpfile, " Register %d now in %d.\n\n",
3471 i, reg_renumber[i]);
3472 }
3473 }
3474 for (i = 0; i < scratch_list_length; i++)
3475 {
3476 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3477 {
3478 if (! cant_eliminate && basic_block_needs[0]
3479 && ! basic_block_needs[(int) class][scratch_block[i]])
3480 {
3481 enum reg_class *p;
3482
3483 for (p = reg_class_superclasses[(int) class];
3484 *p != LIM_REG_CLASSES; p++)
3485 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3486 break;
3487
3488 if (*p == LIM_REG_CLASSES)
3489 continue;
3490 }
3491 PUT_CODE (scratch_list[i], SCRATCH);
3492 scratch_list[i] = 0;
3493 something_changed = 1;
3494 continue;
3495 }
3496 }
3497
3498 return something_changed;
3499 }
3500 \f
3501 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3502 Also mark any hard registers used to store user variables as
3503 forbidden from being used for spill registers. */
3504
3505 static void
3506 scan_paradoxical_subregs (x)
3507 register rtx x;
3508 {
3509 register int i;
3510 register char *fmt;
3511 register enum rtx_code code = GET_CODE (x);
3512
3513 switch (code)
3514 {
3515 case REG:
3516 #ifdef SMALL_REGISTER_CLASSES
3517 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3518 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3519 #endif
3520 return;
3521
3522 case CONST_INT:
3523 case CONST:
3524 case SYMBOL_REF:
3525 case LABEL_REF:
3526 case CONST_DOUBLE:
3527 case CC0:
3528 case PC:
3529 case USE:
3530 case CLOBBER:
3531 return;
3532
3533 case SUBREG:
3534 if (GET_CODE (SUBREG_REG (x)) == REG
3535 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3536 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3537 = GET_MODE_SIZE (GET_MODE (x));
3538 return;
3539 }
3540
3541 fmt = GET_RTX_FORMAT (code);
3542 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3543 {
3544 if (fmt[i] == 'e')
3545 scan_paradoxical_subregs (XEXP (x, i));
3546 else if (fmt[i] == 'E')
3547 {
3548 register int j;
3549 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3550 scan_paradoxical_subregs (XVECEXP (x, i, j));
3551 }
3552 }
3553 }
3554 \f
3555 static int
3556 hard_reg_use_compare (p1, p2)
3557 struct hard_reg_n_uses *p1, *p2;
3558 {
3559 int tem = p1->uses - p2->uses;
3560 if (tem != 0) return tem;
3561 /* If regs are equally good, sort by regno,
3562 so that the results of qsort leave nothing to chance. */
3563 return p1->regno - p2->regno;
3564 }
3565
3566 /* Choose the order to consider regs for use as reload registers
3567 based on how much trouble would be caused by spilling one.
3568 Store them in order of decreasing preference in potential_reload_regs. */
3569
3570 static void
3571 order_regs_for_reload ()
3572 {
3573 register int i;
3574 register int o = 0;
3575 int large = 0;
3576
3577 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3578
3579 CLEAR_HARD_REG_SET (bad_spill_regs);
3580
3581 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3582 potential_reload_regs[i] = -1;
3583
3584 /* Count number of uses of each hard reg by pseudo regs allocated to it
3585 and then order them by decreasing use. */
3586
3587 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3588 {
3589 hard_reg_n_uses[i].uses = 0;
3590 hard_reg_n_uses[i].regno = i;
3591 }
3592
3593 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3594 {
3595 int regno = reg_renumber[i];
3596 if (regno >= 0)
3597 {
3598 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3599 while (regno < lim)
3600 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3601 }
3602 large += reg_n_refs[i];
3603 }
3604
3605 /* Now fixed registers (which cannot safely be used for reloading)
3606 get a very high use count so they will be considered least desirable.
3607 Registers used explicitly in the rtl code are almost as bad. */
3608
3609 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3610 {
3611 if (fixed_regs[i])
3612 {
3613 hard_reg_n_uses[i].uses += 2 * large + 2;
3614 SET_HARD_REG_BIT (bad_spill_regs, i);
3615 }
3616 else if (regs_explicitly_used[i])
3617 {
3618 hard_reg_n_uses[i].uses += large + 1;
3619 #ifndef SMALL_REGISTER_CLASSES
3620 /* ??? We are doing this here because of the potential that
3621 bad code may be generated if a register explicitly used in
3622 an insn was used as a spill register for that insn. But
3623 not using these are spill registers may lose on some machine.
3624 We'll have to see how this works out. */
3625 SET_HARD_REG_BIT (bad_spill_regs, i);
3626 #endif
3627 }
3628 }
3629 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3630 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3631
3632 #ifdef ELIMINABLE_REGS
3633 /* If registers other than the frame pointer are eliminable, mark them as
3634 poor choices. */
3635 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3636 {
3637 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3638 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3639 }
3640 #endif
3641
3642 /* Prefer registers not so far used, for use in temporary loading.
3643 Among them, if REG_ALLOC_ORDER is defined, use that order.
3644 Otherwise, prefer registers not preserved by calls. */
3645
3646 #ifdef REG_ALLOC_ORDER
3647 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3648 {
3649 int regno = reg_alloc_order[i];
3650
3651 if (hard_reg_n_uses[regno].uses == 0)
3652 potential_reload_regs[o++] = regno;
3653 }
3654 #else
3655 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3656 {
3657 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3658 potential_reload_regs[o++] = i;
3659 }
3660 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3661 {
3662 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3663 potential_reload_regs[o++] = i;
3664 }
3665 #endif
3666
3667 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3668 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3669
3670 /* Now add the regs that are already used,
3671 preferring those used less often. The fixed and otherwise forbidden
3672 registers will be at the end of this list. */
3673
3674 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3675 if (hard_reg_n_uses[i].uses != 0)
3676 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3677 }
3678 \f
3679 /* Used in reload_as_needed to sort the spilled regs. */
3680
3681 static int
3682 compare_spill_regs (r1, r2)
3683 short *r1, *r2;
3684 {
3685 return *r1 - *r2;
3686 }
3687
3688 /* Reload pseudo-registers into hard regs around each insn as needed.
3689 Additional register load insns are output before the insn that needs it
3690 and perhaps store insns after insns that modify the reloaded pseudo reg.
3691
3692 reg_last_reload_reg and reg_reloaded_contents keep track of
3693 which registers are already available in reload registers.
3694 We update these for the reloads that we perform,
3695 as the insns are scanned. */
3696
3697 static void
3698 reload_as_needed (first, live_known)
3699 rtx first;
3700 int live_known;
3701 {
3702 register rtx insn;
3703 register int i;
3704 int this_block = 0;
3705 rtx x;
3706 rtx after_call = 0;
3707
3708 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3709 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3710 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3711 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3712 reg_has_output_reload = (char *) alloca (max_regno);
3713 for (i = 0; i < n_spills; i++)
3714 {
3715 reg_reloaded_contents[i] = -1;
3716 reg_reloaded_insn[i] = 0;
3717 }
3718
3719 /* Reset all offsets on eliminable registers to their initial values. */
3720 #ifdef ELIMINABLE_REGS
3721 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3722 {
3723 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3724 reg_eliminate[i].initial_offset);
3725 reg_eliminate[i].previous_offset
3726 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3727 }
3728 #else
3729 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3730 reg_eliminate[0].previous_offset
3731 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3732 #endif
3733
3734 num_not_at_initial_offset = 0;
3735
3736 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3737 pack registers with group needs. */
3738 if (n_spills > 1)
3739 {
3740 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3741 for (i = 0; i < n_spills; i++)
3742 spill_reg_order[spill_regs[i]] = i;
3743 }
3744
3745 for (insn = first; insn;)
3746 {
3747 register rtx next = NEXT_INSN (insn);
3748
3749 /* Notice when we move to a new basic block. */
3750 if (live_known && this_block + 1 < n_basic_blocks
3751 && insn == basic_block_head[this_block+1])
3752 ++this_block;
3753
3754 /* If we pass a label, copy the offsets from the label information
3755 into the current offsets of each elimination. */
3756 if (GET_CODE (insn) == CODE_LABEL)
3757 {
3758 num_not_at_initial_offset = 0;
3759 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3760 {
3761 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3762 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3763 if (reg_eliminate[i].can_eliminate
3764 && (reg_eliminate[i].offset
3765 != reg_eliminate[i].initial_offset))
3766 num_not_at_initial_offset++;
3767 }
3768 }
3769
3770 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3771 {
3772 rtx avoid_return_reg = 0;
3773
3774 #ifdef SMALL_REGISTER_CLASSES
3775 /* Set avoid_return_reg if this is an insn
3776 that might use the value of a function call. */
3777 if (GET_CODE (insn) == CALL_INSN)
3778 {
3779 if (GET_CODE (PATTERN (insn)) == SET)
3780 after_call = SET_DEST (PATTERN (insn));
3781 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3782 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3783 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3784 else
3785 after_call = 0;
3786 }
3787 else if (after_call != 0
3788 && !(GET_CODE (PATTERN (insn)) == SET
3789 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3790 {
3791 if (reg_referenced_p (after_call, PATTERN (insn)))
3792 avoid_return_reg = after_call;
3793 after_call = 0;
3794 }
3795 #endif /* SMALL_REGISTER_CLASSES */
3796
3797 /* If this is a USE and CLOBBER of a MEM, ensure that any
3798 references to eliminable registers have been removed. */
3799
3800 if ((GET_CODE (PATTERN (insn)) == USE
3801 || GET_CODE (PATTERN (insn)) == CLOBBER)
3802 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3803 XEXP (XEXP (PATTERN (insn), 0), 0)
3804 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3805 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3806
3807 /* If we need to do register elimination processing, do so.
3808 This might delete the insn, in which case we are done. */
3809 if (num_eliminable && GET_MODE (insn) == QImode)
3810 {
3811 eliminate_regs_in_insn (insn, 1);
3812 if (GET_CODE (insn) == NOTE)
3813 {
3814 insn = next;
3815 continue;
3816 }
3817 }
3818
3819 if (GET_MODE (insn) == VOIDmode)
3820 n_reloads = 0;
3821 /* First find the pseudo regs that must be reloaded for this insn.
3822 This info is returned in the tables reload_... (see reload.h).
3823 Also modify the body of INSN by substituting RELOAD
3824 rtx's for those pseudo regs. */
3825 else
3826 {
3827 bzero (reg_has_output_reload, max_regno);
3828 CLEAR_HARD_REG_SET (reg_is_output_reload);
3829
3830 find_reloads (insn, 1, spill_indirect_levels, live_known,
3831 spill_reg_order);
3832 }
3833
3834 if (n_reloads > 0)
3835 {
3836 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3837 rtx p;
3838 int class;
3839
3840 /* If this block has not had spilling done for a
3841 particular clas and we have any non-optionals that need a
3842 spill reg in that class, abort. */
3843
3844 for (class = 0; class < N_REG_CLASSES; class++)
3845 if (basic_block_needs[class] != 0
3846 && basic_block_needs[class][this_block] == 0)
3847 for (i = 0; i < n_reloads; i++)
3848 if (class == (int) reload_reg_class[i]
3849 && reload_reg_rtx[i] == 0
3850 && ! reload_optional[i]
3851 && (reload_in[i] != 0 || reload_out[i] != 0
3852 || reload_secondary_p[i] != 0))
3853 abort ();
3854
3855 /* Now compute which reload regs to reload them into. Perhaps
3856 reusing reload regs from previous insns, or else output
3857 load insns to reload them. Maybe output store insns too.
3858 Record the choices of reload reg in reload_reg_rtx. */
3859 choose_reload_regs (insn, avoid_return_reg);
3860
3861 #ifdef SMALL_REGISTER_CLASSES
3862 /* Merge any reloads that we didn't combine for fear of
3863 increasing the number of spill registers needed but now
3864 discover can be safely merged. */
3865 merge_assigned_reloads (insn);
3866 #endif
3867
3868 /* Generate the insns to reload operands into or out of
3869 their reload regs. */
3870 emit_reload_insns (insn);
3871
3872 /* Substitute the chosen reload regs from reload_reg_rtx
3873 into the insn's body (or perhaps into the bodies of other
3874 load and store insn that we just made for reloading
3875 and that we moved the structure into). */
3876 subst_reloads ();
3877
3878 /* If this was an ASM, make sure that all the reload insns
3879 we have generated are valid. If not, give an error
3880 and delete them. */
3881
3882 if (asm_noperands (PATTERN (insn)) >= 0)
3883 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3884 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3885 && (recog_memoized (p) < 0
3886 || (insn_extract (p),
3887 ! constrain_operands (INSN_CODE (p), 1))))
3888 {
3889 error_for_asm (insn,
3890 "`asm' operand requires impossible reload");
3891 PUT_CODE (p, NOTE);
3892 NOTE_SOURCE_FILE (p) = 0;
3893 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3894 }
3895 }
3896 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3897 is no longer validly lying around to save a future reload.
3898 Note that this does not detect pseudos that were reloaded
3899 for this insn in order to be stored in
3900 (obeying register constraints). That is correct; such reload
3901 registers ARE still valid. */
3902 note_stores (PATTERN (insn), forget_old_reloads_1);
3903
3904 /* There may have been CLOBBER insns placed after INSN. So scan
3905 between INSN and NEXT and use them to forget old reloads. */
3906 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3907 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3908 note_stores (PATTERN (x), forget_old_reloads_1);
3909
3910 #ifdef AUTO_INC_DEC
3911 /* Likewise for regs altered by auto-increment in this insn.
3912 But note that the reg-notes are not changed by reloading:
3913 they still contain the pseudo-regs, not the spill regs. */
3914 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3915 if (REG_NOTE_KIND (x) == REG_INC)
3916 {
3917 /* See if this pseudo reg was reloaded in this insn.
3918 If so, its last-reload info is still valid
3919 because it is based on this insn's reload. */
3920 for (i = 0; i < n_reloads; i++)
3921 if (reload_out[i] == XEXP (x, 0))
3922 break;
3923
3924 if (i == n_reloads)
3925 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3926 }
3927 #endif
3928 }
3929 /* A reload reg's contents are unknown after a label. */
3930 if (GET_CODE (insn) == CODE_LABEL)
3931 for (i = 0; i < n_spills; i++)
3932 {
3933 reg_reloaded_contents[i] = -1;
3934 reg_reloaded_insn[i] = 0;
3935 }
3936
3937 /* Don't assume a reload reg is still good after a call insn
3938 if it is a call-used reg. */
3939 else if (GET_CODE (insn) == CALL_INSN)
3940 for (i = 0; i < n_spills; i++)
3941 if (call_used_regs[spill_regs[i]])
3942 {
3943 reg_reloaded_contents[i] = -1;
3944 reg_reloaded_insn[i] = 0;
3945 }
3946
3947 /* In case registers overlap, allow certain insns to invalidate
3948 particular hard registers. */
3949
3950 #ifdef INSN_CLOBBERS_REGNO_P
3951 for (i = 0 ; i < n_spills ; i++)
3952 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3953 {
3954 reg_reloaded_contents[i] = -1;
3955 reg_reloaded_insn[i] = 0;
3956 }
3957 #endif
3958
3959 insn = next;
3960
3961 #ifdef USE_C_ALLOCA
3962 alloca (0);
3963 #endif
3964 }
3965 }
3966
3967 /* Discard all record of any value reloaded from X,
3968 or reloaded in X from someplace else;
3969 unless X is an output reload reg of the current insn.
3970
3971 X may be a hard reg (the reload reg)
3972 or it may be a pseudo reg that was reloaded from. */
3973
3974 static void
3975 forget_old_reloads_1 (x, ignored)
3976 rtx x;
3977 rtx ignored;
3978 {
3979 register int regno;
3980 int nr;
3981 int offset = 0;
3982
3983 /* note_stores does give us subregs of hard regs. */
3984 while (GET_CODE (x) == SUBREG)
3985 {
3986 offset += SUBREG_WORD (x);
3987 x = SUBREG_REG (x);
3988 }
3989
3990 if (GET_CODE (x) != REG)
3991 return;
3992
3993 regno = REGNO (x) + offset;
3994
3995 if (regno >= FIRST_PSEUDO_REGISTER)
3996 nr = 1;
3997 else
3998 {
3999 int i;
4000 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4001 /* Storing into a spilled-reg invalidates its contents.
4002 This can happen if a block-local pseudo is allocated to that reg
4003 and it wasn't spilled because this block's total need is 0.
4004 Then some insn might have an optional reload and use this reg. */
4005 for (i = 0; i < nr; i++)
4006 if (spill_reg_order[regno + i] >= 0
4007 /* But don't do this if the reg actually serves as an output
4008 reload reg in the current instruction. */
4009 && (n_reloads == 0
4010 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4011 {
4012 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4013 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4014 }
4015 }
4016
4017 /* Since value of X has changed,
4018 forget any value previously copied from it. */
4019
4020 while (nr-- > 0)
4021 /* But don't forget a copy if this is the output reload
4022 that establishes the copy's validity. */
4023 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4024 reg_last_reload_reg[regno + nr] = 0;
4025 }
4026 \f
4027 /* For each reload, the mode of the reload register. */
4028 static enum machine_mode reload_mode[MAX_RELOADS];
4029
4030 /* For each reload, the largest number of registers it will require. */
4031 static int reload_nregs[MAX_RELOADS];
4032
4033 /* Comparison function for qsort to decide which of two reloads
4034 should be handled first. *P1 and *P2 are the reload numbers. */
4035
4036 static int
4037 reload_reg_class_lower (p1, p2)
4038 short *p1, *p2;
4039 {
4040 register int r1 = *p1, r2 = *p2;
4041 register int t;
4042
4043 /* Consider required reloads before optional ones. */
4044 t = reload_optional[r1] - reload_optional[r2];
4045 if (t != 0)
4046 return t;
4047
4048 /* Count all solitary classes before non-solitary ones. */
4049 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4050 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4051 if (t != 0)
4052 return t;
4053
4054 /* Aside from solitaires, consider all multi-reg groups first. */
4055 t = reload_nregs[r2] - reload_nregs[r1];
4056 if (t != 0)
4057 return t;
4058
4059 /* Consider reloads in order of increasing reg-class number. */
4060 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4061 if (t != 0)
4062 return t;
4063
4064 /* If reloads are equally urgent, sort by reload number,
4065 so that the results of qsort leave nothing to chance. */
4066 return r1 - r2;
4067 }
4068 \f
4069 /* The following HARD_REG_SETs indicate when each hard register is
4070 used for a reload of various parts of the current insn. */
4071
4072 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4073 static HARD_REG_SET reload_reg_used;
4074 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4075 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4076 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4077 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4078 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4079 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4080 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4081 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4082 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4083 static HARD_REG_SET reload_reg_used_in_op_addr;
4084 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4085 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4086 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4087 static HARD_REG_SET reload_reg_used_in_insn;
4088 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4089 static HARD_REG_SET reload_reg_used_in_other_addr;
4090
4091 /* If reg is in use as a reload reg for any sort of reload. */
4092 static HARD_REG_SET reload_reg_used_at_all;
4093
4094 /* If reg is use as an inherited reload. We just mark the first register
4095 in the group. */
4096 static HARD_REG_SET reload_reg_used_for_inherit;
4097
4098 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4099 TYPE. MODE is used to indicate how many consecutive regs are
4100 actually used. */
4101
4102 static void
4103 mark_reload_reg_in_use (regno, opnum, type, mode)
4104 int regno;
4105 int opnum;
4106 enum reload_type type;
4107 enum machine_mode mode;
4108 {
4109 int nregs = HARD_REGNO_NREGS (regno, mode);
4110 int i;
4111
4112 for (i = regno; i < nregs + regno; i++)
4113 {
4114 switch (type)
4115 {
4116 case RELOAD_OTHER:
4117 SET_HARD_REG_BIT (reload_reg_used, i);
4118 break;
4119
4120 case RELOAD_FOR_INPUT_ADDRESS:
4121 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4122 break;
4123
4124 case RELOAD_FOR_OUTPUT_ADDRESS:
4125 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4126 break;
4127
4128 case RELOAD_FOR_OPERAND_ADDRESS:
4129 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4130 break;
4131
4132 case RELOAD_FOR_OPADDR_ADDR:
4133 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4134 break;
4135
4136 case RELOAD_FOR_OTHER_ADDRESS:
4137 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4138 break;
4139
4140 case RELOAD_FOR_INPUT:
4141 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4142 break;
4143
4144 case RELOAD_FOR_OUTPUT:
4145 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4146 break;
4147
4148 case RELOAD_FOR_INSN:
4149 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4150 break;
4151 }
4152
4153 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4154 }
4155 }
4156
4157 /* Similarly, but show REGNO is no longer in use for a reload. */
4158
4159 static void
4160 clear_reload_reg_in_use (regno, opnum, type, mode)
4161 int regno;
4162 int opnum;
4163 enum reload_type type;
4164 enum machine_mode mode;
4165 {
4166 int nregs = HARD_REGNO_NREGS (regno, mode);
4167 int i;
4168
4169 for (i = regno; i < nregs + regno; i++)
4170 {
4171 switch (type)
4172 {
4173 case RELOAD_OTHER:
4174 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4175 break;
4176
4177 case RELOAD_FOR_INPUT_ADDRESS:
4178 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4179 break;
4180
4181 case RELOAD_FOR_OUTPUT_ADDRESS:
4182 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4183 break;
4184
4185 case RELOAD_FOR_OPERAND_ADDRESS:
4186 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4187 break;
4188
4189 case RELOAD_FOR_OPADDR_ADDR:
4190 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4191 break;
4192
4193 case RELOAD_FOR_OTHER_ADDRESS:
4194 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4195 break;
4196
4197 case RELOAD_FOR_INPUT:
4198 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4199 break;
4200
4201 case RELOAD_FOR_OUTPUT:
4202 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4203 break;
4204
4205 case RELOAD_FOR_INSN:
4206 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4207 break;
4208 }
4209 }
4210 }
4211
4212 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4213 specified by OPNUM and TYPE. */
4214
4215 static int
4216 reload_reg_free_p (regno, opnum, type)
4217 int regno;
4218 int opnum;
4219 enum reload_type type;
4220 {
4221 int i;
4222
4223 /* In use for a RELOAD_OTHER means it's not available for anything except
4224 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4225 to be used only for inputs. */
4226
4227 if (type != RELOAD_FOR_OTHER_ADDRESS
4228 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4229 return 0;
4230
4231 switch (type)
4232 {
4233 case RELOAD_OTHER:
4234 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4235 we can't use it for RELOAD_OTHER. */
4236 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4237 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4238 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4239 return 0;
4240
4241 for (i = 0; i < reload_n_operands; i++)
4242 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4243 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4244 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4245 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4246 return 0;
4247
4248 return 1;
4249
4250 case RELOAD_FOR_INPUT:
4251 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4252 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4253 return 0;
4254
4255 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4256 return 0;
4257
4258 /* If it is used for some other input, can't use it. */
4259 for (i = 0; i < reload_n_operands; i++)
4260 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4261 return 0;
4262
4263 /* If it is used in a later operand's address, can't use it. */
4264 for (i = opnum + 1; i < reload_n_operands; i++)
4265 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4266 return 0;
4267
4268 return 1;
4269
4270 case RELOAD_FOR_INPUT_ADDRESS:
4271 /* Can't use a register if it is used for an input address for this
4272 operand or used as an input in an earlier one. */
4273 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4274 return 0;
4275
4276 for (i = 0; i < opnum; i++)
4277 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4278 return 0;
4279
4280 return 1;
4281
4282 case RELOAD_FOR_OUTPUT_ADDRESS:
4283 /* Can't use a register if it is used for an output address for this
4284 operand or used as an output in this or a later operand. */
4285 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4286 return 0;
4287
4288 for (i = opnum; i < reload_n_operands; i++)
4289 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4290 return 0;
4291
4292 return 1;
4293
4294 case RELOAD_FOR_OPERAND_ADDRESS:
4295 for (i = 0; i < reload_n_operands; i++)
4296 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4297 return 0;
4298
4299 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4300 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4301
4302 case RELOAD_FOR_OPADDR_ADDR:
4303 for (i = 0; i < reload_n_operands; i++)
4304 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4305 return 0;
4306
4307 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4308
4309 case RELOAD_FOR_OUTPUT:
4310 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4311 outputs, or an operand address for this or an earlier output. */
4312 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4313 return 0;
4314
4315 for (i = 0; i < reload_n_operands; i++)
4316 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4317 return 0;
4318
4319 for (i = 0; i <= opnum; i++)
4320 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4321 return 0;
4322
4323 return 1;
4324
4325 case RELOAD_FOR_INSN:
4326 for (i = 0; i < reload_n_operands; i++)
4327 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4328 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4329 return 0;
4330
4331 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4332 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4333
4334 case RELOAD_FOR_OTHER_ADDRESS:
4335 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4336 }
4337 abort ();
4338 }
4339
4340 /* Return 1 if the value in reload reg REGNO, as used by a reload
4341 needed for the part of the insn specified by OPNUM and TYPE,
4342 is not in use for a reload in any prior part of the insn.
4343
4344 We can assume that the reload reg was already tested for availability
4345 at the time it is needed, and we should not check this again,
4346 in case the reg has already been marked in use. */
4347
4348 static int
4349 reload_reg_free_before_p (regno, opnum, type)
4350 int regno;
4351 int opnum;
4352 enum reload_type type;
4353 {
4354 int i;
4355
4356 switch (type)
4357 {
4358 case RELOAD_FOR_OTHER_ADDRESS:
4359 /* These always come first. */
4360 return 1;
4361
4362 case RELOAD_OTHER:
4363 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4364
4365 /* If this use is for part of the insn,
4366 check the reg is not in use for any prior part. It is tempting
4367 to try to do this by falling through from objecs that occur
4368 later in the insn to ones that occur earlier, but that will not
4369 correctly take into account the fact that here we MUST ignore
4370 things that would prevent the register from being allocated in
4371 the first place, since we know that it was allocated. */
4372
4373 case RELOAD_FOR_OUTPUT_ADDRESS:
4374 /* Earlier reloads are for earlier outputs or their addresses,
4375 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4376 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4377 RELOAD_OTHER).. */
4378 for (i = 0; i < opnum; i++)
4379 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4380 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4381 return 0;
4382
4383 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4384 return 0;
4385
4386 for (i = 0; i < reload_n_operands; i++)
4387 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4388 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4389 return 0;
4390
4391 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4392 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4393 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4394
4395 case RELOAD_FOR_OUTPUT:
4396 /* This can't be used in the output address for this operand and
4397 anything that can't be used for it, except that we've already
4398 tested for RELOAD_FOR_INSN objects. */
4399
4400 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4401 return 0;
4402
4403 for (i = 0; i < opnum; i++)
4404 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4405 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4406 return 0;
4407
4408 for (i = 0; i < reload_n_operands; i++)
4409 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4410 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4411 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4412 return 0;
4413
4414 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4415
4416 case RELOAD_FOR_OPERAND_ADDRESS:
4417 case RELOAD_FOR_OPADDR_ADDR:
4418 case RELOAD_FOR_INSN:
4419 /* These can't conflict with inputs, or each other, so all we have to
4420 test is input addresses and the addresses of OTHER items. */
4421
4422 for (i = 0; i < reload_n_operands; i++)
4423 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4424 return 0;
4425
4426 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4427
4428 case RELOAD_FOR_INPUT:
4429 /* The only things earlier are the address for this and
4430 earlier inputs, other inputs (which we know we don't conflict
4431 with), and addresses of RELOAD_OTHER objects. */
4432
4433 for (i = 0; i <= opnum; i++)
4434 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4435 return 0;
4436
4437 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4438
4439 case RELOAD_FOR_INPUT_ADDRESS:
4440 /* Similarly, all we have to check is for use in earlier inputs'
4441 addresses. */
4442 for (i = 0; i < opnum; i++)
4443 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4444 return 0;
4445
4446 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4447 }
4448 abort ();
4449 }
4450
4451 /* Return 1 if the value in reload reg REGNO, as used by a reload
4452 needed for the part of the insn specified by OPNUM and TYPE,
4453 is still available in REGNO at the end of the insn.
4454
4455 We can assume that the reload reg was already tested for availability
4456 at the time it is needed, and we should not check this again,
4457 in case the reg has already been marked in use. */
4458
4459 static int
4460 reload_reg_reaches_end_p (regno, opnum, type)
4461 int regno;
4462 int opnum;
4463 enum reload_type type;
4464 {
4465 int i;
4466
4467 switch (type)
4468 {
4469 case RELOAD_OTHER:
4470 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4471 its value must reach the end. */
4472 return 1;
4473
4474 /* If this use is for part of the insn,
4475 its value reaches if no subsequent part uses the same register.
4476 Just like the above function, don't try to do this with lots
4477 of fallthroughs. */
4478
4479 case RELOAD_FOR_OTHER_ADDRESS:
4480 /* Here we check for everything else, since these don't conflict
4481 with anything else and everything comes later. */
4482
4483 for (i = 0; i < reload_n_operands; i++)
4484 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4485 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4486 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4487 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4488 return 0;
4489
4490 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4491 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4492 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4493
4494 case RELOAD_FOR_INPUT_ADDRESS:
4495 /* Similar, except that we check only for this and subsequent inputs
4496 and the address of only subsequent inputs and we do not need
4497 to check for RELOAD_OTHER objects since they are known not to
4498 conflict. */
4499
4500 for (i = opnum; i < reload_n_operands; i++)
4501 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4502 return 0;
4503
4504 for (i = opnum + 1; i < reload_n_operands; i++)
4505 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4506 return 0;
4507
4508 for (i = 0; i < reload_n_operands; i++)
4509 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4510 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4511 return 0;
4512
4513 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4514 return 0;
4515
4516 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4517 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4518
4519 case RELOAD_FOR_INPUT:
4520 /* Similar to input address, except we start at the next operand for
4521 both input and input address and we do not check for
4522 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4523 would conflict. */
4524
4525 for (i = opnum + 1; i < reload_n_operands; i++)
4526 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4527 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4528 return 0;
4529
4530 /* ... fall through ... */
4531
4532 case RELOAD_FOR_OPERAND_ADDRESS:
4533 /* Check outputs and their addresses. */
4534
4535 for (i = 0; i < reload_n_operands; i++)
4536 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4537 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4538 return 0;
4539
4540 return 1;
4541
4542 case RELOAD_FOR_OPADDR_ADDR:
4543 for (i = 0; i < reload_n_operands; i++)
4544 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4545 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4546 return 0;
4547
4548 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4549 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4550
4551 case RELOAD_FOR_INSN:
4552 /* These conflict with other outputs with RELOAD_OTHER. So
4553 we need only check for output addresses. */
4554
4555 opnum = -1;
4556
4557 /* ... fall through ... */
4558
4559 case RELOAD_FOR_OUTPUT:
4560 case RELOAD_FOR_OUTPUT_ADDRESS:
4561 /* We already know these can't conflict with a later output. So the
4562 only thing to check are later output addresses. */
4563 for (i = opnum + 1; i < reload_n_operands; i++)
4564 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4565 return 0;
4566
4567 return 1;
4568 }
4569
4570 abort ();
4571 }
4572 \f
4573 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4574 Return 0 otherwise.
4575
4576 This function uses the same algorithm as reload_reg_free_p above. */
4577
4578 static int
4579 reloads_conflict (r1, r2)
4580 int r1, r2;
4581 {
4582 enum reload_type r1_type = reload_when_needed[r1];
4583 enum reload_type r2_type = reload_when_needed[r2];
4584 int r1_opnum = reload_opnum[r1];
4585 int r2_opnum = reload_opnum[r2];
4586
4587 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4588
4589 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4590 return 1;
4591
4592 /* Otherwise, check conflicts differently for each type. */
4593
4594 switch (r1_type)
4595 {
4596 case RELOAD_FOR_INPUT:
4597 return (r2_type == RELOAD_FOR_INSN
4598 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4599 || r2_type == RELOAD_FOR_OPADDR_ADDR
4600 || r2_type == RELOAD_FOR_INPUT
4601 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4602
4603 case RELOAD_FOR_INPUT_ADDRESS:
4604 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4605 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4606
4607 case RELOAD_FOR_OUTPUT_ADDRESS:
4608 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4609 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4610
4611 case RELOAD_FOR_OPERAND_ADDRESS:
4612 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4613 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4614
4615 case RELOAD_FOR_OPADDR_ADDR:
4616 return (r2_type == RELOAD_FOR_INPUT
4617 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4618
4619 case RELOAD_FOR_OUTPUT:
4620 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4621 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4622 && r2_opnum >= r1_opnum));
4623
4624 case RELOAD_FOR_INSN:
4625 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4626 || r2_type == RELOAD_FOR_INSN
4627 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4628
4629 case RELOAD_FOR_OTHER_ADDRESS:
4630 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4631
4632 case RELOAD_OTHER:
4633 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4634
4635 default:
4636 abort ();
4637 }
4638 }
4639 \f
4640 /* Vector of reload-numbers showing the order in which the reloads should
4641 be processed. */
4642 short reload_order[MAX_RELOADS];
4643
4644 /* Indexed by reload number, 1 if incoming value
4645 inherited from previous insns. */
4646 char reload_inherited[MAX_RELOADS];
4647
4648 /* For an inherited reload, this is the insn the reload was inherited from,
4649 if we know it. Otherwise, this is 0. */
4650 rtx reload_inheritance_insn[MAX_RELOADS];
4651
4652 /* If non-zero, this is a place to get the value of the reload,
4653 rather than using reload_in. */
4654 rtx reload_override_in[MAX_RELOADS];
4655
4656 /* For each reload, the index in spill_regs of the spill register used,
4657 or -1 if we did not need one of the spill registers for this reload. */
4658 int reload_spill_index[MAX_RELOADS];
4659
4660 /* Index of last register assigned as a spill register. We allocate in
4661 a round-robin fashio. */
4662
4663 static int last_spill_reg = 0;
4664
4665 /* Find a spill register to use as a reload register for reload R.
4666 LAST_RELOAD is non-zero if this is the last reload for the insn being
4667 processed.
4668
4669 Set reload_reg_rtx[R] to the register allocated.
4670
4671 If NOERROR is nonzero, we return 1 if successful,
4672 or 0 if we couldn't find a spill reg and we didn't change anything. */
4673
4674 static int
4675 allocate_reload_reg (r, insn, last_reload, noerror)
4676 int r;
4677 rtx insn;
4678 int last_reload;
4679 int noerror;
4680 {
4681 int i;
4682 int pass;
4683 int count;
4684 rtx new;
4685 int regno;
4686
4687 /* If we put this reload ahead, thinking it is a group,
4688 then insist on finding a group. Otherwise we can grab a
4689 reg that some other reload needs.
4690 (That can happen when we have a 68000 DATA_OR_FP_REG
4691 which is a group of data regs or one fp reg.)
4692 We need not be so restrictive if there are no more reloads
4693 for this insn.
4694
4695 ??? Really it would be nicer to have smarter handling
4696 for that kind of reg class, where a problem like this is normal.
4697 Perhaps those classes should be avoided for reloading
4698 by use of more alternatives. */
4699
4700 int force_group = reload_nregs[r] > 1 && ! last_reload;
4701
4702 /* If we want a single register and haven't yet found one,
4703 take any reg in the right class and not in use.
4704 If we want a consecutive group, here is where we look for it.
4705
4706 We use two passes so we can first look for reload regs to
4707 reuse, which are already in use for other reloads in this insn,
4708 and only then use additional registers.
4709 I think that maximizing reuse is needed to make sure we don't
4710 run out of reload regs. Suppose we have three reloads, and
4711 reloads A and B can share regs. These need two regs.
4712 Suppose A and B are given different regs.
4713 That leaves none for C. */
4714 for (pass = 0; pass < 2; pass++)
4715 {
4716 /* I is the index in spill_regs.
4717 We advance it round-robin between insns to use all spill regs
4718 equally, so that inherited reloads have a chance
4719 of leapfrogging each other. Don't do this, however, when we have
4720 group needs and failure would be fatal; if we only have a relatively
4721 small number of spill registers, and more than one of them has
4722 group needs, then by starting in the middle, we may end up
4723 allocating the first one in such a way that we are not left with
4724 sufficient groups to handle the rest. */
4725
4726 if (noerror || ! force_group)
4727 i = last_spill_reg;
4728 else
4729 i = -1;
4730
4731 for (count = 0; count < n_spills; count++)
4732 {
4733 int class = (int) reload_reg_class[r];
4734
4735 i = (i + 1) % n_spills;
4736
4737 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4738 reload_when_needed[r])
4739 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4740 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4741 /* Look first for regs to share, then for unshared. But
4742 don't share regs used for inherited reloads; they are
4743 the ones we want to preserve. */
4744 && (pass
4745 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4746 spill_regs[i])
4747 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4748 spill_regs[i]))))
4749 {
4750 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4751 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4752 (on 68000) got us two FP regs. If NR is 1,
4753 we would reject both of them. */
4754 if (force_group)
4755 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4756 /* If we need only one reg, we have already won. */
4757 if (nr == 1)
4758 {
4759 /* But reject a single reg if we demand a group. */
4760 if (force_group)
4761 continue;
4762 break;
4763 }
4764 /* Otherwise check that as many consecutive regs as we need
4765 are available here.
4766 Also, don't use for a group registers that are
4767 needed for nongroups. */
4768 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4769 while (nr > 1)
4770 {
4771 regno = spill_regs[i] + nr - 1;
4772 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4773 && spill_reg_order[regno] >= 0
4774 && reload_reg_free_p (regno, reload_opnum[r],
4775 reload_when_needed[r])
4776 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4777 regno)))
4778 break;
4779 nr--;
4780 }
4781 if (nr == 1)
4782 break;
4783 }
4784 }
4785
4786 /* If we found something on pass 1, omit pass 2. */
4787 if (count < n_spills)
4788 break;
4789 }
4790
4791 /* We should have found a spill register by now. */
4792 if (count == n_spills)
4793 {
4794 if (noerror)
4795 return 0;
4796 goto failure;
4797 }
4798
4799 /* I is the index in SPILL_REG_RTX of the reload register we are to
4800 allocate. Get an rtx for it and find its register number. */
4801
4802 new = spill_reg_rtx[i];
4803
4804 if (new == 0 || GET_MODE (new) != reload_mode[r])
4805 spill_reg_rtx[i] = new
4806 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4807
4808 regno = true_regnum (new);
4809
4810 /* Detect when the reload reg can't hold the reload mode.
4811 This used to be one `if', but Sequent compiler can't handle that. */
4812 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4813 {
4814 enum machine_mode test_mode = VOIDmode;
4815 if (reload_in[r])
4816 test_mode = GET_MODE (reload_in[r]);
4817 /* If reload_in[r] has VOIDmode, it means we will load it
4818 in whatever mode the reload reg has: to wit, reload_mode[r].
4819 We have already tested that for validity. */
4820 /* Aside from that, we need to test that the expressions
4821 to reload from or into have modes which are valid for this
4822 reload register. Otherwise the reload insns would be invalid. */
4823 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4824 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4825 if (! (reload_out[r] != 0
4826 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4827 {
4828 /* The reg is OK. */
4829 last_spill_reg = i;
4830
4831 /* Mark as in use for this insn the reload regs we use
4832 for this. */
4833 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4834 reload_when_needed[r], reload_mode[r]);
4835
4836 reload_reg_rtx[r] = new;
4837 reload_spill_index[r] = i;
4838 return 1;
4839 }
4840 }
4841
4842 /* The reg is not OK. */
4843 if (noerror)
4844 return 0;
4845
4846 failure:
4847 if (asm_noperands (PATTERN (insn)) < 0)
4848 /* It's the compiler's fault. */
4849 abort ();
4850
4851 /* It's the user's fault; the operand's mode and constraint
4852 don't match. Disable this reload so we don't crash in final. */
4853 error_for_asm (insn,
4854 "`asm' operand constraint incompatible with operand size");
4855 reload_in[r] = 0;
4856 reload_out[r] = 0;
4857 reload_reg_rtx[r] = 0;
4858 reload_optional[r] = 1;
4859 reload_secondary_p[r] = 1;
4860
4861 return 1;
4862 }
4863 \f
4864 /* Assign hard reg targets for the pseudo-registers we must reload
4865 into hard regs for this insn.
4866 Also output the instructions to copy them in and out of the hard regs.
4867
4868 For machines with register classes, we are responsible for
4869 finding a reload reg in the proper class. */
4870
4871 static void
4872 choose_reload_regs (insn, avoid_return_reg)
4873 rtx insn;
4874 rtx avoid_return_reg;
4875 {
4876 register int i, j;
4877 int max_group_size = 1;
4878 enum reg_class group_class = NO_REGS;
4879 int inheritance;
4880
4881 rtx save_reload_reg_rtx[MAX_RELOADS];
4882 char save_reload_inherited[MAX_RELOADS];
4883 rtx save_reload_inheritance_insn[MAX_RELOADS];
4884 rtx save_reload_override_in[MAX_RELOADS];
4885 int save_reload_spill_index[MAX_RELOADS];
4886 HARD_REG_SET save_reload_reg_used;
4887 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4888 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4889 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4890 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4891 HARD_REG_SET save_reload_reg_used_in_op_addr;
4892 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4893 HARD_REG_SET save_reload_reg_used_in_insn;
4894 HARD_REG_SET save_reload_reg_used_in_other_addr;
4895 HARD_REG_SET save_reload_reg_used_at_all;
4896
4897 bzero (reload_inherited, MAX_RELOADS);
4898 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4899 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4900
4901 CLEAR_HARD_REG_SET (reload_reg_used);
4902 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4903 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4904 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4905 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4906 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4907
4908 for (i = 0; i < reload_n_operands; i++)
4909 {
4910 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4911 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4912 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4913 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4914 }
4915
4916 #ifdef SMALL_REGISTER_CLASSES
4917 /* Don't bother with avoiding the return reg
4918 if we have no mandatory reload that could use it. */
4919 if (avoid_return_reg)
4920 {
4921 int do_avoid = 0;
4922 int regno = REGNO (avoid_return_reg);
4923 int nregs
4924 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4925 int r;
4926
4927 for (r = regno; r < regno + nregs; r++)
4928 if (spill_reg_order[r] >= 0)
4929 for (j = 0; j < n_reloads; j++)
4930 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4931 && (reload_in[j] != 0 || reload_out[j] != 0
4932 || reload_secondary_p[j])
4933 &&
4934 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4935 do_avoid = 1;
4936 if (!do_avoid)
4937 avoid_return_reg = 0;
4938 }
4939 #endif /* SMALL_REGISTER_CLASSES */
4940
4941 #if 0 /* Not needed, now that we can always retry without inheritance. */
4942 /* See if we have more mandatory reloads than spill regs.
4943 If so, then we cannot risk optimizations that could prevent
4944 reloads from sharing one spill register.
4945
4946 Since we will try finding a better register than reload_reg_rtx
4947 unless it is equal to reload_in or reload_out, count such reloads. */
4948
4949 {
4950 int tem = 0;
4951 #ifdef SMALL_REGISTER_CLASSES
4952 int tem = (avoid_return_reg != 0);
4953 #endif
4954 for (j = 0; j < n_reloads; j++)
4955 if (! reload_optional[j]
4956 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4957 && (reload_reg_rtx[j] == 0
4958 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4959 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4960 tem++;
4961 if (tem > n_spills)
4962 must_reuse = 1;
4963 }
4964 #endif
4965
4966 #ifdef SMALL_REGISTER_CLASSES
4967 /* Don't use the subroutine call return reg for a reload
4968 if we are supposed to avoid it. */
4969 if (avoid_return_reg)
4970 {
4971 int regno = REGNO (avoid_return_reg);
4972 int nregs
4973 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4974 int r;
4975
4976 for (r = regno; r < regno + nregs; r++)
4977 if (spill_reg_order[r] >= 0)
4978 SET_HARD_REG_BIT (reload_reg_used, r);
4979 }
4980 #endif /* SMALL_REGISTER_CLASSES */
4981
4982 /* In order to be certain of getting the registers we need,
4983 we must sort the reloads into order of increasing register class.
4984 Then our grabbing of reload registers will parallel the process
4985 that provided the reload registers.
4986
4987 Also note whether any of the reloads wants a consecutive group of regs.
4988 If so, record the maximum size of the group desired and what
4989 register class contains all the groups needed by this insn. */
4990
4991 for (j = 0; j < n_reloads; j++)
4992 {
4993 reload_order[j] = j;
4994 reload_spill_index[j] = -1;
4995
4996 reload_mode[j]
4997 = (reload_inmode[j] == VOIDmode
4998 || (GET_MODE_SIZE (reload_outmode[j])
4999 > GET_MODE_SIZE (reload_inmode[j])))
5000 ? reload_outmode[j] : reload_inmode[j];
5001
5002 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5003
5004 if (reload_nregs[j] > 1)
5005 {
5006 max_group_size = MAX (reload_nregs[j], max_group_size);
5007 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5008 }
5009
5010 /* If we have already decided to use a certain register,
5011 don't use it in another way. */
5012 if (reload_reg_rtx[j])
5013 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5014 reload_when_needed[j], reload_mode[j]);
5015 }
5016
5017 if (n_reloads > 1)
5018 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5019
5020 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5021 sizeof reload_reg_rtx);
5022 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5023 bcopy ((char *) reload_inheritance_insn,
5024 (char *) save_reload_inheritance_insn,
5025 sizeof reload_inheritance_insn);
5026 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5027 sizeof reload_override_in);
5028 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5029 sizeof reload_spill_index);
5030 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5031 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5032 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5033 reload_reg_used_in_op_addr);
5034
5035 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5036 reload_reg_used_in_op_addr_reload);
5037
5038 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5039 reload_reg_used_in_insn);
5040 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5041 reload_reg_used_in_other_addr);
5042
5043 for (i = 0; i < reload_n_operands; i++)
5044 {
5045 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5046 reload_reg_used_in_output[i]);
5047 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5048 reload_reg_used_in_input[i]);
5049 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5050 reload_reg_used_in_input_addr[i]);
5051 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5052 reload_reg_used_in_output_addr[i]);
5053 }
5054
5055 /* If -O, try first with inheritance, then turning it off.
5056 If not -O, don't do inheritance.
5057 Using inheritance when not optimizing leads to paradoxes
5058 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5059 because one side of the comparison might be inherited. */
5060
5061 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5062 {
5063 /* Process the reloads in order of preference just found.
5064 Beyond this point, subregs can be found in reload_reg_rtx.
5065
5066 This used to look for an existing reloaded home for all
5067 of the reloads, and only then perform any new reloads.
5068 But that could lose if the reloads were done out of reg-class order
5069 because a later reload with a looser constraint might have an old
5070 home in a register needed by an earlier reload with a tighter constraint.
5071
5072 To solve this, we make two passes over the reloads, in the order
5073 described above. In the first pass we try to inherit a reload
5074 from a previous insn. If there is a later reload that needs a
5075 class that is a proper subset of the class being processed, we must
5076 also allocate a spill register during the first pass.
5077
5078 Then make a second pass over the reloads to allocate any reloads
5079 that haven't been given registers yet. */
5080
5081 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5082
5083 for (j = 0; j < n_reloads; j++)
5084 {
5085 register int r = reload_order[j];
5086
5087 /* Ignore reloads that got marked inoperative. */
5088 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5089 continue;
5090
5091 /* If find_reloads chose a to use reload_in or reload_out as a reload
5092 register, we don't need to chose one. Otherwise, try even if it found
5093 one since we might save an insn if we find the value lying around. */
5094 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5095 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5096 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5097 continue;
5098
5099 #if 0 /* No longer needed for correct operation.
5100 It might give better code, or might not; worth an experiment? */
5101 /* If this is an optional reload, we can't inherit from earlier insns
5102 until we are sure that any non-optional reloads have been allocated.
5103 The following code takes advantage of the fact that optional reloads
5104 are at the end of reload_order. */
5105 if (reload_optional[r] != 0)
5106 for (i = 0; i < j; i++)
5107 if ((reload_out[reload_order[i]] != 0
5108 || reload_in[reload_order[i]] != 0
5109 || reload_secondary_p[reload_order[i]])
5110 && ! reload_optional[reload_order[i]]
5111 && reload_reg_rtx[reload_order[i]] == 0)
5112 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5113 #endif
5114
5115 /* First see if this pseudo is already available as reloaded
5116 for a previous insn. We cannot try to inherit for reloads
5117 that are smaller than the maximum number of registers needed
5118 for groups unless the register we would allocate cannot be used
5119 for the groups.
5120
5121 We could check here to see if this is a secondary reload for
5122 an object that is already in a register of the desired class.
5123 This would avoid the need for the secondary reload register.
5124 But this is complex because we can't easily determine what
5125 objects might want to be loaded via this reload. So let a register
5126 be allocated here. In `emit_reload_insns' we suppress one of the
5127 loads in the case described above. */
5128
5129 if (inheritance)
5130 {
5131 register int regno = -1;
5132 enum machine_mode mode;
5133
5134 if (reload_in[r] == 0)
5135 ;
5136 else if (GET_CODE (reload_in[r]) == REG)
5137 {
5138 regno = REGNO (reload_in[r]);
5139 mode = GET_MODE (reload_in[r]);
5140 }
5141 else if (GET_CODE (reload_in_reg[r]) == REG)
5142 {
5143 regno = REGNO (reload_in_reg[r]);
5144 mode = GET_MODE (reload_in_reg[r]);
5145 }
5146 #if 0
5147 /* This won't work, since REGNO can be a pseudo reg number.
5148 Also, it takes much more hair to keep track of all the things
5149 that can invalidate an inherited reload of part of a pseudoreg. */
5150 else if (GET_CODE (reload_in[r]) == SUBREG
5151 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5152 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5153 #endif
5154
5155 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5156 {
5157 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5158
5159 if (reg_reloaded_contents[i] == regno
5160 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5161 >= GET_MODE_SIZE (mode))
5162 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5163 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5164 spill_regs[i])
5165 && (reload_nregs[r] == max_group_size
5166 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5167 spill_regs[i]))
5168 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5169 reload_when_needed[r])
5170 && reload_reg_free_before_p (spill_regs[i],
5171 reload_opnum[r],
5172 reload_when_needed[r]))
5173 {
5174 /* If a group is needed, verify that all the subsequent
5175 registers still have their values intact. */
5176 int nr
5177 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5178 int k;
5179
5180 for (k = 1; k < nr; k++)
5181 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5182 != regno)
5183 break;
5184
5185 if (k == nr)
5186 {
5187 int i1;
5188
5189 /* We found a register that contains the
5190 value we need. If this register is the
5191 same as an `earlyclobber' operand of the
5192 current insn, just mark it as a place to
5193 reload from since we can't use it as the
5194 reload register itself. */
5195
5196 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5197 if (reg_overlap_mentioned_for_reload_p
5198 (reg_last_reload_reg[regno],
5199 reload_earlyclobbers[i1]))
5200 break;
5201
5202 if (i1 != n_earlyclobbers
5203 /* Don't really use the inherited spill reg
5204 if we need it wider than we've got it. */
5205 || (GET_MODE_SIZE (reload_mode[r])
5206 > GET_MODE_SIZE (mode)))
5207 reload_override_in[r] = reg_last_reload_reg[regno];
5208 else
5209 {
5210 int k;
5211 /* We can use this as a reload reg. */
5212 /* Mark the register as in use for this part of
5213 the insn. */
5214 mark_reload_reg_in_use (spill_regs[i],
5215 reload_opnum[r],
5216 reload_when_needed[r],
5217 reload_mode[r]);
5218 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5219 reload_inherited[r] = 1;
5220 reload_inheritance_insn[r]
5221 = reg_reloaded_insn[i];
5222 reload_spill_index[r] = i;
5223 for (k = 0; k < nr; k++)
5224 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5225 spill_regs[i + k]);
5226 }
5227 }
5228 }
5229 }
5230 }
5231
5232 /* Here's another way to see if the value is already lying around. */
5233 if (inheritance
5234 && reload_in[r] != 0
5235 && ! reload_inherited[r]
5236 && reload_out[r] == 0
5237 && (CONSTANT_P (reload_in[r])
5238 || GET_CODE (reload_in[r]) == PLUS
5239 || GET_CODE (reload_in[r]) == REG
5240 || GET_CODE (reload_in[r]) == MEM)
5241 && (reload_nregs[r] == max_group_size
5242 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5243 {
5244 register rtx equiv
5245 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5246 -1, NULL_PTR, 0, reload_mode[r]);
5247 int regno;
5248
5249 if (equiv != 0)
5250 {
5251 if (GET_CODE (equiv) == REG)
5252 regno = REGNO (equiv);
5253 else if (GET_CODE (equiv) == SUBREG)
5254 {
5255 /* This must be a SUBREG of a hard register.
5256 Make a new REG since this might be used in an
5257 address and not all machines support SUBREGs
5258 there. */
5259 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5260 equiv = gen_rtx (REG, reload_mode[r], regno);
5261 }
5262 else
5263 abort ();
5264 }
5265
5266 /* If we found a spill reg, reject it unless it is free
5267 and of the desired class. */
5268 if (equiv != 0
5269 && ((spill_reg_order[regno] >= 0
5270 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5271 reload_when_needed[r]))
5272 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5273 regno)))
5274 equiv = 0;
5275
5276 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5277 equiv = 0;
5278
5279 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5280 equiv = 0;
5281
5282 /* We found a register that contains the value we need.
5283 If this register is the same as an `earlyclobber' operand
5284 of the current insn, just mark it as a place to reload from
5285 since we can't use it as the reload register itself. */
5286
5287 if (equiv != 0)
5288 for (i = 0; i < n_earlyclobbers; i++)
5289 if (reg_overlap_mentioned_for_reload_p (equiv,
5290 reload_earlyclobbers[i]))
5291 {
5292 reload_override_in[r] = equiv;
5293 equiv = 0;
5294 break;
5295 }
5296
5297 /* JRV: If the equiv register we have found is explicitly
5298 clobbered in the current insn, mark but don't use, as above. */
5299
5300 if (equiv != 0 && regno_clobbered_p (regno, insn))
5301 {
5302 reload_override_in[r] = equiv;
5303 equiv = 0;
5304 }
5305
5306 /* If we found an equivalent reg, say no code need be generated
5307 to load it, and use it as our reload reg. */
5308 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5309 {
5310 reload_reg_rtx[r] = equiv;
5311 reload_inherited[r] = 1;
5312 /* If it is a spill reg,
5313 mark the spill reg as in use for this insn. */
5314 i = spill_reg_order[regno];
5315 if (i >= 0)
5316 {
5317 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5318 int k;
5319 mark_reload_reg_in_use (regno, reload_opnum[r],
5320 reload_when_needed[r],
5321 reload_mode[r]);
5322 for (k = 0; k < nr; k++)
5323 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5324 }
5325 }
5326 }
5327
5328 /* If we found a register to use already, or if this is an optional
5329 reload, we are done. */
5330 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5331 continue;
5332
5333 #if 0 /* No longer needed for correct operation. Might or might not
5334 give better code on the average. Want to experiment? */
5335
5336 /* See if there is a later reload that has a class different from our
5337 class that intersects our class or that requires less register
5338 than our reload. If so, we must allocate a register to this
5339 reload now, since that reload might inherit a previous reload
5340 and take the only available register in our class. Don't do this
5341 for optional reloads since they will force all previous reloads
5342 to be allocated. Also don't do this for reloads that have been
5343 turned off. */
5344
5345 for (i = j + 1; i < n_reloads; i++)
5346 {
5347 int s = reload_order[i];
5348
5349 if ((reload_in[s] == 0 && reload_out[s] == 0
5350 && ! reload_secondary_p[s])
5351 || reload_optional[s])
5352 continue;
5353
5354 if ((reload_reg_class[s] != reload_reg_class[r]
5355 && reg_classes_intersect_p (reload_reg_class[r],
5356 reload_reg_class[s]))
5357 || reload_nregs[s] < reload_nregs[r])
5358 break;
5359 }
5360
5361 if (i == n_reloads)
5362 continue;
5363
5364 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5365 #endif
5366 }
5367
5368 /* Now allocate reload registers for anything non-optional that
5369 didn't get one yet. */
5370 for (j = 0; j < n_reloads; j++)
5371 {
5372 register int r = reload_order[j];
5373
5374 /* Ignore reloads that got marked inoperative. */
5375 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5376 continue;
5377
5378 /* Skip reloads that already have a register allocated or are
5379 optional. */
5380 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5381 continue;
5382
5383 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5384 break;
5385 }
5386
5387 /* If that loop got all the way, we have won. */
5388 if (j == n_reloads)
5389 break;
5390
5391 fail:
5392 /* Loop around and try without any inheritance. */
5393 /* First undo everything done by the failed attempt
5394 to allocate with inheritance. */
5395 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5396 sizeof reload_reg_rtx);
5397 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5398 sizeof reload_inherited);
5399 bcopy ((char *) save_reload_inheritance_insn,
5400 (char *) reload_inheritance_insn,
5401 sizeof reload_inheritance_insn);
5402 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5403 sizeof reload_override_in);
5404 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5405 sizeof reload_spill_index);
5406 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5407 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5408 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5409 save_reload_reg_used_in_op_addr);
5410 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5411 save_reload_reg_used_in_op_addr_reload);
5412 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5413 save_reload_reg_used_in_insn);
5414 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5415 save_reload_reg_used_in_other_addr);
5416
5417 for (i = 0; i < reload_n_operands; i++)
5418 {
5419 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5420 save_reload_reg_used_in_input[i]);
5421 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5422 save_reload_reg_used_in_output[i]);
5423 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5424 save_reload_reg_used_in_input_addr[i]);
5425 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5426 save_reload_reg_used_in_output_addr[i]);
5427 }
5428 }
5429
5430 /* If we thought we could inherit a reload, because it seemed that
5431 nothing else wanted the same reload register earlier in the insn,
5432 verify that assumption, now that all reloads have been assigned. */
5433
5434 for (j = 0; j < n_reloads; j++)
5435 {
5436 register int r = reload_order[j];
5437
5438 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5439 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5440 reload_opnum[r],
5441 reload_when_needed[r]))
5442 reload_inherited[r] = 0;
5443
5444 /* If we found a better place to reload from,
5445 validate it in the same fashion, if it is a reload reg. */
5446 if (reload_override_in[r]
5447 && (GET_CODE (reload_override_in[r]) == REG
5448 || GET_CODE (reload_override_in[r]) == SUBREG))
5449 {
5450 int regno = true_regnum (reload_override_in[r]);
5451 if (spill_reg_order[regno] >= 0
5452 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5453 reload_when_needed[r]))
5454 reload_override_in[r] = 0;
5455 }
5456 }
5457
5458 /* Now that reload_override_in is known valid,
5459 actually override reload_in. */
5460 for (j = 0; j < n_reloads; j++)
5461 if (reload_override_in[j])
5462 reload_in[j] = reload_override_in[j];
5463
5464 /* If this reload won't be done because it has been cancelled or is
5465 optional and not inherited, clear reload_reg_rtx so other
5466 routines (such as subst_reloads) don't get confused. */
5467 for (j = 0; j < n_reloads; j++)
5468 if (reload_reg_rtx[j] != 0
5469 && ((reload_optional[j] && ! reload_inherited[j])
5470 || (reload_in[j] == 0 && reload_out[j] == 0
5471 && ! reload_secondary_p[j])))
5472 {
5473 int regno = true_regnum (reload_reg_rtx[j]);
5474
5475 if (spill_reg_order[regno] >= 0)
5476 clear_reload_reg_in_use (regno, reload_opnum[j],
5477 reload_when_needed[j], reload_mode[j]);
5478 reload_reg_rtx[j] = 0;
5479 }
5480
5481 /* Record which pseudos and which spill regs have output reloads. */
5482 for (j = 0; j < n_reloads; j++)
5483 {
5484 register int r = reload_order[j];
5485
5486 i = reload_spill_index[r];
5487
5488 /* I is nonneg if this reload used one of the spill regs.
5489 If reload_reg_rtx[r] is 0, this is an optional reload
5490 that we opted to ignore. */
5491 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5492 && reload_reg_rtx[r] != 0)
5493 {
5494 register int nregno = REGNO (reload_out[r]);
5495 int nr = 1;
5496
5497 if (nregno < FIRST_PSEUDO_REGISTER)
5498 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5499
5500 while (--nr >= 0)
5501 reg_has_output_reload[nregno + nr] = 1;
5502
5503 if (i >= 0)
5504 {
5505 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5506 while (--nr >= 0)
5507 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5508 }
5509
5510 if (reload_when_needed[r] != RELOAD_OTHER
5511 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5512 && reload_when_needed[r] != RELOAD_FOR_INSN)
5513 abort ();
5514 }
5515 }
5516 }
5517 \f
5518 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5519 reloads of the same item for fear that we might not have enough reload
5520 registers. However, normally they will get the same reload register
5521 and hence actually need not be loaded twice.
5522
5523 Here we check for the most common case of this phenomenon: when we have
5524 a number of reloads for the same object, each of which were allocated
5525 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5526 reload, and is not modified in the insn itself. If we find such,
5527 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5528 This will not increase the number of spill registers needed and will
5529 prevent redundant code. */
5530
5531 #ifdef SMALL_REGISTER_CLASSES
5532
5533 static void
5534 merge_assigned_reloads (insn)
5535 rtx insn;
5536 {
5537 int i, j;
5538
5539 /* Scan all the reloads looking for ones that only load values and
5540 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5541 assigned and not modified by INSN. */
5542
5543 for (i = 0; i < n_reloads; i++)
5544 {
5545 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5546 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5547 || reg_set_p (reload_reg_rtx[i], insn))
5548 continue;
5549
5550 /* Look at all other reloads. Ensure that the only use of this
5551 reload_reg_rtx is in a reload that just loads the same value
5552 as we do. Note that any secondary reloads must be of the identical
5553 class since the values, modes, and result registers are the
5554 same, so we need not do anything with any secondary reloads. */
5555
5556 for (j = 0; j < n_reloads; j++)
5557 {
5558 if (i == j || reload_reg_rtx[j] == 0
5559 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5560 reload_reg_rtx[i]))
5561 continue;
5562
5563 /* If the reload regs aren't exactly the same (e.g, different modes)
5564 or if the values are different, we can't merge anything with this
5565 reload register. */
5566
5567 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5568 || reload_out[j] != 0 || reload_in[j] == 0
5569 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5570 break;
5571 }
5572
5573 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5574 we, in fact, found any matching reloads. */
5575
5576 if (j == n_reloads)
5577 {
5578 for (j = 0; j < n_reloads; j++)
5579 if (i != j && reload_reg_rtx[j] != 0
5580 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5581 {
5582 reload_when_needed[i] = RELOAD_OTHER;
5583 reload_in[j] = 0;
5584 transfer_replacements (i, j);
5585 }
5586
5587 /* If this is now RELOAD_OTHER, look for any reloads that load
5588 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5589 if they were for inputs, RELOAD_OTHER for outputs. Note that
5590 this test is equivalent to looking for reloads for this operand
5591 number. */
5592
5593 if (reload_when_needed[i] == RELOAD_OTHER)
5594 for (j = 0; j < n_reloads; j++)
5595 if (reload_in[j] != 0
5596 && reload_when_needed[i] != RELOAD_OTHER
5597 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5598 reload_in[i]))
5599 reload_when_needed[j]
5600 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5601 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5602 }
5603 }
5604 }
5605 #endif /* SMALL_RELOAD_CLASSES */
5606 \f
5607 /* Output insns to reload values in and out of the chosen reload regs. */
5608
5609 static void
5610 emit_reload_insns (insn)
5611 rtx insn;
5612 {
5613 register int j;
5614 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5615 rtx other_input_address_reload_insns = 0;
5616 rtx other_input_reload_insns = 0;
5617 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5618 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5619 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5620 rtx operand_reload_insns = 0;
5621 rtx other_operand_reload_insns = 0;
5622 rtx following_insn = NEXT_INSN (insn);
5623 rtx before_insn = insn;
5624 int special;
5625 /* Values to be put in spill_reg_store are put here first. */
5626 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5627
5628 for (j = 0; j < reload_n_operands; j++)
5629 input_reload_insns[j] = input_address_reload_insns[j]
5630 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5631
5632 /* Now output the instructions to copy the data into and out of the
5633 reload registers. Do these in the order that the reloads were reported,
5634 since reloads of base and index registers precede reloads of operands
5635 and the operands may need the base and index registers reloaded. */
5636
5637 for (j = 0; j < n_reloads; j++)
5638 {
5639 register rtx old;
5640 rtx oldequiv_reg = 0;
5641 rtx store_insn = 0;
5642
5643 old = reload_in[j];
5644 if (old != 0 && ! reload_inherited[j]
5645 && ! rtx_equal_p (reload_reg_rtx[j], old)
5646 && reload_reg_rtx[j] != 0)
5647 {
5648 register rtx reloadreg = reload_reg_rtx[j];
5649 rtx oldequiv = 0;
5650 enum machine_mode mode;
5651 rtx *where;
5652
5653 /* Determine the mode to reload in.
5654 This is very tricky because we have three to choose from.
5655 There is the mode the insn operand wants (reload_inmode[J]).
5656 There is the mode of the reload register RELOADREG.
5657 There is the intrinsic mode of the operand, which we could find
5658 by stripping some SUBREGs.
5659 It turns out that RELOADREG's mode is irrelevant:
5660 we can change that arbitrarily.
5661
5662 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5663 then the reload reg may not support QImode moves, so use SImode.
5664 If foo is in memory due to spilling a pseudo reg, this is safe,
5665 because the QImode value is in the least significant part of a
5666 slot big enough for a SImode. If foo is some other sort of
5667 memory reference, then it is impossible to reload this case,
5668 so previous passes had better make sure this never happens.
5669
5670 Then consider a one-word union which has SImode and one of its
5671 members is a float, being fetched as (SUBREG:SF union:SI).
5672 We must fetch that as SFmode because we could be loading into
5673 a float-only register. In this case OLD's mode is correct.
5674
5675 Consider an immediate integer: it has VOIDmode. Here we need
5676 to get a mode from something else.
5677
5678 In some cases, there is a fourth mode, the operand's
5679 containing mode. If the insn specifies a containing mode for
5680 this operand, it overrides all others.
5681
5682 I am not sure whether the algorithm here is always right,
5683 but it does the right things in those cases. */
5684
5685 mode = GET_MODE (old);
5686 if (mode == VOIDmode)
5687 mode = reload_inmode[j];
5688
5689 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5690 /* If we need a secondary register for this operation, see if
5691 the value is already in a register in that class. Don't
5692 do this if the secondary register will be used as a scratch
5693 register. */
5694
5695 if (reload_secondary_in_reload[j] >= 0
5696 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5697 && optimize)
5698 oldequiv
5699 = find_equiv_reg (old, insn,
5700 reload_reg_class[reload_secondary_in_reload[j]],
5701 -1, NULL_PTR, 0, mode);
5702 #endif
5703
5704 /* If reloading from memory, see if there is a register
5705 that already holds the same value. If so, reload from there.
5706 We can pass 0 as the reload_reg_p argument because
5707 any other reload has either already been emitted,
5708 in which case find_equiv_reg will see the reload-insn,
5709 or has yet to be emitted, in which case it doesn't matter
5710 because we will use this equiv reg right away. */
5711
5712 if (oldequiv == 0 && optimize
5713 && (GET_CODE (old) == MEM
5714 || (GET_CODE (old) == REG
5715 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5716 && reg_renumber[REGNO (old)] < 0)))
5717 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5718 -1, NULL_PTR, 0, mode);
5719
5720 if (oldequiv)
5721 {
5722 int regno = true_regnum (oldequiv);
5723
5724 /* If OLDEQUIV is a spill register, don't use it for this
5725 if any other reload needs it at an earlier stage of this insn
5726 or at this stage. */
5727 if (spill_reg_order[regno] >= 0
5728 && (! reload_reg_free_p (regno, reload_opnum[j],
5729 reload_when_needed[j])
5730 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5731 reload_when_needed[j])))
5732 oldequiv = 0;
5733
5734 /* If OLDEQUIV is not a spill register,
5735 don't use it if any other reload wants it. */
5736 if (spill_reg_order[regno] < 0)
5737 {
5738 int k;
5739 for (k = 0; k < n_reloads; k++)
5740 if (reload_reg_rtx[k] != 0 && k != j
5741 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5742 oldequiv))
5743 {
5744 oldequiv = 0;
5745 break;
5746 }
5747 }
5748
5749 /* If it is no cheaper to copy from OLDEQUIV into the
5750 reload register than it would be to move from memory,
5751 don't use it. Likewise, if we need a secondary register
5752 or memory. */
5753
5754 if (oldequiv != 0
5755 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5756 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5757 reload_reg_class[j])
5758 >= MEMORY_MOVE_COST (mode)))
5759 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5760 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5761 mode, oldequiv)
5762 != NO_REGS)
5763 #endif
5764 #ifdef SECONDARY_MEMORY_NEEDED
5765 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5766 REGNO_REG_CLASS (regno),
5767 mode)
5768 #endif
5769 ))
5770 oldequiv = 0;
5771 }
5772
5773 if (oldequiv == 0)
5774 oldequiv = old;
5775 else if (GET_CODE (oldequiv) == REG)
5776 oldequiv_reg = oldequiv;
5777 else if (GET_CODE (oldequiv) == SUBREG)
5778 oldequiv_reg = SUBREG_REG (oldequiv);
5779
5780 /* If we are reloading from a register that was recently stored in
5781 with an output-reload, see if we can prove there was
5782 actually no need to store the old value in it. */
5783
5784 if (optimize && GET_CODE (oldequiv) == REG
5785 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5786 && spill_reg_order[REGNO (oldequiv)] >= 0
5787 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5788 && find_reg_note (insn, REG_DEAD, reload_in[j])
5789 /* This is unsafe if operand occurs more than once in current
5790 insn. Perhaps some occurrences weren't reloaded. */
5791 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5792 delete_output_reload
5793 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5794
5795 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5796 then load RELOADREG from OLDEQUIV. Note that we cannot use
5797 gen_lowpart_common since it can do the wrong thing when
5798 RELOADREG has a multi-word mode. Note that RELOADREG
5799 must always be a REG here. */
5800
5801 if (GET_MODE (reloadreg) != mode)
5802 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5803 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5804 oldequiv = SUBREG_REG (oldequiv);
5805 if (GET_MODE (oldequiv) != VOIDmode
5806 && mode != GET_MODE (oldequiv))
5807 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5808
5809 /* Switch to the right place to emit the reload insns. */
5810 switch (reload_when_needed[j])
5811 {
5812 case RELOAD_OTHER:
5813 where = &other_input_reload_insns;
5814 break;
5815 case RELOAD_FOR_INPUT:
5816 where = &input_reload_insns[reload_opnum[j]];
5817 break;
5818 case RELOAD_FOR_INPUT_ADDRESS:
5819 where = &input_address_reload_insns[reload_opnum[j]];
5820 break;
5821 case RELOAD_FOR_OUTPUT_ADDRESS:
5822 where = &output_address_reload_insns[reload_opnum[j]];
5823 break;
5824 case RELOAD_FOR_OPERAND_ADDRESS:
5825 where = &operand_reload_insns;
5826 break;
5827 case RELOAD_FOR_OPADDR_ADDR:
5828 where = &other_operand_reload_insns;
5829 break;
5830 case RELOAD_FOR_OTHER_ADDRESS:
5831 where = &other_input_address_reload_insns;
5832 break;
5833 default:
5834 abort ();
5835 }
5836
5837 push_to_sequence (*where);
5838 special = 0;
5839
5840 /* Auto-increment addresses must be reloaded in a special way. */
5841 if (GET_CODE (oldequiv) == POST_INC
5842 || GET_CODE (oldequiv) == POST_DEC
5843 || GET_CODE (oldequiv) == PRE_INC
5844 || GET_CODE (oldequiv) == PRE_DEC)
5845 {
5846 /* We are not going to bother supporting the case where a
5847 incremented register can't be copied directly from
5848 OLDEQUIV since this seems highly unlikely. */
5849 if (reload_secondary_in_reload[j] >= 0)
5850 abort ();
5851 /* Prevent normal processing of this reload. */
5852 special = 1;
5853 /* Output a special code sequence for this case. */
5854 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5855 }
5856
5857 /* If we are reloading a pseudo-register that was set by the previous
5858 insn, see if we can get rid of that pseudo-register entirely
5859 by redirecting the previous insn into our reload register. */
5860
5861 else if (optimize && GET_CODE (old) == REG
5862 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5863 && dead_or_set_p (insn, old)
5864 /* This is unsafe if some other reload
5865 uses the same reg first. */
5866 && reload_reg_free_before_p (REGNO (reloadreg),
5867 reload_opnum[j],
5868 reload_when_needed[j]))
5869 {
5870 rtx temp = PREV_INSN (insn);
5871 while (temp && GET_CODE (temp) == NOTE)
5872 temp = PREV_INSN (temp);
5873 if (temp
5874 && GET_CODE (temp) == INSN
5875 && GET_CODE (PATTERN (temp)) == SET
5876 && SET_DEST (PATTERN (temp)) == old
5877 /* Make sure we can access insn_operand_constraint. */
5878 && asm_noperands (PATTERN (temp)) < 0
5879 /* This is unsafe if prev insn rejects our reload reg. */
5880 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5881 reloadreg)
5882 /* This is unsafe if operand occurs more than once in current
5883 insn. Perhaps some occurrences aren't reloaded. */
5884 && count_occurrences (PATTERN (insn), old) == 1
5885 /* Don't risk splitting a matching pair of operands. */
5886 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5887 {
5888 /* Store into the reload register instead of the pseudo. */
5889 SET_DEST (PATTERN (temp)) = reloadreg;
5890 /* If these are the only uses of the pseudo reg,
5891 pretend for GDB it lives in the reload reg we used. */
5892 if (reg_n_deaths[REGNO (old)] == 1
5893 && reg_n_sets[REGNO (old)] == 1)
5894 {
5895 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5896 alter_reg (REGNO (old), -1);
5897 }
5898 special = 1;
5899 }
5900 }
5901
5902 /* We can't do that, so output an insn to load RELOADREG. */
5903
5904 if (! special)
5905 {
5906 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5907 rtx second_reload_reg = 0;
5908 enum insn_code icode;
5909
5910 /* If we have a secondary reload, pick up the secondary register
5911 and icode, if any. If OLDEQUIV and OLD are different or
5912 if this is an in-out reload, recompute whether or not we
5913 still need a secondary register and what the icode should
5914 be. If we still need a secondary register and the class or
5915 icode is different, go back to reloading from OLD if using
5916 OLDEQUIV means that we got the wrong type of register. We
5917 cannot have different class or icode due to an in-out reload
5918 because we don't make such reloads when both the input and
5919 output need secondary reload registers. */
5920
5921 if (reload_secondary_in_reload[j] >= 0)
5922 {
5923 int secondary_reload = reload_secondary_in_reload[j];
5924 rtx real_oldequiv = oldequiv;
5925 rtx real_old = old;
5926
5927 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5928 and similarly for OLD.
5929 See comments in get_secondary_reload in reload.c. */
5930 if (GET_CODE (oldequiv) == REG
5931 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5932 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5933 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5934
5935 if (GET_CODE (old) == REG
5936 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5937 && reg_equiv_mem[REGNO (old)] != 0)
5938 real_old = reg_equiv_mem[REGNO (old)];
5939
5940 second_reload_reg = reload_reg_rtx[secondary_reload];
5941 icode = reload_secondary_in_icode[j];
5942
5943 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5944 || (reload_in[j] != 0 && reload_out[j] != 0))
5945 {
5946 enum reg_class new_class
5947 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5948 mode, real_oldequiv);
5949
5950 if (new_class == NO_REGS)
5951 second_reload_reg = 0;
5952 else
5953 {
5954 enum insn_code new_icode;
5955 enum machine_mode new_mode;
5956
5957 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5958 REGNO (second_reload_reg)))
5959 oldequiv = old, real_oldequiv = real_old;
5960 else
5961 {
5962 new_icode = reload_in_optab[(int) mode];
5963 if (new_icode != CODE_FOR_nothing
5964 && ((insn_operand_predicate[(int) new_icode][0]
5965 && ! ((*insn_operand_predicate[(int) new_icode][0])
5966 (reloadreg, mode)))
5967 || (insn_operand_predicate[(int) new_icode][1]
5968 && ! ((*insn_operand_predicate[(int) new_icode][1])
5969 (real_oldequiv, mode)))))
5970 new_icode = CODE_FOR_nothing;
5971
5972 if (new_icode == CODE_FOR_nothing)
5973 new_mode = mode;
5974 else
5975 new_mode = insn_operand_mode[(int) new_icode][2];
5976
5977 if (GET_MODE (second_reload_reg) != new_mode)
5978 {
5979 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5980 new_mode))
5981 oldequiv = old, real_oldequiv = real_old;
5982 else
5983 second_reload_reg
5984 = gen_rtx (REG, new_mode,
5985 REGNO (second_reload_reg));
5986 }
5987 }
5988 }
5989 }
5990
5991 /* If we still need a secondary reload register, check
5992 to see if it is being used as a scratch or intermediate
5993 register and generate code appropriately. If we need
5994 a scratch register, use REAL_OLDEQUIV since the form of
5995 the insn may depend on the actual address if it is
5996 a MEM. */
5997
5998 if (second_reload_reg)
5999 {
6000 if (icode != CODE_FOR_nothing)
6001 {
6002 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6003 second_reload_reg));
6004 special = 1;
6005 }
6006 else
6007 {
6008 /* See if we need a scratch register to load the
6009 intermediate register (a tertiary reload). */
6010 enum insn_code tertiary_icode
6011 = reload_secondary_in_icode[secondary_reload];
6012
6013 if (tertiary_icode != CODE_FOR_nothing)
6014 {
6015 rtx third_reload_reg
6016 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6017
6018 emit_insn ((GEN_FCN (tertiary_icode)
6019 (second_reload_reg, real_oldequiv,
6020 third_reload_reg)));
6021 }
6022 else
6023 gen_input_reload (second_reload_reg, oldequiv,
6024 reload_opnum[j],
6025 reload_when_needed[j]);
6026
6027 oldequiv = second_reload_reg;
6028 }
6029 }
6030 }
6031 #endif
6032
6033 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6034 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
6035 reload_when_needed[j]);
6036
6037 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6038 /* We may have to make a REG_DEAD note for the secondary reload
6039 register in the insns we just made. Find the last insn that
6040 mentioned the register. */
6041 if (! special && second_reload_reg
6042 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6043 {
6044 rtx prev;
6045
6046 for (prev = get_last_insn (); prev;
6047 prev = PREV_INSN (prev))
6048 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6049 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6050 PATTERN (prev)))
6051 {
6052 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6053 second_reload_reg,
6054 REG_NOTES (prev));
6055 break;
6056 }
6057 }
6058 #endif
6059 }
6060
6061 /* End this sequence. */
6062 *where = get_insns ();
6063 end_sequence ();
6064 }
6065
6066 /* Add a note saying the input reload reg
6067 dies in this insn, if anyone cares. */
6068 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6069 if (old != 0
6070 && reload_reg_rtx[j] != old
6071 && reload_reg_rtx[j] != 0
6072 && reload_out[j] == 0
6073 && ! reload_inherited[j]
6074 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6075 {
6076 register rtx reloadreg = reload_reg_rtx[j];
6077
6078 #if 0
6079 /* We can't abort here because we need to support this for sched.c.
6080 It's not terrible to miss a REG_DEAD note, but we should try
6081 to figure out how to do this correctly. */
6082 /* The code below is incorrect for address-only reloads. */
6083 if (reload_when_needed[j] != RELOAD_OTHER
6084 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6085 abort ();
6086 #endif
6087
6088 /* Add a death note to this insn, for an input reload. */
6089
6090 if ((reload_when_needed[j] == RELOAD_OTHER
6091 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6092 && ! dead_or_set_p (insn, reloadreg))
6093 REG_NOTES (insn)
6094 = gen_rtx (EXPR_LIST, REG_DEAD,
6095 reloadreg, REG_NOTES (insn));
6096 }
6097
6098 /* When we inherit a reload, the last marked death of the reload reg
6099 may no longer really be a death. */
6100 if (reload_reg_rtx[j] != 0
6101 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6102 && reload_inherited[j])
6103 {
6104 /* Handle inheriting an output reload.
6105 Remove the death note from the output reload insn. */
6106 if (reload_spill_index[j] >= 0
6107 && GET_CODE (reload_in[j]) == REG
6108 && spill_reg_store[reload_spill_index[j]] != 0
6109 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6110 REG_DEAD, REGNO (reload_reg_rtx[j])))
6111 remove_death (REGNO (reload_reg_rtx[j]),
6112 spill_reg_store[reload_spill_index[j]]);
6113 /* Likewise for input reloads that were inherited. */
6114 else if (reload_spill_index[j] >= 0
6115 && GET_CODE (reload_in[j]) == REG
6116 && spill_reg_store[reload_spill_index[j]] == 0
6117 && reload_inheritance_insn[j] != 0
6118 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6119 REGNO (reload_reg_rtx[j])))
6120 remove_death (REGNO (reload_reg_rtx[j]),
6121 reload_inheritance_insn[j]);
6122 else
6123 {
6124 rtx prev;
6125
6126 /* We got this register from find_equiv_reg.
6127 Search back for its last death note and get rid of it.
6128 But don't search back too far.
6129 Don't go past a place where this reg is set,
6130 since a death note before that remains valid. */
6131 for (prev = PREV_INSN (insn);
6132 prev && GET_CODE (prev) != CODE_LABEL;
6133 prev = PREV_INSN (prev))
6134 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6135 && dead_or_set_p (prev, reload_reg_rtx[j]))
6136 {
6137 if (find_regno_note (prev, REG_DEAD,
6138 REGNO (reload_reg_rtx[j])))
6139 remove_death (REGNO (reload_reg_rtx[j]), prev);
6140 break;
6141 }
6142 }
6143 }
6144
6145 /* We might have used find_equiv_reg above to choose an alternate
6146 place from which to reload. If so, and it died, we need to remove
6147 that death and move it to one of the insns we just made. */
6148
6149 if (oldequiv_reg != 0
6150 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6151 {
6152 rtx prev, prev1;
6153
6154 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6155 prev = PREV_INSN (prev))
6156 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6157 && dead_or_set_p (prev, oldequiv_reg))
6158 {
6159 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6160 {
6161 for (prev1 = this_reload_insn;
6162 prev1; prev1 = PREV_INSN (prev1))
6163 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6164 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6165 PATTERN (prev1)))
6166 {
6167 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6168 oldequiv_reg,
6169 REG_NOTES (prev1));
6170 break;
6171 }
6172 remove_death (REGNO (oldequiv_reg), prev);
6173 }
6174 break;
6175 }
6176 }
6177 #endif
6178
6179 /* If we are reloading a register that was recently stored in with an
6180 output-reload, see if we can prove there was
6181 actually no need to store the old value in it. */
6182
6183 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6184 && reload_in[j] != 0
6185 && GET_CODE (reload_in[j]) == REG
6186 #if 0
6187 /* There doesn't seem to be any reason to restrict this to pseudos
6188 and doing so loses in the case where we are copying from a
6189 register of the wrong class. */
6190 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6191 #endif
6192 && spill_reg_store[reload_spill_index[j]] != 0
6193 /* This is unsafe if some other reload uses the same reg first. */
6194 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6195 reload_opnum[j], reload_when_needed[j])
6196 && dead_or_set_p (insn, reload_in[j])
6197 /* This is unsafe if operand occurs more than once in current
6198 insn. Perhaps some occurrences weren't reloaded. */
6199 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6200 delete_output_reload (insn, j,
6201 spill_reg_store[reload_spill_index[j]]);
6202
6203 /* Input-reloading is done. Now do output-reloading,
6204 storing the value from the reload-register after the main insn
6205 if reload_out[j] is nonzero.
6206
6207 ??? At some point we need to support handling output reloads of
6208 JUMP_INSNs or insns that set cc0. */
6209 old = reload_out[j];
6210 if (old != 0
6211 && reload_reg_rtx[j] != old
6212 && reload_reg_rtx[j] != 0)
6213 {
6214 register rtx reloadreg = reload_reg_rtx[j];
6215 register rtx second_reloadreg = 0;
6216 rtx note, p;
6217 enum machine_mode mode;
6218 int special = 0;
6219
6220 /* An output operand that dies right away does need a reload,
6221 but need not be copied from it. Show the new location in the
6222 REG_UNUSED note. */
6223 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6224 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6225 {
6226 XEXP (note, 0) = reload_reg_rtx[j];
6227 continue;
6228 }
6229 else if (GET_CODE (old) == SCRATCH)
6230 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6231 but we don't want to make an output reload. */
6232 continue;
6233
6234 #if 0
6235 /* Strip off of OLD any size-increasing SUBREGs such as
6236 (SUBREG:SI foo:QI 0). */
6237
6238 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6239 && (GET_MODE_SIZE (GET_MODE (old))
6240 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6241 old = SUBREG_REG (old);
6242 #endif
6243
6244 /* If is a JUMP_INSN, we can't support output reloads yet. */
6245 if (GET_CODE (insn) == JUMP_INSN)
6246 abort ();
6247
6248 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6249
6250 /* Determine the mode to reload in.
6251 See comments above (for input reloading). */
6252
6253 mode = GET_MODE (old);
6254 if (mode == VOIDmode)
6255 {
6256 /* VOIDmode should never happen for an output. */
6257 if (asm_noperands (PATTERN (insn)) < 0)
6258 /* It's the compiler's fault. */
6259 abort ();
6260 error_for_asm (insn, "output operand is constant in `asm'");
6261 /* Prevent crash--use something we know is valid. */
6262 mode = word_mode;
6263 old = gen_rtx (REG, mode, REGNO (reloadreg));
6264 }
6265
6266 if (GET_MODE (reloadreg) != mode)
6267 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6268
6269 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6270
6271 /* If we need two reload regs, set RELOADREG to the intermediate
6272 one, since it will be stored into OUT. We might need a secondary
6273 register only for an input reload, so check again here. */
6274
6275 if (reload_secondary_out_reload[j] >= 0)
6276 {
6277 rtx real_old = old;
6278
6279 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6280 && reg_equiv_mem[REGNO (old)] != 0)
6281 real_old = reg_equiv_mem[REGNO (old)];
6282
6283 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6284 mode, real_old)
6285 != NO_REGS))
6286 {
6287 second_reloadreg = reloadreg;
6288 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6289
6290 /* See if RELOADREG is to be used as a scratch register
6291 or as an intermediate register. */
6292 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6293 {
6294 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6295 (real_old, second_reloadreg, reloadreg)));
6296 special = 1;
6297 }
6298 else
6299 {
6300 /* See if we need both a scratch and intermediate reload
6301 register. */
6302 int secondary_reload = reload_secondary_out_reload[j];
6303 enum insn_code tertiary_icode
6304 = reload_secondary_out_icode[secondary_reload];
6305 rtx pat;
6306
6307 if (GET_MODE (reloadreg) != mode)
6308 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6309
6310 if (tertiary_icode != CODE_FOR_nothing)
6311 {
6312 rtx third_reloadreg
6313 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6314 pat = (GEN_FCN (tertiary_icode)
6315 (reloadreg, second_reloadreg, third_reloadreg));
6316 }
6317 #ifdef SECONDARY_MEMORY_NEEDED
6318 /* If we need a memory location to do the move, do it that way. */
6319 else if (GET_CODE (reloadreg) == REG
6320 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6321 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6322 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6323 GET_MODE (second_reloadreg)))
6324 {
6325 /* Get the memory to use and rewrite both registers
6326 to its mode. */
6327 rtx loc
6328 = get_secondary_mem (reloadreg,
6329 GET_MODE (second_reloadreg),
6330 reload_opnum[j],
6331 reload_when_needed[j]);
6332 rtx tmp_reloadreg;
6333
6334 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6335 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6336 REGNO (second_reloadreg));
6337
6338 if (GET_MODE (loc) != GET_MODE (reloadreg))
6339 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6340 REGNO (reloadreg));
6341 else
6342 tmp_reloadreg = reloadreg;
6343
6344 emit_move_insn (loc, second_reloadreg);
6345 pat = gen_move_insn (tmp_reloadreg, loc);
6346 }
6347 #endif
6348 else
6349 pat = gen_move_insn (reloadreg, second_reloadreg);
6350
6351 emit_insn (pat);
6352 }
6353 }
6354 }
6355 #endif
6356
6357 /* Output the last reload insn. */
6358 if (! special)
6359 {
6360 #ifdef SECONDARY_MEMORY_NEEDED
6361 /* If we need a memory location to do the move, do it that way. */
6362 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6363 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6364 REGNO_REG_CLASS (REGNO (reloadreg)),
6365 GET_MODE (reloadreg)))
6366 {
6367 /* Get the memory to use and rewrite both registers to
6368 its mode. */
6369 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6370 reload_opnum[j],
6371 reload_when_needed[j]);
6372
6373 if (GET_MODE (loc) != GET_MODE (reloadreg))
6374 reloadreg = gen_rtx (REG, GET_MODE (loc),
6375 REGNO (reloadreg));
6376
6377 if (GET_MODE (loc) != GET_MODE (old))
6378 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6379
6380 emit_insn (gen_move_insn (loc, reloadreg));
6381 emit_insn (gen_move_insn (old, loc));
6382 }
6383 else
6384 #endif
6385 emit_insn (gen_move_insn (old, reloadreg));
6386 }
6387
6388 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6389 /* If final will look at death notes for this reg,
6390 put one on the last output-reload insn to use it. Similarly
6391 for any secondary register. */
6392 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6393 for (p = get_last_insn (); p; p = PREV_INSN (p))
6394 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6395 && reg_overlap_mentioned_for_reload_p (reloadreg,
6396 PATTERN (p)))
6397 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6398 reloadreg, REG_NOTES (p));
6399
6400 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6401 if (! special
6402 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6403 for (p = get_last_insn (); p; p = PREV_INSN (p))
6404 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6405 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6406 PATTERN (p)))
6407 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6408 second_reloadreg, REG_NOTES (p));
6409 #endif
6410 #endif
6411 /* Look at all insns we emitted, just to be safe. */
6412 for (p = get_insns (); p; p = NEXT_INSN (p))
6413 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6414 {
6415 /* If this output reload doesn't come from a spill reg,
6416 clear any memory of reloaded copies of the pseudo reg.
6417 If this output reload comes from a spill reg,
6418 reg_has_output_reload will make this do nothing. */
6419 note_stores (PATTERN (p), forget_old_reloads_1);
6420
6421 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6422 store_insn = p;
6423 }
6424
6425 output_reload_insns[reload_opnum[j]] = get_insns ();
6426 end_sequence ();
6427
6428 }
6429
6430 if (reload_spill_index[j] >= 0)
6431 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6432 }
6433
6434 /* Now write all the insns we made for reloads in the order expected by
6435 the allocation functions. Prior to the insn being reloaded, we write
6436 the following reloads:
6437
6438 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6439
6440 RELOAD_OTHER reloads.
6441
6442 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6443 the RELOAD_FOR_INPUT reload for the operand.
6444
6445 RELOAD_FOR_OPADDR_ADDRS reloads.
6446
6447 RELOAD_FOR_OPERAND_ADDRESS reloads.
6448
6449 After the insn being reloaded, we write the following:
6450
6451 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6452 the RELOAD_FOR_OUTPUT reload for that operand. */
6453
6454 emit_insns_before (other_input_address_reload_insns, before_insn);
6455 emit_insns_before (other_input_reload_insns, before_insn);
6456
6457 for (j = 0; j < reload_n_operands; j++)
6458 {
6459 emit_insns_before (input_address_reload_insns[j], before_insn);
6460 emit_insns_before (input_reload_insns[j], before_insn);
6461 }
6462
6463 emit_insns_before (other_operand_reload_insns, before_insn);
6464 emit_insns_before (operand_reload_insns, before_insn);
6465
6466 for (j = 0; j < reload_n_operands; j++)
6467 {
6468 emit_insns_before (output_address_reload_insns[j], following_insn);
6469 emit_insns_before (output_reload_insns[j], following_insn);
6470 }
6471
6472 /* Move death notes from INSN
6473 to output-operand-address and output reload insns. */
6474 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6475 {
6476 rtx insn1;
6477 /* Loop over those insns, last ones first. */
6478 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6479 insn1 = PREV_INSN (insn1))
6480 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6481 {
6482 rtx source = SET_SRC (PATTERN (insn1));
6483 rtx dest = SET_DEST (PATTERN (insn1));
6484
6485 /* The note we will examine next. */
6486 rtx reg_notes = REG_NOTES (insn);
6487 /* The place that pointed to this note. */
6488 rtx *prev_reg_note = &REG_NOTES (insn);
6489
6490 /* If the note is for something used in the source of this
6491 reload insn, or in the output address, move the note. */
6492 while (reg_notes)
6493 {
6494 rtx next_reg_notes = XEXP (reg_notes, 1);
6495 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6496 && GET_CODE (XEXP (reg_notes, 0)) == REG
6497 && ((GET_CODE (dest) != REG
6498 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6499 dest))
6500 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6501 source)))
6502 {
6503 *prev_reg_note = next_reg_notes;
6504 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6505 REG_NOTES (insn1) = reg_notes;
6506 }
6507 else
6508 prev_reg_note = &XEXP (reg_notes, 1);
6509
6510 reg_notes = next_reg_notes;
6511 }
6512 }
6513 }
6514 #endif
6515
6516 /* For all the spill regs newly reloaded in this instruction,
6517 record what they were reloaded from, so subsequent instructions
6518 can inherit the reloads.
6519
6520 Update spill_reg_store for the reloads of this insn.
6521 Copy the elements that were updated in the loop above. */
6522
6523 for (j = 0; j < n_reloads; j++)
6524 {
6525 register int r = reload_order[j];
6526 register int i = reload_spill_index[r];
6527
6528 /* I is nonneg if this reload used one of the spill regs.
6529 If reload_reg_rtx[r] is 0, this is an optional reload
6530 that we opted to ignore.
6531
6532 Also ignore reloads that don't reach the end of the insn,
6533 since we will eventually see the one that does. */
6534
6535 if (i >= 0 && reload_reg_rtx[r] != 0
6536 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6537 reload_when_needed[r]))
6538 {
6539 /* First, clear out memory of what used to be in this spill reg.
6540 If consecutive registers are used, clear them all. */
6541 int nr
6542 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6543 int k;
6544
6545 for (k = 0; k < nr; k++)
6546 {
6547 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6548 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6549 }
6550
6551 /* Maybe the spill reg contains a copy of reload_out. */
6552 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6553 {
6554 register int nregno = REGNO (reload_out[r]);
6555 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6556 : HARD_REGNO_NREGS (nregno,
6557 GET_MODE (reload_reg_rtx[r])));
6558
6559 spill_reg_store[i] = new_spill_reg_store[i];
6560 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6561
6562 /* If NREGNO is a hard register, it may occupy more than
6563 one register. If it does, say what is in the
6564 rest of the registers assuming that both registers
6565 agree on how many words the object takes. If not,
6566 invalidate the subsequent registers. */
6567
6568 if (nregno < FIRST_PSEUDO_REGISTER)
6569 for (k = 1; k < nnr; k++)
6570 reg_last_reload_reg[nregno + k]
6571 = (nr == nnr ? gen_rtx (REG,
6572 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6573 REGNO (reload_reg_rtx[r]) + k)
6574 : 0);
6575
6576 /* Now do the inverse operation. */
6577 for (k = 0; k < nr; k++)
6578 {
6579 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6580 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6581 : nregno + k);
6582 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6583 }
6584 }
6585
6586 /* Maybe the spill reg contains a copy of reload_in. Only do
6587 something if there will not be an output reload for
6588 the register being reloaded. */
6589 else if (reload_out[r] == 0
6590 && reload_in[r] != 0
6591 && ((GET_CODE (reload_in[r]) == REG
6592 && ! reg_has_output_reload[REGNO (reload_in[r])]
6593 || (GET_CODE (reload_in_reg[r]) == REG
6594 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6595 {
6596 register int nregno;
6597 int nnr;
6598
6599 if (GET_CODE (reload_in[r]) == REG)
6600 nregno = REGNO (reload_in[r]);
6601 else
6602 nregno = REGNO (reload_in_reg[r]);
6603
6604 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6605 : HARD_REGNO_NREGS (nregno,
6606 GET_MODE (reload_reg_rtx[r])));
6607
6608 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6609
6610 if (nregno < FIRST_PSEUDO_REGISTER)
6611 for (k = 1; k < nnr; k++)
6612 reg_last_reload_reg[nregno + k]
6613 = (nr == nnr ? gen_rtx (REG,
6614 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6615 REGNO (reload_reg_rtx[r]) + k)
6616 : 0);
6617
6618 /* Unless we inherited this reload, show we haven't
6619 recently done a store. */
6620 if (! reload_inherited[r])
6621 spill_reg_store[i] = 0;
6622
6623 for (k = 0; k < nr; k++)
6624 {
6625 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6626 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6627 : nregno + k);
6628 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6629 = insn;
6630 }
6631 }
6632 }
6633
6634 /* The following if-statement was #if 0'd in 1.34 (or before...).
6635 It's reenabled in 1.35 because supposedly nothing else
6636 deals with this problem. */
6637
6638 /* If a register gets output-reloaded from a non-spill register,
6639 that invalidates any previous reloaded copy of it.
6640 But forget_old_reloads_1 won't get to see it, because
6641 it thinks only about the original insn. So invalidate it here. */
6642 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6643 {
6644 register int nregno = REGNO (reload_out[r]);
6645 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6646
6647 while (num_regs-- > 0)
6648 reg_last_reload_reg[nregno + num_regs] = 0;
6649 }
6650 }
6651 }
6652 \f
6653 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6654 operand OPNUM with reload type TYPE.
6655
6656 Returns first insn emitted. */
6657
6658 rtx
6659 gen_input_reload (reloadreg, in, opnum, type)
6660 rtx reloadreg;
6661 rtx in;
6662 int opnum;
6663 enum reload_type type;
6664 {
6665 rtx last = get_last_insn ();
6666
6667 /* How to do this reload can get quite tricky. Normally, we are being
6668 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6669 register that didn't get a hard register. In that case we can just
6670 call emit_move_insn.
6671
6672 We can also be asked to reload a PLUS that adds a register or a MEM to
6673 another register, constant or MEM. This can occur during frame pointer
6674 elimination and while reloading addresses. This case is handled by
6675 trying to emit a single insn to perform the add. If it is not valid,
6676 we use a two insn sequence.
6677
6678 Finally, we could be called to handle an 'o' constraint by putting
6679 an address into a register. In that case, we first try to do this
6680 with a named pattern of "reload_load_address". If no such pattern
6681 exists, we just emit a SET insn and hope for the best (it will normally
6682 be valid on machines that use 'o').
6683
6684 This entire process is made complex because reload will never
6685 process the insns we generate here and so we must ensure that
6686 they will fit their constraints and also by the fact that parts of
6687 IN might be being reloaded separately and replaced with spill registers.
6688 Because of this, we are, in some sense, just guessing the right approach
6689 here. The one listed above seems to work.
6690
6691 ??? At some point, this whole thing needs to be rethought. */
6692
6693 if (GET_CODE (in) == PLUS
6694 && (GET_CODE (XEXP (in, 0)) == REG
6695 || GET_CODE (XEXP (in, 0)) == MEM)
6696 && (GET_CODE (XEXP (in, 1)) == REG
6697 || CONSTANT_P (XEXP (in, 1))
6698 || GET_CODE (XEXP (in, 1)) == MEM))
6699 {
6700 /* We need to compute the sum of a register or a MEM and another
6701 register, constant, or MEM, and put it into the reload
6702 register. The best possible way of doing this is if the machine
6703 has a three-operand ADD insn that accepts the required operands.
6704
6705 The simplest approach is to try to generate such an insn and see if it
6706 is recognized and matches its constraints. If so, it can be used.
6707
6708 It might be better not to actually emit the insn unless it is valid,
6709 but we need to pass the insn as an operand to `recog' and
6710 `insn_extract' and it is simpler to emit and then delete the insn if
6711 not valid than to dummy things up. */
6712
6713 rtx op0, op1, tem, insn;
6714 int code;
6715
6716 op0 = find_replacement (&XEXP (in, 0));
6717 op1 = find_replacement (&XEXP (in, 1));
6718
6719 /* Since constraint checking is strict, commutativity won't be
6720 checked, so we need to do that here to avoid spurious failure
6721 if the add instruction is two-address and the second operand
6722 of the add is the same as the reload reg, which is frequently
6723 the case. If the insn would be A = B + A, rearrange it so
6724 it will be A = A + B as constrain_operands expects. */
6725
6726 if (GET_CODE (XEXP (in, 1)) == REG
6727 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6728 tem = op0, op0 = op1, op1 = tem;
6729
6730 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6731 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6732
6733 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6734 code = recog_memoized (insn);
6735
6736 if (code >= 0)
6737 {
6738 insn_extract (insn);
6739 /* We want constrain operands to treat this insn strictly in
6740 its validity determination, i.e., the way it would after reload
6741 has completed. */
6742 if (constrain_operands (code, 1))
6743 return insn;
6744 }
6745
6746 delete_insns_since (last);
6747
6748 /* If that failed, we must use a conservative two-insn sequence.
6749 use move to copy constant, MEM, or pseudo register to the reload
6750 register since "move" will be able to handle an arbitrary operand,
6751 unlike add which can't, in general. Then add the registers.
6752
6753 If there is another way to do this for a specific machine, a
6754 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6755 we emit below. */
6756
6757 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6758 || (GET_CODE (op1) == REG
6759 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6760 tem = op0, op0 = op1, op1 = tem;
6761
6762 emit_insn (gen_move_insn (reloadreg, op0));
6763
6764 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6765 This fixes a problem on the 32K where the stack pointer cannot
6766 be used as an operand of an add insn. */
6767
6768 if (rtx_equal_p (op0, op1))
6769 op1 = reloadreg;
6770
6771 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6772
6773 /* If that failed, copy the address register to the reload register.
6774 Then add the constant to the reload register. */
6775
6776 code = recog_memoized (insn);
6777
6778 if (code >= 0)
6779 {
6780 insn_extract (insn);
6781 /* We want constrain operands to treat this insn strictly in
6782 its validity determination, i.e., the way it would after reload
6783 has completed. */
6784 if (constrain_operands (code, 1))
6785 return insn;
6786 }
6787
6788 delete_insns_since (last);
6789
6790 emit_insn (gen_move_insn (reloadreg, op1));
6791 emit_insn (gen_add2_insn (reloadreg, op0));
6792 }
6793
6794 #ifdef SECONDARY_MEMORY_NEEDED
6795 /* If we need a memory location to do the move, do it that way. */
6796 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6797 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6798 REGNO_REG_CLASS (REGNO (reloadreg)),
6799 GET_MODE (reloadreg)))
6800 {
6801 /* Get the memory to use and rewrite both registers to its mode. */
6802 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6803
6804 if (GET_MODE (loc) != GET_MODE (reloadreg))
6805 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6806
6807 if (GET_MODE (loc) != GET_MODE (in))
6808 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6809
6810 emit_insn (gen_move_insn (loc, in));
6811 emit_insn (gen_move_insn (reloadreg, loc));
6812 }
6813 #endif
6814
6815 /* If IN is a simple operand, use gen_move_insn. */
6816 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6817 emit_insn (gen_move_insn (reloadreg, in));
6818
6819 #ifdef HAVE_reload_load_address
6820 else if (HAVE_reload_load_address)
6821 emit_insn (gen_reload_load_address (reloadreg, in));
6822 #endif
6823
6824 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6825 else
6826 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6827
6828 /* Return the first insn emitted.
6829 We can not just return get_last_insn, because there may have
6830 been multiple instructions emitted. Also note that gen_move_insn may
6831 emit more than one insn itself, so we can not assume that there is one
6832 insn emitted per emit_insn_before call. */
6833
6834 return last ? NEXT_INSN (last) : get_insns ();
6835 }
6836 \f
6837 /* Delete a previously made output-reload
6838 whose result we now believe is not needed.
6839 First we double-check.
6840
6841 INSN is the insn now being processed.
6842 OUTPUT_RELOAD_INSN is the insn of the output reload.
6843 J is the reload-number for this insn. */
6844
6845 static void
6846 delete_output_reload (insn, j, output_reload_insn)
6847 rtx insn;
6848 int j;
6849 rtx output_reload_insn;
6850 {
6851 register rtx i1;
6852
6853 /* Get the raw pseudo-register referred to. */
6854
6855 rtx reg = reload_in[j];
6856 while (GET_CODE (reg) == SUBREG)
6857 reg = SUBREG_REG (reg);
6858
6859 /* If the pseudo-reg we are reloading is no longer referenced
6860 anywhere between the store into it and here,
6861 and no jumps or labels intervene, then the value can get
6862 here through the reload reg alone.
6863 Otherwise, give up--return. */
6864 for (i1 = NEXT_INSN (output_reload_insn);
6865 i1 != insn; i1 = NEXT_INSN (i1))
6866 {
6867 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6868 return;
6869 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6870 && reg_mentioned_p (reg, PATTERN (i1)))
6871 return;
6872 }
6873
6874 if (cannot_omit_stores[REGNO (reg)])
6875 return;
6876
6877 /* If this insn will store in the pseudo again,
6878 the previous store can be removed. */
6879 if (reload_out[j] == reload_in[j])
6880 delete_insn (output_reload_insn);
6881
6882 /* See if the pseudo reg has been completely replaced
6883 with reload regs. If so, delete the store insn
6884 and forget we had a stack slot for the pseudo. */
6885 else if (reg_n_deaths[REGNO (reg)] == 1
6886 && reg_basic_block[REGNO (reg)] >= 0
6887 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6888 {
6889 rtx i2;
6890
6891 /* We know that it was used only between here
6892 and the beginning of the current basic block.
6893 (We also know that the last use before INSN was
6894 the output reload we are thinking of deleting, but never mind that.)
6895 Search that range; see if any ref remains. */
6896 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6897 {
6898 rtx set = single_set (i2);
6899
6900 /* Uses which just store in the pseudo don't count,
6901 since if they are the only uses, they are dead. */
6902 if (set != 0 && SET_DEST (set) == reg)
6903 continue;
6904 if (GET_CODE (i2) == CODE_LABEL
6905 || GET_CODE (i2) == JUMP_INSN)
6906 break;
6907 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6908 && reg_mentioned_p (reg, PATTERN (i2)))
6909 /* Some other ref remains;
6910 we can't do anything. */
6911 return;
6912 }
6913
6914 /* Delete the now-dead stores into this pseudo. */
6915 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6916 {
6917 rtx set = single_set (i2);
6918
6919 if (set != 0 && SET_DEST (set) == reg)
6920 delete_insn (i2);
6921 if (GET_CODE (i2) == CODE_LABEL
6922 || GET_CODE (i2) == JUMP_INSN)
6923 break;
6924 }
6925
6926 /* For the debugging info,
6927 say the pseudo lives in this reload reg. */
6928 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6929 alter_reg (REGNO (reg), -1);
6930 }
6931 }
6932 \f
6933 /* Output reload-insns to reload VALUE into RELOADREG.
6934 VALUE is an autoincrement or autodecrement RTX whose operand
6935 is a register or memory location;
6936 so reloading involves incrementing that location.
6937
6938 INC_AMOUNT is the number to increment or decrement by (always positive).
6939 This cannot be deduced from VALUE. */
6940
6941 static void
6942 inc_for_reload (reloadreg, value, inc_amount)
6943 rtx reloadreg;
6944 rtx value;
6945 int inc_amount;
6946 {
6947 /* REG or MEM to be copied and incremented. */
6948 rtx incloc = XEXP (value, 0);
6949 /* Nonzero if increment after copying. */
6950 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6951 rtx last;
6952 rtx inc;
6953 rtx add_insn;
6954 int code;
6955
6956 /* No hard register is equivalent to this register after
6957 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6958 we could inc/dec that register as well (maybe even using it for
6959 the source), but I'm not sure it's worth worrying about. */
6960 if (GET_CODE (incloc) == REG)
6961 reg_last_reload_reg[REGNO (incloc)] = 0;
6962
6963 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6964 inc_amount = - inc_amount;
6965
6966 inc = GEN_INT (inc_amount);
6967
6968 /* If this is post-increment, first copy the location to the reload reg. */
6969 if (post)
6970 emit_insn (gen_move_insn (reloadreg, incloc));
6971
6972 /* See if we can directly increment INCLOC. Use a method similar to that
6973 in gen_input_reload. */
6974
6975 last = get_last_insn ();
6976 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6977 gen_rtx (PLUS, GET_MODE (incloc),
6978 incloc, inc)));
6979
6980 code = recog_memoized (add_insn);
6981 if (code >= 0)
6982 {
6983 insn_extract (add_insn);
6984 if (constrain_operands (code, 1))
6985 {
6986 /* If this is a pre-increment and we have incremented the value
6987 where it lives, copy the incremented value to RELOADREG to
6988 be used as an address. */
6989
6990 if (! post)
6991 emit_insn (gen_move_insn (reloadreg, incloc));
6992
6993 return;
6994 }
6995 }
6996
6997 delete_insns_since (last);
6998
6999 /* If couldn't do the increment directly, must increment in RELOADREG.
7000 The way we do this depends on whether this is pre- or post-increment.
7001 For pre-increment, copy INCLOC to the reload register, increment it
7002 there, then save back. */
7003
7004 if (! post)
7005 {
7006 emit_insn (gen_move_insn (reloadreg, incloc));
7007 emit_insn (gen_add2_insn (reloadreg, inc));
7008 emit_insn (gen_move_insn (incloc, reloadreg));
7009 }
7010 else
7011 {
7012 /* Postincrement.
7013 Because this might be a jump insn or a compare, and because RELOADREG
7014 may not be available after the insn in an input reload, we must do
7015 the incrementation before the insn being reloaded for.
7016
7017 We have already copied INCLOC to RELOADREG. Increment the copy in
7018 RELOADREG, save that back, then decrement RELOADREG so it has
7019 the original value. */
7020
7021 emit_insn (gen_add2_insn (reloadreg, inc));
7022 emit_insn (gen_move_insn (incloc, reloadreg));
7023 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7024 }
7025
7026 return;
7027 }
7028 \f
7029 /* Return 1 if we are certain that the constraint-string STRING allows
7030 the hard register REG. Return 0 if we can't be sure of this. */
7031
7032 static int
7033 constraint_accepts_reg_p (string, reg)
7034 char *string;
7035 rtx reg;
7036 {
7037 int value = 0;
7038 int regno = true_regnum (reg);
7039 int c;
7040
7041 /* Initialize for first alternative. */
7042 value = 0;
7043 /* Check that each alternative contains `g' or `r'. */
7044 while (1)
7045 switch (c = *string++)
7046 {
7047 case 0:
7048 /* If an alternative lacks `g' or `r', we lose. */
7049 return value;
7050 case ',':
7051 /* If an alternative lacks `g' or `r', we lose. */
7052 if (value == 0)
7053 return 0;
7054 /* Initialize for next alternative. */
7055 value = 0;
7056 break;
7057 case 'g':
7058 case 'r':
7059 /* Any general reg wins for this alternative. */
7060 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7061 value = 1;
7062 break;
7063 default:
7064 /* Any reg in specified class wins for this alternative. */
7065 {
7066 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7067
7068 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7069 value = 1;
7070 }
7071 }
7072 }
7073 \f
7074 /* Return the number of places FIND appears within X, but don't count
7075 an occurrence if some SET_DEST is FIND. */
7076
7077 static int
7078 count_occurrences (x, find)
7079 register rtx x, find;
7080 {
7081 register int i, j;
7082 register enum rtx_code code;
7083 register char *format_ptr;
7084 int count;
7085
7086 if (x == find)
7087 return 1;
7088 if (x == 0)
7089 return 0;
7090
7091 code = GET_CODE (x);
7092
7093 switch (code)
7094 {
7095 case REG:
7096 case QUEUED:
7097 case CONST_INT:
7098 case CONST_DOUBLE:
7099 case SYMBOL_REF:
7100 case CODE_LABEL:
7101 case PC:
7102 case CC0:
7103 return 0;
7104
7105 case SET:
7106 if (SET_DEST (x) == find)
7107 return count_occurrences (SET_SRC (x), find);
7108 break;
7109 }
7110
7111 format_ptr = GET_RTX_FORMAT (code);
7112 count = 0;
7113
7114 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7115 {
7116 switch (*format_ptr++)
7117 {
7118 case 'e':
7119 count += count_occurrences (XEXP (x, i), find);
7120 break;
7121
7122 case 'E':
7123 if (XVEC (x, i) != NULL)
7124 {
7125 for (j = 0; j < XVECLEN (x, i); j++)
7126 count += count_occurrences (XVECEXP (x, i, j), find);
7127 }
7128 break;
7129 }
7130 }
7131 return count;
7132 }