Update FSF address.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include <stdio.h>
23 #include "config.h"
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37
38 /* This file contains the reload pass of the compiler, which is
39 run after register allocation has been done. It checks that
40 each insn is valid (operands required to be in registers really
41 are in registers of the proper class) and fixes up invalid ones
42 by copying values temporarily into registers for the insns
43 that need them.
44
45 The results of register allocation are described by the vector
46 reg_renumber; the insns still contain pseudo regs, but reg_renumber
47 can be used to find which hard reg, if any, a pseudo reg is in.
48
49 The technique we always use is to free up a few hard regs that are
50 called ``reload regs'', and for each place where a pseudo reg
51 must be in a hard reg, copy it temporarily into one of the reload regs.
52
53 All the pseudos that were formerly allocated to the hard regs that
54 are now in use as reload regs must be ``spilled''. This means
55 that they go to other hard regs, or to stack slots if no other
56 available hard regs can be found. Spilling can invalidate more
57 insns, requiring additional need for reloads, so we must keep checking
58 until the process stabilizes.
59
60 For machines with different classes of registers, we must keep track
61 of the register class needed for each reload, and make sure that
62 we allocate enough reload registers of each class.
63
64 The file reload.c contains the code that checks one insn for
65 validity and reports the reloads that it needs. This file
66 is in charge of scanning the entire rtl code, accumulating the
67 reload needs, spilling, assigning reload registers to use for
68 fixing up each insn, and generating the new insns to copy values
69 into the reload registers. */
70
71
72 #ifndef REGISTER_MOVE_COST
73 #define REGISTER_MOVE_COST(x, y) 2
74 #endif
75
76 #ifndef MEMORY_MOVE_COST
77 #define MEMORY_MOVE_COST(x) 4
78 #endif
79 \f
80 /* During reload_as_needed, element N contains a REG rtx for the hard reg
81 into which reg N has been reloaded (perhaps for a previous insn). */
82 static rtx *reg_last_reload_reg;
83
84 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
85 for an output reload that stores into reg N. */
86 static char *reg_has_output_reload;
87
88 /* Indicates which hard regs are reload-registers for an output reload
89 in the current insn. */
90 static HARD_REG_SET reg_is_output_reload;
91
92 /* Element N is the constant value to which pseudo reg N is equivalent,
93 or zero if pseudo reg N is not equivalent to a constant.
94 find_reloads looks at this in order to replace pseudo reg N
95 with the constant it stands for. */
96 rtx *reg_equiv_constant;
97
98 /* Element N is a memory location to which pseudo reg N is equivalent,
99 prior to any register elimination (such as frame pointer to stack
100 pointer). Depending on whether or not it is a valid address, this value
101 is transferred to either reg_equiv_address or reg_equiv_mem. */
102 rtx *reg_equiv_memory_loc;
103
104 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
105 This is used when the address is not valid as a memory address
106 (because its displacement is too big for the machine.) */
107 rtx *reg_equiv_address;
108
109 /* Element N is the memory slot to which pseudo reg N is equivalent,
110 or zero if pseudo reg N is not equivalent to a memory slot. */
111 rtx *reg_equiv_mem;
112
113 /* Widest width in which each pseudo reg is referred to (via subreg). */
114 static int *reg_max_ref_width;
115
116 /* Element N is the insn that initialized reg N from its equivalent
117 constant or memory slot. */
118 static rtx *reg_equiv_init;
119
120 /* During reload_as_needed, element N contains the last pseudo regno
121 reloaded into the Nth reload register. This vector is in parallel
122 with spill_regs. If that pseudo reg occupied more than one register,
123 reg_reloaded_contents points to that pseudo for each spill register in
124 use; all of these must remain set for an inheritance to occur. */
125 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
126
127 /* During reload_as_needed, element N contains the insn for which
128 the Nth reload register was last used. This vector is in parallel
129 with spill_regs, and its contents are significant only when
130 reg_reloaded_contents is significant. */
131 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
132
133 /* Number of spill-regs so far; number of valid elements of spill_regs. */
134 static int n_spills;
135
136 /* In parallel with spill_regs, contains REG rtx's for those regs.
137 Holds the last rtx used for any given reg, or 0 if it has never
138 been used for spilling yet. This rtx is reused, provided it has
139 the proper mode. */
140 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
141
142 /* In parallel with spill_regs, contains nonzero for a spill reg
143 that was stored after the last time it was used.
144 The precise value is the insn generated to do the store. */
145 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
146
147 /* This table is the inverse mapping of spill_regs:
148 indexed by hard reg number,
149 it contains the position of that reg in spill_regs,
150 or -1 for something that is not in spill_regs. */
151 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
152
153 /* This reg set indicates registers that may not be used for retrying global
154 allocation. The registers that may not be used include all spill registers
155 and the frame pointer (if we are using one). */
156 HARD_REG_SET forbidden_regs;
157
158 /* This reg set indicates registers that are not good for spill registers.
159 They will not be used to complete groups of spill registers. This includes
160 all fixed registers, registers that may be eliminated, and, if
161 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
162
163 (spill_reg_order prevents these registers from being used to start a
164 group.) */
165 static HARD_REG_SET bad_spill_regs;
166
167 /* Describes order of use of registers for reloading
168 of spilled pseudo-registers. `spills' is the number of
169 elements that are actually valid; new ones are added at the end. */
170 static short spill_regs[FIRST_PSEUDO_REGISTER];
171
172 /* Index of last register assigned as a spill register. We allocate in
173 a round-robin fashion. */
174
175 static int last_spill_reg;
176
177 /* Describes order of preference for putting regs into spill_regs.
178 Contains the numbers of all the hard regs, in order most preferred first.
179 This order is different for each function.
180 It is set up by order_regs_for_reload.
181 Empty elements at the end contain -1. */
182 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
183
184 /* 1 for a hard register that appears explicitly in the rtl
185 (for example, function value registers, special registers
186 used by insns, structure value pointer registers). */
187 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
188
189 /* Indicates if a register was counted against the need for
190 groups. 0 means it can count against max_nongroup instead. */
191 static HARD_REG_SET counted_for_groups;
192
193 /* Indicates if a register was counted against the need for
194 non-groups. 0 means it can become part of a new group.
195 During choose_reload_regs, 1 here means don't use this reg
196 as part of a group, even if it seems to be otherwise ok. */
197 static HARD_REG_SET counted_for_nongroups;
198
199 /* Indexed by pseudo reg number N,
200 says may not delete stores into the real (memory) home of pseudo N.
201 This is set if we already substituted a memory equivalent in some uses,
202 which happens when we have to eliminate the fp from it. */
203 static char *cannot_omit_stores;
204
205 /* Nonzero if indirect addressing is supported on the machine; this means
206 that spilling (REG n) does not require reloading it into a register in
207 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
208 value indicates the level of indirect addressing supported, e.g., two
209 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
210 a hard register. */
211
212 static char spill_indirect_levels;
213
214 /* Nonzero if indirect addressing is supported when the innermost MEM is
215 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
216 which these are valid is the same as spill_indirect_levels, above. */
217
218 char indirect_symref_ok;
219
220 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
221
222 char double_reg_address_ok;
223
224 /* Record the stack slot for each spilled hard register. */
225
226 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
227
228 /* Width allocated so far for that stack slot. */
229
230 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
231
232 /* Indexed by register class and basic block number, nonzero if there is
233 any need for a spill register of that class in that basic block.
234 The pointer is 0 if we did stupid allocation and don't know
235 the structure of basic blocks. */
236
237 char *basic_block_needs[N_REG_CLASSES];
238
239 /* First uid used by insns created by reload in this function.
240 Used in find_equiv_reg. */
241 int reload_first_uid;
242
243 /* Flag set by local-alloc or global-alloc if anything is live in
244 a call-clobbered reg across calls. */
245
246 int caller_save_needed;
247
248 /* Set to 1 while reload_as_needed is operating.
249 Required by some machines to handle any generated moves differently. */
250
251 int reload_in_progress = 0;
252
253 /* These arrays record the insn_code of insns that may be needed to
254 perform input and output reloads of special objects. They provide a
255 place to pass a scratch register. */
256
257 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
258 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
259
260 /* This obstack is used for allocation of rtl during register elimination.
261 The allocated storage can be freed once find_reloads has processed the
262 insn. */
263
264 struct obstack reload_obstack;
265 char *reload_firstobj;
266
267 #define obstack_chunk_alloc xmalloc
268 #define obstack_chunk_free free
269
270 /* List of labels that must never be deleted. */
271 extern rtx forced_labels;
272 \f
273 /* This structure is used to record information about register eliminations.
274 Each array entry describes one possible way of eliminating a register
275 in favor of another. If there is more than one way of eliminating a
276 particular register, the most preferred should be specified first. */
277
278 static struct elim_table
279 {
280 int from; /* Register number to be eliminated. */
281 int to; /* Register number used as replacement. */
282 int initial_offset; /* Initial difference between values. */
283 int can_eliminate; /* Non-zero if this elimination can be done. */
284 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
285 insns made by reload. */
286 int offset; /* Current offset between the two regs. */
287 int max_offset; /* Maximum offset between the two regs. */
288 int previous_offset; /* Offset at end of previous insn. */
289 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
290 rtx from_rtx; /* REG rtx for the register to be eliminated.
291 We cannot simply compare the number since
292 we might then spuriously replace a hard
293 register corresponding to a pseudo
294 assigned to the reg to be eliminated. */
295 rtx to_rtx; /* REG rtx for the replacement. */
296 } reg_eliminate[] =
297
298 /* If a set of eliminable registers was specified, define the table from it.
299 Otherwise, default to the normal case of the frame pointer being
300 replaced by the stack pointer. */
301
302 #ifdef ELIMINABLE_REGS
303 ELIMINABLE_REGS;
304 #else
305 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
306 #endif
307
308 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
309
310 /* Record the number of pending eliminations that have an offset not equal
311 to their initial offset. If non-zero, we use a new copy of each
312 replacement result in any insns encountered. */
313 static int num_not_at_initial_offset;
314
315 /* Count the number of registers that we may be able to eliminate. */
316 static int num_eliminable;
317
318 /* For each label, we record the offset of each elimination. If we reach
319 a label by more than one path and an offset differs, we cannot do the
320 elimination. This information is indexed by the number of the label.
321 The first table is an array of flags that records whether we have yet
322 encountered a label and the second table is an array of arrays, one
323 entry in the latter array for each elimination. */
324
325 static char *offsets_known_at;
326 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
327
328 /* Number of labels in the current function. */
329
330 static int num_labels;
331
332 struct hard_reg_n_uses { int regno; int uses; };
333 \f
334 static int possible_group_p PROTO((int, int *));
335 static void count_possible_groups PROTO((int *, enum machine_mode *,
336 int *, int));
337 static int modes_equiv_for_class_p PROTO((enum machine_mode,
338 enum machine_mode,
339 enum reg_class));
340 static void spill_failure PROTO((rtx));
341 static int new_spill_reg PROTO((int, int, int *, int *, int,
342 FILE *));
343 static void delete_dead_insn PROTO((rtx));
344 static void alter_reg PROTO((int, int));
345 static void mark_scratch_live PROTO((rtx));
346 static void set_label_offsets PROTO((rtx, rtx, int));
347 static int eliminate_regs_in_insn PROTO((rtx, int));
348 static void mark_not_eliminable PROTO((rtx, rtx));
349 static int spill_hard_reg PROTO((int, int, FILE *, int));
350 static void scan_paradoxical_subregs PROTO((rtx));
351 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
352 struct hard_reg_n_uses *));
353 static void order_regs_for_reload PROTO((void));
354 static int compare_spill_regs PROTO((short *, short *));
355 static void reload_as_needed PROTO((rtx, int));
356 static void forget_old_reloads_1 PROTO((rtx, rtx));
357 static int reload_reg_class_lower PROTO((short *, short *));
358 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
359 enum machine_mode));
360 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
361 enum machine_mode));
362 static int reload_reg_free_p PROTO((int, int, enum reload_type));
363 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
364 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
365 static int reloads_conflict PROTO((int, int));
366 static int allocate_reload_reg PROTO((int, rtx, int, int));
367 static void choose_reload_regs PROTO((rtx, rtx));
368 static void merge_assigned_reloads PROTO((rtx));
369 static void emit_reload_insns PROTO((rtx));
370 static void delete_output_reload PROTO((rtx, int, rtx));
371 static void inc_for_reload PROTO((rtx, rtx, int));
372 static int constraint_accepts_reg_p PROTO((char *, rtx));
373 static int count_occurrences PROTO((rtx, rtx));
374 \f
375 /* Initialize the reload pass once per compilation. */
376
377 void
378 init_reload ()
379 {
380 register int i;
381
382 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
383 Set spill_indirect_levels to the number of levels such addressing is
384 permitted, zero if it is not permitted at all. */
385
386 register rtx tem
387 = gen_rtx (MEM, Pmode,
388 gen_rtx (PLUS, Pmode,
389 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
390 GEN_INT (4)));
391 spill_indirect_levels = 0;
392
393 while (memory_address_p (QImode, tem))
394 {
395 spill_indirect_levels++;
396 tem = gen_rtx (MEM, Pmode, tem);
397 }
398
399 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
400
401 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
402 indirect_symref_ok = memory_address_p (QImode, tem);
403
404 /* See if reg+reg is a valid (and offsettable) address. */
405
406 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
407 {
408 tem = gen_rtx (PLUS, Pmode,
409 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
410 gen_rtx (REG, Pmode, i));
411 /* This way, we make sure that reg+reg is an offsettable address. */
412 tem = plus_constant (tem, 4);
413
414 if (memory_address_p (QImode, tem))
415 {
416 double_reg_address_ok = 1;
417 break;
418 }
419 }
420
421 /* Initialize obstack for our rtl allocation. */
422 gcc_obstack_init (&reload_obstack);
423 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
424 }
425
426 /* Main entry point for the reload pass.
427
428 FIRST is the first insn of the function being compiled.
429
430 GLOBAL nonzero means we were called from global_alloc
431 and should attempt to reallocate any pseudoregs that we
432 displace from hard regs we will use for reloads.
433 If GLOBAL is zero, we do not have enough information to do that,
434 so any pseudo reg that is spilled must go to the stack.
435
436 DUMPFILE is the global-reg debugging dump file stream, or 0.
437 If it is nonzero, messages are written to it to describe
438 which registers are seized as reload regs, which pseudo regs
439 are spilled from them, and where the pseudo regs are reallocated to.
440
441 Return value is nonzero if reload failed
442 and we must not do any more for this function. */
443
444 int
445 reload (first, global, dumpfile)
446 rtx first;
447 int global;
448 FILE *dumpfile;
449 {
450 register int class;
451 register int i, j, k;
452 register rtx insn;
453 register struct elim_table *ep;
454
455 int something_changed;
456 int something_needs_reloads;
457 int something_needs_elimination;
458 int new_basic_block_needs;
459 enum reg_class caller_save_spill_class = NO_REGS;
460 int caller_save_group_size = 1;
461
462 /* Nonzero means we couldn't get enough spill regs. */
463 int failure = 0;
464
465 /* The basic block number currently being processed for INSN. */
466 int this_block;
467
468 /* Make sure even insns with volatile mem refs are recognizable. */
469 init_recog ();
470
471 /* Enable find_equiv_reg to distinguish insns made by reload. */
472 reload_first_uid = get_max_uid ();
473
474 for (i = 0; i < N_REG_CLASSES; i++)
475 basic_block_needs[i] = 0;
476
477 #ifdef SECONDARY_MEMORY_NEEDED
478 /* Initialize the secondary memory table. */
479 clear_secondary_mem ();
480 #endif
481
482 /* Remember which hard regs appear explicitly
483 before we merge into `regs_ever_live' the ones in which
484 pseudo regs have been allocated. */
485 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
486
487 /* We don't have a stack slot for any spill reg yet. */
488 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
489 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
490
491 /* Initialize the save area information for caller-save, in case some
492 are needed. */
493 init_save_areas ();
494
495 /* Compute which hard registers are now in use
496 as homes for pseudo registers.
497 This is done here rather than (eg) in global_alloc
498 because this point is reached even if not optimizing. */
499
500 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
501 mark_home_live (i);
502
503 for (i = 0; i < scratch_list_length; i++)
504 if (scratch_list[i])
505 mark_scratch_live (scratch_list[i]);
506
507 /* Make sure that the last insn in the chain
508 is not something that needs reloading. */
509 emit_note (NULL_PTR, NOTE_INSN_DELETED);
510
511 /* Find all the pseudo registers that didn't get hard regs
512 but do have known equivalent constants or memory slots.
513 These include parameters (known equivalent to parameter slots)
514 and cse'd or loop-moved constant memory addresses.
515
516 Record constant equivalents in reg_equiv_constant
517 so they will be substituted by find_reloads.
518 Record memory equivalents in reg_mem_equiv so they can
519 be substituted eventually by altering the REG-rtx's. */
520
521 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
523 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
525 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
526 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
527 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
528 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
529 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
530 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
531 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
532 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
533 cannot_omit_stores = (char *) alloca (max_regno);
534 bzero (cannot_omit_stores, max_regno);
535
536 #ifdef SMALL_REGISTER_CLASSES
537 CLEAR_HARD_REG_SET (forbidden_regs);
538 #endif
539
540 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
541 Also find all paradoxical subregs and find largest such for each pseudo.
542 On machines with small register classes, record hard registers that
543 are used for user variables. These can never be used for spills. */
544
545 for (insn = first; insn; insn = NEXT_INSN (insn))
546 {
547 rtx set = single_set (insn);
548
549 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
550 {
551 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
552 if (note
553 #ifdef LEGITIMATE_PIC_OPERAND_P
554 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
555 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
556 #endif
557 )
558 {
559 rtx x = XEXP (note, 0);
560 i = REGNO (SET_DEST (set));
561 if (i > LAST_VIRTUAL_REGISTER)
562 {
563 if (GET_CODE (x) == MEM)
564 reg_equiv_memory_loc[i] = x;
565 else if (CONSTANT_P (x))
566 {
567 if (LEGITIMATE_CONSTANT_P (x))
568 reg_equiv_constant[i] = x;
569 else
570 reg_equiv_memory_loc[i]
571 = force_const_mem (GET_MODE (SET_DEST (set)), x);
572 }
573 else
574 continue;
575
576 /* If this register is being made equivalent to a MEM
577 and the MEM is not SET_SRC, the equivalencing insn
578 is one with the MEM as a SET_DEST and it occurs later.
579 So don't mark this insn now. */
580 if (GET_CODE (x) != MEM
581 || rtx_equal_p (SET_SRC (set), x))
582 reg_equiv_init[i] = insn;
583 }
584 }
585 }
586
587 /* If this insn is setting a MEM from a register equivalent to it,
588 this is the equivalencing insn. */
589 else if (set && GET_CODE (SET_DEST (set)) == MEM
590 && GET_CODE (SET_SRC (set)) == REG
591 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
592 && rtx_equal_p (SET_DEST (set),
593 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
594 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
595
596 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
597 scan_paradoxical_subregs (PATTERN (insn));
598 }
599
600 /* Does this function require a frame pointer? */
601
602 frame_pointer_needed = (! flag_omit_frame_pointer
603 #ifdef EXIT_IGNORE_STACK
604 /* ?? If EXIT_IGNORE_STACK is set, we will not save
605 and restore sp for alloca. So we can't eliminate
606 the frame pointer in that case. At some point,
607 we should improve this by emitting the
608 sp-adjusting insns for this case. */
609 || (current_function_calls_alloca
610 && EXIT_IGNORE_STACK)
611 #endif
612 || FRAME_POINTER_REQUIRED);
613
614 num_eliminable = 0;
615
616 /* Initialize the table of registers to eliminate. The way we do this
617 depends on how the eliminable registers were defined. */
618 #ifdef ELIMINABLE_REGS
619 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
620 {
621 ep->can_eliminate = ep->can_eliminate_previous
622 = (CAN_ELIMINATE (ep->from, ep->to)
623 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
624 }
625 #else
626 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
627 = ! frame_pointer_needed;
628 #endif
629
630 /* Count the number of eliminable registers and build the FROM and TO
631 REG rtx's. Note that code in gen_rtx will cause, e.g.,
632 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
633 We depend on this. */
634 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
635 {
636 num_eliminable += ep->can_eliminate;
637 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
638 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
639 }
640
641 num_labels = max_label_num () - get_first_label_num ();
642
643 /* Allocate the tables used to store offset information at labels. */
644 offsets_known_at = (char *) alloca (num_labels);
645 offsets_at
646 = (int (*)[NUM_ELIMINABLE_REGS])
647 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
648
649 offsets_known_at -= get_first_label_num ();
650 offsets_at -= get_first_label_num ();
651
652 /* Alter each pseudo-reg rtx to contain its hard reg number.
653 Assign stack slots to the pseudos that lack hard regs or equivalents.
654 Do not touch virtual registers. */
655
656 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
657 alter_reg (i, -1);
658
659 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
660 because the stack size may be a part of the offset computation for
661 register elimination. */
662 assign_stack_local (BLKmode, 0, 0);
663
664 /* If we have some registers we think can be eliminated, scan all insns to
665 see if there is an insn that sets one of these registers to something
666 other than itself plus a constant. If so, the register cannot be
667 eliminated. Doing this scan here eliminates an extra pass through the
668 main reload loop in the most common case where register elimination
669 cannot be done. */
670 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
671 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
672 || GET_CODE (insn) == CALL_INSN)
673 note_stores (PATTERN (insn), mark_not_eliminable);
674
675 #ifndef REGISTER_CONSTRAINTS
676 /* If all the pseudo regs have hard regs,
677 except for those that are never referenced,
678 we know that no reloads are needed. */
679 /* But that is not true if there are register constraints, since
680 in that case some pseudos might be in the wrong kind of hard reg. */
681
682 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
683 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
684 break;
685
686 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
687 return;
688 #endif
689
690 /* Compute the order of preference for hard registers to spill.
691 Store them by decreasing preference in potential_reload_regs. */
692
693 order_regs_for_reload ();
694
695 /* So far, no hard regs have been spilled. */
696 n_spills = 0;
697 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
698 spill_reg_order[i] = -1;
699
700 /* Initialize to -1, which means take the first spill register. */
701 last_spill_reg = -1;
702
703 /* On most machines, we can't use any register explicitly used in the
704 rtl as a spill register. But on some, we have to. Those will have
705 taken care to keep the life of hard regs as short as possible. */
706
707 #ifndef SMALL_REGISTER_CLASSES
708 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
709 #endif
710
711 /* Spill any hard regs that we know we can't eliminate. */
712 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
713 if (! ep->can_eliminate)
714 spill_hard_reg (ep->from, global, dumpfile, 1);
715
716 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
717 if (frame_pointer_needed)
718 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
719 #endif
720
721 if (global)
722 for (i = 0; i < N_REG_CLASSES; i++)
723 {
724 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
725 bzero (basic_block_needs[i], n_basic_blocks);
726 }
727
728 /* From now on, we need to emit any moves without making new pseudos. */
729 reload_in_progress = 1;
730
731 /* This loop scans the entire function each go-round
732 and repeats until one repetition spills no additional hard regs. */
733
734 /* This flag is set when a pseudo reg is spilled,
735 to require another pass. Note that getting an additional reload
736 reg does not necessarily imply any pseudo reg was spilled;
737 sometimes we find a reload reg that no pseudo reg was allocated in. */
738 something_changed = 1;
739 /* This flag is set if there are any insns that require reloading. */
740 something_needs_reloads = 0;
741 /* This flag is set if there are any insns that require register
742 eliminations. */
743 something_needs_elimination = 0;
744 while (something_changed)
745 {
746 rtx after_call = 0;
747
748 /* For each class, number of reload regs needed in that class.
749 This is the maximum over all insns of the needs in that class
750 of the individual insn. */
751 int max_needs[N_REG_CLASSES];
752 /* For each class, size of group of consecutive regs
753 that is needed for the reloads of this class. */
754 int group_size[N_REG_CLASSES];
755 /* For each class, max number of consecutive groups needed.
756 (Each group contains group_size[CLASS] consecutive registers.) */
757 int max_groups[N_REG_CLASSES];
758 /* For each class, max number needed of regs that don't belong
759 to any of the groups. */
760 int max_nongroups[N_REG_CLASSES];
761 /* For each class, the machine mode which requires consecutive
762 groups of regs of that class.
763 If two different modes ever require groups of one class,
764 they must be the same size and equally restrictive for that class,
765 otherwise we can't handle the complexity. */
766 enum machine_mode group_mode[N_REG_CLASSES];
767 /* Record the insn where each maximum need is first found. */
768 rtx max_needs_insn[N_REG_CLASSES];
769 rtx max_groups_insn[N_REG_CLASSES];
770 rtx max_nongroups_insn[N_REG_CLASSES];
771 rtx x;
772 int starting_frame_size = get_frame_size ();
773 int previous_frame_pointer_needed = frame_pointer_needed;
774 static char *reg_class_names[] = REG_CLASS_NAMES;
775
776 something_changed = 0;
777 bzero ((char *) max_needs, sizeof max_needs);
778 bzero ((char *) max_groups, sizeof max_groups);
779 bzero ((char *) max_nongroups, sizeof max_nongroups);
780 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
781 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
782 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
783 bzero ((char *) group_size, sizeof group_size);
784 for (i = 0; i < N_REG_CLASSES; i++)
785 group_mode[i] = VOIDmode;
786
787 /* Keep track of which basic blocks are needing the reloads. */
788 this_block = 0;
789
790 /* Remember whether any element of basic_block_needs
791 changes from 0 to 1 in this pass. */
792 new_basic_block_needs = 0;
793
794 /* Reset all offsets on eliminable registers to their initial values. */
795 #ifdef ELIMINABLE_REGS
796 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
797 {
798 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
799 ep->previous_offset = ep->offset
800 = ep->max_offset = ep->initial_offset;
801 }
802 #else
803 #ifdef INITIAL_FRAME_POINTER_OFFSET
804 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
805 #else
806 if (!FRAME_POINTER_REQUIRED)
807 abort ();
808 reg_eliminate[0].initial_offset = 0;
809 #endif
810 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
811 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
812 #endif
813
814 num_not_at_initial_offset = 0;
815
816 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
817
818 /* Set a known offset for each forced label to be at the initial offset
819 of each elimination. We do this because we assume that all
820 computed jumps occur from a location where each elimination is
821 at its initial offset. */
822
823 for (x = forced_labels; x; x = XEXP (x, 1))
824 if (XEXP (x, 0))
825 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
826
827 /* For each pseudo register that has an equivalent location defined,
828 try to eliminate any eliminable registers (such as the frame pointer)
829 assuming initial offsets for the replacement register, which
830 is the normal case.
831
832 If the resulting location is directly addressable, substitute
833 the MEM we just got directly for the old REG.
834
835 If it is not addressable but is a constant or the sum of a hard reg
836 and constant, it is probably not addressable because the constant is
837 out of range, in that case record the address; we will generate
838 hairy code to compute the address in a register each time it is
839 needed. Similarly if it is a hard register, but one that is not
840 valid as an address register.
841
842 If the location is not addressable, but does not have one of the
843 above forms, assign a stack slot. We have to do this to avoid the
844 potential of producing lots of reloads if, e.g., a location involves
845 a pseudo that didn't get a hard register and has an equivalent memory
846 location that also involves a pseudo that didn't get a hard register.
847
848 Perhaps at some point we will improve reload_when_needed handling
849 so this problem goes away. But that's very hairy. */
850
851 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
852 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
853 {
854 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
855
856 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
857 XEXP (x, 0)))
858 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
859 else if (CONSTANT_P (XEXP (x, 0))
860 || (GET_CODE (XEXP (x, 0)) == REG
861 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
862 || (GET_CODE (XEXP (x, 0)) == PLUS
863 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
864 && (REGNO (XEXP (XEXP (x, 0), 0))
865 < FIRST_PSEUDO_REGISTER)
866 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
867 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
868 else
869 {
870 /* Make a new stack slot. Then indicate that something
871 changed so we go back and recompute offsets for
872 eliminable registers because the allocation of memory
873 below might change some offset. reg_equiv_{mem,address}
874 will be set up for this pseudo on the next pass around
875 the loop. */
876 reg_equiv_memory_loc[i] = 0;
877 reg_equiv_init[i] = 0;
878 alter_reg (i, -1);
879 something_changed = 1;
880 }
881 }
882
883 /* If we allocated another pseudo to the stack, redo elimination
884 bookkeeping. */
885 if (something_changed)
886 continue;
887
888 /* If caller-saves needs a group, initialize the group to include
889 the size and mode required for caller-saves. */
890
891 if (caller_save_group_size > 1)
892 {
893 group_mode[(int) caller_save_spill_class] = Pmode;
894 group_size[(int) caller_save_spill_class] = caller_save_group_size;
895 }
896
897 /* Compute the most additional registers needed by any instruction.
898 Collect information separately for each class of regs. */
899
900 for (insn = first; insn; insn = NEXT_INSN (insn))
901 {
902 if (global && this_block + 1 < n_basic_blocks
903 && insn == basic_block_head[this_block+1])
904 ++this_block;
905
906 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
907 might include REG_LABEL), we need to see what effects this
908 has on the known offsets at labels. */
909
910 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
911 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
912 && REG_NOTES (insn) != 0))
913 set_label_offsets (insn, insn, 0);
914
915 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
916 {
917 /* Nonzero means don't use a reload reg that overlaps
918 the place where a function value can be returned. */
919 rtx avoid_return_reg = 0;
920
921 rtx old_body = PATTERN (insn);
922 int old_code = INSN_CODE (insn);
923 rtx old_notes = REG_NOTES (insn);
924 int did_elimination = 0;
925
926 /* To compute the number of reload registers of each class
927 needed for an insn, we must simulate what choose_reload_regs
928 can do. We do this by splitting an insn into an "input" and
929 an "output" part. RELOAD_OTHER reloads are used in both.
930 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
931 which must be live over the entire input section of reloads,
932 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
933 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
934 inputs.
935
936 The registers needed for output are RELOAD_OTHER and
937 RELOAD_FOR_OUTPUT, which are live for the entire output
938 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
939 reloads for each operand.
940
941 The total number of registers needed is the maximum of the
942 inputs and outputs. */
943
944 struct needs
945 {
946 /* [0] is normal, [1] is nongroup. */
947 int regs[2][N_REG_CLASSES];
948 int groups[N_REG_CLASSES];
949 };
950
951 /* Each `struct needs' corresponds to one RELOAD_... type. */
952 struct {
953 struct needs other;
954 struct needs input;
955 struct needs output;
956 struct needs insn;
957 struct needs other_addr;
958 struct needs op_addr;
959 struct needs op_addr_reload;
960 struct needs in_addr[MAX_RECOG_OPERANDS];
961 struct needs out_addr[MAX_RECOG_OPERANDS];
962 } insn_needs;
963
964 /* If needed, eliminate any eliminable registers. */
965 if (num_eliminable)
966 did_elimination = eliminate_regs_in_insn (insn, 0);
967
968 #ifdef SMALL_REGISTER_CLASSES
969 /* Set avoid_return_reg if this is an insn
970 that might use the value of a function call. */
971 if (GET_CODE (insn) == CALL_INSN)
972 {
973 if (GET_CODE (PATTERN (insn)) == SET)
974 after_call = SET_DEST (PATTERN (insn));
975 else if (GET_CODE (PATTERN (insn)) == PARALLEL
976 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
977 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
978 else
979 after_call = 0;
980 }
981 else if (after_call != 0
982 && !(GET_CODE (PATTERN (insn)) == SET
983 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
984 {
985 if (reg_referenced_p (after_call, PATTERN (insn)))
986 avoid_return_reg = after_call;
987 after_call = 0;
988 }
989 #endif /* SMALL_REGISTER_CLASSES */
990
991 /* Analyze the instruction. */
992 find_reloads (insn, 0, spill_indirect_levels, global,
993 spill_reg_order);
994
995 /* Remember for later shortcuts which insns had any reloads or
996 register eliminations.
997
998 One might think that it would be worthwhile to mark insns
999 that need register replacements but not reloads, but this is
1000 not safe because find_reloads may do some manipulation of
1001 the insn (such as swapping commutative operands), which would
1002 be lost when we restore the old pattern after register
1003 replacement. So the actions of find_reloads must be redone in
1004 subsequent passes or in reload_as_needed.
1005
1006 However, it is safe to mark insns that need reloads
1007 but not register replacement. */
1008
1009 PUT_MODE (insn, (did_elimination ? QImode
1010 : n_reloads ? HImode
1011 : GET_MODE (insn) == DImode ? DImode
1012 : VOIDmode));
1013
1014 /* Discard any register replacements done. */
1015 if (did_elimination)
1016 {
1017 obstack_free (&reload_obstack, reload_firstobj);
1018 PATTERN (insn) = old_body;
1019 INSN_CODE (insn) = old_code;
1020 REG_NOTES (insn) = old_notes;
1021 something_needs_elimination = 1;
1022 }
1023
1024 /* If this insn has no reloads, we need not do anything except
1025 in the case of a CALL_INSN when we have caller-saves and
1026 caller-save needs reloads. */
1027
1028 if (n_reloads == 0
1029 && ! (GET_CODE (insn) == CALL_INSN
1030 && caller_save_spill_class != NO_REGS))
1031 continue;
1032
1033 something_needs_reloads = 1;
1034 bzero ((char *) &insn_needs, sizeof insn_needs);
1035
1036 /* Count each reload once in every class
1037 containing the reload's own class. */
1038
1039 for (i = 0; i < n_reloads; i++)
1040 {
1041 register enum reg_class *p;
1042 enum reg_class class = reload_reg_class[i];
1043 int size;
1044 enum machine_mode mode;
1045 int nongroup_need;
1046 struct needs *this_needs;
1047
1048 /* Don't count the dummy reloads, for which one of the
1049 regs mentioned in the insn can be used for reloading.
1050 Don't count optional reloads.
1051 Don't count reloads that got combined with others. */
1052 if (reload_reg_rtx[i] != 0
1053 || reload_optional[i] != 0
1054 || (reload_out[i] == 0 && reload_in[i] == 0
1055 && ! reload_secondary_p[i]))
1056 continue;
1057
1058 /* Show that a reload register of this class is needed
1059 in this basic block. We do not use insn_needs and
1060 insn_groups because they are overly conservative for
1061 this purpose. */
1062 if (global && ! basic_block_needs[(int) class][this_block])
1063 {
1064 basic_block_needs[(int) class][this_block] = 1;
1065 new_basic_block_needs = 1;
1066 }
1067
1068
1069 mode = reload_inmode[i];
1070 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1071 mode = reload_outmode[i];
1072 size = CLASS_MAX_NREGS (class, mode);
1073
1074 /* If this class doesn't want a group, determine if we have
1075 a nongroup need or a regular need. We have a nongroup
1076 need if this reload conflicts with a group reload whose
1077 class intersects with this reload's class. */
1078
1079 nongroup_need = 0;
1080 if (size == 1)
1081 for (j = 0; j < n_reloads; j++)
1082 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1083 (GET_MODE_SIZE (reload_outmode[j])
1084 > GET_MODE_SIZE (reload_inmode[j]))
1085 ? reload_outmode[j]
1086 : reload_inmode[j])
1087 > 1)
1088 && (!reload_optional[j])
1089 && (reload_in[j] != 0 || reload_out[j] != 0
1090 || reload_secondary_p[j])
1091 && reloads_conflict (i, j)
1092 && reg_classes_intersect_p (class,
1093 reload_reg_class[j]))
1094 {
1095 nongroup_need = 1;
1096 break;
1097 }
1098
1099 /* Decide which time-of-use to count this reload for. */
1100 switch (reload_when_needed[i])
1101 {
1102 case RELOAD_OTHER:
1103 this_needs = &insn_needs.other;
1104 break;
1105 case RELOAD_FOR_INPUT:
1106 this_needs = &insn_needs.input;
1107 break;
1108 case RELOAD_FOR_OUTPUT:
1109 this_needs = &insn_needs.output;
1110 break;
1111 case RELOAD_FOR_INSN:
1112 this_needs = &insn_needs.insn;
1113 break;
1114 case RELOAD_FOR_OTHER_ADDRESS:
1115 this_needs = &insn_needs.other_addr;
1116 break;
1117 case RELOAD_FOR_INPUT_ADDRESS:
1118 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1119 break;
1120 case RELOAD_FOR_OUTPUT_ADDRESS:
1121 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1122 break;
1123 case RELOAD_FOR_OPERAND_ADDRESS:
1124 this_needs = &insn_needs.op_addr;
1125 break;
1126 case RELOAD_FOR_OPADDR_ADDR:
1127 this_needs = &insn_needs.op_addr_reload;
1128 break;
1129 }
1130
1131 if (size > 1)
1132 {
1133 enum machine_mode other_mode, allocate_mode;
1134
1135 /* Count number of groups needed separately from
1136 number of individual regs needed. */
1137 this_needs->groups[(int) class]++;
1138 p = reg_class_superclasses[(int) class];
1139 while (*p != LIM_REG_CLASSES)
1140 this_needs->groups[(int) *p++]++;
1141
1142 /* Record size and mode of a group of this class. */
1143 /* If more than one size group is needed,
1144 make all groups the largest needed size. */
1145 if (group_size[(int) class] < size)
1146 {
1147 other_mode = group_mode[(int) class];
1148 allocate_mode = mode;
1149
1150 group_size[(int) class] = size;
1151 group_mode[(int) class] = mode;
1152 }
1153 else
1154 {
1155 other_mode = mode;
1156 allocate_mode = group_mode[(int) class];
1157 }
1158
1159 /* Crash if two dissimilar machine modes both need
1160 groups of consecutive regs of the same class. */
1161
1162 if (other_mode != VOIDmode && other_mode != allocate_mode
1163 && ! modes_equiv_for_class_p (allocate_mode,
1164 other_mode, class))
1165 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1166 insn);
1167 }
1168 else if (size == 1)
1169 {
1170 this_needs->regs[nongroup_need][(int) class] += 1;
1171 p = reg_class_superclasses[(int) class];
1172 while (*p != LIM_REG_CLASSES)
1173 this_needs->regs[nongroup_need][(int) *p++] += 1;
1174 }
1175 else
1176 abort ();
1177 }
1178
1179 /* All reloads have been counted for this insn;
1180 now merge the various times of use.
1181 This sets insn_needs, etc., to the maximum total number
1182 of registers needed at any point in this insn. */
1183
1184 for (i = 0; i < N_REG_CLASSES; i++)
1185 {
1186 int in_max, out_max;
1187
1188 /* Compute normal and nongroup needs. */
1189 for (j = 0; j <= 1; j++)
1190 {
1191 for (in_max = 0, out_max = 0, k = 0;
1192 k < reload_n_operands; k++)
1193 {
1194 in_max
1195 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1196 out_max
1197 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1198 }
1199
1200 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1201 and operand addresses but not things used to reload
1202 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1203 don't conflict with things needed to reload inputs or
1204 outputs. */
1205
1206 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1207 insn_needs.op_addr_reload.regs[j][i]),
1208 in_max);
1209
1210 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1211
1212 insn_needs.input.regs[j][i]
1213 = MAX (insn_needs.input.regs[j][i]
1214 + insn_needs.op_addr.regs[j][i]
1215 + insn_needs.insn.regs[j][i],
1216 in_max + insn_needs.input.regs[j][i]);
1217
1218 insn_needs.output.regs[j][i] += out_max;
1219 insn_needs.other.regs[j][i]
1220 += MAX (MAX (insn_needs.input.regs[j][i],
1221 insn_needs.output.regs[j][i]),
1222 insn_needs.other_addr.regs[j][i]);
1223
1224 }
1225
1226 /* Now compute group needs. */
1227 for (in_max = 0, out_max = 0, j = 0;
1228 j < reload_n_operands; j++)
1229 {
1230 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1231 out_max
1232 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1233 }
1234
1235 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1236 insn_needs.op_addr_reload.groups[i]),
1237 in_max);
1238 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1239
1240 insn_needs.input.groups[i]
1241 = MAX (insn_needs.input.groups[i]
1242 + insn_needs.op_addr.groups[i]
1243 + insn_needs.insn.groups[i],
1244 in_max + insn_needs.input.groups[i]);
1245
1246 insn_needs.output.groups[i] += out_max;
1247 insn_needs.other.groups[i]
1248 += MAX (MAX (insn_needs.input.groups[i],
1249 insn_needs.output.groups[i]),
1250 insn_needs.other_addr.groups[i]);
1251 }
1252
1253 /* If this is a CALL_INSN and caller-saves will need
1254 a spill register, act as if the spill register is
1255 needed for this insn. However, the spill register
1256 can be used by any reload of this insn, so we only
1257 need do something if no need for that class has
1258 been recorded.
1259
1260 The assumption that every CALL_INSN will trigger a
1261 caller-save is highly conservative, however, the number
1262 of cases where caller-saves will need a spill register but
1263 a block containing a CALL_INSN won't need a spill register
1264 of that class should be quite rare.
1265
1266 If a group is needed, the size and mode of the group will
1267 have been set up at the beginning of this loop. */
1268
1269 if (GET_CODE (insn) == CALL_INSN
1270 && caller_save_spill_class != NO_REGS)
1271 {
1272 /* See if this register would conflict with any reload
1273 that needs a group. */
1274 int nongroup_need = 0;
1275 int *caller_save_needs;
1276
1277 for (j = 0; j < n_reloads; j++)
1278 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1279 (GET_MODE_SIZE (reload_outmode[j])
1280 > GET_MODE_SIZE (reload_inmode[j]))
1281 ? reload_outmode[j]
1282 : reload_inmode[j])
1283 > 1)
1284 && reg_classes_intersect_p (caller_save_spill_class,
1285 reload_reg_class[j]))
1286 {
1287 nongroup_need = 1;
1288 break;
1289 }
1290
1291 caller_save_needs
1292 = (caller_save_group_size > 1
1293 ? insn_needs.other.groups
1294 : insn_needs.other.regs[nongroup_need]);
1295
1296 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1297 {
1298 register enum reg_class *p
1299 = reg_class_superclasses[(int) caller_save_spill_class];
1300
1301 caller_save_needs[(int) caller_save_spill_class]++;
1302
1303 while (*p != LIM_REG_CLASSES)
1304 caller_save_needs[(int) *p++] += 1;
1305 }
1306
1307 /* Show that this basic block will need a register of
1308 this class. */
1309
1310 if (global
1311 && ! (basic_block_needs[(int) caller_save_spill_class]
1312 [this_block]))
1313 {
1314 basic_block_needs[(int) caller_save_spill_class]
1315 [this_block] = 1;
1316 new_basic_block_needs = 1;
1317 }
1318 }
1319
1320 #ifdef SMALL_REGISTER_CLASSES
1321 /* If this insn stores the value of a function call,
1322 and that value is in a register that has been spilled,
1323 and if the insn needs a reload in a class
1324 that might use that register as the reload register,
1325 then add add an extra need in that class.
1326 This makes sure we have a register available that does
1327 not overlap the return value. */
1328
1329 if (avoid_return_reg)
1330 {
1331 int regno = REGNO (avoid_return_reg);
1332 int nregs
1333 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1334 int r;
1335 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1336
1337 /* First compute the "basic needs", which counts a
1338 need only in the smallest class in which it
1339 is required. */
1340
1341 bcopy ((char *) insn_needs.other.regs[0],
1342 (char *) basic_needs, sizeof basic_needs);
1343 bcopy ((char *) insn_needs.other.groups,
1344 (char *) basic_groups, sizeof basic_groups);
1345
1346 for (i = 0; i < N_REG_CLASSES; i++)
1347 {
1348 enum reg_class *p;
1349
1350 if (basic_needs[i] >= 0)
1351 for (p = reg_class_superclasses[i];
1352 *p != LIM_REG_CLASSES; p++)
1353 basic_needs[(int) *p] -= basic_needs[i];
1354
1355 if (basic_groups[i] >= 0)
1356 for (p = reg_class_superclasses[i];
1357 *p != LIM_REG_CLASSES; p++)
1358 basic_groups[(int) *p] -= basic_groups[i];
1359 }
1360
1361 /* Now count extra regs if there might be a conflict with
1362 the return value register. */
1363
1364 for (r = regno; r < regno + nregs; r++)
1365 if (spill_reg_order[r] >= 0)
1366 for (i = 0; i < N_REG_CLASSES; i++)
1367 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1368 {
1369 if (basic_needs[i] > 0)
1370 {
1371 enum reg_class *p;
1372
1373 insn_needs.other.regs[0][i]++;
1374 p = reg_class_superclasses[i];
1375 while (*p != LIM_REG_CLASSES)
1376 insn_needs.other.regs[0][(int) *p++]++;
1377 }
1378 if (basic_groups[i] > 0)
1379 {
1380 enum reg_class *p;
1381
1382 insn_needs.other.groups[i]++;
1383 p = reg_class_superclasses[i];
1384 while (*p != LIM_REG_CLASSES)
1385 insn_needs.other.groups[(int) *p++]++;
1386 }
1387 }
1388 }
1389 #endif /* SMALL_REGISTER_CLASSES */
1390
1391 /* For each class, collect maximum need of any insn. */
1392
1393 for (i = 0; i < N_REG_CLASSES; i++)
1394 {
1395 if (max_needs[i] < insn_needs.other.regs[0][i])
1396 {
1397 max_needs[i] = insn_needs.other.regs[0][i];
1398 max_needs_insn[i] = insn;
1399 }
1400 if (max_groups[i] < insn_needs.other.groups[i])
1401 {
1402 max_groups[i] = insn_needs.other.groups[i];
1403 max_groups_insn[i] = insn;
1404 }
1405 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1406 {
1407 max_nongroups[i] = insn_needs.other.regs[1][i];
1408 max_nongroups_insn[i] = insn;
1409 }
1410 }
1411 }
1412 /* Note that there is a continue statement above. */
1413 }
1414
1415 /* If we allocated any new memory locations, make another pass
1416 since it might have changed elimination offsets. */
1417 if (starting_frame_size != get_frame_size ())
1418 something_changed = 1;
1419
1420 if (dumpfile)
1421 for (i = 0; i < N_REG_CLASSES; i++)
1422 {
1423 if (max_needs[i] > 0)
1424 fprintf (dumpfile,
1425 ";; Need %d reg%s of class %s (for insn %d).\n",
1426 max_needs[i], max_needs[i] == 1 ? "" : "s",
1427 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1428 if (max_nongroups[i] > 0)
1429 fprintf (dumpfile,
1430 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1431 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1432 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1433 if (max_groups[i] > 0)
1434 fprintf (dumpfile,
1435 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1436 max_groups[i], max_groups[i] == 1 ? "" : "s",
1437 mode_name[(int) group_mode[i]],
1438 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1439 }
1440
1441 /* If we have caller-saves, set up the save areas and see if caller-save
1442 will need a spill register. */
1443
1444 if (caller_save_needed
1445 && ! setup_save_areas (&something_changed)
1446 && caller_save_spill_class == NO_REGS)
1447 {
1448 /* The class we will need depends on whether the machine
1449 supports the sum of two registers for an address; see
1450 find_address_reloads for details. */
1451
1452 caller_save_spill_class
1453 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1454 caller_save_group_size
1455 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1456 something_changed = 1;
1457 }
1458
1459 /* See if anything that happened changes which eliminations are valid.
1460 For example, on the Sparc, whether or not the frame pointer can
1461 be eliminated can depend on what registers have been used. We need
1462 not check some conditions again (such as flag_omit_frame_pointer)
1463 since they can't have changed. */
1464
1465 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1466 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1467 #ifdef ELIMINABLE_REGS
1468 || ! CAN_ELIMINATE (ep->from, ep->to)
1469 #endif
1470 )
1471 ep->can_eliminate = 0;
1472
1473 /* Look for the case where we have discovered that we can't replace
1474 register A with register B and that means that we will now be
1475 trying to replace register A with register C. This means we can
1476 no longer replace register C with register B and we need to disable
1477 such an elimination, if it exists. This occurs often with A == ap,
1478 B == sp, and C == fp. */
1479
1480 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1481 {
1482 struct elim_table *op;
1483 register int new_to = -1;
1484
1485 if (! ep->can_eliminate && ep->can_eliminate_previous)
1486 {
1487 /* Find the current elimination for ep->from, if there is a
1488 new one. */
1489 for (op = reg_eliminate;
1490 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1491 if (op->from == ep->from && op->can_eliminate)
1492 {
1493 new_to = op->to;
1494 break;
1495 }
1496
1497 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1498 disable it. */
1499 for (op = reg_eliminate;
1500 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1501 if (op->from == new_to && op->to == ep->to)
1502 op->can_eliminate = 0;
1503 }
1504 }
1505
1506 /* See if any registers that we thought we could eliminate the previous
1507 time are no longer eliminable. If so, something has changed and we
1508 must spill the register. Also, recompute the number of eliminable
1509 registers and see if the frame pointer is needed; it is if there is
1510 no elimination of the frame pointer that we can perform. */
1511
1512 frame_pointer_needed = 1;
1513 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1514 {
1515 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1516 && ep->to != HARD_FRAME_POINTER_REGNUM)
1517 frame_pointer_needed = 0;
1518
1519 if (! ep->can_eliminate && ep->can_eliminate_previous)
1520 {
1521 ep->can_eliminate_previous = 0;
1522 spill_hard_reg (ep->from, global, dumpfile, 1);
1523 something_changed = 1;
1524 num_eliminable--;
1525 }
1526 }
1527
1528 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1529 /* If we didn't need a frame pointer last time, but we do now, spill
1530 the hard frame pointer. */
1531 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1532 {
1533 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1534 something_changed = 1;
1535 }
1536 #endif
1537
1538 /* If all needs are met, we win. */
1539
1540 for (i = 0; i < N_REG_CLASSES; i++)
1541 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1542 break;
1543 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1544 break;
1545
1546 /* Not all needs are met; must spill some hard regs. */
1547
1548 /* Put all registers spilled so far back in potential_reload_regs, but
1549 put them at the front, since we've already spilled most of the
1550 pseudos in them (we might have left some pseudos unspilled if they
1551 were in a block that didn't need any spill registers of a conflicting
1552 class. We used to try to mark off the need for those registers,
1553 but doing so properly is very complex and reallocating them is the
1554 simpler approach. First, "pack" potential_reload_regs by pushing
1555 any nonnegative entries towards the end. That will leave room
1556 for the registers we already spilled.
1557
1558 Also, undo the marking of the spill registers from the last time
1559 around in FORBIDDEN_REGS since we will be probably be allocating
1560 them again below.
1561
1562 ??? It is theoretically possible that we might end up not using one
1563 of our previously-spilled registers in this allocation, even though
1564 they are at the head of the list. It's not clear what to do about
1565 this, but it was no better before, when we marked off the needs met
1566 by the previously-spilled registers. With the current code, globals
1567 can be allocated into these registers, but locals cannot. */
1568
1569 if (n_spills)
1570 {
1571 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1572 if (potential_reload_regs[i] != -1)
1573 potential_reload_regs[j--] = potential_reload_regs[i];
1574
1575 for (i = 0; i < n_spills; i++)
1576 {
1577 potential_reload_regs[i] = spill_regs[i];
1578 spill_reg_order[spill_regs[i]] = -1;
1579 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1580 }
1581
1582 n_spills = 0;
1583 }
1584
1585 /* Now find more reload regs to satisfy the remaining need
1586 Do it by ascending class number, since otherwise a reg
1587 might be spilled for a big class and might fail to count
1588 for a smaller class even though it belongs to that class.
1589
1590 Count spilled regs in `spills', and add entries to
1591 `spill_regs' and `spill_reg_order'.
1592
1593 ??? Note there is a problem here.
1594 When there is a need for a group in a high-numbered class,
1595 and also need for non-group regs that come from a lower class,
1596 the non-group regs are chosen first. If there aren't many regs,
1597 they might leave no room for a group.
1598
1599 This was happening on the 386. To fix it, we added the code
1600 that calls possible_group_p, so that the lower class won't
1601 break up the last possible group.
1602
1603 Really fixing the problem would require changes above
1604 in counting the regs already spilled, and in choose_reload_regs.
1605 It might be hard to avoid introducing bugs there. */
1606
1607 CLEAR_HARD_REG_SET (counted_for_groups);
1608 CLEAR_HARD_REG_SET (counted_for_nongroups);
1609
1610 for (class = 0; class < N_REG_CLASSES; class++)
1611 {
1612 /* First get the groups of registers.
1613 If we got single registers first, we might fragment
1614 possible groups. */
1615 while (max_groups[class] > 0)
1616 {
1617 /* If any single spilled regs happen to form groups,
1618 count them now. Maybe we don't really need
1619 to spill another group. */
1620 count_possible_groups (group_size, group_mode, max_groups,
1621 class);
1622
1623 if (max_groups[class] <= 0)
1624 break;
1625
1626 /* Groups of size 2 (the only groups used on most machines)
1627 are treated specially. */
1628 if (group_size[class] == 2)
1629 {
1630 /* First, look for a register that will complete a group. */
1631 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1632 {
1633 int other;
1634
1635 j = potential_reload_regs[i];
1636 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1637 &&
1638 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1639 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1640 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1641 && HARD_REGNO_MODE_OK (other, group_mode[class])
1642 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1643 other)
1644 /* We don't want one part of another group.
1645 We could get "two groups" that overlap! */
1646 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1647 ||
1648 (j < FIRST_PSEUDO_REGISTER - 1
1649 && (other = j + 1, spill_reg_order[other] >= 0)
1650 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1651 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1652 && HARD_REGNO_MODE_OK (j, group_mode[class])
1653 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1654 other)
1655 && ! TEST_HARD_REG_BIT (counted_for_groups,
1656 other))))
1657 {
1658 register enum reg_class *p;
1659
1660 /* We have found one that will complete a group,
1661 so count off one group as provided. */
1662 max_groups[class]--;
1663 p = reg_class_superclasses[class];
1664 while (*p != LIM_REG_CLASSES)
1665 {
1666 if (group_size [(int) *p] <= group_size [class])
1667 max_groups[(int) *p]--;
1668 p++;
1669 }
1670
1671 /* Indicate both these regs are part of a group. */
1672 SET_HARD_REG_BIT (counted_for_groups, j);
1673 SET_HARD_REG_BIT (counted_for_groups, other);
1674 break;
1675 }
1676 }
1677 /* We can't complete a group, so start one. */
1678 #ifdef SMALL_REGISTER_CLASSES
1679 /* Look for a pair neither of which is explicitly used. */
1680 if (i == FIRST_PSEUDO_REGISTER)
1681 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1682 {
1683 int k;
1684 j = potential_reload_regs[i];
1685 /* Verify that J+1 is a potential reload reg. */
1686 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1687 if (potential_reload_regs[k] == j + 1)
1688 break;
1689 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1690 && k < FIRST_PSEUDO_REGISTER
1691 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1692 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1693 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1694 && HARD_REGNO_MODE_OK (j, group_mode[class])
1695 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1696 j + 1)
1697 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1698 /* Reject J at this stage
1699 if J+1 was explicitly used. */
1700 && ! regs_explicitly_used[j + 1])
1701 break;
1702 }
1703 #endif
1704 /* Now try any group at all
1705 whose registers are not in bad_spill_regs. */
1706 if (i == FIRST_PSEUDO_REGISTER)
1707 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1708 {
1709 int k;
1710 j = potential_reload_regs[i];
1711 /* Verify that J+1 is a potential reload reg. */
1712 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1713 if (potential_reload_regs[k] == j + 1)
1714 break;
1715 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1716 && k < FIRST_PSEUDO_REGISTER
1717 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1718 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1719 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1720 && HARD_REGNO_MODE_OK (j, group_mode[class])
1721 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1722 j + 1)
1723 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1724 break;
1725 }
1726
1727 /* I should be the index in potential_reload_regs
1728 of the new reload reg we have found. */
1729
1730 if (i >= FIRST_PSEUDO_REGISTER)
1731 {
1732 /* There are no groups left to spill. */
1733 spill_failure (max_groups_insn[class]);
1734 failure = 1;
1735 goto failed;
1736 }
1737 else
1738 something_changed
1739 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1740 global, dumpfile);
1741 }
1742 else
1743 {
1744 /* For groups of more than 2 registers,
1745 look for a sufficient sequence of unspilled registers,
1746 and spill them all at once. */
1747 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1748 {
1749 int k;
1750
1751 j = potential_reload_regs[i];
1752 if (j >= 0
1753 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1754 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1755 {
1756 /* Check each reg in the sequence. */
1757 for (k = 0; k < group_size[class]; k++)
1758 if (! (spill_reg_order[j + k] < 0
1759 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1760 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1761 break;
1762 /* We got a full sequence, so spill them all. */
1763 if (k == group_size[class])
1764 {
1765 register enum reg_class *p;
1766 for (k = 0; k < group_size[class]; k++)
1767 {
1768 int idx;
1769 SET_HARD_REG_BIT (counted_for_groups, j + k);
1770 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1771 if (potential_reload_regs[idx] == j + k)
1772 break;
1773 something_changed
1774 |= new_spill_reg (idx, class,
1775 max_needs, NULL_PTR,
1776 global, dumpfile);
1777 }
1778
1779 /* We have found one that will complete a group,
1780 so count off one group as provided. */
1781 max_groups[class]--;
1782 p = reg_class_superclasses[class];
1783 while (*p != LIM_REG_CLASSES)
1784 {
1785 if (group_size [(int) *p]
1786 <= group_size [class])
1787 max_groups[(int) *p]--;
1788 p++;
1789 }
1790 break;
1791 }
1792 }
1793 }
1794 /* We couldn't find any registers for this reload.
1795 Avoid going into an infinite loop. */
1796 if (i >= FIRST_PSEUDO_REGISTER)
1797 {
1798 /* There are no groups left. */
1799 spill_failure (max_groups_insn[class]);
1800 failure = 1;
1801 goto failed;
1802 }
1803 }
1804 }
1805
1806 /* Now similarly satisfy all need for single registers. */
1807
1808 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1809 {
1810 #ifdef SMALL_REGISTER_CLASSES
1811 /* This should be right for all machines, but only the 386
1812 is known to need it, so this conditional plays safe.
1813 ??? For 2.5, try making this unconditional. */
1814 /* If we spilled enough regs, but they weren't counted
1815 against the non-group need, see if we can count them now.
1816 If so, we can avoid some actual spilling. */
1817 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1818 for (i = 0; i < n_spills; i++)
1819 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1820 spill_regs[i])
1821 && !TEST_HARD_REG_BIT (counted_for_groups,
1822 spill_regs[i])
1823 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1824 spill_regs[i])
1825 && max_nongroups[class] > 0)
1826 {
1827 register enum reg_class *p;
1828
1829 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1830 max_nongroups[class]--;
1831 p = reg_class_superclasses[class];
1832 while (*p != LIM_REG_CLASSES)
1833 max_nongroups[(int) *p++]--;
1834 }
1835 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1836 break;
1837 #endif
1838
1839 /* Consider the potential reload regs that aren't
1840 yet in use as reload regs, in order of preference.
1841 Find the most preferred one that's in this class. */
1842
1843 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1844 if (potential_reload_regs[i] >= 0
1845 && TEST_HARD_REG_BIT (reg_class_contents[class],
1846 potential_reload_regs[i])
1847 /* If this reg will not be available for groups,
1848 pick one that does not foreclose possible groups.
1849 This is a kludge, and not very general,
1850 but it should be sufficient to make the 386 work,
1851 and the problem should not occur on machines with
1852 more registers. */
1853 && (max_nongroups[class] == 0
1854 || possible_group_p (potential_reload_regs[i], max_groups)))
1855 break;
1856
1857 /* If we couldn't get a register, try to get one even if we
1858 might foreclose possible groups. This may cause problems
1859 later, but that's better than aborting now, since it is
1860 possible that we will, in fact, be able to form the needed
1861 group even with this allocation. */
1862
1863 if (i >= FIRST_PSEUDO_REGISTER
1864 && (asm_noperands (max_needs[class] > 0
1865 ? max_needs_insn[class]
1866 : max_nongroups_insn[class])
1867 < 0))
1868 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1869 if (potential_reload_regs[i] >= 0
1870 && TEST_HARD_REG_BIT (reg_class_contents[class],
1871 potential_reload_regs[i]))
1872 break;
1873
1874 /* I should be the index in potential_reload_regs
1875 of the new reload reg we have found. */
1876
1877 if (i >= FIRST_PSEUDO_REGISTER)
1878 {
1879 /* There are no possible registers left to spill. */
1880 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1881 : max_nongroups_insn[class]);
1882 failure = 1;
1883 goto failed;
1884 }
1885 else
1886 something_changed
1887 |= new_spill_reg (i, class, max_needs, max_nongroups,
1888 global, dumpfile);
1889 }
1890 }
1891 }
1892
1893 /* If global-alloc was run, notify it of any register eliminations we have
1894 done. */
1895 if (global)
1896 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1897 if (ep->can_eliminate)
1898 mark_elimination (ep->from, ep->to);
1899
1900 /* Insert code to save and restore call-clobbered hard regs
1901 around calls. Tell if what mode to use so that we will process
1902 those insns in reload_as_needed if we have to. */
1903
1904 if (caller_save_needed)
1905 save_call_clobbered_regs (num_eliminable ? QImode
1906 : caller_save_spill_class != NO_REGS ? HImode
1907 : VOIDmode);
1908
1909 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1910 If that insn didn't set the register (i.e., it copied the register to
1911 memory), just delete that insn instead of the equivalencing insn plus
1912 anything now dead. If we call delete_dead_insn on that insn, we may
1913 delete the insn that actually sets the register if the register die
1914 there and that is incorrect. */
1915
1916 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1917 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1918 && GET_CODE (reg_equiv_init[i]) != NOTE)
1919 {
1920 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1921 delete_dead_insn (reg_equiv_init[i]);
1922 else
1923 {
1924 PUT_CODE (reg_equiv_init[i], NOTE);
1925 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1926 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1927 }
1928 }
1929
1930 /* Use the reload registers where necessary
1931 by generating move instructions to move the must-be-register
1932 values into or out of the reload registers. */
1933
1934 if (something_needs_reloads || something_needs_elimination
1935 || (caller_save_needed && num_eliminable)
1936 || caller_save_spill_class != NO_REGS)
1937 reload_as_needed (first, global);
1938
1939 /* If we were able to eliminate the frame pointer, show that it is no
1940 longer live at the start of any basic block. If it ls live by
1941 virtue of being in a pseudo, that pseudo will be marked live
1942 and hence the frame pointer will be known to be live via that
1943 pseudo. */
1944
1945 if (! frame_pointer_needed)
1946 for (i = 0; i < n_basic_blocks; i++)
1947 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1948 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1949 % REGSET_ELT_BITS));
1950
1951 /* Come here (with failure set nonzero) if we can't get enough spill regs
1952 and we decide not to abort about it. */
1953 failed:
1954
1955 reload_in_progress = 0;
1956
1957 /* Now eliminate all pseudo regs by modifying them into
1958 their equivalent memory references.
1959 The REG-rtx's for the pseudos are modified in place,
1960 so all insns that used to refer to them now refer to memory.
1961
1962 For a reg that has a reg_equiv_address, all those insns
1963 were changed by reloading so that no insns refer to it any longer;
1964 but the DECL_RTL of a variable decl may refer to it,
1965 and if so this causes the debugging info to mention the variable. */
1966
1967 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1968 {
1969 rtx addr = 0;
1970 int in_struct = 0;
1971 if (reg_equiv_mem[i])
1972 {
1973 addr = XEXP (reg_equiv_mem[i], 0);
1974 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1975 }
1976 if (reg_equiv_address[i])
1977 addr = reg_equiv_address[i];
1978 if (addr)
1979 {
1980 if (reg_renumber[i] < 0)
1981 {
1982 rtx reg = regno_reg_rtx[i];
1983 XEXP (reg, 0) = addr;
1984 REG_USERVAR_P (reg) = 0;
1985 MEM_IN_STRUCT_P (reg) = in_struct;
1986 PUT_CODE (reg, MEM);
1987 }
1988 else if (reg_equiv_mem[i])
1989 XEXP (reg_equiv_mem[i], 0) = addr;
1990 }
1991 }
1992
1993 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1994 /* Make a pass over all the insns and remove death notes for things that
1995 are no longer registers or no longer die in the insn (e.g., an input
1996 and output pseudo being tied). */
1997
1998 for (insn = first; insn; insn = NEXT_INSN (insn))
1999 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2000 {
2001 rtx note, next;
2002
2003 for (note = REG_NOTES (insn); note; note = next)
2004 {
2005 next = XEXP (note, 1);
2006 if (REG_NOTE_KIND (note) == REG_DEAD
2007 && (GET_CODE (XEXP (note, 0)) != REG
2008 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2009 remove_note (insn, note);
2010 }
2011 }
2012 #endif
2013
2014 /* Indicate that we no longer have known memory locations or constants. */
2015 reg_equiv_constant = 0;
2016 reg_equiv_memory_loc = 0;
2017
2018 if (scratch_list)
2019 free (scratch_list);
2020 scratch_list = 0;
2021 if (scratch_block)
2022 free (scratch_block);
2023 scratch_block = 0;
2024
2025 return failure;
2026 }
2027 \f
2028 /* Nonzero if, after spilling reg REGNO for non-groups,
2029 it will still be possible to find a group if we still need one. */
2030
2031 static int
2032 possible_group_p (regno, max_groups)
2033 int regno;
2034 int *max_groups;
2035 {
2036 int i;
2037 int class = (int) NO_REGS;
2038
2039 for (i = 0; i < (int) N_REG_CLASSES; i++)
2040 if (max_groups[i] > 0)
2041 {
2042 class = i;
2043 break;
2044 }
2045
2046 if (class == (int) NO_REGS)
2047 return 1;
2048
2049 /* Consider each pair of consecutive registers. */
2050 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2051 {
2052 /* Ignore pairs that include reg REGNO. */
2053 if (i == regno || i + 1 == regno)
2054 continue;
2055
2056 /* Ignore pairs that are outside the class that needs the group.
2057 ??? Here we fail to handle the case where two different classes
2058 independently need groups. But this never happens with our
2059 current machine descriptions. */
2060 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2061 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2062 continue;
2063
2064 /* A pair of consecutive regs we can still spill does the trick. */
2065 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2066 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2067 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2068 return 1;
2069
2070 /* A pair of one already spilled and one we can spill does it
2071 provided the one already spilled is not otherwise reserved. */
2072 if (spill_reg_order[i] < 0
2073 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2074 && spill_reg_order[i + 1] >= 0
2075 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2076 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2077 return 1;
2078 if (spill_reg_order[i + 1] < 0
2079 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2080 && spill_reg_order[i] >= 0
2081 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2082 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2083 return 1;
2084 }
2085
2086 return 0;
2087 }
2088 \f
2089 /* Count any groups of CLASS that can be formed from the registers recently
2090 spilled. */
2091
2092 static void
2093 count_possible_groups (group_size, group_mode, max_groups, class)
2094 int *group_size;
2095 enum machine_mode *group_mode;
2096 int *max_groups;
2097 int class;
2098 {
2099 HARD_REG_SET new;
2100 int i, j;
2101
2102 /* Now find all consecutive groups of spilled registers
2103 and mark each group off against the need for such groups.
2104 But don't count them against ordinary need, yet. */
2105
2106 if (group_size[class] == 0)
2107 return;
2108
2109 CLEAR_HARD_REG_SET (new);
2110
2111 /* Make a mask of all the regs that are spill regs in class I. */
2112 for (i = 0; i < n_spills; i++)
2113 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2114 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2115 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2116 SET_HARD_REG_BIT (new, spill_regs[i]);
2117
2118 /* Find each consecutive group of them. */
2119 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2120 if (TEST_HARD_REG_BIT (new, i)
2121 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2122 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2123 {
2124 for (j = 1; j < group_size[class]; j++)
2125 if (! TEST_HARD_REG_BIT (new, i + j))
2126 break;
2127
2128 if (j == group_size[class])
2129 {
2130 /* We found a group. Mark it off against this class's need for
2131 groups, and against each superclass too. */
2132 register enum reg_class *p;
2133
2134 max_groups[class]--;
2135 p = reg_class_superclasses[class];
2136 while (*p != LIM_REG_CLASSES)
2137 {
2138 if (group_size [(int) *p] <= group_size [class])
2139 max_groups[(int) *p]--;
2140 p++;
2141 }
2142
2143 /* Don't count these registers again. */
2144 for (j = 0; j < group_size[class]; j++)
2145 SET_HARD_REG_BIT (counted_for_groups, i + j);
2146 }
2147
2148 /* Skip to the last reg in this group. When i is incremented above,
2149 it will then point to the first reg of the next possible group. */
2150 i += j - 1;
2151 }
2152 }
2153 \f
2154 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2155 another mode that needs to be reloaded for the same register class CLASS.
2156 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2157 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2158
2159 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2160 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2161 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2162 causes unnecessary failures on machines requiring alignment of register
2163 groups when the two modes are different sizes, because the larger mode has
2164 more strict alignment rules than the smaller mode. */
2165
2166 static int
2167 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2168 enum machine_mode allocate_mode, other_mode;
2169 enum reg_class class;
2170 {
2171 register int regno;
2172 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2173 {
2174 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2175 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2176 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2177 return 0;
2178 }
2179 return 1;
2180 }
2181
2182 /* Handle the failure to find a register to spill.
2183 INSN should be one of the insns which needed this particular spill reg. */
2184
2185 static void
2186 spill_failure (insn)
2187 rtx insn;
2188 {
2189 if (asm_noperands (PATTERN (insn)) >= 0)
2190 error_for_asm (insn, "`asm' needs too many reloads");
2191 else
2192 fatal_insn ("Unable to find a register to spill.", insn);
2193 }
2194
2195 /* Add a new register to the tables of available spill-registers
2196 (as well as spilling all pseudos allocated to the register).
2197 I is the index of this register in potential_reload_regs.
2198 CLASS is the regclass whose need is being satisfied.
2199 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2200 so that this register can count off against them.
2201 MAX_NONGROUPS is 0 if this register is part of a group.
2202 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2203
2204 static int
2205 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2206 int i;
2207 int class;
2208 int *max_needs;
2209 int *max_nongroups;
2210 int global;
2211 FILE *dumpfile;
2212 {
2213 register enum reg_class *p;
2214 int val;
2215 int regno = potential_reload_regs[i];
2216
2217 if (i >= FIRST_PSEUDO_REGISTER)
2218 abort (); /* Caller failed to find any register. */
2219
2220 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2221 fatal ("fixed or forbidden register was spilled.\n\
2222 This may be due to a compiler bug or to impossible asm\n\
2223 statements or clauses.");
2224
2225 /* Make reg REGNO an additional reload reg. */
2226
2227 potential_reload_regs[i] = -1;
2228 spill_regs[n_spills] = regno;
2229 spill_reg_order[regno] = n_spills;
2230 if (dumpfile)
2231 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2232
2233 /* Clear off the needs we just satisfied. */
2234
2235 max_needs[class]--;
2236 p = reg_class_superclasses[class];
2237 while (*p != LIM_REG_CLASSES)
2238 max_needs[(int) *p++]--;
2239
2240 if (max_nongroups && max_nongroups[class] > 0)
2241 {
2242 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2243 max_nongroups[class]--;
2244 p = reg_class_superclasses[class];
2245 while (*p != LIM_REG_CLASSES)
2246 max_nongroups[(int) *p++]--;
2247 }
2248
2249 /* Spill every pseudo reg that was allocated to this reg
2250 or to something that overlaps this reg. */
2251
2252 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2253
2254 /* If there are some registers still to eliminate and this register
2255 wasn't ever used before, additional stack space may have to be
2256 allocated to store this register. Thus, we may have changed the offset
2257 between the stack and frame pointers, so mark that something has changed.
2258 (If new pseudos were spilled, thus requiring more space, VAL would have
2259 been set non-zero by the call to spill_hard_reg above since additional
2260 reloads may be needed in that case.
2261
2262 One might think that we need only set VAL to 1 if this is a call-used
2263 register. However, the set of registers that must be saved by the
2264 prologue is not identical to the call-used set. For example, the
2265 register used by the call insn for the return PC is a call-used register,
2266 but must be saved by the prologue. */
2267 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2268 val = 1;
2269
2270 regs_ever_live[spill_regs[n_spills]] = 1;
2271 n_spills++;
2272
2273 return val;
2274 }
2275 \f
2276 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2277 data that is dead in INSN. */
2278
2279 static void
2280 delete_dead_insn (insn)
2281 rtx insn;
2282 {
2283 rtx prev = prev_real_insn (insn);
2284 rtx prev_dest;
2285
2286 /* If the previous insn sets a register that dies in our insn, delete it
2287 too. */
2288 if (prev && GET_CODE (PATTERN (prev)) == SET
2289 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2290 && reg_mentioned_p (prev_dest, PATTERN (insn))
2291 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2292 delete_dead_insn (prev);
2293
2294 PUT_CODE (insn, NOTE);
2295 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2296 NOTE_SOURCE_FILE (insn) = 0;
2297 }
2298
2299 /* Modify the home of pseudo-reg I.
2300 The new home is present in reg_renumber[I].
2301
2302 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2303 or it may be -1, meaning there is none or it is not relevant.
2304 This is used so that all pseudos spilled from a given hard reg
2305 can share one stack slot. */
2306
2307 static void
2308 alter_reg (i, from_reg)
2309 register int i;
2310 int from_reg;
2311 {
2312 /* When outputting an inline function, this can happen
2313 for a reg that isn't actually used. */
2314 if (regno_reg_rtx[i] == 0)
2315 return;
2316
2317 /* If the reg got changed to a MEM at rtl-generation time,
2318 ignore it. */
2319 if (GET_CODE (regno_reg_rtx[i]) != REG)
2320 return;
2321
2322 /* Modify the reg-rtx to contain the new hard reg
2323 number or else to contain its pseudo reg number. */
2324 REGNO (regno_reg_rtx[i])
2325 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2326
2327 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2328 allocate a stack slot for it. */
2329
2330 if (reg_renumber[i] < 0
2331 && reg_n_refs[i] > 0
2332 && reg_equiv_constant[i] == 0
2333 && reg_equiv_memory_loc[i] == 0)
2334 {
2335 register rtx x;
2336 int inherent_size = PSEUDO_REGNO_BYTES (i);
2337 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2338 int adjust = 0;
2339
2340 /* Each pseudo reg has an inherent size which comes from its own mode,
2341 and a total size which provides room for paradoxical subregs
2342 which refer to the pseudo reg in wider modes.
2343
2344 We can use a slot already allocated if it provides both
2345 enough inherent space and enough total space.
2346 Otherwise, we allocate a new slot, making sure that it has no less
2347 inherent space, and no less total space, then the previous slot. */
2348 if (from_reg == -1)
2349 {
2350 /* No known place to spill from => no slot to reuse. */
2351 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2352 if (BYTES_BIG_ENDIAN)
2353 /* Cancel the big-endian correction done in assign_stack_local.
2354 Get the address of the beginning of the slot.
2355 This is so we can do a big-endian correction unconditionally
2356 below. */
2357 adjust = inherent_size - total_size;
2358
2359 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2360 }
2361 /* Reuse a stack slot if possible. */
2362 else if (spill_stack_slot[from_reg] != 0
2363 && spill_stack_slot_width[from_reg] >= total_size
2364 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2365 >= inherent_size))
2366 x = spill_stack_slot[from_reg];
2367 /* Allocate a bigger slot. */
2368 else
2369 {
2370 /* Compute maximum size needed, both for inherent size
2371 and for total size. */
2372 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2373 rtx stack_slot;
2374 if (spill_stack_slot[from_reg])
2375 {
2376 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2377 > inherent_size)
2378 mode = GET_MODE (spill_stack_slot[from_reg]);
2379 if (spill_stack_slot_width[from_reg] > total_size)
2380 total_size = spill_stack_slot_width[from_reg];
2381 }
2382 /* Make a slot with that size. */
2383 x = assign_stack_local (mode, total_size, -1);
2384 stack_slot = x;
2385 if (BYTES_BIG_ENDIAN)
2386 {
2387 /* Cancel the big-endian correction done in assign_stack_local.
2388 Get the address of the beginning of the slot.
2389 This is so we can do a big-endian correction unconditionally
2390 below. */
2391 adjust = GET_MODE_SIZE (mode) - total_size;
2392 if (adjust)
2393 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2394 * BITS_PER_UNIT,
2395 MODE_INT, 1),
2396 plus_constant (XEXP (x, 0), adjust));
2397 }
2398 spill_stack_slot[from_reg] = stack_slot;
2399 spill_stack_slot_width[from_reg] = total_size;
2400 }
2401
2402 /* On a big endian machine, the "address" of the slot
2403 is the address of the low part that fits its inherent mode. */
2404 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2405 adjust += (total_size - inherent_size);
2406
2407 /* If we have any adjustment to make, or if the stack slot is the
2408 wrong mode, make a new stack slot. */
2409 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2410 {
2411 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2412 plus_constant (XEXP (x, 0), adjust));
2413 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2414 }
2415
2416 /* Save the stack slot for later. */
2417 reg_equiv_memory_loc[i] = x;
2418 }
2419 }
2420
2421 /* Mark the slots in regs_ever_live for the hard regs
2422 used by pseudo-reg number REGNO. */
2423
2424 void
2425 mark_home_live (regno)
2426 int regno;
2427 {
2428 register int i, lim;
2429 i = reg_renumber[regno];
2430 if (i < 0)
2431 return;
2432 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2433 while (i < lim)
2434 regs_ever_live[i++] = 1;
2435 }
2436
2437 /* Mark the registers used in SCRATCH as being live. */
2438
2439 static void
2440 mark_scratch_live (scratch)
2441 rtx scratch;
2442 {
2443 register int i;
2444 int regno = REGNO (scratch);
2445 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2446
2447 for (i = regno; i < lim; i++)
2448 regs_ever_live[i] = 1;
2449 }
2450 \f
2451 /* This function handles the tracking of elimination offsets around branches.
2452
2453 X is a piece of RTL being scanned.
2454
2455 INSN is the insn that it came from, if any.
2456
2457 INITIAL_P is non-zero if we are to set the offset to be the initial
2458 offset and zero if we are setting the offset of the label to be the
2459 current offset. */
2460
2461 static void
2462 set_label_offsets (x, insn, initial_p)
2463 rtx x;
2464 rtx insn;
2465 int initial_p;
2466 {
2467 enum rtx_code code = GET_CODE (x);
2468 rtx tem;
2469 int i;
2470 struct elim_table *p;
2471
2472 switch (code)
2473 {
2474 case LABEL_REF:
2475 if (LABEL_REF_NONLOCAL_P (x))
2476 return;
2477
2478 x = XEXP (x, 0);
2479
2480 /* ... fall through ... */
2481
2482 case CODE_LABEL:
2483 /* If we know nothing about this label, set the desired offsets. Note
2484 that this sets the offset at a label to be the offset before a label
2485 if we don't know anything about the label. This is not correct for
2486 the label after a BARRIER, but is the best guess we can make. If
2487 we guessed wrong, we will suppress an elimination that might have
2488 been possible had we been able to guess correctly. */
2489
2490 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2491 {
2492 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2493 offsets_at[CODE_LABEL_NUMBER (x)][i]
2494 = (initial_p ? reg_eliminate[i].initial_offset
2495 : reg_eliminate[i].offset);
2496 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2497 }
2498
2499 /* Otherwise, if this is the definition of a label and it is
2500 preceded by a BARRIER, set our offsets to the known offset of
2501 that label. */
2502
2503 else if (x == insn
2504 && (tem = prev_nonnote_insn (insn)) != 0
2505 && GET_CODE (tem) == BARRIER)
2506 {
2507 num_not_at_initial_offset = 0;
2508 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2509 {
2510 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2511 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2512 if (reg_eliminate[i].can_eliminate
2513 && (reg_eliminate[i].offset
2514 != reg_eliminate[i].initial_offset))
2515 num_not_at_initial_offset++;
2516 }
2517 }
2518
2519 else
2520 /* If neither of the above cases is true, compare each offset
2521 with those previously recorded and suppress any eliminations
2522 where the offsets disagree. */
2523
2524 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2525 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2526 != (initial_p ? reg_eliminate[i].initial_offset
2527 : reg_eliminate[i].offset))
2528 reg_eliminate[i].can_eliminate = 0;
2529
2530 return;
2531
2532 case JUMP_INSN:
2533 set_label_offsets (PATTERN (insn), insn, initial_p);
2534
2535 /* ... fall through ... */
2536
2537 case INSN:
2538 case CALL_INSN:
2539 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2540 and hence must have all eliminations at their initial offsets. */
2541 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2542 if (REG_NOTE_KIND (tem) == REG_LABEL)
2543 set_label_offsets (XEXP (tem, 0), insn, 1);
2544 return;
2545
2546 case ADDR_VEC:
2547 case ADDR_DIFF_VEC:
2548 /* Each of the labels in the address vector must be at their initial
2549 offsets. We want the first first for ADDR_VEC and the second
2550 field for ADDR_DIFF_VEC. */
2551
2552 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2553 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2554 insn, initial_p);
2555 return;
2556
2557 case SET:
2558 /* We only care about setting PC. If the source is not RETURN,
2559 IF_THEN_ELSE, or a label, disable any eliminations not at
2560 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2561 isn't one of those possibilities. For branches to a label,
2562 call ourselves recursively.
2563
2564 Note that this can disable elimination unnecessarily when we have
2565 a non-local goto since it will look like a non-constant jump to
2566 someplace in the current function. This isn't a significant
2567 problem since such jumps will normally be when all elimination
2568 pairs are back to their initial offsets. */
2569
2570 if (SET_DEST (x) != pc_rtx)
2571 return;
2572
2573 switch (GET_CODE (SET_SRC (x)))
2574 {
2575 case PC:
2576 case RETURN:
2577 return;
2578
2579 case LABEL_REF:
2580 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2581 return;
2582
2583 case IF_THEN_ELSE:
2584 tem = XEXP (SET_SRC (x), 1);
2585 if (GET_CODE (tem) == LABEL_REF)
2586 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2587 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2588 break;
2589
2590 tem = XEXP (SET_SRC (x), 2);
2591 if (GET_CODE (tem) == LABEL_REF)
2592 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2593 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2594 break;
2595 return;
2596 }
2597
2598 /* If we reach here, all eliminations must be at their initial
2599 offset because we are doing a jump to a variable address. */
2600 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2601 if (p->offset != p->initial_offset)
2602 p->can_eliminate = 0;
2603 }
2604 }
2605 \f
2606 /* Used for communication between the next two function to properly share
2607 the vector for an ASM_OPERANDS. */
2608
2609 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2610
2611 /* Scan X and replace any eliminable registers (such as fp) with a
2612 replacement (such as sp), plus an offset.
2613
2614 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2615 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2616 MEM, we are allowed to replace a sum of a register and the constant zero
2617 with the register, which we cannot do outside a MEM. In addition, we need
2618 to record the fact that a register is referenced outside a MEM.
2619
2620 If INSN is an insn, it is the insn containing X. If we replace a REG
2621 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2622 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2623 that the REG is being modified.
2624
2625 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2626 That's used when we eliminate in expressions stored in notes.
2627 This means, do not set ref_outside_mem even if the reference
2628 is outside of MEMs.
2629
2630 If we see a modification to a register we know about, take the
2631 appropriate action (see case SET, below).
2632
2633 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2634 replacements done assuming all offsets are at their initial values. If
2635 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2636 encounter, return the actual location so that find_reloads will do
2637 the proper thing. */
2638
2639 rtx
2640 eliminate_regs (x, mem_mode, insn)
2641 rtx x;
2642 enum machine_mode mem_mode;
2643 rtx insn;
2644 {
2645 enum rtx_code code = GET_CODE (x);
2646 struct elim_table *ep;
2647 int regno;
2648 rtx new;
2649 int i, j;
2650 char *fmt;
2651 int copied = 0;
2652
2653 switch (code)
2654 {
2655 case CONST_INT:
2656 case CONST_DOUBLE:
2657 case CONST:
2658 case SYMBOL_REF:
2659 case CODE_LABEL:
2660 case PC:
2661 case CC0:
2662 case ASM_INPUT:
2663 case ADDR_VEC:
2664 case ADDR_DIFF_VEC:
2665 case RETURN:
2666 return x;
2667
2668 case REG:
2669 regno = REGNO (x);
2670
2671 /* First handle the case where we encounter a bare register that
2672 is eliminable. Replace it with a PLUS. */
2673 if (regno < FIRST_PSEUDO_REGISTER)
2674 {
2675 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2676 ep++)
2677 if (ep->from_rtx == x && ep->can_eliminate)
2678 {
2679 if (! mem_mode
2680 /* Refs inside notes don't count for this purpose. */
2681 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2682 || GET_CODE (insn) == INSN_LIST)))
2683 ep->ref_outside_mem = 1;
2684 return plus_constant (ep->to_rtx, ep->previous_offset);
2685 }
2686
2687 }
2688 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2689 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2690 {
2691 /* In this case, find_reloads would attempt to either use an
2692 incorrect address (if something is not at its initial offset)
2693 or substitute an replaced address into an insn (which loses
2694 if the offset is changed by some later action). So we simply
2695 return the replaced stack slot (assuming it is changed by
2696 elimination) and ignore the fact that this is actually a
2697 reference to the pseudo. Ensure we make a copy of the
2698 address in case it is shared. */
2699 new = eliminate_regs (reg_equiv_memory_loc[regno],
2700 mem_mode, insn);
2701 if (new != reg_equiv_memory_loc[regno])
2702 {
2703 cannot_omit_stores[regno] = 1;
2704 return copy_rtx (new);
2705 }
2706 }
2707 return x;
2708
2709 case PLUS:
2710 /* If this is the sum of an eliminable register and a constant, rework
2711 the sum. */
2712 if (GET_CODE (XEXP (x, 0)) == REG
2713 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2714 && CONSTANT_P (XEXP (x, 1)))
2715 {
2716 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2717 ep++)
2718 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2719 {
2720 if (! mem_mode
2721 /* Refs inside notes don't count for this purpose. */
2722 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2723 || GET_CODE (insn) == INSN_LIST)))
2724 ep->ref_outside_mem = 1;
2725
2726 /* The only time we want to replace a PLUS with a REG (this
2727 occurs when the constant operand of the PLUS is the negative
2728 of the offset) is when we are inside a MEM. We won't want
2729 to do so at other times because that would change the
2730 structure of the insn in a way that reload can't handle.
2731 We special-case the commonest situation in
2732 eliminate_regs_in_insn, so just replace a PLUS with a
2733 PLUS here, unless inside a MEM. */
2734 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2735 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2736 return ep->to_rtx;
2737 else
2738 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2739 plus_constant (XEXP (x, 1),
2740 ep->previous_offset));
2741 }
2742
2743 /* If the register is not eliminable, we are done since the other
2744 operand is a constant. */
2745 return x;
2746 }
2747
2748 /* If this is part of an address, we want to bring any constant to the
2749 outermost PLUS. We will do this by doing register replacement in
2750 our operands and seeing if a constant shows up in one of them.
2751
2752 We assume here this is part of an address (or a "load address" insn)
2753 since an eliminable register is not likely to appear in any other
2754 context.
2755
2756 If we have (plus (eliminable) (reg)), we want to produce
2757 (plus (plus (replacement) (reg) (const))). If this was part of a
2758 normal add insn, (plus (replacement) (reg)) will be pushed as a
2759 reload. This is the desired action. */
2760
2761 {
2762 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2763 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2764
2765 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2766 {
2767 /* If one side is a PLUS and the other side is a pseudo that
2768 didn't get a hard register but has a reg_equiv_constant,
2769 we must replace the constant here since it may no longer
2770 be in the position of any operand. */
2771 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2772 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2773 && reg_renumber[REGNO (new1)] < 0
2774 && reg_equiv_constant != 0
2775 && reg_equiv_constant[REGNO (new1)] != 0)
2776 new1 = reg_equiv_constant[REGNO (new1)];
2777 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2778 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2779 && reg_renumber[REGNO (new0)] < 0
2780 && reg_equiv_constant[REGNO (new0)] != 0)
2781 new0 = reg_equiv_constant[REGNO (new0)];
2782
2783 new = form_sum (new0, new1);
2784
2785 /* As above, if we are not inside a MEM we do not want to
2786 turn a PLUS into something else. We might try to do so here
2787 for an addition of 0 if we aren't optimizing. */
2788 if (! mem_mode && GET_CODE (new) != PLUS)
2789 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2790 else
2791 return new;
2792 }
2793 }
2794 return x;
2795
2796 case MULT:
2797 /* If this is the product of an eliminable register and a
2798 constant, apply the distribute law and move the constant out
2799 so that we have (plus (mult ..) ..). This is needed in order
2800 to keep load-address insns valid. This case is pathological.
2801 We ignore the possibility of overflow here. */
2802 if (GET_CODE (XEXP (x, 0)) == REG
2803 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2804 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2805 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2806 ep++)
2807 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2808 {
2809 if (! mem_mode
2810 /* Refs inside notes don't count for this purpose. */
2811 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2812 || GET_CODE (insn) == INSN_LIST)))
2813 ep->ref_outside_mem = 1;
2814
2815 return
2816 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2817 ep->previous_offset * INTVAL (XEXP (x, 1)));
2818 }
2819
2820 /* ... fall through ... */
2821
2822 case CALL:
2823 case COMPARE:
2824 case MINUS:
2825 case DIV: case UDIV:
2826 case MOD: case UMOD:
2827 case AND: case IOR: case XOR:
2828 case ROTATERT: case ROTATE:
2829 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2830 case NE: case EQ:
2831 case GE: case GT: case GEU: case GTU:
2832 case LE: case LT: case LEU: case LTU:
2833 {
2834 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2835 rtx new1
2836 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2837
2838 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2839 return gen_rtx (code, GET_MODE (x), new0, new1);
2840 }
2841 return x;
2842
2843 case EXPR_LIST:
2844 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2845 if (XEXP (x, 0))
2846 {
2847 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2848 if (new != XEXP (x, 0))
2849 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2850 }
2851
2852 /* ... fall through ... */
2853
2854 case INSN_LIST:
2855 /* Now do eliminations in the rest of the chain. If this was
2856 an EXPR_LIST, this might result in allocating more memory than is
2857 strictly needed, but it simplifies the code. */
2858 if (XEXP (x, 1))
2859 {
2860 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2861 if (new != XEXP (x, 1))
2862 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2863 }
2864 return x;
2865
2866 case PRE_INC:
2867 case POST_INC:
2868 case PRE_DEC:
2869 case POST_DEC:
2870 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2871 if (ep->to_rtx == XEXP (x, 0))
2872 {
2873 int size = GET_MODE_SIZE (mem_mode);
2874
2875 /* If more bytes than MEM_MODE are pushed, account for them. */
2876 #ifdef PUSH_ROUNDING
2877 if (ep->to_rtx == stack_pointer_rtx)
2878 size = PUSH_ROUNDING (size);
2879 #endif
2880 if (code == PRE_DEC || code == POST_DEC)
2881 ep->offset += size;
2882 else
2883 ep->offset -= size;
2884 }
2885
2886 /* Fall through to generic unary operation case. */
2887 case USE:
2888 case STRICT_LOW_PART:
2889 case NEG: case NOT:
2890 case SIGN_EXTEND: case ZERO_EXTEND:
2891 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2892 case FLOAT: case FIX:
2893 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2894 case ABS:
2895 case SQRT:
2896 case FFS:
2897 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2898 if (new != XEXP (x, 0))
2899 return gen_rtx (code, GET_MODE (x), new);
2900 return x;
2901
2902 case SUBREG:
2903 /* Similar to above processing, but preserve SUBREG_WORD.
2904 Convert (subreg (mem)) to (mem) if not paradoxical.
2905 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2906 pseudo didn't get a hard reg, we must replace this with the
2907 eliminated version of the memory location because push_reloads
2908 may do the replacement in certain circumstances. */
2909 if (GET_CODE (SUBREG_REG (x)) == REG
2910 && (GET_MODE_SIZE (GET_MODE (x))
2911 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2912 && reg_equiv_memory_loc != 0
2913 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2914 {
2915 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2916 mem_mode, insn);
2917
2918 /* If we didn't change anything, we must retain the pseudo. */
2919 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2920 new = SUBREG_REG (x);
2921 else
2922 {
2923 /* Otherwise, ensure NEW isn't shared in case we have to reload
2924 it. */
2925 new = copy_rtx (new);
2926
2927 /* In this case, we must show that the pseudo is used in this
2928 insn so that delete_output_reload will do the right thing. */
2929 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2930 && GET_CODE (insn) != INSN_LIST)
2931 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2932 insn);
2933 }
2934 }
2935 else
2936 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2937
2938 if (new != XEXP (x, 0))
2939 {
2940 if (GET_CODE (new) == MEM
2941 && (GET_MODE_SIZE (GET_MODE (x))
2942 <= GET_MODE_SIZE (GET_MODE (new)))
2943 #ifdef LOAD_EXTEND_OP
2944 /* On these machines we will be reloading what is
2945 inside the SUBREG if it originally was a pseudo and
2946 the inner and outer modes are both a word or
2947 smaller. So leave the SUBREG then. */
2948 && ! (GET_CODE (SUBREG_REG (x)) == REG
2949 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2950 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2951 && (GET_MODE_SIZE (GET_MODE (x))
2952 > GET_MODE_SIZE (GET_MODE (new)))
2953 && INTEGRAL_MODE_P (GET_MODE (new))
2954 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
2955 #endif
2956 )
2957 {
2958 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2959 enum machine_mode mode = GET_MODE (x);
2960
2961 if (BYTES_BIG_ENDIAN)
2962 offset += (MIN (UNITS_PER_WORD,
2963 GET_MODE_SIZE (GET_MODE (new)))
2964 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2965
2966 PUT_MODE (new, mode);
2967 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2968 return new;
2969 }
2970 else
2971 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2972 }
2973
2974 return x;
2975
2976 case CLOBBER:
2977 /* If clobbering a register that is the replacement register for an
2978 elimination we still think can be performed, note that it cannot
2979 be performed. Otherwise, we need not be concerned about it. */
2980 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2981 if (ep->to_rtx == XEXP (x, 0))
2982 ep->can_eliminate = 0;
2983
2984 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2985 if (new != XEXP (x, 0))
2986 return gen_rtx (code, GET_MODE (x), new);
2987 return x;
2988
2989 case ASM_OPERANDS:
2990 {
2991 rtx *temp_vec;
2992 /* Properly handle sharing input and constraint vectors. */
2993 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2994 {
2995 /* When we come to a new vector not seen before,
2996 scan all its elements; keep the old vector if none
2997 of them changes; otherwise, make a copy. */
2998 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2999 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3000 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3001 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3002 mem_mode, insn);
3003
3004 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3005 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3006 break;
3007
3008 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3009 new_asm_operands_vec = old_asm_operands_vec;
3010 else
3011 new_asm_operands_vec
3012 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3013 }
3014
3015 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3016 if (new_asm_operands_vec == old_asm_operands_vec)
3017 return x;
3018
3019 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3020 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3021 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3022 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3023 ASM_OPERANDS_SOURCE_FILE (x),
3024 ASM_OPERANDS_SOURCE_LINE (x));
3025 new->volatil = x->volatil;
3026 return new;
3027 }
3028
3029 case SET:
3030 /* Check for setting a register that we know about. */
3031 if (GET_CODE (SET_DEST (x)) == REG)
3032 {
3033 /* See if this is setting the replacement register for an
3034 elimination.
3035
3036 If DEST is the hard frame pointer, we do nothing because we
3037 assume that all assignments to the frame pointer are for
3038 non-local gotos and are being done at a time when they are valid
3039 and do not disturb anything else. Some machines want to
3040 eliminate a fake argument pointer (or even a fake frame pointer)
3041 with either the real frame or the stack pointer. Assignments to
3042 the hard frame pointer must not prevent this elimination. */
3043
3044 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3045 ep++)
3046 if (ep->to_rtx == SET_DEST (x)
3047 && SET_DEST (x) != hard_frame_pointer_rtx)
3048 {
3049 /* If it is being incremented, adjust the offset. Otherwise,
3050 this elimination can't be done. */
3051 rtx src = SET_SRC (x);
3052
3053 if (GET_CODE (src) == PLUS
3054 && XEXP (src, 0) == SET_DEST (x)
3055 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3056 ep->offset -= INTVAL (XEXP (src, 1));
3057 else
3058 ep->can_eliminate = 0;
3059 }
3060
3061 /* Now check to see we are assigning to a register that can be
3062 eliminated. If so, it must be as part of a PARALLEL, since we
3063 will not have been called if this is a single SET. So indicate
3064 that we can no longer eliminate this reg. */
3065 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3066 ep++)
3067 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3068 ep->can_eliminate = 0;
3069 }
3070
3071 /* Now avoid the loop below in this common case. */
3072 {
3073 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3074 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3075
3076 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3077 write a CLOBBER insn. */
3078 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3079 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3080 && GET_CODE (insn) != INSN_LIST)
3081 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3082
3083 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3084 return gen_rtx (SET, VOIDmode, new0, new1);
3085 }
3086
3087 return x;
3088
3089 case MEM:
3090 /* Our only special processing is to pass the mode of the MEM to our
3091 recursive call and copy the flags. While we are here, handle this
3092 case more efficiently. */
3093 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3094 if (new != XEXP (x, 0))
3095 {
3096 new = gen_rtx (MEM, GET_MODE (x), new);
3097 new->volatil = x->volatil;
3098 new->unchanging = x->unchanging;
3099 new->in_struct = x->in_struct;
3100 return new;
3101 }
3102 else
3103 return x;
3104 }
3105
3106 /* Process each of our operands recursively. If any have changed, make a
3107 copy of the rtx. */
3108 fmt = GET_RTX_FORMAT (code);
3109 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3110 {
3111 if (*fmt == 'e')
3112 {
3113 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3114 if (new != XEXP (x, i) && ! copied)
3115 {
3116 rtx new_x = rtx_alloc (code);
3117 bcopy ((char *) x, (char *) new_x,
3118 (sizeof (*new_x) - sizeof (new_x->fld)
3119 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3120 x = new_x;
3121 copied = 1;
3122 }
3123 XEXP (x, i) = new;
3124 }
3125 else if (*fmt == 'E')
3126 {
3127 int copied_vec = 0;
3128 for (j = 0; j < XVECLEN (x, i); j++)
3129 {
3130 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3131 if (new != XVECEXP (x, i, j) && ! copied_vec)
3132 {
3133 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3134 &XVECEXP (x, i, 0));
3135 if (! copied)
3136 {
3137 rtx new_x = rtx_alloc (code);
3138 bcopy ((char *) x, (char *) new_x,
3139 (sizeof (*new_x) - sizeof (new_x->fld)
3140 + (sizeof (new_x->fld[0])
3141 * GET_RTX_LENGTH (code))));
3142 x = new_x;
3143 copied = 1;
3144 }
3145 XVEC (x, i) = new_v;
3146 copied_vec = 1;
3147 }
3148 XVECEXP (x, i, j) = new;
3149 }
3150 }
3151 }
3152
3153 return x;
3154 }
3155 \f
3156 /* Scan INSN and eliminate all eliminable registers in it.
3157
3158 If REPLACE is nonzero, do the replacement destructively. Also
3159 delete the insn as dead it if it is setting an eliminable register.
3160
3161 If REPLACE is zero, do all our allocations in reload_obstack.
3162
3163 If no eliminations were done and this insn doesn't require any elimination
3164 processing (these are not identical conditions: it might be updating sp,
3165 but not referencing fp; this needs to be seen during reload_as_needed so
3166 that the offset between fp and sp can be taken into consideration), zero
3167 is returned. Otherwise, 1 is returned. */
3168
3169 static int
3170 eliminate_regs_in_insn (insn, replace)
3171 rtx insn;
3172 int replace;
3173 {
3174 rtx old_body = PATTERN (insn);
3175 rtx old_set = single_set (insn);
3176 rtx new_body;
3177 int val = 0;
3178 struct elim_table *ep;
3179
3180 if (! replace)
3181 push_obstacks (&reload_obstack, &reload_obstack);
3182
3183 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3184 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3185 {
3186 /* Check for setting an eliminable register. */
3187 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3188 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3189 {
3190 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3191 /* If this is setting the frame pointer register to the
3192 hardware frame pointer register and this is an elimination
3193 that will be done (tested above), this insn is really
3194 adjusting the frame pointer downward to compensate for
3195 the adjustment done before a nonlocal goto. */
3196 if (ep->from == FRAME_POINTER_REGNUM
3197 && ep->to == HARD_FRAME_POINTER_REGNUM)
3198 {
3199 rtx src = SET_SRC (old_set);
3200 int offset, ok = 0;
3201
3202 if (src == ep->to_rtx)
3203 offset = 0, ok = 1;
3204 else if (GET_CODE (src) == PLUS
3205 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3206 offset = INTVAL (XEXP (src, 0)), ok = 1;
3207
3208 if (ok)
3209 {
3210 if (replace)
3211 {
3212 rtx src
3213 = plus_constant (ep->to_rtx, offset - ep->offset);
3214
3215 /* First see if this insn remains valid when we
3216 make the change. If not, keep the INSN_CODE
3217 the same and let reload fit it up. */
3218 validate_change (insn, &SET_SRC (old_set), src, 1);
3219 validate_change (insn, &SET_DEST (old_set),
3220 ep->to_rtx, 1);
3221 if (! apply_change_group ())
3222 {
3223 SET_SRC (old_set) = src;
3224 SET_DEST (old_set) = ep->to_rtx;
3225 }
3226 }
3227
3228 val = 1;
3229 goto done;
3230 }
3231 }
3232 #endif
3233
3234 /* In this case this insn isn't serving a useful purpose. We
3235 will delete it in reload_as_needed once we know that this
3236 elimination is, in fact, being done.
3237
3238 If REPLACE isn't set, we can't delete this insn, but neededn't
3239 process it since it won't be used unless something changes. */
3240 if (replace)
3241 delete_dead_insn (insn);
3242 val = 1;
3243 goto done;
3244 }
3245
3246 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3247 in the insn is the negative of the offset in FROM. Substitute
3248 (set (reg) (reg to)) for the insn and change its code.
3249
3250 We have to do this here, rather than in eliminate_regs, do that we can
3251 change the insn code. */
3252
3253 if (GET_CODE (SET_SRC (old_set)) == PLUS
3254 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3255 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3256 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3257 ep++)
3258 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3259 && ep->can_eliminate)
3260 {
3261 /* We must stop at the first elimination that will be used.
3262 If this one would replace the PLUS with a REG, do it
3263 now. Otherwise, quit the loop and let eliminate_regs
3264 do its normal replacement. */
3265 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3266 {
3267 /* We assume here that we don't need a PARALLEL of
3268 any CLOBBERs for this assignment. There's not
3269 much we can do if we do need it. */
3270 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3271 SET_DEST (old_set), ep->to_rtx);
3272 INSN_CODE (insn) = -1;
3273 val = 1;
3274 goto done;
3275 }
3276
3277 break;
3278 }
3279 }
3280
3281 old_asm_operands_vec = 0;
3282
3283 /* Replace the body of this insn with a substituted form. If we changed
3284 something, return non-zero.
3285
3286 If we are replacing a body that was a (set X (plus Y Z)), try to
3287 re-recognize the insn. We do this in case we had a simple addition
3288 but now can do this as a load-address. This saves an insn in this
3289 common case. */
3290
3291 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3292 if (new_body != old_body)
3293 {
3294 /* If we aren't replacing things permanently and we changed something,
3295 make another copy to ensure that all the RTL is new. Otherwise
3296 things can go wrong if find_reload swaps commutative operands
3297 and one is inside RTL that has been copied while the other is not. */
3298
3299 /* Don't copy an asm_operands because (1) there's no need and (2)
3300 copy_rtx can't do it properly when there are multiple outputs. */
3301 if (! replace && asm_noperands (old_body) < 0)
3302 new_body = copy_rtx (new_body);
3303
3304 /* If we had a move insn but now we don't, rerecognize it. This will
3305 cause spurious re-recognition if the old move had a PARALLEL since
3306 the new one still will, but we can't call single_set without
3307 having put NEW_BODY into the insn and the re-recognition won't
3308 hurt in this rare case. */
3309 if (old_set != 0
3310 && ((GET_CODE (SET_SRC (old_set)) == REG
3311 && (GET_CODE (new_body) != SET
3312 || GET_CODE (SET_SRC (new_body)) != REG))
3313 /* If this was a load from or store to memory, compare
3314 the MEM in recog_operand to the one in the insn. If they
3315 are not equal, then rerecognize the insn. */
3316 || (old_set != 0
3317 && ((GET_CODE (SET_SRC (old_set)) == MEM
3318 && SET_SRC (old_set) != recog_operand[1])
3319 || (GET_CODE (SET_DEST (old_set)) == MEM
3320 && SET_DEST (old_set) != recog_operand[0])))
3321 /* If this was an add insn before, rerecognize. */
3322 || GET_CODE (SET_SRC (old_set)) == PLUS))
3323 {
3324 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3325 /* If recognition fails, store the new body anyway.
3326 It's normal to have recognition failures here
3327 due to bizarre memory addresses; reloading will fix them. */
3328 PATTERN (insn) = new_body;
3329 }
3330 else
3331 PATTERN (insn) = new_body;
3332
3333 val = 1;
3334 }
3335
3336 /* Loop through all elimination pairs. See if any have changed and
3337 recalculate the number not at initial offset.
3338
3339 Compute the maximum offset (minimum offset if the stack does not
3340 grow downward) for each elimination pair.
3341
3342 We also detect a cases where register elimination cannot be done,
3343 namely, if a register would be both changed and referenced outside a MEM
3344 in the resulting insn since such an insn is often undefined and, even if
3345 not, we cannot know what meaning will be given to it. Note that it is
3346 valid to have a register used in an address in an insn that changes it
3347 (presumably with a pre- or post-increment or decrement).
3348
3349 If anything changes, return nonzero. */
3350
3351 num_not_at_initial_offset = 0;
3352 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3353 {
3354 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3355 ep->can_eliminate = 0;
3356
3357 ep->ref_outside_mem = 0;
3358
3359 if (ep->previous_offset != ep->offset)
3360 val = 1;
3361
3362 ep->previous_offset = ep->offset;
3363 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3364 num_not_at_initial_offset++;
3365
3366 #ifdef STACK_GROWS_DOWNWARD
3367 ep->max_offset = MAX (ep->max_offset, ep->offset);
3368 #else
3369 ep->max_offset = MIN (ep->max_offset, ep->offset);
3370 #endif
3371 }
3372
3373 done:
3374 /* If we changed something, perform elimination in REG_NOTES. This is
3375 needed even when REPLACE is zero because a REG_DEAD note might refer
3376 to a register that we eliminate and could cause a different number
3377 of spill registers to be needed in the final reload pass than in
3378 the pre-passes. */
3379 if (val && REG_NOTES (insn) != 0)
3380 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3381
3382 if (! replace)
3383 pop_obstacks ();
3384
3385 return val;
3386 }
3387
3388 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3389 replacement we currently believe is valid, mark it as not eliminable if X
3390 modifies DEST in any way other than by adding a constant integer to it.
3391
3392 If DEST is the frame pointer, we do nothing because we assume that
3393 all assignments to the hard frame pointer are nonlocal gotos and are being
3394 done at a time when they are valid and do not disturb anything else.
3395 Some machines want to eliminate a fake argument pointer with either the
3396 frame or stack pointer. Assignments to the hard frame pointer must not
3397 prevent this elimination.
3398
3399 Called via note_stores from reload before starting its passes to scan
3400 the insns of the function. */
3401
3402 static void
3403 mark_not_eliminable (dest, x)
3404 rtx dest;
3405 rtx x;
3406 {
3407 register int i;
3408
3409 /* A SUBREG of a hard register here is just changing its mode. We should
3410 not see a SUBREG of an eliminable hard register, but check just in
3411 case. */
3412 if (GET_CODE (dest) == SUBREG)
3413 dest = SUBREG_REG (dest);
3414
3415 if (dest == hard_frame_pointer_rtx)
3416 return;
3417
3418 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3419 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3420 && (GET_CODE (x) != SET
3421 || GET_CODE (SET_SRC (x)) != PLUS
3422 || XEXP (SET_SRC (x), 0) != dest
3423 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3424 {
3425 reg_eliminate[i].can_eliminate_previous
3426 = reg_eliminate[i].can_eliminate = 0;
3427 num_eliminable--;
3428 }
3429 }
3430 \f
3431 /* Kick all pseudos out of hard register REGNO.
3432 If GLOBAL is nonzero, try to find someplace else to put them.
3433 If DUMPFILE is nonzero, log actions taken on that file.
3434
3435 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3436 because we found we can't eliminate some register. In the case, no pseudos
3437 are allowed to be in the register, even if they are only in a block that
3438 doesn't require spill registers, unlike the case when we are spilling this
3439 hard reg to produce another spill register.
3440
3441 Return nonzero if any pseudos needed to be kicked out. */
3442
3443 static int
3444 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3445 register int regno;
3446 int global;
3447 FILE *dumpfile;
3448 int cant_eliminate;
3449 {
3450 enum reg_class class = REGNO_REG_CLASS (regno);
3451 int something_changed = 0;
3452 register int i;
3453
3454 SET_HARD_REG_BIT (forbidden_regs, regno);
3455
3456 if (cant_eliminate)
3457 regs_ever_live[regno] = 1;
3458
3459 /* Spill every pseudo reg that was allocated to this reg
3460 or to something that overlaps this reg. */
3461
3462 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3463 if (reg_renumber[i] >= 0
3464 && reg_renumber[i] <= regno
3465 && (reg_renumber[i]
3466 + HARD_REGNO_NREGS (reg_renumber[i],
3467 PSEUDO_REGNO_MODE (i))
3468 > regno))
3469 {
3470 /* If this register belongs solely to a basic block which needed no
3471 spilling of any class that this register is contained in,
3472 leave it be, unless we are spilling this register because
3473 it was a hard register that can't be eliminated. */
3474
3475 if (! cant_eliminate
3476 && basic_block_needs[0]
3477 && reg_basic_block[i] >= 0
3478 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3479 {
3480 enum reg_class *p;
3481
3482 for (p = reg_class_superclasses[(int) class];
3483 *p != LIM_REG_CLASSES; p++)
3484 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3485 break;
3486
3487 if (*p == LIM_REG_CLASSES)
3488 continue;
3489 }
3490
3491 /* Mark it as no longer having a hard register home. */
3492 reg_renumber[i] = -1;
3493 /* We will need to scan everything again. */
3494 something_changed = 1;
3495 if (global)
3496 retry_global_alloc (i, forbidden_regs);
3497
3498 alter_reg (i, regno);
3499 if (dumpfile)
3500 {
3501 if (reg_renumber[i] == -1)
3502 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3503 else
3504 fprintf (dumpfile, " Register %d now in %d.\n\n",
3505 i, reg_renumber[i]);
3506 }
3507 }
3508 for (i = 0; i < scratch_list_length; i++)
3509 {
3510 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3511 {
3512 if (! cant_eliminate && basic_block_needs[0]
3513 && ! basic_block_needs[(int) class][scratch_block[i]])
3514 {
3515 enum reg_class *p;
3516
3517 for (p = reg_class_superclasses[(int) class];
3518 *p != LIM_REG_CLASSES; p++)
3519 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3520 break;
3521
3522 if (*p == LIM_REG_CLASSES)
3523 continue;
3524 }
3525 PUT_CODE (scratch_list[i], SCRATCH);
3526 scratch_list[i] = 0;
3527 something_changed = 1;
3528 continue;
3529 }
3530 }
3531
3532 return something_changed;
3533 }
3534 \f
3535 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3536 Also mark any hard registers used to store user variables as
3537 forbidden from being used for spill registers. */
3538
3539 static void
3540 scan_paradoxical_subregs (x)
3541 register rtx x;
3542 {
3543 register int i;
3544 register char *fmt;
3545 register enum rtx_code code = GET_CODE (x);
3546
3547 switch (code)
3548 {
3549 case REG:
3550 #ifdef SMALL_REGISTER_CLASSES
3551 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3552 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3553 #endif
3554 return;
3555
3556 case CONST_INT:
3557 case CONST:
3558 case SYMBOL_REF:
3559 case LABEL_REF:
3560 case CONST_DOUBLE:
3561 case CC0:
3562 case PC:
3563 case USE:
3564 case CLOBBER:
3565 return;
3566
3567 case SUBREG:
3568 if (GET_CODE (SUBREG_REG (x)) == REG
3569 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3570 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3571 = GET_MODE_SIZE (GET_MODE (x));
3572 return;
3573 }
3574
3575 fmt = GET_RTX_FORMAT (code);
3576 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3577 {
3578 if (fmt[i] == 'e')
3579 scan_paradoxical_subregs (XEXP (x, i));
3580 else if (fmt[i] == 'E')
3581 {
3582 register int j;
3583 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3584 scan_paradoxical_subregs (XVECEXP (x, i, j));
3585 }
3586 }
3587 }
3588 \f
3589 static int
3590 hard_reg_use_compare (p1, p2)
3591 struct hard_reg_n_uses *p1, *p2;
3592 {
3593 int tem = p1->uses - p2->uses;
3594 if (tem != 0) return tem;
3595 /* If regs are equally good, sort by regno,
3596 so that the results of qsort leave nothing to chance. */
3597 return p1->regno - p2->regno;
3598 }
3599
3600 /* Choose the order to consider regs for use as reload registers
3601 based on how much trouble would be caused by spilling one.
3602 Store them in order of decreasing preference in potential_reload_regs. */
3603
3604 static void
3605 order_regs_for_reload ()
3606 {
3607 register int i;
3608 register int o = 0;
3609 int large = 0;
3610
3611 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3612
3613 CLEAR_HARD_REG_SET (bad_spill_regs);
3614
3615 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3616 potential_reload_regs[i] = -1;
3617
3618 /* Count number of uses of each hard reg by pseudo regs allocated to it
3619 and then order them by decreasing use. */
3620
3621 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3622 {
3623 hard_reg_n_uses[i].uses = 0;
3624 hard_reg_n_uses[i].regno = i;
3625 }
3626
3627 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3628 {
3629 int regno = reg_renumber[i];
3630 if (regno >= 0)
3631 {
3632 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3633 while (regno < lim)
3634 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3635 }
3636 large += reg_n_refs[i];
3637 }
3638
3639 /* Now fixed registers (which cannot safely be used for reloading)
3640 get a very high use count so they will be considered least desirable.
3641 Registers used explicitly in the rtl code are almost as bad. */
3642
3643 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3644 {
3645 if (fixed_regs[i])
3646 {
3647 hard_reg_n_uses[i].uses += 2 * large + 2;
3648 SET_HARD_REG_BIT (bad_spill_regs, i);
3649 }
3650 else if (regs_explicitly_used[i])
3651 {
3652 hard_reg_n_uses[i].uses += large + 1;
3653 #ifndef SMALL_REGISTER_CLASSES
3654 /* ??? We are doing this here because of the potential that
3655 bad code may be generated if a register explicitly used in
3656 an insn was used as a spill register for that insn. But
3657 not using these are spill registers may lose on some machine.
3658 We'll have to see how this works out. */
3659 SET_HARD_REG_BIT (bad_spill_regs, i);
3660 #endif
3661 }
3662 }
3663 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3664 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3665
3666 #ifdef ELIMINABLE_REGS
3667 /* If registers other than the frame pointer are eliminable, mark them as
3668 poor choices. */
3669 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3670 {
3671 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3672 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3673 }
3674 #endif
3675
3676 /* Prefer registers not so far used, for use in temporary loading.
3677 Among them, if REG_ALLOC_ORDER is defined, use that order.
3678 Otherwise, prefer registers not preserved by calls. */
3679
3680 #ifdef REG_ALLOC_ORDER
3681 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3682 {
3683 int regno = reg_alloc_order[i];
3684
3685 if (hard_reg_n_uses[regno].uses == 0)
3686 potential_reload_regs[o++] = regno;
3687 }
3688 #else
3689 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3690 {
3691 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3692 potential_reload_regs[o++] = i;
3693 }
3694 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3695 {
3696 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3697 potential_reload_regs[o++] = i;
3698 }
3699 #endif
3700
3701 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3702 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3703
3704 /* Now add the regs that are already used,
3705 preferring those used less often. The fixed and otherwise forbidden
3706 registers will be at the end of this list. */
3707
3708 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3709 if (hard_reg_n_uses[i].uses != 0)
3710 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3711 }
3712 \f
3713 /* Used in reload_as_needed to sort the spilled regs. */
3714
3715 static int
3716 compare_spill_regs (r1, r2)
3717 short *r1, *r2;
3718 {
3719 return *r1 - *r2;
3720 }
3721
3722 /* Reload pseudo-registers into hard regs around each insn as needed.
3723 Additional register load insns are output before the insn that needs it
3724 and perhaps store insns after insns that modify the reloaded pseudo reg.
3725
3726 reg_last_reload_reg and reg_reloaded_contents keep track of
3727 which registers are already available in reload registers.
3728 We update these for the reloads that we perform,
3729 as the insns are scanned. */
3730
3731 static void
3732 reload_as_needed (first, live_known)
3733 rtx first;
3734 int live_known;
3735 {
3736 register rtx insn;
3737 register int i;
3738 int this_block = 0;
3739 rtx x;
3740 rtx after_call = 0;
3741
3742 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3743 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3744 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3745 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3746 reg_has_output_reload = (char *) alloca (max_regno);
3747 for (i = 0; i < n_spills; i++)
3748 {
3749 reg_reloaded_contents[i] = -1;
3750 reg_reloaded_insn[i] = 0;
3751 }
3752
3753 /* Reset all offsets on eliminable registers to their initial values. */
3754 #ifdef ELIMINABLE_REGS
3755 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3756 {
3757 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3758 reg_eliminate[i].initial_offset);
3759 reg_eliminate[i].previous_offset
3760 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3761 }
3762 #else
3763 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3764 reg_eliminate[0].previous_offset
3765 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3766 #endif
3767
3768 num_not_at_initial_offset = 0;
3769
3770 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3771 pack registers with group needs. */
3772 if (n_spills > 1)
3773 {
3774 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3775 for (i = 0; i < n_spills; i++)
3776 spill_reg_order[spill_regs[i]] = i;
3777 }
3778
3779 for (insn = first; insn;)
3780 {
3781 register rtx next = NEXT_INSN (insn);
3782
3783 /* Notice when we move to a new basic block. */
3784 if (live_known && this_block + 1 < n_basic_blocks
3785 && insn == basic_block_head[this_block+1])
3786 ++this_block;
3787
3788 /* If we pass a label, copy the offsets from the label information
3789 into the current offsets of each elimination. */
3790 if (GET_CODE (insn) == CODE_LABEL)
3791 {
3792 num_not_at_initial_offset = 0;
3793 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3794 {
3795 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3796 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3797 if (reg_eliminate[i].can_eliminate
3798 && (reg_eliminate[i].offset
3799 != reg_eliminate[i].initial_offset))
3800 num_not_at_initial_offset++;
3801 }
3802 }
3803
3804 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3805 {
3806 rtx avoid_return_reg = 0;
3807 rtx oldpat = PATTERN (insn);
3808
3809 #ifdef SMALL_REGISTER_CLASSES
3810 /* Set avoid_return_reg if this is an insn
3811 that might use the value of a function call. */
3812 if (GET_CODE (insn) == CALL_INSN)
3813 {
3814 if (GET_CODE (PATTERN (insn)) == SET)
3815 after_call = SET_DEST (PATTERN (insn));
3816 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3817 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3818 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3819 else
3820 after_call = 0;
3821 }
3822 else if (after_call != 0
3823 && !(GET_CODE (PATTERN (insn)) == SET
3824 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3825 {
3826 if (reg_referenced_p (after_call, PATTERN (insn)))
3827 avoid_return_reg = after_call;
3828 after_call = 0;
3829 }
3830 #endif /* SMALL_REGISTER_CLASSES */
3831
3832 /* If this is a USE and CLOBBER of a MEM, ensure that any
3833 references to eliminable registers have been removed. */
3834
3835 if ((GET_CODE (PATTERN (insn)) == USE
3836 || GET_CODE (PATTERN (insn)) == CLOBBER)
3837 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3838 XEXP (XEXP (PATTERN (insn), 0), 0)
3839 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3840 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3841
3842 /* If we need to do register elimination processing, do so.
3843 This might delete the insn, in which case we are done. */
3844 if (num_eliminable && GET_MODE (insn) == QImode)
3845 {
3846 eliminate_regs_in_insn (insn, 1);
3847 if (GET_CODE (insn) == NOTE)
3848 {
3849 insn = next;
3850 continue;
3851 }
3852 }
3853
3854 if (GET_MODE (insn) == VOIDmode)
3855 n_reloads = 0;
3856 /* First find the pseudo regs that must be reloaded for this insn.
3857 This info is returned in the tables reload_... (see reload.h).
3858 Also modify the body of INSN by substituting RELOAD
3859 rtx's for those pseudo regs. */
3860 else
3861 {
3862 bzero (reg_has_output_reload, max_regno);
3863 CLEAR_HARD_REG_SET (reg_is_output_reload);
3864
3865 find_reloads (insn, 1, spill_indirect_levels, live_known,
3866 spill_reg_order);
3867 }
3868
3869 if (n_reloads > 0)
3870 {
3871 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3872 rtx p;
3873 int class;
3874
3875 /* If this block has not had spilling done for a
3876 particular clas and we have any non-optionals that need a
3877 spill reg in that class, abort. */
3878
3879 for (class = 0; class < N_REG_CLASSES; class++)
3880 if (basic_block_needs[class] != 0
3881 && basic_block_needs[class][this_block] == 0)
3882 for (i = 0; i < n_reloads; i++)
3883 if (class == (int) reload_reg_class[i]
3884 && reload_reg_rtx[i] == 0
3885 && ! reload_optional[i]
3886 && (reload_in[i] != 0 || reload_out[i] != 0
3887 || reload_secondary_p[i] != 0))
3888 fatal_insn ("Non-optional registers need a spill register", insn);
3889
3890 /* Now compute which reload regs to reload them into. Perhaps
3891 reusing reload regs from previous insns, or else output
3892 load insns to reload them. Maybe output store insns too.
3893 Record the choices of reload reg in reload_reg_rtx. */
3894 choose_reload_regs (insn, avoid_return_reg);
3895
3896 #ifdef SMALL_REGISTER_CLASSES
3897 /* Merge any reloads that we didn't combine for fear of
3898 increasing the number of spill registers needed but now
3899 discover can be safely merged. */
3900 merge_assigned_reloads (insn);
3901 #endif
3902
3903 /* Generate the insns to reload operands into or out of
3904 their reload regs. */
3905 emit_reload_insns (insn);
3906
3907 /* Substitute the chosen reload regs from reload_reg_rtx
3908 into the insn's body (or perhaps into the bodies of other
3909 load and store insn that we just made for reloading
3910 and that we moved the structure into). */
3911 subst_reloads ();
3912
3913 /* If this was an ASM, make sure that all the reload insns
3914 we have generated are valid. If not, give an error
3915 and delete them. */
3916
3917 if (asm_noperands (PATTERN (insn)) >= 0)
3918 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3919 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3920 && (recog_memoized (p) < 0
3921 || (insn_extract (p),
3922 ! constrain_operands (INSN_CODE (p), 1))))
3923 {
3924 error_for_asm (insn,
3925 "`asm' operand requires impossible reload");
3926 PUT_CODE (p, NOTE);
3927 NOTE_SOURCE_FILE (p) = 0;
3928 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3929 }
3930 }
3931 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3932 is no longer validly lying around to save a future reload.
3933 Note that this does not detect pseudos that were reloaded
3934 for this insn in order to be stored in
3935 (obeying register constraints). That is correct; such reload
3936 registers ARE still valid. */
3937 note_stores (oldpat, forget_old_reloads_1);
3938
3939 /* There may have been CLOBBER insns placed after INSN. So scan
3940 between INSN and NEXT and use them to forget old reloads. */
3941 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3942 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3943 note_stores (PATTERN (x), forget_old_reloads_1);
3944
3945 #ifdef AUTO_INC_DEC
3946 /* Likewise for regs altered by auto-increment in this insn.
3947 But note that the reg-notes are not changed by reloading:
3948 they still contain the pseudo-regs, not the spill regs. */
3949 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3950 if (REG_NOTE_KIND (x) == REG_INC)
3951 {
3952 /* See if this pseudo reg was reloaded in this insn.
3953 If so, its last-reload info is still valid
3954 because it is based on this insn's reload. */
3955 for (i = 0; i < n_reloads; i++)
3956 if (reload_out[i] == XEXP (x, 0))
3957 break;
3958
3959 if (i == n_reloads)
3960 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3961 }
3962 #endif
3963 }
3964 /* A reload reg's contents are unknown after a label. */
3965 if (GET_CODE (insn) == CODE_LABEL)
3966 for (i = 0; i < n_spills; i++)
3967 {
3968 reg_reloaded_contents[i] = -1;
3969 reg_reloaded_insn[i] = 0;
3970 }
3971
3972 /* Don't assume a reload reg is still good after a call insn
3973 if it is a call-used reg. */
3974 else if (GET_CODE (insn) == CALL_INSN)
3975 for (i = 0; i < n_spills; i++)
3976 if (call_used_regs[spill_regs[i]])
3977 {
3978 reg_reloaded_contents[i] = -1;
3979 reg_reloaded_insn[i] = 0;
3980 }
3981
3982 /* In case registers overlap, allow certain insns to invalidate
3983 particular hard registers. */
3984
3985 #ifdef INSN_CLOBBERS_REGNO_P
3986 for (i = 0 ; i < n_spills ; i++)
3987 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3988 {
3989 reg_reloaded_contents[i] = -1;
3990 reg_reloaded_insn[i] = 0;
3991 }
3992 #endif
3993
3994 insn = next;
3995
3996 #ifdef USE_C_ALLOCA
3997 alloca (0);
3998 #endif
3999 }
4000 }
4001
4002 /* Discard all record of any value reloaded from X,
4003 or reloaded in X from someplace else;
4004 unless X is an output reload reg of the current insn.
4005
4006 X may be a hard reg (the reload reg)
4007 or it may be a pseudo reg that was reloaded from. */
4008
4009 static void
4010 forget_old_reloads_1 (x, ignored)
4011 rtx x;
4012 rtx ignored;
4013 {
4014 register int regno;
4015 int nr;
4016 int offset = 0;
4017
4018 /* note_stores does give us subregs of hard regs. */
4019 while (GET_CODE (x) == SUBREG)
4020 {
4021 offset += SUBREG_WORD (x);
4022 x = SUBREG_REG (x);
4023 }
4024
4025 if (GET_CODE (x) != REG)
4026 return;
4027
4028 regno = REGNO (x) + offset;
4029
4030 if (regno >= FIRST_PSEUDO_REGISTER)
4031 nr = 1;
4032 else
4033 {
4034 int i;
4035 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4036 /* Storing into a spilled-reg invalidates its contents.
4037 This can happen if a block-local pseudo is allocated to that reg
4038 and it wasn't spilled because this block's total need is 0.
4039 Then some insn might have an optional reload and use this reg. */
4040 for (i = 0; i < nr; i++)
4041 if (spill_reg_order[regno + i] >= 0
4042 /* But don't do this if the reg actually serves as an output
4043 reload reg in the current instruction. */
4044 && (n_reloads == 0
4045 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4046 {
4047 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4048 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4049 }
4050 }
4051
4052 /* Since value of X has changed,
4053 forget any value previously copied from it. */
4054
4055 while (nr-- > 0)
4056 /* But don't forget a copy if this is the output reload
4057 that establishes the copy's validity. */
4058 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4059 reg_last_reload_reg[regno + nr] = 0;
4060 }
4061 \f
4062 /* For each reload, the mode of the reload register. */
4063 static enum machine_mode reload_mode[MAX_RELOADS];
4064
4065 /* For each reload, the largest number of registers it will require. */
4066 static int reload_nregs[MAX_RELOADS];
4067
4068 /* Comparison function for qsort to decide which of two reloads
4069 should be handled first. *P1 and *P2 are the reload numbers. */
4070
4071 static int
4072 reload_reg_class_lower (p1, p2)
4073 short *p1, *p2;
4074 {
4075 register int r1 = *p1, r2 = *p2;
4076 register int t;
4077
4078 /* Consider required reloads before optional ones. */
4079 t = reload_optional[r1] - reload_optional[r2];
4080 if (t != 0)
4081 return t;
4082
4083 /* Count all solitary classes before non-solitary ones. */
4084 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4085 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4086 if (t != 0)
4087 return t;
4088
4089 /* Aside from solitaires, consider all multi-reg groups first. */
4090 t = reload_nregs[r2] - reload_nregs[r1];
4091 if (t != 0)
4092 return t;
4093
4094 /* Consider reloads in order of increasing reg-class number. */
4095 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4096 if (t != 0)
4097 return t;
4098
4099 /* If reloads are equally urgent, sort by reload number,
4100 so that the results of qsort leave nothing to chance. */
4101 return r1 - r2;
4102 }
4103 \f
4104 /* The following HARD_REG_SETs indicate when each hard register is
4105 used for a reload of various parts of the current insn. */
4106
4107 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4108 static HARD_REG_SET reload_reg_used;
4109 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4110 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4111 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4112 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4113 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4114 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4115 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4116 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4117 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4118 static HARD_REG_SET reload_reg_used_in_op_addr;
4119 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4120 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4121 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4122 static HARD_REG_SET reload_reg_used_in_insn;
4123 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4124 static HARD_REG_SET reload_reg_used_in_other_addr;
4125
4126 /* If reg is in use as a reload reg for any sort of reload. */
4127 static HARD_REG_SET reload_reg_used_at_all;
4128
4129 /* If reg is use as an inherited reload. We just mark the first register
4130 in the group. */
4131 static HARD_REG_SET reload_reg_used_for_inherit;
4132
4133 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4134 TYPE. MODE is used to indicate how many consecutive regs are
4135 actually used. */
4136
4137 static void
4138 mark_reload_reg_in_use (regno, opnum, type, mode)
4139 int regno;
4140 int opnum;
4141 enum reload_type type;
4142 enum machine_mode mode;
4143 {
4144 int nregs = HARD_REGNO_NREGS (regno, mode);
4145 int i;
4146
4147 for (i = regno; i < nregs + regno; i++)
4148 {
4149 switch (type)
4150 {
4151 case RELOAD_OTHER:
4152 SET_HARD_REG_BIT (reload_reg_used, i);
4153 break;
4154
4155 case RELOAD_FOR_INPUT_ADDRESS:
4156 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4157 break;
4158
4159 case RELOAD_FOR_OUTPUT_ADDRESS:
4160 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4161 break;
4162
4163 case RELOAD_FOR_OPERAND_ADDRESS:
4164 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4165 break;
4166
4167 case RELOAD_FOR_OPADDR_ADDR:
4168 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4169 break;
4170
4171 case RELOAD_FOR_OTHER_ADDRESS:
4172 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4173 break;
4174
4175 case RELOAD_FOR_INPUT:
4176 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4177 break;
4178
4179 case RELOAD_FOR_OUTPUT:
4180 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4181 break;
4182
4183 case RELOAD_FOR_INSN:
4184 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4185 break;
4186 }
4187
4188 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4189 }
4190 }
4191
4192 /* Similarly, but show REGNO is no longer in use for a reload. */
4193
4194 static void
4195 clear_reload_reg_in_use (regno, opnum, type, mode)
4196 int regno;
4197 int opnum;
4198 enum reload_type type;
4199 enum machine_mode mode;
4200 {
4201 int nregs = HARD_REGNO_NREGS (regno, mode);
4202 int i;
4203
4204 for (i = regno; i < nregs + regno; i++)
4205 {
4206 switch (type)
4207 {
4208 case RELOAD_OTHER:
4209 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4210 break;
4211
4212 case RELOAD_FOR_INPUT_ADDRESS:
4213 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4214 break;
4215
4216 case RELOAD_FOR_OUTPUT_ADDRESS:
4217 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4218 break;
4219
4220 case RELOAD_FOR_OPERAND_ADDRESS:
4221 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4222 break;
4223
4224 case RELOAD_FOR_OPADDR_ADDR:
4225 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4226 break;
4227
4228 case RELOAD_FOR_OTHER_ADDRESS:
4229 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4230 break;
4231
4232 case RELOAD_FOR_INPUT:
4233 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4234 break;
4235
4236 case RELOAD_FOR_OUTPUT:
4237 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4238 break;
4239
4240 case RELOAD_FOR_INSN:
4241 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4242 break;
4243 }
4244 }
4245 }
4246
4247 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4248 specified by OPNUM and TYPE. */
4249
4250 static int
4251 reload_reg_free_p (regno, opnum, type)
4252 int regno;
4253 int opnum;
4254 enum reload_type type;
4255 {
4256 int i;
4257
4258 /* In use for a RELOAD_OTHER means it's not available for anything except
4259 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4260 to be used only for inputs. */
4261
4262 if (type != RELOAD_FOR_OTHER_ADDRESS
4263 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4264 return 0;
4265
4266 switch (type)
4267 {
4268 case RELOAD_OTHER:
4269 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4270 we can't use it for RELOAD_OTHER. */
4271 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4272 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4273 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4274 return 0;
4275
4276 for (i = 0; i < reload_n_operands; i++)
4277 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4278 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4279 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4280 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4281 return 0;
4282
4283 return 1;
4284
4285 case RELOAD_FOR_INPUT:
4286 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4287 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4288 return 0;
4289
4290 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4291 return 0;
4292
4293 /* If it is used for some other input, can't use it. */
4294 for (i = 0; i < reload_n_operands; i++)
4295 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4296 return 0;
4297
4298 /* If it is used in a later operand's address, can't use it. */
4299 for (i = opnum + 1; i < reload_n_operands; i++)
4300 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4301 return 0;
4302
4303 return 1;
4304
4305 case RELOAD_FOR_INPUT_ADDRESS:
4306 /* Can't use a register if it is used for an input address for this
4307 operand or used as an input in an earlier one. */
4308 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4309 return 0;
4310
4311 for (i = 0; i < opnum; i++)
4312 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4313 return 0;
4314
4315 return 1;
4316
4317 case RELOAD_FOR_OUTPUT_ADDRESS:
4318 /* Can't use a register if it is used for an output address for this
4319 operand or used as an output in this or a later operand. */
4320 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4321 return 0;
4322
4323 for (i = opnum; i < reload_n_operands; i++)
4324 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4325 return 0;
4326
4327 return 1;
4328
4329 case RELOAD_FOR_OPERAND_ADDRESS:
4330 for (i = 0; i < reload_n_operands; i++)
4331 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4332 return 0;
4333
4334 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4335 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4336
4337 case RELOAD_FOR_OPADDR_ADDR:
4338 for (i = 0; i < reload_n_operands; i++)
4339 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4340 return 0;
4341
4342 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4343
4344 case RELOAD_FOR_OUTPUT:
4345 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4346 outputs, or an operand address for this or an earlier output. */
4347 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4348 return 0;
4349
4350 for (i = 0; i < reload_n_operands; i++)
4351 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4352 return 0;
4353
4354 for (i = 0; i <= opnum; i++)
4355 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4356 return 0;
4357
4358 return 1;
4359
4360 case RELOAD_FOR_INSN:
4361 for (i = 0; i < reload_n_operands; i++)
4362 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4363 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4364 return 0;
4365
4366 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4367 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4368
4369 case RELOAD_FOR_OTHER_ADDRESS:
4370 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4371 }
4372 abort ();
4373 }
4374
4375 /* Return 1 if the value in reload reg REGNO, as used by a reload
4376 needed for the part of the insn specified by OPNUM and TYPE,
4377 is not in use for a reload in any prior part of the insn.
4378
4379 We can assume that the reload reg was already tested for availability
4380 at the time it is needed, and we should not check this again,
4381 in case the reg has already been marked in use. */
4382
4383 static int
4384 reload_reg_free_before_p (regno, opnum, type)
4385 int regno;
4386 int opnum;
4387 enum reload_type type;
4388 {
4389 int i;
4390
4391 switch (type)
4392 {
4393 case RELOAD_FOR_OTHER_ADDRESS:
4394 /* These always come first. */
4395 return 1;
4396
4397 case RELOAD_OTHER:
4398 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4399
4400 /* If this use is for part of the insn,
4401 check the reg is not in use for any prior part. It is tempting
4402 to try to do this by falling through from objecs that occur
4403 later in the insn to ones that occur earlier, but that will not
4404 correctly take into account the fact that here we MUST ignore
4405 things that would prevent the register from being allocated in
4406 the first place, since we know that it was allocated. */
4407
4408 case RELOAD_FOR_OUTPUT_ADDRESS:
4409 /* Earlier reloads are for earlier outputs or their addresses,
4410 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4411 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4412 RELOAD_OTHER).. */
4413 for (i = 0; i < opnum; i++)
4414 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4415 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4416 return 0;
4417
4418 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4419 return 0;
4420
4421 for (i = 0; i < reload_n_operands; i++)
4422 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4423 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4424 return 0;
4425
4426 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4427 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4428 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4429
4430 case RELOAD_FOR_OUTPUT:
4431 /* This can't be used in the output address for this operand and
4432 anything that can't be used for it, except that we've already
4433 tested for RELOAD_FOR_INSN objects. */
4434
4435 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4436 return 0;
4437
4438 for (i = 0; i < opnum; i++)
4439 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4440 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4441 return 0;
4442
4443 for (i = 0; i < reload_n_operands; i++)
4444 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4445 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4446 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4447 return 0;
4448
4449 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4450
4451 case RELOAD_FOR_OPERAND_ADDRESS:
4452 case RELOAD_FOR_OPADDR_ADDR:
4453 case RELOAD_FOR_INSN:
4454 /* These can't conflict with inputs, or each other, so all we have to
4455 test is input addresses and the addresses of OTHER items. */
4456
4457 for (i = 0; i < reload_n_operands; i++)
4458 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4459 return 0;
4460
4461 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4462
4463 case RELOAD_FOR_INPUT:
4464 /* The only things earlier are the address for this and
4465 earlier inputs, other inputs (which we know we don't conflict
4466 with), and addresses of RELOAD_OTHER objects. */
4467
4468 for (i = 0; i <= opnum; i++)
4469 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4470 return 0;
4471
4472 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4473
4474 case RELOAD_FOR_INPUT_ADDRESS:
4475 /* Similarly, all we have to check is for use in earlier inputs'
4476 addresses. */
4477 for (i = 0; i < opnum; i++)
4478 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4479 return 0;
4480
4481 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4482 }
4483 abort ();
4484 }
4485
4486 /* Return 1 if the value in reload reg REGNO, as used by a reload
4487 needed for the part of the insn specified by OPNUM and TYPE,
4488 is still available in REGNO at the end of the insn.
4489
4490 We can assume that the reload reg was already tested for availability
4491 at the time it is needed, and we should not check this again,
4492 in case the reg has already been marked in use. */
4493
4494 static int
4495 reload_reg_reaches_end_p (regno, opnum, type)
4496 int regno;
4497 int opnum;
4498 enum reload_type type;
4499 {
4500 int i;
4501
4502 switch (type)
4503 {
4504 case RELOAD_OTHER:
4505 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4506 its value must reach the end. */
4507 return 1;
4508
4509 /* If this use is for part of the insn,
4510 its value reaches if no subsequent part uses the same register.
4511 Just like the above function, don't try to do this with lots
4512 of fallthroughs. */
4513
4514 case RELOAD_FOR_OTHER_ADDRESS:
4515 /* Here we check for everything else, since these don't conflict
4516 with anything else and everything comes later. */
4517
4518 for (i = 0; i < reload_n_operands; i++)
4519 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4520 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4521 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4522 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4523 return 0;
4524
4525 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4526 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4527 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4528
4529 case RELOAD_FOR_INPUT_ADDRESS:
4530 /* Similar, except that we check only for this and subsequent inputs
4531 and the address of only subsequent inputs and we do not need
4532 to check for RELOAD_OTHER objects since they are known not to
4533 conflict. */
4534
4535 for (i = opnum; i < reload_n_operands; i++)
4536 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4537 return 0;
4538
4539 for (i = opnum + 1; i < reload_n_operands; i++)
4540 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4541 return 0;
4542
4543 for (i = 0; i < reload_n_operands; i++)
4544 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4545 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4546 return 0;
4547
4548 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4549 return 0;
4550
4551 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4552 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4553
4554 case RELOAD_FOR_INPUT:
4555 /* Similar to input address, except we start at the next operand for
4556 both input and input address and we do not check for
4557 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4558 would conflict. */
4559
4560 for (i = opnum + 1; i < reload_n_operands; i++)
4561 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4562 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4563 return 0;
4564
4565 /* ... fall through ... */
4566
4567 case RELOAD_FOR_OPERAND_ADDRESS:
4568 /* Check outputs and their addresses. */
4569
4570 for (i = 0; i < reload_n_operands; i++)
4571 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4572 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4573 return 0;
4574
4575 return 1;
4576
4577 case RELOAD_FOR_OPADDR_ADDR:
4578 for (i = 0; i < reload_n_operands; i++)
4579 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4580 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4581 return 0;
4582
4583 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4584 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4585
4586 case RELOAD_FOR_INSN:
4587 /* These conflict with other outputs with RELOAD_OTHER. So
4588 we need only check for output addresses. */
4589
4590 opnum = -1;
4591
4592 /* ... fall through ... */
4593
4594 case RELOAD_FOR_OUTPUT:
4595 case RELOAD_FOR_OUTPUT_ADDRESS:
4596 /* We already know these can't conflict with a later output. So the
4597 only thing to check are later output addresses. */
4598 for (i = opnum + 1; i < reload_n_operands; i++)
4599 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4600 return 0;
4601
4602 return 1;
4603 }
4604
4605 abort ();
4606 }
4607 \f
4608 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4609 Return 0 otherwise.
4610
4611 This function uses the same algorithm as reload_reg_free_p above. */
4612
4613 static int
4614 reloads_conflict (r1, r2)
4615 int r1, r2;
4616 {
4617 enum reload_type r1_type = reload_when_needed[r1];
4618 enum reload_type r2_type = reload_when_needed[r2];
4619 int r1_opnum = reload_opnum[r1];
4620 int r2_opnum = reload_opnum[r2];
4621
4622 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4623
4624 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4625 return 1;
4626
4627 /* Otherwise, check conflicts differently for each type. */
4628
4629 switch (r1_type)
4630 {
4631 case RELOAD_FOR_INPUT:
4632 return (r2_type == RELOAD_FOR_INSN
4633 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4634 || r2_type == RELOAD_FOR_OPADDR_ADDR
4635 || r2_type == RELOAD_FOR_INPUT
4636 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4637
4638 case RELOAD_FOR_INPUT_ADDRESS:
4639 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4640 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4641
4642 case RELOAD_FOR_OUTPUT_ADDRESS:
4643 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4644 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4645
4646 case RELOAD_FOR_OPERAND_ADDRESS:
4647 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4648 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4649
4650 case RELOAD_FOR_OPADDR_ADDR:
4651 return (r2_type == RELOAD_FOR_INPUT
4652 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4653
4654 case RELOAD_FOR_OUTPUT:
4655 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4656 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4657 && r2_opnum >= r1_opnum));
4658
4659 case RELOAD_FOR_INSN:
4660 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4661 || r2_type == RELOAD_FOR_INSN
4662 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4663
4664 case RELOAD_FOR_OTHER_ADDRESS:
4665 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4666
4667 case RELOAD_OTHER:
4668 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4669
4670 default:
4671 abort ();
4672 }
4673 }
4674 \f
4675 /* Vector of reload-numbers showing the order in which the reloads should
4676 be processed. */
4677 short reload_order[MAX_RELOADS];
4678
4679 /* Indexed by reload number, 1 if incoming value
4680 inherited from previous insns. */
4681 char reload_inherited[MAX_RELOADS];
4682
4683 /* For an inherited reload, this is the insn the reload was inherited from,
4684 if we know it. Otherwise, this is 0. */
4685 rtx reload_inheritance_insn[MAX_RELOADS];
4686
4687 /* If non-zero, this is a place to get the value of the reload,
4688 rather than using reload_in. */
4689 rtx reload_override_in[MAX_RELOADS];
4690
4691 /* For each reload, the index in spill_regs of the spill register used,
4692 or -1 if we did not need one of the spill registers for this reload. */
4693 int reload_spill_index[MAX_RELOADS];
4694
4695 /* Find a spill register to use as a reload register for reload R.
4696 LAST_RELOAD is non-zero if this is the last reload for the insn being
4697 processed.
4698
4699 Set reload_reg_rtx[R] to the register allocated.
4700
4701 If NOERROR is nonzero, we return 1 if successful,
4702 or 0 if we couldn't find a spill reg and we didn't change anything. */
4703
4704 static int
4705 allocate_reload_reg (r, insn, last_reload, noerror)
4706 int r;
4707 rtx insn;
4708 int last_reload;
4709 int noerror;
4710 {
4711 int i;
4712 int pass;
4713 int count;
4714 rtx new;
4715 int regno;
4716
4717 /* If we put this reload ahead, thinking it is a group,
4718 then insist on finding a group. Otherwise we can grab a
4719 reg that some other reload needs.
4720 (That can happen when we have a 68000 DATA_OR_FP_REG
4721 which is a group of data regs or one fp reg.)
4722 We need not be so restrictive if there are no more reloads
4723 for this insn.
4724
4725 ??? Really it would be nicer to have smarter handling
4726 for that kind of reg class, where a problem like this is normal.
4727 Perhaps those classes should be avoided for reloading
4728 by use of more alternatives. */
4729
4730 int force_group = reload_nregs[r] > 1 && ! last_reload;
4731
4732 /* If we want a single register and haven't yet found one,
4733 take any reg in the right class and not in use.
4734 If we want a consecutive group, here is where we look for it.
4735
4736 We use two passes so we can first look for reload regs to
4737 reuse, which are already in use for other reloads in this insn,
4738 and only then use additional registers.
4739 I think that maximizing reuse is needed to make sure we don't
4740 run out of reload regs. Suppose we have three reloads, and
4741 reloads A and B can share regs. These need two regs.
4742 Suppose A and B are given different regs.
4743 That leaves none for C. */
4744 for (pass = 0; pass < 2; pass++)
4745 {
4746 /* I is the index in spill_regs.
4747 We advance it round-robin between insns to use all spill regs
4748 equally, so that inherited reloads have a chance
4749 of leapfrogging each other. Don't do this, however, when we have
4750 group needs and failure would be fatal; if we only have a relatively
4751 small number of spill registers, and more than one of them has
4752 group needs, then by starting in the middle, we may end up
4753 allocating the first one in such a way that we are not left with
4754 sufficient groups to handle the rest. */
4755
4756 if (noerror || ! force_group)
4757 i = last_spill_reg;
4758 else
4759 i = -1;
4760
4761 for (count = 0; count < n_spills; count++)
4762 {
4763 int class = (int) reload_reg_class[r];
4764
4765 i = (i + 1) % n_spills;
4766
4767 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4768 reload_when_needed[r])
4769 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4770 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4771 /* Look first for regs to share, then for unshared. But
4772 don't share regs used for inherited reloads; they are
4773 the ones we want to preserve. */
4774 && (pass
4775 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4776 spill_regs[i])
4777 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4778 spill_regs[i]))))
4779 {
4780 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4781 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4782 (on 68000) got us two FP regs. If NR is 1,
4783 we would reject both of them. */
4784 if (force_group)
4785 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4786 /* If we need only one reg, we have already won. */
4787 if (nr == 1)
4788 {
4789 /* But reject a single reg if we demand a group. */
4790 if (force_group)
4791 continue;
4792 break;
4793 }
4794 /* Otherwise check that as many consecutive regs as we need
4795 are available here.
4796 Also, don't use for a group registers that are
4797 needed for nongroups. */
4798 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4799 while (nr > 1)
4800 {
4801 regno = spill_regs[i] + nr - 1;
4802 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4803 && spill_reg_order[regno] >= 0
4804 && reload_reg_free_p (regno, reload_opnum[r],
4805 reload_when_needed[r])
4806 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4807 regno)))
4808 break;
4809 nr--;
4810 }
4811 if (nr == 1)
4812 break;
4813 }
4814 }
4815
4816 /* If we found something on pass 1, omit pass 2. */
4817 if (count < n_spills)
4818 break;
4819 }
4820
4821 /* We should have found a spill register by now. */
4822 if (count == n_spills)
4823 {
4824 if (noerror)
4825 return 0;
4826 goto failure;
4827 }
4828
4829 /* I is the index in SPILL_REG_RTX of the reload register we are to
4830 allocate. Get an rtx for it and find its register number. */
4831
4832 new = spill_reg_rtx[i];
4833
4834 if (new == 0 || GET_MODE (new) != reload_mode[r])
4835 spill_reg_rtx[i] = new
4836 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4837
4838 regno = true_regnum (new);
4839
4840 /* Detect when the reload reg can't hold the reload mode.
4841 This used to be one `if', but Sequent compiler can't handle that. */
4842 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4843 {
4844 enum machine_mode test_mode = VOIDmode;
4845 if (reload_in[r])
4846 test_mode = GET_MODE (reload_in[r]);
4847 /* If reload_in[r] has VOIDmode, it means we will load it
4848 in whatever mode the reload reg has: to wit, reload_mode[r].
4849 We have already tested that for validity. */
4850 /* Aside from that, we need to test that the expressions
4851 to reload from or into have modes which are valid for this
4852 reload register. Otherwise the reload insns would be invalid. */
4853 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4854 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4855 if (! (reload_out[r] != 0
4856 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4857 {
4858 /* The reg is OK. */
4859 last_spill_reg = i;
4860
4861 /* Mark as in use for this insn the reload regs we use
4862 for this. */
4863 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4864 reload_when_needed[r], reload_mode[r]);
4865
4866 reload_reg_rtx[r] = new;
4867 reload_spill_index[r] = i;
4868 return 1;
4869 }
4870 }
4871
4872 /* The reg is not OK. */
4873 if (noerror)
4874 return 0;
4875
4876 failure:
4877 if (asm_noperands (PATTERN (insn)) < 0)
4878 /* It's the compiler's fault. */
4879 fatal_insn ("Could not find a spill register", insn);
4880
4881 /* It's the user's fault; the operand's mode and constraint
4882 don't match. Disable this reload so we don't crash in final. */
4883 error_for_asm (insn,
4884 "`asm' operand constraint incompatible with operand size");
4885 reload_in[r] = 0;
4886 reload_out[r] = 0;
4887 reload_reg_rtx[r] = 0;
4888 reload_optional[r] = 1;
4889 reload_secondary_p[r] = 1;
4890
4891 return 1;
4892 }
4893 \f
4894 /* Assign hard reg targets for the pseudo-registers we must reload
4895 into hard regs for this insn.
4896 Also output the instructions to copy them in and out of the hard regs.
4897
4898 For machines with register classes, we are responsible for
4899 finding a reload reg in the proper class. */
4900
4901 static void
4902 choose_reload_regs (insn, avoid_return_reg)
4903 rtx insn;
4904 rtx avoid_return_reg;
4905 {
4906 register int i, j;
4907 int max_group_size = 1;
4908 enum reg_class group_class = NO_REGS;
4909 int inheritance;
4910
4911 rtx save_reload_reg_rtx[MAX_RELOADS];
4912 char save_reload_inherited[MAX_RELOADS];
4913 rtx save_reload_inheritance_insn[MAX_RELOADS];
4914 rtx save_reload_override_in[MAX_RELOADS];
4915 int save_reload_spill_index[MAX_RELOADS];
4916 HARD_REG_SET save_reload_reg_used;
4917 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4918 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4919 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4920 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4921 HARD_REG_SET save_reload_reg_used_in_op_addr;
4922 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4923 HARD_REG_SET save_reload_reg_used_in_insn;
4924 HARD_REG_SET save_reload_reg_used_in_other_addr;
4925 HARD_REG_SET save_reload_reg_used_at_all;
4926
4927 bzero (reload_inherited, MAX_RELOADS);
4928 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4929 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4930
4931 CLEAR_HARD_REG_SET (reload_reg_used);
4932 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4933 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4934 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4935 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4936 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4937
4938 for (i = 0; i < reload_n_operands; i++)
4939 {
4940 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4941 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4942 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4943 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4944 }
4945
4946 #ifdef SMALL_REGISTER_CLASSES
4947 /* Don't bother with avoiding the return reg
4948 if we have no mandatory reload that could use it. */
4949 if (avoid_return_reg)
4950 {
4951 int do_avoid = 0;
4952 int regno = REGNO (avoid_return_reg);
4953 int nregs
4954 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4955 int r;
4956
4957 for (r = regno; r < regno + nregs; r++)
4958 if (spill_reg_order[r] >= 0)
4959 for (j = 0; j < n_reloads; j++)
4960 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4961 && (reload_in[j] != 0 || reload_out[j] != 0
4962 || reload_secondary_p[j])
4963 &&
4964 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4965 do_avoid = 1;
4966 if (!do_avoid)
4967 avoid_return_reg = 0;
4968 }
4969 #endif /* SMALL_REGISTER_CLASSES */
4970
4971 #if 0 /* Not needed, now that we can always retry without inheritance. */
4972 /* See if we have more mandatory reloads than spill regs.
4973 If so, then we cannot risk optimizations that could prevent
4974 reloads from sharing one spill register.
4975
4976 Since we will try finding a better register than reload_reg_rtx
4977 unless it is equal to reload_in or reload_out, count such reloads. */
4978
4979 {
4980 int tem = 0;
4981 #ifdef SMALL_REGISTER_CLASSES
4982 int tem = (avoid_return_reg != 0);
4983 #endif
4984 for (j = 0; j < n_reloads; j++)
4985 if (! reload_optional[j]
4986 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4987 && (reload_reg_rtx[j] == 0
4988 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4989 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4990 tem++;
4991 if (tem > n_spills)
4992 must_reuse = 1;
4993 }
4994 #endif
4995
4996 #ifdef SMALL_REGISTER_CLASSES
4997 /* Don't use the subroutine call return reg for a reload
4998 if we are supposed to avoid it. */
4999 if (avoid_return_reg)
5000 {
5001 int regno = REGNO (avoid_return_reg);
5002 int nregs
5003 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5004 int r;
5005
5006 for (r = regno; r < regno + nregs; r++)
5007 if (spill_reg_order[r] >= 0)
5008 SET_HARD_REG_BIT (reload_reg_used, r);
5009 }
5010 #endif /* SMALL_REGISTER_CLASSES */
5011
5012 /* In order to be certain of getting the registers we need,
5013 we must sort the reloads into order of increasing register class.
5014 Then our grabbing of reload registers will parallel the process
5015 that provided the reload registers.
5016
5017 Also note whether any of the reloads wants a consecutive group of regs.
5018 If so, record the maximum size of the group desired and what
5019 register class contains all the groups needed by this insn. */
5020
5021 for (j = 0; j < n_reloads; j++)
5022 {
5023 reload_order[j] = j;
5024 reload_spill_index[j] = -1;
5025
5026 reload_mode[j]
5027 = (reload_inmode[j] == VOIDmode
5028 || (GET_MODE_SIZE (reload_outmode[j])
5029 > GET_MODE_SIZE (reload_inmode[j])))
5030 ? reload_outmode[j] : reload_inmode[j];
5031
5032 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5033
5034 if (reload_nregs[j] > 1)
5035 {
5036 max_group_size = MAX (reload_nregs[j], max_group_size);
5037 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5038 }
5039
5040 /* If we have already decided to use a certain register,
5041 don't use it in another way. */
5042 if (reload_reg_rtx[j])
5043 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5044 reload_when_needed[j], reload_mode[j]);
5045 }
5046
5047 if (n_reloads > 1)
5048 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5049
5050 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5051 sizeof reload_reg_rtx);
5052 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5053 bcopy ((char *) reload_inheritance_insn,
5054 (char *) save_reload_inheritance_insn,
5055 sizeof reload_inheritance_insn);
5056 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5057 sizeof reload_override_in);
5058 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5059 sizeof reload_spill_index);
5060 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5061 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5062 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5063 reload_reg_used_in_op_addr);
5064
5065 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5066 reload_reg_used_in_op_addr_reload);
5067
5068 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5069 reload_reg_used_in_insn);
5070 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5071 reload_reg_used_in_other_addr);
5072
5073 for (i = 0; i < reload_n_operands; i++)
5074 {
5075 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5076 reload_reg_used_in_output[i]);
5077 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5078 reload_reg_used_in_input[i]);
5079 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5080 reload_reg_used_in_input_addr[i]);
5081 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5082 reload_reg_used_in_output_addr[i]);
5083 }
5084
5085 /* If -O, try first with inheritance, then turning it off.
5086 If not -O, don't do inheritance.
5087 Using inheritance when not optimizing leads to paradoxes
5088 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5089 because one side of the comparison might be inherited. */
5090
5091 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5092 {
5093 /* Process the reloads in order of preference just found.
5094 Beyond this point, subregs can be found in reload_reg_rtx.
5095
5096 This used to look for an existing reloaded home for all
5097 of the reloads, and only then perform any new reloads.
5098 But that could lose if the reloads were done out of reg-class order
5099 because a later reload with a looser constraint might have an old
5100 home in a register needed by an earlier reload with a tighter constraint.
5101
5102 To solve this, we make two passes over the reloads, in the order
5103 described above. In the first pass we try to inherit a reload
5104 from a previous insn. If there is a later reload that needs a
5105 class that is a proper subset of the class being processed, we must
5106 also allocate a spill register during the first pass.
5107
5108 Then make a second pass over the reloads to allocate any reloads
5109 that haven't been given registers yet. */
5110
5111 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5112
5113 for (j = 0; j < n_reloads; j++)
5114 {
5115 register int r = reload_order[j];
5116
5117 /* Ignore reloads that got marked inoperative. */
5118 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5119 continue;
5120
5121 /* If find_reloads chose a to use reload_in or reload_out as a reload
5122 register, we don't need to chose one. Otherwise, try even if it found
5123 one since we might save an insn if we find the value lying around. */
5124 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5125 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5126 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5127 continue;
5128
5129 #if 0 /* No longer needed for correct operation.
5130 It might give better code, or might not; worth an experiment? */
5131 /* If this is an optional reload, we can't inherit from earlier insns
5132 until we are sure that any non-optional reloads have been allocated.
5133 The following code takes advantage of the fact that optional reloads
5134 are at the end of reload_order. */
5135 if (reload_optional[r] != 0)
5136 for (i = 0; i < j; i++)
5137 if ((reload_out[reload_order[i]] != 0
5138 || reload_in[reload_order[i]] != 0
5139 || reload_secondary_p[reload_order[i]])
5140 && ! reload_optional[reload_order[i]]
5141 && reload_reg_rtx[reload_order[i]] == 0)
5142 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5143 #endif
5144
5145 /* First see if this pseudo is already available as reloaded
5146 for a previous insn. We cannot try to inherit for reloads
5147 that are smaller than the maximum number of registers needed
5148 for groups unless the register we would allocate cannot be used
5149 for the groups.
5150
5151 We could check here to see if this is a secondary reload for
5152 an object that is already in a register of the desired class.
5153 This would avoid the need for the secondary reload register.
5154 But this is complex because we can't easily determine what
5155 objects might want to be loaded via this reload. So let a register
5156 be allocated here. In `emit_reload_insns' we suppress one of the
5157 loads in the case described above. */
5158
5159 if (inheritance)
5160 {
5161 register int regno = -1;
5162 enum machine_mode mode;
5163
5164 if (reload_in[r] == 0)
5165 ;
5166 else if (GET_CODE (reload_in[r]) == REG)
5167 {
5168 regno = REGNO (reload_in[r]);
5169 mode = GET_MODE (reload_in[r]);
5170 }
5171 else if (GET_CODE (reload_in_reg[r]) == REG)
5172 {
5173 regno = REGNO (reload_in_reg[r]);
5174 mode = GET_MODE (reload_in_reg[r]);
5175 }
5176 #if 0
5177 /* This won't work, since REGNO can be a pseudo reg number.
5178 Also, it takes much more hair to keep track of all the things
5179 that can invalidate an inherited reload of part of a pseudoreg. */
5180 else if (GET_CODE (reload_in[r]) == SUBREG
5181 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5182 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5183 #endif
5184
5185 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5186 {
5187 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5188
5189 if (reg_reloaded_contents[i] == regno
5190 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5191 >= GET_MODE_SIZE (mode))
5192 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5193 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5194 spill_regs[i])
5195 && (reload_nregs[r] == max_group_size
5196 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5197 spill_regs[i]))
5198 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5199 reload_when_needed[r])
5200 && reload_reg_free_before_p (spill_regs[i],
5201 reload_opnum[r],
5202 reload_when_needed[r]))
5203 {
5204 /* If a group is needed, verify that all the subsequent
5205 registers still have their values intact. */
5206 int nr
5207 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5208 int k;
5209
5210 for (k = 1; k < nr; k++)
5211 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5212 != regno)
5213 break;
5214
5215 if (k == nr)
5216 {
5217 int i1;
5218
5219 /* We found a register that contains the
5220 value we need. If this register is the
5221 same as an `earlyclobber' operand of the
5222 current insn, just mark it as a place to
5223 reload from since we can't use it as the
5224 reload register itself. */
5225
5226 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5227 if (reg_overlap_mentioned_for_reload_p
5228 (reg_last_reload_reg[regno],
5229 reload_earlyclobbers[i1]))
5230 break;
5231
5232 if (i1 != n_earlyclobbers
5233 /* Don't really use the inherited spill reg
5234 if we need it wider than we've got it. */
5235 || (GET_MODE_SIZE (reload_mode[r])
5236 > GET_MODE_SIZE (mode)))
5237 reload_override_in[r] = reg_last_reload_reg[regno];
5238 else
5239 {
5240 int k;
5241 /* We can use this as a reload reg. */
5242 /* Mark the register as in use for this part of
5243 the insn. */
5244 mark_reload_reg_in_use (spill_regs[i],
5245 reload_opnum[r],
5246 reload_when_needed[r],
5247 reload_mode[r]);
5248 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5249 reload_inherited[r] = 1;
5250 reload_inheritance_insn[r]
5251 = reg_reloaded_insn[i];
5252 reload_spill_index[r] = i;
5253 for (k = 0; k < nr; k++)
5254 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5255 spill_regs[i + k]);
5256 }
5257 }
5258 }
5259 }
5260 }
5261
5262 /* Here's another way to see if the value is already lying around. */
5263 if (inheritance
5264 && reload_in[r] != 0
5265 && ! reload_inherited[r]
5266 && reload_out[r] == 0
5267 && (CONSTANT_P (reload_in[r])
5268 || GET_CODE (reload_in[r]) == PLUS
5269 || GET_CODE (reload_in[r]) == REG
5270 || GET_CODE (reload_in[r]) == MEM)
5271 && (reload_nregs[r] == max_group_size
5272 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5273 {
5274 register rtx equiv
5275 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5276 -1, NULL_PTR, 0, reload_mode[r]);
5277 int regno;
5278
5279 if (equiv != 0)
5280 {
5281 if (GET_CODE (equiv) == REG)
5282 regno = REGNO (equiv);
5283 else if (GET_CODE (equiv) == SUBREG)
5284 {
5285 /* This must be a SUBREG of a hard register.
5286 Make a new REG since this might be used in an
5287 address and not all machines support SUBREGs
5288 there. */
5289 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5290 equiv = gen_rtx (REG, reload_mode[r], regno);
5291 }
5292 else
5293 abort ();
5294 }
5295
5296 /* If we found a spill reg, reject it unless it is free
5297 and of the desired class. */
5298 if (equiv != 0
5299 && ((spill_reg_order[regno] >= 0
5300 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5301 reload_when_needed[r]))
5302 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5303 regno)))
5304 equiv = 0;
5305
5306 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5307 equiv = 0;
5308
5309 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5310 equiv = 0;
5311
5312 /* We found a register that contains the value we need.
5313 If this register is the same as an `earlyclobber' operand
5314 of the current insn, just mark it as a place to reload from
5315 since we can't use it as the reload register itself. */
5316
5317 if (equiv != 0)
5318 for (i = 0; i < n_earlyclobbers; i++)
5319 if (reg_overlap_mentioned_for_reload_p (equiv,
5320 reload_earlyclobbers[i]))
5321 {
5322 reload_override_in[r] = equiv;
5323 equiv = 0;
5324 break;
5325 }
5326
5327 /* JRV: If the equiv register we have found is explicitly
5328 clobbered in the current insn, mark but don't use, as above. */
5329
5330 if (equiv != 0 && regno_clobbered_p (regno, insn))
5331 {
5332 reload_override_in[r] = equiv;
5333 equiv = 0;
5334 }
5335
5336 /* If we found an equivalent reg, say no code need be generated
5337 to load it, and use it as our reload reg. */
5338 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5339 {
5340 reload_reg_rtx[r] = equiv;
5341 reload_inherited[r] = 1;
5342 /* If it is a spill reg,
5343 mark the spill reg as in use for this insn. */
5344 i = spill_reg_order[regno];
5345 if (i >= 0)
5346 {
5347 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5348 int k;
5349 mark_reload_reg_in_use (regno, reload_opnum[r],
5350 reload_when_needed[r],
5351 reload_mode[r]);
5352 for (k = 0; k < nr; k++)
5353 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5354 }
5355 }
5356 }
5357
5358 /* If we found a register to use already, or if this is an optional
5359 reload, we are done. */
5360 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5361 continue;
5362
5363 #if 0 /* No longer needed for correct operation. Might or might not
5364 give better code on the average. Want to experiment? */
5365
5366 /* See if there is a later reload that has a class different from our
5367 class that intersects our class or that requires less register
5368 than our reload. If so, we must allocate a register to this
5369 reload now, since that reload might inherit a previous reload
5370 and take the only available register in our class. Don't do this
5371 for optional reloads since they will force all previous reloads
5372 to be allocated. Also don't do this for reloads that have been
5373 turned off. */
5374
5375 for (i = j + 1; i < n_reloads; i++)
5376 {
5377 int s = reload_order[i];
5378
5379 if ((reload_in[s] == 0 && reload_out[s] == 0
5380 && ! reload_secondary_p[s])
5381 || reload_optional[s])
5382 continue;
5383
5384 if ((reload_reg_class[s] != reload_reg_class[r]
5385 && reg_classes_intersect_p (reload_reg_class[r],
5386 reload_reg_class[s]))
5387 || reload_nregs[s] < reload_nregs[r])
5388 break;
5389 }
5390
5391 if (i == n_reloads)
5392 continue;
5393
5394 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5395 #endif
5396 }
5397
5398 /* Now allocate reload registers for anything non-optional that
5399 didn't get one yet. */
5400 for (j = 0; j < n_reloads; j++)
5401 {
5402 register int r = reload_order[j];
5403
5404 /* Ignore reloads that got marked inoperative. */
5405 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5406 continue;
5407
5408 /* Skip reloads that already have a register allocated or are
5409 optional. */
5410 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5411 continue;
5412
5413 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5414 break;
5415 }
5416
5417 /* If that loop got all the way, we have won. */
5418 if (j == n_reloads)
5419 break;
5420
5421 fail:
5422 /* Loop around and try without any inheritance. */
5423 /* First undo everything done by the failed attempt
5424 to allocate with inheritance. */
5425 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5426 sizeof reload_reg_rtx);
5427 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5428 sizeof reload_inherited);
5429 bcopy ((char *) save_reload_inheritance_insn,
5430 (char *) reload_inheritance_insn,
5431 sizeof reload_inheritance_insn);
5432 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5433 sizeof reload_override_in);
5434 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5435 sizeof reload_spill_index);
5436 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5437 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5438 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5439 save_reload_reg_used_in_op_addr);
5440 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5441 save_reload_reg_used_in_op_addr_reload);
5442 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5443 save_reload_reg_used_in_insn);
5444 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5445 save_reload_reg_used_in_other_addr);
5446
5447 for (i = 0; i < reload_n_operands; i++)
5448 {
5449 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5450 save_reload_reg_used_in_input[i]);
5451 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5452 save_reload_reg_used_in_output[i]);
5453 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5454 save_reload_reg_used_in_input_addr[i]);
5455 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5456 save_reload_reg_used_in_output_addr[i]);
5457 }
5458 }
5459
5460 /* If we thought we could inherit a reload, because it seemed that
5461 nothing else wanted the same reload register earlier in the insn,
5462 verify that assumption, now that all reloads have been assigned. */
5463
5464 for (j = 0; j < n_reloads; j++)
5465 {
5466 register int r = reload_order[j];
5467
5468 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5469 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5470 reload_opnum[r],
5471 reload_when_needed[r]))
5472 reload_inherited[r] = 0;
5473
5474 /* If we found a better place to reload from,
5475 validate it in the same fashion, if it is a reload reg. */
5476 if (reload_override_in[r]
5477 && (GET_CODE (reload_override_in[r]) == REG
5478 || GET_CODE (reload_override_in[r]) == SUBREG))
5479 {
5480 int regno = true_regnum (reload_override_in[r]);
5481 if (spill_reg_order[regno] >= 0
5482 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5483 reload_when_needed[r]))
5484 reload_override_in[r] = 0;
5485 }
5486 }
5487
5488 /* Now that reload_override_in is known valid,
5489 actually override reload_in. */
5490 for (j = 0; j < n_reloads; j++)
5491 if (reload_override_in[j])
5492 reload_in[j] = reload_override_in[j];
5493
5494 /* If this reload won't be done because it has been cancelled or is
5495 optional and not inherited, clear reload_reg_rtx so other
5496 routines (such as subst_reloads) don't get confused. */
5497 for (j = 0; j < n_reloads; j++)
5498 if (reload_reg_rtx[j] != 0
5499 && ((reload_optional[j] && ! reload_inherited[j])
5500 || (reload_in[j] == 0 && reload_out[j] == 0
5501 && ! reload_secondary_p[j])))
5502 {
5503 int regno = true_regnum (reload_reg_rtx[j]);
5504
5505 if (spill_reg_order[regno] >= 0)
5506 clear_reload_reg_in_use (regno, reload_opnum[j],
5507 reload_when_needed[j], reload_mode[j]);
5508 reload_reg_rtx[j] = 0;
5509 }
5510
5511 /* Record which pseudos and which spill regs have output reloads. */
5512 for (j = 0; j < n_reloads; j++)
5513 {
5514 register int r = reload_order[j];
5515
5516 i = reload_spill_index[r];
5517
5518 /* I is nonneg if this reload used one of the spill regs.
5519 If reload_reg_rtx[r] is 0, this is an optional reload
5520 that we opted to ignore. */
5521 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5522 && reload_reg_rtx[r] != 0)
5523 {
5524 register int nregno = REGNO (reload_out[r]);
5525 int nr = 1;
5526
5527 if (nregno < FIRST_PSEUDO_REGISTER)
5528 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5529
5530 while (--nr >= 0)
5531 reg_has_output_reload[nregno + nr] = 1;
5532
5533 if (i >= 0)
5534 {
5535 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5536 while (--nr >= 0)
5537 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5538 }
5539
5540 if (reload_when_needed[r] != RELOAD_OTHER
5541 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5542 && reload_when_needed[r] != RELOAD_FOR_INSN)
5543 abort ();
5544 }
5545 }
5546 }
5547 \f
5548 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5549 reloads of the same item for fear that we might not have enough reload
5550 registers. However, normally they will get the same reload register
5551 and hence actually need not be loaded twice.
5552
5553 Here we check for the most common case of this phenomenon: when we have
5554 a number of reloads for the same object, each of which were allocated
5555 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5556 reload, and is not modified in the insn itself. If we find such,
5557 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5558 This will not increase the number of spill registers needed and will
5559 prevent redundant code. */
5560
5561 #ifdef SMALL_REGISTER_CLASSES
5562
5563 static void
5564 merge_assigned_reloads (insn)
5565 rtx insn;
5566 {
5567 int i, j;
5568
5569 /* Scan all the reloads looking for ones that only load values and
5570 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5571 assigned and not modified by INSN. */
5572
5573 for (i = 0; i < n_reloads; i++)
5574 {
5575 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5576 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5577 || reg_set_p (reload_reg_rtx[i], insn))
5578 continue;
5579
5580 /* Look at all other reloads. Ensure that the only use of this
5581 reload_reg_rtx is in a reload that just loads the same value
5582 as we do. Note that any secondary reloads must be of the identical
5583 class since the values, modes, and result registers are the
5584 same, so we need not do anything with any secondary reloads. */
5585
5586 for (j = 0; j < n_reloads; j++)
5587 {
5588 if (i == j || reload_reg_rtx[j] == 0
5589 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5590 reload_reg_rtx[i]))
5591 continue;
5592
5593 /* If the reload regs aren't exactly the same (e.g, different modes)
5594 or if the values are different, we can't merge anything with this
5595 reload register. */
5596
5597 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5598 || reload_out[j] != 0 || reload_in[j] == 0
5599 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5600 break;
5601 }
5602
5603 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5604 we, in fact, found any matching reloads. */
5605
5606 if (j == n_reloads)
5607 {
5608 for (j = 0; j < n_reloads; j++)
5609 if (i != j && reload_reg_rtx[j] != 0
5610 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5611 {
5612 reload_when_needed[i] = RELOAD_OTHER;
5613 reload_in[j] = 0;
5614 transfer_replacements (i, j);
5615 }
5616
5617 /* If this is now RELOAD_OTHER, look for any reloads that load
5618 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5619 if they were for inputs, RELOAD_OTHER for outputs. Note that
5620 this test is equivalent to looking for reloads for this operand
5621 number. */
5622
5623 if (reload_when_needed[i] == RELOAD_OTHER)
5624 for (j = 0; j < n_reloads; j++)
5625 if (reload_in[j] != 0
5626 && reload_when_needed[i] != RELOAD_OTHER
5627 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5628 reload_in[i]))
5629 reload_when_needed[j]
5630 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5631 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5632 }
5633 }
5634 }
5635 #endif /* SMALL_RELOAD_CLASSES */
5636 \f
5637 /* Output insns to reload values in and out of the chosen reload regs. */
5638
5639 static void
5640 emit_reload_insns (insn)
5641 rtx insn;
5642 {
5643 register int j;
5644 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5645 rtx other_input_address_reload_insns = 0;
5646 rtx other_input_reload_insns = 0;
5647 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5648 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5649 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5650 rtx operand_reload_insns = 0;
5651 rtx other_operand_reload_insns = 0;
5652 rtx following_insn = NEXT_INSN (insn);
5653 rtx before_insn = insn;
5654 int special;
5655 /* Values to be put in spill_reg_store are put here first. */
5656 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5657
5658 for (j = 0; j < reload_n_operands; j++)
5659 input_reload_insns[j] = input_address_reload_insns[j]
5660 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5661
5662 /* Now output the instructions to copy the data into and out of the
5663 reload registers. Do these in the order that the reloads were reported,
5664 since reloads of base and index registers precede reloads of operands
5665 and the operands may need the base and index registers reloaded. */
5666
5667 for (j = 0; j < n_reloads; j++)
5668 {
5669 register rtx old;
5670 rtx oldequiv_reg = 0;
5671
5672 if (reload_spill_index[j] >= 0)
5673 new_spill_reg_store[reload_spill_index[j]] = 0;
5674
5675 old = reload_in[j];
5676 if (old != 0 && ! reload_inherited[j]
5677 && ! rtx_equal_p (reload_reg_rtx[j], old)
5678 && reload_reg_rtx[j] != 0)
5679 {
5680 register rtx reloadreg = reload_reg_rtx[j];
5681 rtx oldequiv = 0;
5682 enum machine_mode mode;
5683 rtx *where;
5684
5685 /* Determine the mode to reload in.
5686 This is very tricky because we have three to choose from.
5687 There is the mode the insn operand wants (reload_inmode[J]).
5688 There is the mode of the reload register RELOADREG.
5689 There is the intrinsic mode of the operand, which we could find
5690 by stripping some SUBREGs.
5691 It turns out that RELOADREG's mode is irrelevant:
5692 we can change that arbitrarily.
5693
5694 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5695 then the reload reg may not support QImode moves, so use SImode.
5696 If foo is in memory due to spilling a pseudo reg, this is safe,
5697 because the QImode value is in the least significant part of a
5698 slot big enough for a SImode. If foo is some other sort of
5699 memory reference, then it is impossible to reload this case,
5700 so previous passes had better make sure this never happens.
5701
5702 Then consider a one-word union which has SImode and one of its
5703 members is a float, being fetched as (SUBREG:SF union:SI).
5704 We must fetch that as SFmode because we could be loading into
5705 a float-only register. In this case OLD's mode is correct.
5706
5707 Consider an immediate integer: it has VOIDmode. Here we need
5708 to get a mode from something else.
5709
5710 In some cases, there is a fourth mode, the operand's
5711 containing mode. If the insn specifies a containing mode for
5712 this operand, it overrides all others.
5713
5714 I am not sure whether the algorithm here is always right,
5715 but it does the right things in those cases. */
5716
5717 mode = GET_MODE (old);
5718 if (mode == VOIDmode)
5719 mode = reload_inmode[j];
5720
5721 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5722 /* If we need a secondary register for this operation, see if
5723 the value is already in a register in that class. Don't
5724 do this if the secondary register will be used as a scratch
5725 register. */
5726
5727 if (reload_secondary_in_reload[j] >= 0
5728 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5729 && optimize)
5730 oldequiv
5731 = find_equiv_reg (old, insn,
5732 reload_reg_class[reload_secondary_in_reload[j]],
5733 -1, NULL_PTR, 0, mode);
5734 #endif
5735
5736 /* If reloading from memory, see if there is a register
5737 that already holds the same value. If so, reload from there.
5738 We can pass 0 as the reload_reg_p argument because
5739 any other reload has either already been emitted,
5740 in which case find_equiv_reg will see the reload-insn,
5741 or has yet to be emitted, in which case it doesn't matter
5742 because we will use this equiv reg right away. */
5743
5744 if (oldequiv == 0 && optimize
5745 && (GET_CODE (old) == MEM
5746 || (GET_CODE (old) == REG
5747 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5748 && reg_renumber[REGNO (old)] < 0)))
5749 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5750 -1, NULL_PTR, 0, mode);
5751
5752 if (oldequiv)
5753 {
5754 int regno = true_regnum (oldequiv);
5755
5756 /* If OLDEQUIV is a spill register, don't use it for this
5757 if any other reload needs it at an earlier stage of this insn
5758 or at this stage. */
5759 if (spill_reg_order[regno] >= 0
5760 && (! reload_reg_free_p (regno, reload_opnum[j],
5761 reload_when_needed[j])
5762 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5763 reload_when_needed[j])))
5764 oldequiv = 0;
5765
5766 /* If OLDEQUIV is not a spill register,
5767 don't use it if any other reload wants it. */
5768 if (spill_reg_order[regno] < 0)
5769 {
5770 int k;
5771 for (k = 0; k < n_reloads; k++)
5772 if (reload_reg_rtx[k] != 0 && k != j
5773 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5774 oldequiv))
5775 {
5776 oldequiv = 0;
5777 break;
5778 }
5779 }
5780
5781 /* If it is no cheaper to copy from OLDEQUIV into the
5782 reload register than it would be to move from memory,
5783 don't use it. Likewise, if we need a secondary register
5784 or memory. */
5785
5786 if (oldequiv != 0
5787 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5788 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5789 reload_reg_class[j])
5790 >= MEMORY_MOVE_COST (mode)))
5791 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5792 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5793 mode, oldequiv)
5794 != NO_REGS)
5795 #endif
5796 #ifdef SECONDARY_MEMORY_NEEDED
5797 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5798 REGNO_REG_CLASS (regno),
5799 mode)
5800 #endif
5801 ))
5802 oldequiv = 0;
5803 }
5804
5805 if (oldequiv == 0)
5806 oldequiv = old;
5807 else if (GET_CODE (oldequiv) == REG)
5808 oldequiv_reg = oldequiv;
5809 else if (GET_CODE (oldequiv) == SUBREG)
5810 oldequiv_reg = SUBREG_REG (oldequiv);
5811
5812 /* If we are reloading from a register that was recently stored in
5813 with an output-reload, see if we can prove there was
5814 actually no need to store the old value in it. */
5815
5816 if (optimize && GET_CODE (oldequiv) == REG
5817 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5818 && spill_reg_order[REGNO (oldequiv)] >= 0
5819 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5820 && find_reg_note (insn, REG_DEAD, reload_in[j])
5821 /* This is unsafe if operand occurs more than once in current
5822 insn. Perhaps some occurrences weren't reloaded. */
5823 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5824 delete_output_reload
5825 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5826
5827 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5828 then load RELOADREG from OLDEQUIV. Note that we cannot use
5829 gen_lowpart_common since it can do the wrong thing when
5830 RELOADREG has a multi-word mode. Note that RELOADREG
5831 must always be a REG here. */
5832
5833 if (GET_MODE (reloadreg) != mode)
5834 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5835 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5836 oldequiv = SUBREG_REG (oldequiv);
5837 if (GET_MODE (oldequiv) != VOIDmode
5838 && mode != GET_MODE (oldequiv))
5839 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5840
5841 /* Switch to the right place to emit the reload insns. */
5842 switch (reload_when_needed[j])
5843 {
5844 case RELOAD_OTHER:
5845 where = &other_input_reload_insns;
5846 break;
5847 case RELOAD_FOR_INPUT:
5848 where = &input_reload_insns[reload_opnum[j]];
5849 break;
5850 case RELOAD_FOR_INPUT_ADDRESS:
5851 where = &input_address_reload_insns[reload_opnum[j]];
5852 break;
5853 case RELOAD_FOR_OUTPUT_ADDRESS:
5854 where = &output_address_reload_insns[reload_opnum[j]];
5855 break;
5856 case RELOAD_FOR_OPERAND_ADDRESS:
5857 where = &operand_reload_insns;
5858 break;
5859 case RELOAD_FOR_OPADDR_ADDR:
5860 where = &other_operand_reload_insns;
5861 break;
5862 case RELOAD_FOR_OTHER_ADDRESS:
5863 where = &other_input_address_reload_insns;
5864 break;
5865 default:
5866 abort ();
5867 }
5868
5869 push_to_sequence (*where);
5870 special = 0;
5871
5872 /* Auto-increment addresses must be reloaded in a special way. */
5873 if (GET_CODE (oldequiv) == POST_INC
5874 || GET_CODE (oldequiv) == POST_DEC
5875 || GET_CODE (oldequiv) == PRE_INC
5876 || GET_CODE (oldequiv) == PRE_DEC)
5877 {
5878 /* We are not going to bother supporting the case where a
5879 incremented register can't be copied directly from
5880 OLDEQUIV since this seems highly unlikely. */
5881 if (reload_secondary_in_reload[j] >= 0)
5882 abort ();
5883 /* Prevent normal processing of this reload. */
5884 special = 1;
5885 /* Output a special code sequence for this case. */
5886 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5887 }
5888
5889 /* If we are reloading a pseudo-register that was set by the previous
5890 insn, see if we can get rid of that pseudo-register entirely
5891 by redirecting the previous insn into our reload register. */
5892
5893 else if (optimize && GET_CODE (old) == REG
5894 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5895 && dead_or_set_p (insn, old)
5896 /* This is unsafe if some other reload
5897 uses the same reg first. */
5898 && reload_reg_free_before_p (REGNO (reloadreg),
5899 reload_opnum[j],
5900 reload_when_needed[j]))
5901 {
5902 rtx temp = PREV_INSN (insn);
5903 while (temp && GET_CODE (temp) == NOTE)
5904 temp = PREV_INSN (temp);
5905 if (temp
5906 && GET_CODE (temp) == INSN
5907 && GET_CODE (PATTERN (temp)) == SET
5908 && SET_DEST (PATTERN (temp)) == old
5909 /* Make sure we can access insn_operand_constraint. */
5910 && asm_noperands (PATTERN (temp)) < 0
5911 /* This is unsafe if prev insn rejects our reload reg. */
5912 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5913 reloadreg)
5914 /* This is unsafe if operand occurs more than once in current
5915 insn. Perhaps some occurrences aren't reloaded. */
5916 && count_occurrences (PATTERN (insn), old) == 1
5917 /* Don't risk splitting a matching pair of operands. */
5918 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5919 {
5920 /* Store into the reload register instead of the pseudo. */
5921 SET_DEST (PATTERN (temp)) = reloadreg;
5922 /* If these are the only uses of the pseudo reg,
5923 pretend for GDB it lives in the reload reg we used. */
5924 if (reg_n_deaths[REGNO (old)] == 1
5925 && reg_n_sets[REGNO (old)] == 1)
5926 {
5927 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5928 alter_reg (REGNO (old), -1);
5929 }
5930 special = 1;
5931 }
5932 }
5933
5934 /* We can't do that, so output an insn to load RELOADREG. */
5935
5936 if (! special)
5937 {
5938 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5939 rtx second_reload_reg = 0;
5940 enum insn_code icode;
5941
5942 /* If we have a secondary reload, pick up the secondary register
5943 and icode, if any. If OLDEQUIV and OLD are different or
5944 if this is an in-out reload, recompute whether or not we
5945 still need a secondary register and what the icode should
5946 be. If we still need a secondary register and the class or
5947 icode is different, go back to reloading from OLD if using
5948 OLDEQUIV means that we got the wrong type of register. We
5949 cannot have different class or icode due to an in-out reload
5950 because we don't make such reloads when both the input and
5951 output need secondary reload registers. */
5952
5953 if (reload_secondary_in_reload[j] >= 0)
5954 {
5955 int secondary_reload = reload_secondary_in_reload[j];
5956 rtx real_oldequiv = oldequiv;
5957 rtx real_old = old;
5958
5959 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5960 and similarly for OLD.
5961 See comments in get_secondary_reload in reload.c. */
5962 if (GET_CODE (oldequiv) == REG
5963 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5964 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5965 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5966
5967 if (GET_CODE (old) == REG
5968 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5969 && reg_equiv_mem[REGNO (old)] != 0)
5970 real_old = reg_equiv_mem[REGNO (old)];
5971
5972 second_reload_reg = reload_reg_rtx[secondary_reload];
5973 icode = reload_secondary_in_icode[j];
5974
5975 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5976 || (reload_in[j] != 0 && reload_out[j] != 0))
5977 {
5978 enum reg_class new_class
5979 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5980 mode, real_oldequiv);
5981
5982 if (new_class == NO_REGS)
5983 second_reload_reg = 0;
5984 else
5985 {
5986 enum insn_code new_icode;
5987 enum machine_mode new_mode;
5988
5989 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5990 REGNO (second_reload_reg)))
5991 oldequiv = old, real_oldequiv = real_old;
5992 else
5993 {
5994 new_icode = reload_in_optab[(int) mode];
5995 if (new_icode != CODE_FOR_nothing
5996 && ((insn_operand_predicate[(int) new_icode][0]
5997 && ! ((*insn_operand_predicate[(int) new_icode][0])
5998 (reloadreg, mode)))
5999 || (insn_operand_predicate[(int) new_icode][1]
6000 && ! ((*insn_operand_predicate[(int) new_icode][1])
6001 (real_oldequiv, mode)))))
6002 new_icode = CODE_FOR_nothing;
6003
6004 if (new_icode == CODE_FOR_nothing)
6005 new_mode = mode;
6006 else
6007 new_mode = insn_operand_mode[(int) new_icode][2];
6008
6009 if (GET_MODE (second_reload_reg) != new_mode)
6010 {
6011 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6012 new_mode))
6013 oldequiv = old, real_oldequiv = real_old;
6014 else
6015 second_reload_reg
6016 = gen_rtx (REG, new_mode,
6017 REGNO (second_reload_reg));
6018 }
6019 }
6020 }
6021 }
6022
6023 /* If we still need a secondary reload register, check
6024 to see if it is being used as a scratch or intermediate
6025 register and generate code appropriately. If we need
6026 a scratch register, use REAL_OLDEQUIV since the form of
6027 the insn may depend on the actual address if it is
6028 a MEM. */
6029
6030 if (second_reload_reg)
6031 {
6032 if (icode != CODE_FOR_nothing)
6033 {
6034 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6035 second_reload_reg));
6036 special = 1;
6037 }
6038 else
6039 {
6040 /* See if we need a scratch register to load the
6041 intermediate register (a tertiary reload). */
6042 enum insn_code tertiary_icode
6043 = reload_secondary_in_icode[secondary_reload];
6044
6045 if (tertiary_icode != CODE_FOR_nothing)
6046 {
6047 rtx third_reload_reg
6048 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6049
6050 emit_insn ((GEN_FCN (tertiary_icode)
6051 (second_reload_reg, real_oldequiv,
6052 third_reload_reg)));
6053 }
6054 else
6055 gen_reload (second_reload_reg, oldequiv,
6056 reload_opnum[j],
6057 reload_when_needed[j]);
6058
6059 oldequiv = second_reload_reg;
6060 }
6061 }
6062 }
6063 #endif
6064
6065 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6066 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6067 reload_when_needed[j]);
6068
6069 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6070 /* We may have to make a REG_DEAD note for the secondary reload
6071 register in the insns we just made. Find the last insn that
6072 mentioned the register. */
6073 if (! special && second_reload_reg
6074 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6075 {
6076 rtx prev;
6077
6078 for (prev = get_last_insn (); prev;
6079 prev = PREV_INSN (prev))
6080 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6081 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6082 PATTERN (prev)))
6083 {
6084 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6085 second_reload_reg,
6086 REG_NOTES (prev));
6087 break;
6088 }
6089 }
6090 #endif
6091 }
6092
6093 /* End this sequence. */
6094 *where = get_insns ();
6095 end_sequence ();
6096 }
6097
6098 /* Add a note saying the input reload reg
6099 dies in this insn, if anyone cares. */
6100 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6101 if (old != 0
6102 && reload_reg_rtx[j] != old
6103 && reload_reg_rtx[j] != 0
6104 && reload_out[j] == 0
6105 && ! reload_inherited[j]
6106 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6107 {
6108 register rtx reloadreg = reload_reg_rtx[j];
6109
6110 #if 0
6111 /* We can't abort here because we need to support this for sched.c.
6112 It's not terrible to miss a REG_DEAD note, but we should try
6113 to figure out how to do this correctly. */
6114 /* The code below is incorrect for address-only reloads. */
6115 if (reload_when_needed[j] != RELOAD_OTHER
6116 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6117 abort ();
6118 #endif
6119
6120 /* Add a death note to this insn, for an input reload. */
6121
6122 if ((reload_when_needed[j] == RELOAD_OTHER
6123 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6124 && ! dead_or_set_p (insn, reloadreg))
6125 REG_NOTES (insn)
6126 = gen_rtx (EXPR_LIST, REG_DEAD,
6127 reloadreg, REG_NOTES (insn));
6128 }
6129
6130 /* When we inherit a reload, the last marked death of the reload reg
6131 may no longer really be a death. */
6132 if (reload_reg_rtx[j] != 0
6133 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6134 && reload_inherited[j])
6135 {
6136 /* Handle inheriting an output reload.
6137 Remove the death note from the output reload insn. */
6138 if (reload_spill_index[j] >= 0
6139 && GET_CODE (reload_in[j]) == REG
6140 && spill_reg_store[reload_spill_index[j]] != 0
6141 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6142 REG_DEAD, REGNO (reload_reg_rtx[j])))
6143 remove_death (REGNO (reload_reg_rtx[j]),
6144 spill_reg_store[reload_spill_index[j]]);
6145 /* Likewise for input reloads that were inherited. */
6146 else if (reload_spill_index[j] >= 0
6147 && GET_CODE (reload_in[j]) == REG
6148 && spill_reg_store[reload_spill_index[j]] == 0
6149 && reload_inheritance_insn[j] != 0
6150 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6151 REGNO (reload_reg_rtx[j])))
6152 remove_death (REGNO (reload_reg_rtx[j]),
6153 reload_inheritance_insn[j]);
6154 else
6155 {
6156 rtx prev;
6157
6158 /* We got this register from find_equiv_reg.
6159 Search back for its last death note and get rid of it.
6160 But don't search back too far.
6161 Don't go past a place where this reg is set,
6162 since a death note before that remains valid. */
6163 for (prev = PREV_INSN (insn);
6164 prev && GET_CODE (prev) != CODE_LABEL;
6165 prev = PREV_INSN (prev))
6166 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6167 && dead_or_set_p (prev, reload_reg_rtx[j]))
6168 {
6169 if (find_regno_note (prev, REG_DEAD,
6170 REGNO (reload_reg_rtx[j])))
6171 remove_death (REGNO (reload_reg_rtx[j]), prev);
6172 break;
6173 }
6174 }
6175 }
6176
6177 /* We might have used find_equiv_reg above to choose an alternate
6178 place from which to reload. If so, and it died, we need to remove
6179 that death and move it to one of the insns we just made. */
6180
6181 if (oldequiv_reg != 0
6182 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6183 {
6184 rtx prev, prev1;
6185
6186 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6187 prev = PREV_INSN (prev))
6188 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6189 && dead_or_set_p (prev, oldequiv_reg))
6190 {
6191 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6192 {
6193 for (prev1 = this_reload_insn;
6194 prev1; prev1 = PREV_INSN (prev1))
6195 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6196 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6197 PATTERN (prev1)))
6198 {
6199 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6200 oldequiv_reg,
6201 REG_NOTES (prev1));
6202 break;
6203 }
6204 remove_death (REGNO (oldequiv_reg), prev);
6205 }
6206 break;
6207 }
6208 }
6209 #endif
6210
6211 /* If we are reloading a register that was recently stored in with an
6212 output-reload, see if we can prove there was
6213 actually no need to store the old value in it. */
6214
6215 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6216 && reload_in[j] != 0
6217 && GET_CODE (reload_in[j]) == REG
6218 #if 0
6219 /* There doesn't seem to be any reason to restrict this to pseudos
6220 and doing so loses in the case where we are copying from a
6221 register of the wrong class. */
6222 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6223 #endif
6224 && spill_reg_store[reload_spill_index[j]] != 0
6225 /* This is unsafe if some other reload uses the same reg first. */
6226 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6227 reload_opnum[j], reload_when_needed[j])
6228 && dead_or_set_p (insn, reload_in[j])
6229 /* This is unsafe if operand occurs more than once in current
6230 insn. Perhaps some occurrences weren't reloaded. */
6231 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6232 delete_output_reload (insn, j,
6233 spill_reg_store[reload_spill_index[j]]);
6234
6235 /* Input-reloading is done. Now do output-reloading,
6236 storing the value from the reload-register after the main insn
6237 if reload_out[j] is nonzero.
6238
6239 ??? At some point we need to support handling output reloads of
6240 JUMP_INSNs or insns that set cc0. */
6241 old = reload_out[j];
6242 if (old != 0
6243 && reload_reg_rtx[j] != old
6244 && reload_reg_rtx[j] != 0)
6245 {
6246 register rtx reloadreg = reload_reg_rtx[j];
6247 register rtx second_reloadreg = 0;
6248 rtx note, p;
6249 enum machine_mode mode;
6250 int special = 0;
6251
6252 /* An output operand that dies right away does need a reload,
6253 but need not be copied from it. Show the new location in the
6254 REG_UNUSED note. */
6255 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6256 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6257 {
6258 XEXP (note, 0) = reload_reg_rtx[j];
6259 continue;
6260 }
6261 else if (GET_CODE (old) == SCRATCH)
6262 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6263 but we don't want to make an output reload. */
6264 continue;
6265
6266 #if 0
6267 /* Strip off of OLD any size-increasing SUBREGs such as
6268 (SUBREG:SI foo:QI 0). */
6269
6270 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6271 && (GET_MODE_SIZE (GET_MODE (old))
6272 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6273 old = SUBREG_REG (old);
6274 #endif
6275
6276 /* If is a JUMP_INSN, we can't support output reloads yet. */
6277 if (GET_CODE (insn) == JUMP_INSN)
6278 abort ();
6279
6280 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6281
6282 /* Determine the mode to reload in.
6283 See comments above (for input reloading). */
6284
6285 mode = GET_MODE (old);
6286 if (mode == VOIDmode)
6287 {
6288 /* VOIDmode should never happen for an output. */
6289 if (asm_noperands (PATTERN (insn)) < 0)
6290 /* It's the compiler's fault. */
6291 fatal_insn ("VOIDmode on an output", insn);
6292 error_for_asm (insn, "output operand is constant in `asm'");
6293 /* Prevent crash--use something we know is valid. */
6294 mode = word_mode;
6295 old = gen_rtx (REG, mode, REGNO (reloadreg));
6296 }
6297
6298 if (GET_MODE (reloadreg) != mode)
6299 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6300
6301 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6302
6303 /* If we need two reload regs, set RELOADREG to the intermediate
6304 one, since it will be stored into OLD. We might need a secondary
6305 register only for an input reload, so check again here. */
6306
6307 if (reload_secondary_out_reload[j] >= 0)
6308 {
6309 rtx real_old = old;
6310
6311 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6312 && reg_equiv_mem[REGNO (old)] != 0)
6313 real_old = reg_equiv_mem[REGNO (old)];
6314
6315 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6316 mode, real_old)
6317 != NO_REGS))
6318 {
6319 second_reloadreg = reloadreg;
6320 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6321
6322 /* See if RELOADREG is to be used as a scratch register
6323 or as an intermediate register. */
6324 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6325 {
6326 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6327 (real_old, second_reloadreg, reloadreg)));
6328 special = 1;
6329 }
6330 else
6331 {
6332 /* See if we need both a scratch and intermediate reload
6333 register. */
6334
6335 int secondary_reload = reload_secondary_out_reload[j];
6336 enum insn_code tertiary_icode
6337 = reload_secondary_out_icode[secondary_reload];
6338
6339 if (GET_MODE (reloadreg) != mode)
6340 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6341
6342 if (tertiary_icode != CODE_FOR_nothing)
6343 {
6344 rtx third_reloadreg
6345 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6346
6347 /* Copy primary reload reg to secondary reload reg.
6348 (Note that these have been swapped above, then
6349 secondary reload reg to OLD using our insn. */
6350
6351 gen_reload (reloadreg, second_reloadreg,
6352 reload_opnum[j], reload_when_needed[j]);
6353 emit_insn ((GEN_FCN (tertiary_icode)
6354 (real_old, reloadreg, third_reloadreg)));
6355 special = 1;
6356 }
6357
6358 else
6359 /* Copy between the reload regs here and then to
6360 OUT later. */
6361
6362 gen_reload (reloadreg, second_reloadreg,
6363 reload_opnum[j], reload_when_needed[j]);
6364 }
6365 }
6366 }
6367 #endif
6368
6369 /* Output the last reload insn. */
6370 if (! special)
6371 gen_reload (old, reloadreg, reload_opnum[j],
6372 reload_when_needed[j]);
6373
6374 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6375 /* If final will look at death notes for this reg,
6376 put one on the last output-reload insn to use it. Similarly
6377 for any secondary register. */
6378 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6379 for (p = get_last_insn (); p; p = PREV_INSN (p))
6380 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6381 && reg_overlap_mentioned_for_reload_p (reloadreg,
6382 PATTERN (p)))
6383 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6384 reloadreg, REG_NOTES (p));
6385
6386 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6387 if (! special
6388 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6389 for (p = get_last_insn (); p; p = PREV_INSN (p))
6390 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6391 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6392 PATTERN (p)))
6393 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6394 second_reloadreg, REG_NOTES (p));
6395 #endif
6396 #endif
6397 /* Look at all insns we emitted, just to be safe. */
6398 for (p = get_insns (); p; p = NEXT_INSN (p))
6399 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6400 {
6401 /* If this output reload doesn't come from a spill reg,
6402 clear any memory of reloaded copies of the pseudo reg.
6403 If this output reload comes from a spill reg,
6404 reg_has_output_reload will make this do nothing. */
6405 note_stores (PATTERN (p), forget_old_reloads_1);
6406
6407 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6408 && reload_spill_index[j] >= 0)
6409 new_spill_reg_store[reload_spill_index[j]] = p;
6410 }
6411
6412 output_reload_insns[reload_opnum[j]] = get_insns ();
6413 end_sequence ();
6414 }
6415 }
6416
6417 /* Now write all the insns we made for reloads in the order expected by
6418 the allocation functions. Prior to the insn being reloaded, we write
6419 the following reloads:
6420
6421 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6422
6423 RELOAD_OTHER reloads.
6424
6425 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6426 the RELOAD_FOR_INPUT reload for the operand.
6427
6428 RELOAD_FOR_OPADDR_ADDRS reloads.
6429
6430 RELOAD_FOR_OPERAND_ADDRESS reloads.
6431
6432 After the insn being reloaded, we write the following:
6433
6434 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6435 the RELOAD_FOR_OUTPUT reload for that operand. */
6436
6437 emit_insns_before (other_input_address_reload_insns, before_insn);
6438 emit_insns_before (other_input_reload_insns, before_insn);
6439
6440 for (j = 0; j < reload_n_operands; j++)
6441 {
6442 emit_insns_before (input_address_reload_insns[j], before_insn);
6443 emit_insns_before (input_reload_insns[j], before_insn);
6444 }
6445
6446 emit_insns_before (other_operand_reload_insns, before_insn);
6447 emit_insns_before (operand_reload_insns, before_insn);
6448
6449 for (j = 0; j < reload_n_operands; j++)
6450 {
6451 emit_insns_before (output_address_reload_insns[j], following_insn);
6452 emit_insns_before (output_reload_insns[j], following_insn);
6453 }
6454
6455 /* Move death notes from INSN
6456 to output-operand-address and output reload insns. */
6457 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6458 {
6459 rtx insn1;
6460 /* Loop over those insns, last ones first. */
6461 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6462 insn1 = PREV_INSN (insn1))
6463 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6464 {
6465 rtx source = SET_SRC (PATTERN (insn1));
6466 rtx dest = SET_DEST (PATTERN (insn1));
6467
6468 /* The note we will examine next. */
6469 rtx reg_notes = REG_NOTES (insn);
6470 /* The place that pointed to this note. */
6471 rtx *prev_reg_note = &REG_NOTES (insn);
6472
6473 /* If the note is for something used in the source of this
6474 reload insn, or in the output address, move the note. */
6475 while (reg_notes)
6476 {
6477 rtx next_reg_notes = XEXP (reg_notes, 1);
6478 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6479 && GET_CODE (XEXP (reg_notes, 0)) == REG
6480 && ((GET_CODE (dest) != REG
6481 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6482 dest))
6483 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6484 source)))
6485 {
6486 *prev_reg_note = next_reg_notes;
6487 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6488 REG_NOTES (insn1) = reg_notes;
6489 }
6490 else
6491 prev_reg_note = &XEXP (reg_notes, 1);
6492
6493 reg_notes = next_reg_notes;
6494 }
6495 }
6496 }
6497 #endif
6498
6499 /* For all the spill regs newly reloaded in this instruction,
6500 record what they were reloaded from, so subsequent instructions
6501 can inherit the reloads.
6502
6503 Update spill_reg_store for the reloads of this insn.
6504 Copy the elements that were updated in the loop above. */
6505
6506 for (j = 0; j < n_reloads; j++)
6507 {
6508 register int r = reload_order[j];
6509 register int i = reload_spill_index[r];
6510
6511 /* I is nonneg if this reload used one of the spill regs.
6512 If reload_reg_rtx[r] is 0, this is an optional reload
6513 that we opted to ignore.
6514
6515 Also ignore reloads that don't reach the end of the insn,
6516 since we will eventually see the one that does. */
6517
6518 if (i >= 0 && reload_reg_rtx[r] != 0
6519 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6520 reload_when_needed[r]))
6521 {
6522 /* First, clear out memory of what used to be in this spill reg.
6523 If consecutive registers are used, clear them all. */
6524 int nr
6525 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6526 int k;
6527
6528 for (k = 0; k < nr; k++)
6529 {
6530 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6531 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6532 }
6533
6534 /* Maybe the spill reg contains a copy of reload_out. */
6535 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6536 {
6537 register int nregno = REGNO (reload_out[r]);
6538 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6539 : HARD_REGNO_NREGS (nregno,
6540 GET_MODE (reload_reg_rtx[r])));
6541
6542 spill_reg_store[i] = new_spill_reg_store[i];
6543 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6544
6545 /* If NREGNO is a hard register, it may occupy more than
6546 one register. If it does, say what is in the
6547 rest of the registers assuming that both registers
6548 agree on how many words the object takes. If not,
6549 invalidate the subsequent registers. */
6550
6551 if (nregno < FIRST_PSEUDO_REGISTER)
6552 for (k = 1; k < nnr; k++)
6553 reg_last_reload_reg[nregno + k]
6554 = (nr == nnr ? gen_rtx (REG,
6555 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6556 REGNO (reload_reg_rtx[r]) + k)
6557 : 0);
6558
6559 /* Now do the inverse operation. */
6560 for (k = 0; k < nr; k++)
6561 {
6562 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6563 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6564 : nregno + k);
6565 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6566 }
6567 }
6568
6569 /* Maybe the spill reg contains a copy of reload_in. Only do
6570 something if there will not be an output reload for
6571 the register being reloaded. */
6572 else if (reload_out[r] == 0
6573 && reload_in[r] != 0
6574 && ((GET_CODE (reload_in[r]) == REG
6575 && ! reg_has_output_reload[REGNO (reload_in[r])]
6576 || (GET_CODE (reload_in_reg[r]) == REG
6577 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6578 {
6579 register int nregno;
6580 int nnr;
6581
6582 if (GET_CODE (reload_in[r]) == REG)
6583 nregno = REGNO (reload_in[r]);
6584 else
6585 nregno = REGNO (reload_in_reg[r]);
6586
6587 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6588 : HARD_REGNO_NREGS (nregno,
6589 GET_MODE (reload_reg_rtx[r])));
6590
6591 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6592
6593 if (nregno < FIRST_PSEUDO_REGISTER)
6594 for (k = 1; k < nnr; k++)
6595 reg_last_reload_reg[nregno + k]
6596 = (nr == nnr ? gen_rtx (REG,
6597 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6598 REGNO (reload_reg_rtx[r]) + k)
6599 : 0);
6600
6601 /* Unless we inherited this reload, show we haven't
6602 recently done a store. */
6603 if (! reload_inherited[r])
6604 spill_reg_store[i] = 0;
6605
6606 for (k = 0; k < nr; k++)
6607 {
6608 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6609 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6610 : nregno + k);
6611 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6612 = insn;
6613 }
6614 }
6615 }
6616
6617 /* The following if-statement was #if 0'd in 1.34 (or before...).
6618 It's reenabled in 1.35 because supposedly nothing else
6619 deals with this problem. */
6620
6621 /* If a register gets output-reloaded from a non-spill register,
6622 that invalidates any previous reloaded copy of it.
6623 But forget_old_reloads_1 won't get to see it, because
6624 it thinks only about the original insn. So invalidate it here. */
6625 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6626 {
6627 register int nregno = REGNO (reload_out[r]);
6628 if (nregno >= FIRST_PSEUDO_REGISTER)
6629 reg_last_reload_reg[nregno] = 0;
6630 else
6631 {
6632 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
6633
6634 while (num_regs-- > 0)
6635 reg_last_reload_reg[nregno + num_regs] = 0;
6636 }
6637 }
6638 }
6639 }
6640 \f
6641 /* Emit code to perform a reload from IN (which may be a reload register) to
6642 OUT (which may also be a reload register). IN or OUT is from operand
6643 OPNUM with reload type TYPE.
6644
6645 Returns first insn emitted. */
6646
6647 rtx
6648 gen_reload (out, in, opnum, type)
6649 rtx out;
6650 rtx in;
6651 int opnum;
6652 enum reload_type type;
6653 {
6654 rtx last = get_last_insn ();
6655 rtx tem;
6656
6657 /* If IN is a paradoxical SUBREG, remove it and try to put the
6658 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6659 if (GET_CODE (in) == SUBREG
6660 && (GET_MODE_SIZE (GET_MODE (in))
6661 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
6662 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
6663 in = SUBREG_REG (in), out = tem;
6664 else if (GET_CODE (out) == SUBREG
6665 && (GET_MODE_SIZE (GET_MODE (out))
6666 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
6667 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
6668 out = SUBREG_REG (out), in = tem;
6669
6670 /* How to do this reload can get quite tricky. Normally, we are being
6671 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6672 register that didn't get a hard register. In that case we can just
6673 call emit_move_insn.
6674
6675 We can also be asked to reload a PLUS that adds a register or a MEM to
6676 another register, constant or MEM. This can occur during frame pointer
6677 elimination and while reloading addresses. This case is handled by
6678 trying to emit a single insn to perform the add. If it is not valid,
6679 we use a two insn sequence.
6680
6681 Finally, we could be called to handle an 'o' constraint by putting
6682 an address into a register. In that case, we first try to do this
6683 with a named pattern of "reload_load_address". If no such pattern
6684 exists, we just emit a SET insn and hope for the best (it will normally
6685 be valid on machines that use 'o').
6686
6687 This entire process is made complex because reload will never
6688 process the insns we generate here and so we must ensure that
6689 they will fit their constraints and also by the fact that parts of
6690 IN might be being reloaded separately and replaced with spill registers.
6691 Because of this, we are, in some sense, just guessing the right approach
6692 here. The one listed above seems to work.
6693
6694 ??? At some point, this whole thing needs to be rethought. */
6695
6696 if (GET_CODE (in) == PLUS
6697 && (GET_CODE (XEXP (in, 0)) == REG
6698 || GET_CODE (XEXP (in, 0)) == MEM)
6699 && (GET_CODE (XEXP (in, 1)) == REG
6700 || CONSTANT_P (XEXP (in, 1))
6701 || GET_CODE (XEXP (in, 1)) == MEM))
6702 {
6703 /* We need to compute the sum of a register or a MEM and another
6704 register, constant, or MEM, and put it into the reload
6705 register. The best possible way of doing this is if the machine
6706 has a three-operand ADD insn that accepts the required operands.
6707
6708 The simplest approach is to try to generate such an insn and see if it
6709 is recognized and matches its constraints. If so, it can be used.
6710
6711 It might be better not to actually emit the insn unless it is valid,
6712 but we need to pass the insn as an operand to `recog' and
6713 `insn_extract' and it is simpler to emit and then delete the insn if
6714 not valid than to dummy things up. */
6715
6716 rtx op0, op1, tem, insn;
6717 int code;
6718
6719 op0 = find_replacement (&XEXP (in, 0));
6720 op1 = find_replacement (&XEXP (in, 1));
6721
6722 /* Since constraint checking is strict, commutativity won't be
6723 checked, so we need to do that here to avoid spurious failure
6724 if the add instruction is two-address and the second operand
6725 of the add is the same as the reload reg, which is frequently
6726 the case. If the insn would be A = B + A, rearrange it so
6727 it will be A = A + B as constrain_operands expects. */
6728
6729 if (GET_CODE (XEXP (in, 1)) == REG
6730 && REGNO (out) == REGNO (XEXP (in, 1)))
6731 tem = op0, op0 = op1, op1 = tem;
6732
6733 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6734 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6735
6736 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
6737 code = recog_memoized (insn);
6738
6739 if (code >= 0)
6740 {
6741 insn_extract (insn);
6742 /* We want constrain operands to treat this insn strictly in
6743 its validity determination, i.e., the way it would after reload
6744 has completed. */
6745 if (constrain_operands (code, 1))
6746 return insn;
6747 }
6748
6749 delete_insns_since (last);
6750
6751 /* If that failed, we must use a conservative two-insn sequence.
6752 use move to copy constant, MEM, or pseudo register to the reload
6753 register since "move" will be able to handle an arbitrary operand,
6754 unlike add which can't, in general. Then add the registers.
6755
6756 If there is another way to do this for a specific machine, a
6757 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6758 we emit below. */
6759
6760 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6761 || (GET_CODE (op1) == REG
6762 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6763 tem = op0, op0 = op1, op1 = tem;
6764
6765 emit_insn (gen_move_insn (out, op0));
6766
6767 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6768 This fixes a problem on the 32K where the stack pointer cannot
6769 be used as an operand of an add insn. */
6770
6771 if (rtx_equal_p (op0, op1))
6772 op1 = out;
6773
6774 insn = emit_insn (gen_add2_insn (out, op1));
6775
6776 /* If that failed, copy the address register to the reload register.
6777 Then add the constant to the reload register. */
6778
6779 code = recog_memoized (insn);
6780
6781 if (code >= 0)
6782 {
6783 insn_extract (insn);
6784 /* We want constrain operands to treat this insn strictly in
6785 its validity determination, i.e., the way it would after reload
6786 has completed. */
6787 if (constrain_operands (code, 1))
6788 return insn;
6789 }
6790
6791 delete_insns_since (last);
6792
6793 emit_insn (gen_move_insn (out, op1));
6794 emit_insn (gen_add2_insn (out, op0));
6795 }
6796
6797 #ifdef SECONDARY_MEMORY_NEEDED
6798 /* If we need a memory location to do the move, do it that way. */
6799 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6800 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
6801 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6802 REGNO_REG_CLASS (REGNO (out)),
6803 GET_MODE (out)))
6804 {
6805 /* Get the memory to use and rewrite both registers to its mode. */
6806 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
6807
6808 if (GET_MODE (loc) != GET_MODE (out))
6809 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
6810
6811 if (GET_MODE (loc) != GET_MODE (in))
6812 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6813
6814 emit_insn (gen_move_insn (loc, in));
6815 emit_insn (gen_move_insn (out, loc));
6816 }
6817 #endif
6818
6819 /* If IN is a simple operand, use gen_move_insn. */
6820 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6821 emit_insn (gen_move_insn (out, in));
6822
6823 #ifdef HAVE_reload_load_address
6824 else if (HAVE_reload_load_address)
6825 emit_insn (gen_reload_load_address (out, in));
6826 #endif
6827
6828 /* Otherwise, just write (set OUT IN) and hope for the best. */
6829 else
6830 emit_insn (gen_rtx (SET, VOIDmode, out, in));
6831
6832 /* Return the first insn emitted.
6833 We can not just return get_last_insn, because there may have
6834 been multiple instructions emitted. Also note that gen_move_insn may
6835 emit more than one insn itself, so we can not assume that there is one
6836 insn emitted per emit_insn_before call. */
6837
6838 return last ? NEXT_INSN (last) : get_insns ();
6839 }
6840 \f
6841 /* Delete a previously made output-reload
6842 whose result we now believe is not needed.
6843 First we double-check.
6844
6845 INSN is the insn now being processed.
6846 OUTPUT_RELOAD_INSN is the insn of the output reload.
6847 J is the reload-number for this insn. */
6848
6849 static void
6850 delete_output_reload (insn, j, output_reload_insn)
6851 rtx insn;
6852 int j;
6853 rtx output_reload_insn;
6854 {
6855 register rtx i1;
6856
6857 /* Get the raw pseudo-register referred to. */
6858
6859 rtx reg = reload_in[j];
6860 while (GET_CODE (reg) == SUBREG)
6861 reg = SUBREG_REG (reg);
6862
6863 /* If the pseudo-reg we are reloading is no longer referenced
6864 anywhere between the store into it and here,
6865 and no jumps or labels intervene, then the value can get
6866 here through the reload reg alone.
6867 Otherwise, give up--return. */
6868 for (i1 = NEXT_INSN (output_reload_insn);
6869 i1 != insn; i1 = NEXT_INSN (i1))
6870 {
6871 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6872 return;
6873 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6874 && reg_mentioned_p (reg, PATTERN (i1)))
6875 return;
6876 }
6877
6878 if (cannot_omit_stores[REGNO (reg)])
6879 return;
6880
6881 /* If this insn will store in the pseudo again,
6882 the previous store can be removed. */
6883 if (reload_out[j] == reload_in[j])
6884 delete_insn (output_reload_insn);
6885
6886 /* See if the pseudo reg has been completely replaced
6887 with reload regs. If so, delete the store insn
6888 and forget we had a stack slot for the pseudo. */
6889 else if (reg_n_deaths[REGNO (reg)] == 1
6890 && reg_basic_block[REGNO (reg)] >= 0
6891 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6892 {
6893 rtx i2;
6894
6895 /* We know that it was used only between here
6896 and the beginning of the current basic block.
6897 (We also know that the last use before INSN was
6898 the output reload we are thinking of deleting, but never mind that.)
6899 Search that range; see if any ref remains. */
6900 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6901 {
6902 rtx set = single_set (i2);
6903
6904 /* Uses which just store in the pseudo don't count,
6905 since if they are the only uses, they are dead. */
6906 if (set != 0 && SET_DEST (set) == reg)
6907 continue;
6908 if (GET_CODE (i2) == CODE_LABEL
6909 || GET_CODE (i2) == JUMP_INSN)
6910 break;
6911 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6912 && reg_mentioned_p (reg, PATTERN (i2)))
6913 /* Some other ref remains;
6914 we can't do anything. */
6915 return;
6916 }
6917
6918 /* Delete the now-dead stores into this pseudo. */
6919 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6920 {
6921 rtx set = single_set (i2);
6922
6923 if (set != 0 && SET_DEST (set) == reg)
6924 delete_insn (i2);
6925 if (GET_CODE (i2) == CODE_LABEL
6926 || GET_CODE (i2) == JUMP_INSN)
6927 break;
6928 }
6929
6930 /* For the debugging info,
6931 say the pseudo lives in this reload reg. */
6932 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6933 alter_reg (REGNO (reg), -1);
6934 }
6935 }
6936 \f
6937 /* Output reload-insns to reload VALUE into RELOADREG.
6938 VALUE is an autoincrement or autodecrement RTX whose operand
6939 is a register or memory location;
6940 so reloading involves incrementing that location.
6941
6942 INC_AMOUNT is the number to increment or decrement by (always positive).
6943 This cannot be deduced from VALUE. */
6944
6945 static void
6946 inc_for_reload (reloadreg, value, inc_amount)
6947 rtx reloadreg;
6948 rtx value;
6949 int inc_amount;
6950 {
6951 /* REG or MEM to be copied and incremented. */
6952 rtx incloc = XEXP (value, 0);
6953 /* Nonzero if increment after copying. */
6954 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6955 rtx last;
6956 rtx inc;
6957 rtx add_insn;
6958 int code;
6959
6960 /* No hard register is equivalent to this register after
6961 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6962 we could inc/dec that register as well (maybe even using it for
6963 the source), but I'm not sure it's worth worrying about. */
6964 if (GET_CODE (incloc) == REG)
6965 reg_last_reload_reg[REGNO (incloc)] = 0;
6966
6967 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6968 inc_amount = - inc_amount;
6969
6970 inc = GEN_INT (inc_amount);
6971
6972 /* If this is post-increment, first copy the location to the reload reg. */
6973 if (post)
6974 emit_insn (gen_move_insn (reloadreg, incloc));
6975
6976 /* See if we can directly increment INCLOC. Use a method similar to that
6977 in gen_reload. */
6978
6979 last = get_last_insn ();
6980 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6981 gen_rtx (PLUS, GET_MODE (incloc),
6982 incloc, inc)));
6983
6984 code = recog_memoized (add_insn);
6985 if (code >= 0)
6986 {
6987 insn_extract (add_insn);
6988 if (constrain_operands (code, 1))
6989 {
6990 /* If this is a pre-increment and we have incremented the value
6991 where it lives, copy the incremented value to RELOADREG to
6992 be used as an address. */
6993
6994 if (! post)
6995 emit_insn (gen_move_insn (reloadreg, incloc));
6996
6997 return;
6998 }
6999 }
7000
7001 delete_insns_since (last);
7002
7003 /* If couldn't do the increment directly, must increment in RELOADREG.
7004 The way we do this depends on whether this is pre- or post-increment.
7005 For pre-increment, copy INCLOC to the reload register, increment it
7006 there, then save back. */
7007
7008 if (! post)
7009 {
7010 emit_insn (gen_move_insn (reloadreg, incloc));
7011 emit_insn (gen_add2_insn (reloadreg, inc));
7012 emit_insn (gen_move_insn (incloc, reloadreg));
7013 }
7014 else
7015 {
7016 /* Postincrement.
7017 Because this might be a jump insn or a compare, and because RELOADREG
7018 may not be available after the insn in an input reload, we must do
7019 the incrementation before the insn being reloaded for.
7020
7021 We have already copied INCLOC to RELOADREG. Increment the copy in
7022 RELOADREG, save that back, then decrement RELOADREG so it has
7023 the original value. */
7024
7025 emit_insn (gen_add2_insn (reloadreg, inc));
7026 emit_insn (gen_move_insn (incloc, reloadreg));
7027 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7028 }
7029
7030 return;
7031 }
7032 \f
7033 /* Return 1 if we are certain that the constraint-string STRING allows
7034 the hard register REG. Return 0 if we can't be sure of this. */
7035
7036 static int
7037 constraint_accepts_reg_p (string, reg)
7038 char *string;
7039 rtx reg;
7040 {
7041 int value = 0;
7042 int regno = true_regnum (reg);
7043 int c;
7044
7045 /* Initialize for first alternative. */
7046 value = 0;
7047 /* Check that each alternative contains `g' or `r'. */
7048 while (1)
7049 switch (c = *string++)
7050 {
7051 case 0:
7052 /* If an alternative lacks `g' or `r', we lose. */
7053 return value;
7054 case ',':
7055 /* If an alternative lacks `g' or `r', we lose. */
7056 if (value == 0)
7057 return 0;
7058 /* Initialize for next alternative. */
7059 value = 0;
7060 break;
7061 case 'g':
7062 case 'r':
7063 /* Any general reg wins for this alternative. */
7064 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7065 value = 1;
7066 break;
7067 default:
7068 /* Any reg in specified class wins for this alternative. */
7069 {
7070 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7071
7072 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7073 value = 1;
7074 }
7075 }
7076 }
7077 \f
7078 /* Return the number of places FIND appears within X, but don't count
7079 an occurrence if some SET_DEST is FIND. */
7080
7081 static int
7082 count_occurrences (x, find)
7083 register rtx x, find;
7084 {
7085 register int i, j;
7086 register enum rtx_code code;
7087 register char *format_ptr;
7088 int count;
7089
7090 if (x == find)
7091 return 1;
7092 if (x == 0)
7093 return 0;
7094
7095 code = GET_CODE (x);
7096
7097 switch (code)
7098 {
7099 case REG:
7100 case QUEUED:
7101 case CONST_INT:
7102 case CONST_DOUBLE:
7103 case SYMBOL_REF:
7104 case CODE_LABEL:
7105 case PC:
7106 case CC0:
7107 return 0;
7108
7109 case SET:
7110 if (SET_DEST (x) == find)
7111 return count_occurrences (SET_SRC (x), find);
7112 break;
7113 }
7114
7115 format_ptr = GET_RTX_FORMAT (code);
7116 count = 0;
7117
7118 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7119 {
7120 switch (*format_ptr++)
7121 {
7122 case 'e':
7123 count += count_occurrences (XEXP (x, i), find);
7124 break;
7125
7126 case 'E':
7127 if (XVEC (x, i) != NULL)
7128 {
7129 for (j = 0; j < XVECLEN (x, i); j++)
7130 count += count_occurrences (XVECEXP (x, i, j), find);
7131 }
7132 break;
7133 }
7134 }
7135 return count;
7136 }