(reload): Cast args to bcopy to char *.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
186
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
192
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
198
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206 static char spill_indirect_levels;
207
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212 char indirect_symref_ok;
213
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216 char double_reg_address_ok;
217
218 /* Record the stack slot for each spilled hard register. */
219
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222 /* Width allocated so far for that stack slot. */
223
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231 char *basic_block_needs[N_REG_CLASSES];
232
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
236
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240 int caller_save_needed;
241
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245 int reload_in_progress = 0;
246
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258 struct obstack reload_obstack;
259 char *reload_firstobj;
260
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
263
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
266 \f
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272 static struct elim_table
273 {
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290 } reg_eliminate[] =
291
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296 #ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298 #else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300 #endif
301
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
308
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
311
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322 /* Number of labels in the current function. */
323
324 static int num_labels;
325
326 struct hard_reg_n_uses { int regno; int uses; };
327 \f
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *, int));
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void mark_scratch_live PROTO((rtx));
340 static void set_label_offsets PROTO((rtx, rtx, int));
341 static int eliminate_regs_in_insn PROTO((rtx, int));
342 static void mark_not_eliminable PROTO((rtx, rtx));
343 static int spill_hard_reg PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs PROTO((rtx));
345 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347 static void order_regs_for_reload PROTO((void));
348 static int compare_spill_regs PROTO((short *, short *));
349 static void reload_as_needed PROTO((rtx, int));
350 static void forget_old_reloads_1 PROTO((rtx, rtx));
351 static int reload_reg_class_lower PROTO((short *, short *));
352 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
354 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
356 static int reload_reg_free_p PROTO((int, int, enum reload_type));
357 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
359 static int reloads_conflict PROTO((int, int));
360 static int allocate_reload_reg PROTO((int, rtx, int, int));
361 static void choose_reload_regs PROTO((rtx, rtx));
362 static void merge_assigned_reloads PROTO((rtx));
363 static void emit_reload_insns PROTO((rtx));
364 static void delete_output_reload PROTO((rtx, int, rtx));
365 static void inc_for_reload PROTO((rtx, rtx, int));
366 static int constraint_accepts_reg_p PROTO((char *, rtx));
367 static int count_occurrences PROTO((rtx, rtx));
368 \f
369 /* Initialize the reload pass once per compilation. */
370
371 void
372 init_reload ()
373 {
374 register int i;
375
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
379
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
384 GEN_INT (4)));
385 spill_indirect_levels = 0;
386
387 while (memory_address_p (QImode, tem))
388 {
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
391 }
392
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
394
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
397
398 /* See if reg+reg is a valid (and offsettable) address. */
399
400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
401 {
402 tem = gen_rtx (PLUS, Pmode,
403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
407
408 if (memory_address_p (QImode, tem))
409 {
410 double_reg_address_ok = 1;
411 break;
412 }
413 }
414
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
418 }
419
420 /* Main entry point for the reload pass.
421
422 FIRST is the first insn of the function being compiled.
423
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
429
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
433 are spilled from them, and where the pseudo regs are reallocated to.
434
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
437
438 int
439 reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
443 {
444 register int class;
445 register int i, j, k;
446 register rtx insn;
447 register struct elim_table *ep;
448
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
455
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
458
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
461
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
464
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
467
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
470
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474 #endif
475
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
480
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
483 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
484
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
488
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
493
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
496
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
500
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
504
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
509
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
514
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
526 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
529
530 #ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532 #endif
533
534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
538
539 for (insn = first; insn; insn = NEXT_INSN (insn))
540 {
541 rtx set = single_set (insn);
542
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
544 {
545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
546 if (note
547 #ifdef LEGITIMATE_PIC_OPERAND_P
548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550 #endif
551 )
552 {
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
556 {
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
560 {
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
566 }
567 else
568 continue;
569
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
577 }
578 }
579 }
580
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
589
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
592 }
593
594 /* Does this function require a frame pointer? */
595
596 frame_pointer_needed = (! flag_omit_frame_pointer
597 #ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605 #endif
606 || FRAME_POINTER_REQUIRED);
607
608 num_eliminable = 0;
609
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612 #ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
614 {
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
617 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
618 }
619 #else
620 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
621 = ! frame_pointer_needed;
622 #endif
623
624 /* Count the number of eliminable registers and build the FROM and TO
625 REG rtx's. Note that code in gen_rtx will cause, e.g.,
626 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
627 We depend on this. */
628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
629 {
630 num_eliminable += ep->can_eliminate;
631 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
632 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
633 }
634
635 num_labels = max_label_num () - get_first_label_num ();
636
637 /* Allocate the tables used to store offset information at labels. */
638 offsets_known_at = (char *) alloca (num_labels);
639 offsets_at
640 = (int (*)[NUM_ELIMINABLE_REGS])
641 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
642
643 offsets_known_at -= get_first_label_num ();
644 offsets_at -= get_first_label_num ();
645
646 /* Alter each pseudo-reg rtx to contain its hard reg number.
647 Assign stack slots to the pseudos that lack hard regs or equivalents.
648 Do not touch virtual registers. */
649
650 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
651 alter_reg (i, -1);
652
653 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
654 because the stack size may be a part of the offset computation for
655 register elimination. */
656 assign_stack_local (BLKmode, 0, 0);
657
658 /* If we have some registers we think can be eliminated, scan all insns to
659 see if there is an insn that sets one of these registers to something
660 other than itself plus a constant. If so, the register cannot be
661 eliminated. Doing this scan here eliminates an extra pass through the
662 main reload loop in the most common case where register elimination
663 cannot be done. */
664 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
665 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
666 || GET_CODE (insn) == CALL_INSN)
667 note_stores (PATTERN (insn), mark_not_eliminable);
668
669 #ifndef REGISTER_CONSTRAINTS
670 /* If all the pseudo regs have hard regs,
671 except for those that are never referenced,
672 we know that no reloads are needed. */
673 /* But that is not true if there are register constraints, since
674 in that case some pseudos might be in the wrong kind of hard reg. */
675
676 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
677 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
678 break;
679
680 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
681 return;
682 #endif
683
684 /* Compute the order of preference for hard registers to spill.
685 Store them by decreasing preference in potential_reload_regs. */
686
687 order_regs_for_reload ();
688
689 /* So far, no hard regs have been spilled. */
690 n_spills = 0;
691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
692 spill_reg_order[i] = -1;
693
694 /* On most machines, we can't use any register explicitly used in the
695 rtl as a spill register. But on some, we have to. Those will have
696 taken care to keep the life of hard regs as short as possible. */
697
698 #ifndef SMALL_REGISTER_CLASSES
699 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
700 #endif
701
702 /* Spill any hard regs that we know we can't eliminate. */
703 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
704 if (! ep->can_eliminate)
705 spill_hard_reg (ep->from, global, dumpfile, 1);
706
707 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
708 if (frame_pointer_needed)
709 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
710 #endif
711
712 if (global)
713 for (i = 0; i < N_REG_CLASSES; i++)
714 {
715 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
716 bzero (basic_block_needs[i], n_basic_blocks);
717 }
718
719 /* From now on, we need to emit any moves without making new pseudos. */
720 reload_in_progress = 1;
721
722 /* This loop scans the entire function each go-round
723 and repeats until one repetition spills no additional hard regs. */
724
725 /* This flag is set when a pseudo reg is spilled,
726 to require another pass. Note that getting an additional reload
727 reg does not necessarily imply any pseudo reg was spilled;
728 sometimes we find a reload reg that no pseudo reg was allocated in. */
729 something_changed = 1;
730 /* This flag is set if there are any insns that require reloading. */
731 something_needs_reloads = 0;
732 /* This flag is set if there are any insns that require register
733 eliminations. */
734 something_needs_elimination = 0;
735 while (something_changed)
736 {
737 rtx after_call = 0;
738
739 /* For each class, number of reload regs needed in that class.
740 This is the maximum over all insns of the needs in that class
741 of the individual insn. */
742 int max_needs[N_REG_CLASSES];
743 /* For each class, size of group of consecutive regs
744 that is needed for the reloads of this class. */
745 int group_size[N_REG_CLASSES];
746 /* For each class, max number of consecutive groups needed.
747 (Each group contains group_size[CLASS] consecutive registers.) */
748 int max_groups[N_REG_CLASSES];
749 /* For each class, max number needed of regs that don't belong
750 to any of the groups. */
751 int max_nongroups[N_REG_CLASSES];
752 /* For each class, the machine mode which requires consecutive
753 groups of regs of that class.
754 If two different modes ever require groups of one class,
755 they must be the same size and equally restrictive for that class,
756 otherwise we can't handle the complexity. */
757 enum machine_mode group_mode[N_REG_CLASSES];
758 /* Record the insn where each maximum need is first found. */
759 rtx max_needs_insn[N_REG_CLASSES];
760 rtx max_groups_insn[N_REG_CLASSES];
761 rtx max_nongroups_insn[N_REG_CLASSES];
762 rtx x;
763 int starting_frame_size = get_frame_size ();
764 int previous_frame_pointer_needed = frame_pointer_needed;
765 static char *reg_class_names[] = REG_CLASS_NAMES;
766
767 something_changed = 0;
768 bzero ((char *) max_needs, sizeof max_needs);
769 bzero ((char *) max_groups, sizeof max_groups);
770 bzero ((char *) max_nongroups, sizeof max_nongroups);
771 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
772 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
773 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
774 bzero ((char *) group_size, sizeof group_size);
775 for (i = 0; i < N_REG_CLASSES; i++)
776 group_mode[i] = VOIDmode;
777
778 /* Keep track of which basic blocks are needing the reloads. */
779 this_block = 0;
780
781 /* Remember whether any element of basic_block_needs
782 changes from 0 to 1 in this pass. */
783 new_basic_block_needs = 0;
784
785 /* Reset all offsets on eliminable registers to their initial values. */
786 #ifdef ELIMINABLE_REGS
787 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
788 {
789 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
790 ep->previous_offset = ep->offset
791 = ep->max_offset = ep->initial_offset;
792 }
793 #else
794 #ifdef INITIAL_FRAME_POINTER_OFFSET
795 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
796 #else
797 if (!FRAME_POINTER_REQUIRED)
798 abort ();
799 reg_eliminate[0].initial_offset = 0;
800 #endif
801 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
802 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
803 #endif
804
805 num_not_at_initial_offset = 0;
806
807 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
808
809 /* Set a known offset for each forced label to be at the initial offset
810 of each elimination. We do this because we assume that all
811 computed jumps occur from a location where each elimination is
812 at its initial offset. */
813
814 for (x = forced_labels; x; x = XEXP (x, 1))
815 if (XEXP (x, 0))
816 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
817
818 /* For each pseudo register that has an equivalent location defined,
819 try to eliminate any eliminable registers (such as the frame pointer)
820 assuming initial offsets for the replacement register, which
821 is the normal case.
822
823 If the resulting location is directly addressable, substitute
824 the MEM we just got directly for the old REG.
825
826 If it is not addressable but is a constant or the sum of a hard reg
827 and constant, it is probably not addressable because the constant is
828 out of range, in that case record the address; we will generate
829 hairy code to compute the address in a register each time it is
830 needed. Similarly if it is a hard register, but one that is not
831 valid as an address register.
832
833 If the location is not addressable, but does not have one of the
834 above forms, assign a stack slot. We have to do this to avoid the
835 potential of producing lots of reloads if, e.g., a location involves
836 a pseudo that didn't get a hard register and has an equivalent memory
837 location that also involves a pseudo that didn't get a hard register.
838
839 Perhaps at some point we will improve reload_when_needed handling
840 so this problem goes away. But that's very hairy. */
841
842 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
843 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
844 {
845 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
846
847 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
848 XEXP (x, 0)))
849 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
850 else if (CONSTANT_P (XEXP (x, 0))
851 || (GET_CODE (XEXP (x, 0)) == REG
852 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
853 || (GET_CODE (XEXP (x, 0)) == PLUS
854 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
855 && (REGNO (XEXP (XEXP (x, 0), 0))
856 < FIRST_PSEUDO_REGISTER)
857 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
858 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
859 else
860 {
861 /* Make a new stack slot. Then indicate that something
862 changed so we go back and recompute offsets for
863 eliminable registers because the allocation of memory
864 below might change some offset. reg_equiv_{mem,address}
865 will be set up for this pseudo on the next pass around
866 the loop. */
867 reg_equiv_memory_loc[i] = 0;
868 reg_equiv_init[i] = 0;
869 alter_reg (i, -1);
870 something_changed = 1;
871 }
872 }
873
874 /* If we allocated another pseudo to the stack, redo elimination
875 bookkeeping. */
876 if (something_changed)
877 continue;
878
879 /* If caller-saves needs a group, initialize the group to include
880 the size and mode required for caller-saves. */
881
882 if (caller_save_group_size > 1)
883 {
884 group_mode[(int) caller_save_spill_class] = Pmode;
885 group_size[(int) caller_save_spill_class] = caller_save_group_size;
886 }
887
888 /* Compute the most additional registers needed by any instruction.
889 Collect information separately for each class of regs. */
890
891 for (insn = first; insn; insn = NEXT_INSN (insn))
892 {
893 if (global && this_block + 1 < n_basic_blocks
894 && insn == basic_block_head[this_block+1])
895 ++this_block;
896
897 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
898 might include REG_LABEL), we need to see what effects this
899 has on the known offsets at labels. */
900
901 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
902 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
903 && REG_NOTES (insn) != 0))
904 set_label_offsets (insn, insn, 0);
905
906 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
907 {
908 /* Nonzero means don't use a reload reg that overlaps
909 the place where a function value can be returned. */
910 rtx avoid_return_reg = 0;
911
912 rtx old_body = PATTERN (insn);
913 int old_code = INSN_CODE (insn);
914 rtx old_notes = REG_NOTES (insn);
915 int did_elimination = 0;
916
917 /* To compute the number of reload registers of each class
918 needed for an insn, we must similate what choose_reload_regs
919 can do. We do this by splitting an insn into an "input" and
920 an "output" part. RELOAD_OTHER reloads are used in both.
921 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
922 which must be live over the entire input section of reloads,
923 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
924 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
925 inputs.
926
927 The registers needed for output are RELOAD_OTHER and
928 RELOAD_FOR_OUTPUT, which are live for the entire output
929 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
930 reloads for each operand.
931
932 The total number of registers needed is the maximum of the
933 inputs and outputs. */
934
935 struct needs
936 {
937 /* [0] is normal, [1] is nongroup. */
938 int regs[2][N_REG_CLASSES];
939 int groups[N_REG_CLASSES];
940 };
941
942 /* Each `struct needs' corresponds to one RELOAD_... type. */
943 struct {
944 struct needs other;
945 struct needs input;
946 struct needs output;
947 struct needs insn;
948 struct needs other_addr;
949 struct needs op_addr;
950 struct needs op_addr_reload;
951 struct needs in_addr[MAX_RECOG_OPERANDS];
952 struct needs out_addr[MAX_RECOG_OPERANDS];
953 } insn_needs;
954
955 /* If needed, eliminate any eliminable registers. */
956 if (num_eliminable)
957 did_elimination = eliminate_regs_in_insn (insn, 0);
958
959 #ifdef SMALL_REGISTER_CLASSES
960 /* Set avoid_return_reg if this is an insn
961 that might use the value of a function call. */
962 if (GET_CODE (insn) == CALL_INSN)
963 {
964 if (GET_CODE (PATTERN (insn)) == SET)
965 after_call = SET_DEST (PATTERN (insn));
966 else if (GET_CODE (PATTERN (insn)) == PARALLEL
967 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
968 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
969 else
970 after_call = 0;
971 }
972 else if (after_call != 0
973 && !(GET_CODE (PATTERN (insn)) == SET
974 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
975 {
976 if (reg_referenced_p (after_call, PATTERN (insn)))
977 avoid_return_reg = after_call;
978 after_call = 0;
979 }
980 #endif /* SMALL_REGISTER_CLASSES */
981
982 /* Analyze the instruction. */
983 find_reloads (insn, 0, spill_indirect_levels, global,
984 spill_reg_order);
985
986 /* Remember for later shortcuts which insns had any reloads or
987 register eliminations.
988
989 One might think that it would be worthwhile to mark insns
990 that need register replacements but not reloads, but this is
991 not safe because find_reloads may do some manipulation of
992 the insn (such as swapping commutative operands), which would
993 be lost when we restore the old pattern after register
994 replacement. So the actions of find_reloads must be redone in
995 subsequent passes or in reload_as_needed.
996
997 However, it is safe to mark insns that need reloads
998 but not register replacement. */
999
1000 PUT_MODE (insn, (did_elimination ? QImode
1001 : n_reloads ? HImode
1002 : GET_MODE (insn) == DImode ? DImode
1003 : VOIDmode));
1004
1005 /* Discard any register replacements done. */
1006 if (did_elimination)
1007 {
1008 obstack_free (&reload_obstack, reload_firstobj);
1009 PATTERN (insn) = old_body;
1010 INSN_CODE (insn) = old_code;
1011 REG_NOTES (insn) = old_notes;
1012 something_needs_elimination = 1;
1013 }
1014
1015 /* If this insn has no reloads, we need not do anything except
1016 in the case of a CALL_INSN when we have caller-saves and
1017 caller-save needs reloads. */
1018
1019 if (n_reloads == 0
1020 && ! (GET_CODE (insn) == CALL_INSN
1021 && caller_save_spill_class != NO_REGS))
1022 continue;
1023
1024 something_needs_reloads = 1;
1025 bzero ((char *) &insn_needs, sizeof insn_needs);
1026
1027 /* Count each reload once in every class
1028 containing the reload's own class. */
1029
1030 for (i = 0; i < n_reloads; i++)
1031 {
1032 register enum reg_class *p;
1033 enum reg_class class = reload_reg_class[i];
1034 int size;
1035 enum machine_mode mode;
1036 int nongroup_need;
1037 struct needs *this_needs;
1038
1039 /* Don't count the dummy reloads, for which one of the
1040 regs mentioned in the insn can be used for reloading.
1041 Don't count optional reloads.
1042 Don't count reloads that got combined with others. */
1043 if (reload_reg_rtx[i] != 0
1044 || reload_optional[i] != 0
1045 || (reload_out[i] == 0 && reload_in[i] == 0
1046 && ! reload_secondary_p[i]))
1047 continue;
1048
1049 /* Show that a reload register of this class is needed
1050 in this basic block. We do not use insn_needs and
1051 insn_groups because they are overly conservative for
1052 this purpose. */
1053 if (global && ! basic_block_needs[(int) class][this_block])
1054 {
1055 basic_block_needs[(int) class][this_block] = 1;
1056 new_basic_block_needs = 1;
1057 }
1058
1059
1060 mode = reload_inmode[i];
1061 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1062 mode = reload_outmode[i];
1063 size = CLASS_MAX_NREGS (class, mode);
1064
1065 /* If this class doesn't want a group, determine if we have
1066 a nongroup need or a regular need. We have a nongroup
1067 need if this reload conflicts with a group reload whose
1068 class intersects with this reload's class. */
1069
1070 nongroup_need = 0;
1071 if (size == 1)
1072 for (j = 0; j < n_reloads; j++)
1073 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1074 (GET_MODE_SIZE (reload_outmode[j])
1075 > GET_MODE_SIZE (reload_inmode[j]))
1076 ? reload_outmode[j]
1077 : reload_inmode[j])
1078 > 1)
1079 && (!reload_optional[j])
1080 && (reload_in[j] != 0 || reload_out[j] != 0
1081 || reload_secondary_p[j])
1082 && reloads_conflict (i, j)
1083 && reg_classes_intersect_p (class,
1084 reload_reg_class[j]))
1085 {
1086 nongroup_need = 1;
1087 break;
1088 }
1089
1090 /* Decide which time-of-use to count this reload for. */
1091 switch (reload_when_needed[i])
1092 {
1093 case RELOAD_OTHER:
1094 this_needs = &insn_needs.other;
1095 break;
1096 case RELOAD_FOR_INPUT:
1097 this_needs = &insn_needs.input;
1098 break;
1099 case RELOAD_FOR_OUTPUT:
1100 this_needs = &insn_needs.output;
1101 break;
1102 case RELOAD_FOR_INSN:
1103 this_needs = &insn_needs.insn;
1104 break;
1105 case RELOAD_FOR_OTHER_ADDRESS:
1106 this_needs = &insn_needs.other_addr;
1107 break;
1108 case RELOAD_FOR_INPUT_ADDRESS:
1109 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1110 break;
1111 case RELOAD_FOR_OUTPUT_ADDRESS:
1112 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1113 break;
1114 case RELOAD_FOR_OPERAND_ADDRESS:
1115 this_needs = &insn_needs.op_addr;
1116 break;
1117 case RELOAD_FOR_OPADDR_ADDR:
1118 this_needs = &insn_needs.op_addr_reload;
1119 break;
1120 }
1121
1122 if (size > 1)
1123 {
1124 enum machine_mode other_mode, allocate_mode;
1125
1126 /* Count number of groups needed separately from
1127 number of individual regs needed. */
1128 this_needs->groups[(int) class]++;
1129 p = reg_class_superclasses[(int) class];
1130 while (*p != LIM_REG_CLASSES)
1131 this_needs->groups[(int) *p++]++;
1132
1133 /* Record size and mode of a group of this class. */
1134 /* If more than one size group is needed,
1135 make all groups the largest needed size. */
1136 if (group_size[(int) class] < size)
1137 {
1138 other_mode = group_mode[(int) class];
1139 allocate_mode = mode;
1140
1141 group_size[(int) class] = size;
1142 group_mode[(int) class] = mode;
1143 }
1144 else
1145 {
1146 other_mode = mode;
1147 allocate_mode = group_mode[(int) class];
1148 }
1149
1150 /* Crash if two dissimilar machine modes both need
1151 groups of consecutive regs of the same class. */
1152
1153 if (other_mode != VOIDmode && other_mode != allocate_mode
1154 && ! modes_equiv_for_class_p (allocate_mode,
1155 other_mode, class))
1156 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1157 insn);
1158 }
1159 else if (size == 1)
1160 {
1161 this_needs->regs[nongroup_need][(int) class] += 1;
1162 p = reg_class_superclasses[(int) class];
1163 while (*p != LIM_REG_CLASSES)
1164 this_needs->regs[nongroup_need][(int) *p++] += 1;
1165 }
1166 else
1167 abort ();
1168 }
1169
1170 /* All reloads have been counted for this insn;
1171 now merge the various times of use.
1172 This sets insn_needs, etc., to the maximum total number
1173 of registers needed at any point in this insn. */
1174
1175 for (i = 0; i < N_REG_CLASSES; i++)
1176 {
1177 int in_max, out_max;
1178
1179 /* Compute normal and nongroup needs. */
1180 for (j = 0; j <= 1; j++)
1181 {
1182 for (in_max = 0, out_max = 0, k = 0;
1183 k < reload_n_operands; k++)
1184 {
1185 in_max
1186 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1187 out_max
1188 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1189 }
1190
1191 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1192 and operand addresses but not things used to reload
1193 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1194 don't conflict with things needed to reload inputs or
1195 outputs. */
1196
1197 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1198 insn_needs.op_addr_reload.regs[j][i]),
1199 in_max);
1200
1201 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1202
1203 insn_needs.input.regs[j][i]
1204 = MAX (insn_needs.input.regs[j][i]
1205 + insn_needs.op_addr.regs[j][i]
1206 + insn_needs.insn.regs[j][i],
1207 in_max + insn_needs.input.regs[j][i]);
1208
1209 insn_needs.output.regs[j][i] += out_max;
1210 insn_needs.other.regs[j][i]
1211 += MAX (MAX (insn_needs.input.regs[j][i],
1212 insn_needs.output.regs[j][i]),
1213 insn_needs.other_addr.regs[j][i]);
1214
1215 }
1216
1217 /* Now compute group needs. */
1218 for (in_max = 0, out_max = 0, j = 0;
1219 j < reload_n_operands; j++)
1220 {
1221 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1222 out_max
1223 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1224 }
1225
1226 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1227 insn_needs.op_addr_reload.groups[i]),
1228 in_max);
1229 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1230
1231 insn_needs.input.groups[i]
1232 = MAX (insn_needs.input.groups[i]
1233 + insn_needs.op_addr.groups[i]
1234 + insn_needs.insn.groups[i],
1235 in_max + insn_needs.input.groups[i]);
1236
1237 insn_needs.output.groups[i] += out_max;
1238 insn_needs.other.groups[i]
1239 += MAX (MAX (insn_needs.input.groups[i],
1240 insn_needs.output.groups[i]),
1241 insn_needs.other_addr.groups[i]);
1242 }
1243
1244 /* If this is a CALL_INSN and caller-saves will need
1245 a spill register, act as if the spill register is
1246 needed for this insn. However, the spill register
1247 can be used by any reload of this insn, so we only
1248 need do something if no need for that class has
1249 been recorded.
1250
1251 The assumption that every CALL_INSN will trigger a
1252 caller-save is highly conservative, however, the number
1253 of cases where caller-saves will need a spill register but
1254 a block containing a CALL_INSN won't need a spill register
1255 of that class should be quite rare.
1256
1257 If a group is needed, the size and mode of the group will
1258 have been set up at the beginning of this loop. */
1259
1260 if (GET_CODE (insn) == CALL_INSN
1261 && caller_save_spill_class != NO_REGS)
1262 {
1263 /* See if this register would conflict with any reload
1264 that needs a group. */
1265 int nongroup_need = 0;
1266 int *caller_save_needs;
1267
1268 for (j = 0; j < n_reloads; j++)
1269 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1270 (GET_MODE_SIZE (reload_outmode[j])
1271 > GET_MODE_SIZE (reload_inmode[j]))
1272 ? reload_outmode[j]
1273 : reload_inmode[j])
1274 > 1)
1275 && reg_classes_intersect_p (caller_save_spill_class,
1276 reload_reg_class[j]))
1277 {
1278 nongroup_need = 1;
1279 break;
1280 }
1281
1282 caller_save_needs
1283 = (caller_save_group_size > 1
1284 ? insn_needs.other.groups
1285 : insn_needs.other.regs[nongroup_need]);
1286
1287 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1288 {
1289 register enum reg_class *p
1290 = reg_class_superclasses[(int) caller_save_spill_class];
1291
1292 caller_save_needs[(int) caller_save_spill_class]++;
1293
1294 while (*p != LIM_REG_CLASSES)
1295 caller_save_needs[(int) *p++] += 1;
1296 }
1297
1298 /* Show that this basic block will need a register of
1299 this class. */
1300
1301 if (global
1302 && ! (basic_block_needs[(int) caller_save_spill_class]
1303 [this_block]))
1304 {
1305 basic_block_needs[(int) caller_save_spill_class]
1306 [this_block] = 1;
1307 new_basic_block_needs = 1;
1308 }
1309 }
1310
1311 #ifdef SMALL_REGISTER_CLASSES
1312 /* If this insn stores the value of a function call,
1313 and that value is in a register that has been spilled,
1314 and if the insn needs a reload in a class
1315 that might use that register as the reload register,
1316 then add add an extra need in that class.
1317 This makes sure we have a register available that does
1318 not overlap the return value. */
1319
1320 if (avoid_return_reg)
1321 {
1322 int regno = REGNO (avoid_return_reg);
1323 int nregs
1324 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1325 int r;
1326 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1327
1328 /* First compute the "basic needs", which counts a
1329 need only in the smallest class in which it
1330 is required. */
1331
1332 bcopy ((char *) insn_needs.other.regs[0],
1333 (char *) basic_needs, sizeof basic_needs);
1334 bcopy ((char *) insn_needs.other.groups,
1335 (char *) basic_groups, sizeof basic_groups);
1336
1337 for (i = 0; i < N_REG_CLASSES; i++)
1338 {
1339 enum reg_class *p;
1340
1341 if (basic_needs[i] >= 0)
1342 for (p = reg_class_superclasses[i];
1343 *p != LIM_REG_CLASSES; p++)
1344 basic_needs[(int) *p] -= basic_needs[i];
1345
1346 if (basic_groups[i] >= 0)
1347 for (p = reg_class_superclasses[i];
1348 *p != LIM_REG_CLASSES; p++)
1349 basic_groups[(int) *p] -= basic_groups[i];
1350 }
1351
1352 /* Now count extra regs if there might be a conflict with
1353 the return value register. */
1354
1355 for (r = regno; r < regno + nregs; r++)
1356 if (spill_reg_order[r] >= 0)
1357 for (i = 0; i < N_REG_CLASSES; i++)
1358 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1359 {
1360 if (basic_needs[i] > 0)
1361 {
1362 enum reg_class *p;
1363
1364 insn_needs.other.regs[0][i]++;
1365 p = reg_class_superclasses[i];
1366 while (*p != LIM_REG_CLASSES)
1367 insn_needs.other.regs[0][(int) *p++]++;
1368 }
1369 if (basic_groups[i] > 0)
1370 {
1371 enum reg_class *p;
1372
1373 insn_needs.other.groups[i]++;
1374 p = reg_class_superclasses[i];
1375 while (*p != LIM_REG_CLASSES)
1376 insn_needs.other.groups[(int) *p++]++;
1377 }
1378 }
1379 }
1380 #endif /* SMALL_REGISTER_CLASSES */
1381
1382 /* For each class, collect maximum need of any insn. */
1383
1384 for (i = 0; i < N_REG_CLASSES; i++)
1385 {
1386 if (max_needs[i] < insn_needs.other.regs[0][i])
1387 {
1388 max_needs[i] = insn_needs.other.regs[0][i];
1389 max_needs_insn[i] = insn;
1390 }
1391 if (max_groups[i] < insn_needs.other.groups[i])
1392 {
1393 max_groups[i] = insn_needs.other.groups[i];
1394 max_groups_insn[i] = insn;
1395 }
1396 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1397 {
1398 max_nongroups[i] = insn_needs.other.regs[1][i];
1399 max_nongroups_insn[i] = insn;
1400 }
1401 }
1402 }
1403 /* Note that there is a continue statement above. */
1404 }
1405
1406 /* If we allocated any new memory locations, make another pass
1407 since it might have changed elimination offsets. */
1408 if (starting_frame_size != get_frame_size ())
1409 something_changed = 1;
1410
1411 if (dumpfile)
1412 for (i = 0; i < N_REG_CLASSES; i++)
1413 {
1414 if (max_needs[i] > 0)
1415 fprintf (dumpfile,
1416 ";; Need %d reg%s of class %s (for insn %d).\n",
1417 max_needs[i], max_needs[i] == 1 ? "" : "s",
1418 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1419 if (max_nongroups[i] > 0)
1420 fprintf (dumpfile,
1421 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1422 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1423 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1424 if (max_groups[i] > 0)
1425 fprintf (dumpfile,
1426 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1427 max_groups[i], max_groups[i] == 1 ? "" : "s",
1428 mode_name[(int) group_mode[i]],
1429 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1430 }
1431
1432 /* If we have caller-saves, set up the save areas and see if caller-save
1433 will need a spill register. */
1434
1435 if (caller_save_needed
1436 && ! setup_save_areas (&something_changed)
1437 && caller_save_spill_class == NO_REGS)
1438 {
1439 /* The class we will need depends on whether the machine
1440 supports the sum of two registers for an address; see
1441 find_address_reloads for details. */
1442
1443 caller_save_spill_class
1444 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1445 caller_save_group_size
1446 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1447 something_changed = 1;
1448 }
1449
1450 /* See if anything that happened changes which eliminations are valid.
1451 For example, on the Sparc, whether or not the frame pointer can
1452 be eliminated can depend on what registers have been used. We need
1453 not check some conditions again (such as flag_omit_frame_pointer)
1454 since they can't have changed. */
1455
1456 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1457 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1458 #ifdef ELIMINABLE_REGS
1459 || ! CAN_ELIMINATE (ep->from, ep->to)
1460 #endif
1461 )
1462 ep->can_eliminate = 0;
1463
1464 /* Look for the case where we have discovered that we can't replace
1465 register A with register B and that means that we will now be
1466 trying to replace register A with register C. This means we can
1467 no longer replace register C with register B and we need to disable
1468 such an elimination, if it exists. This occurs often with A == ap,
1469 B == sp, and C == fp. */
1470
1471 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1472 {
1473 struct elim_table *op;
1474 register int new_to = -1;
1475
1476 if (! ep->can_eliminate && ep->can_eliminate_previous)
1477 {
1478 /* Find the current elimination for ep->from, if there is a
1479 new one. */
1480 for (op = reg_eliminate;
1481 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1482 if (op->from == ep->from && op->can_eliminate)
1483 {
1484 new_to = op->to;
1485 break;
1486 }
1487
1488 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1489 disable it. */
1490 for (op = reg_eliminate;
1491 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1492 if (op->from == new_to && op->to == ep->to)
1493 op->can_eliminate = 0;
1494 }
1495 }
1496
1497 /* See if any registers that we thought we could eliminate the previous
1498 time are no longer eliminable. If so, something has changed and we
1499 must spill the register. Also, recompute the number of eliminable
1500 registers and see if the frame pointer is needed; it is if there is
1501 no elimination of the frame pointer that we can perform. */
1502
1503 frame_pointer_needed = 1;
1504 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1505 {
1506 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1507 && ep->to != HARD_FRAME_POINTER_REGNUM)
1508 frame_pointer_needed = 0;
1509
1510 if (! ep->can_eliminate && ep->can_eliminate_previous)
1511 {
1512 ep->can_eliminate_previous = 0;
1513 spill_hard_reg (ep->from, global, dumpfile, 1);
1514 something_changed = 1;
1515 num_eliminable--;
1516 }
1517 }
1518
1519 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1520 /* If we didn't need a frame pointer last time, but we do now, spill
1521 the hard frame pointer. */
1522 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1523 {
1524 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1525 something_changed = 1;
1526 }
1527 #endif
1528
1529 /* If all needs are met, we win. */
1530
1531 for (i = 0; i < N_REG_CLASSES; i++)
1532 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1533 break;
1534 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1535 break;
1536
1537 /* Not all needs are met; must spill some hard regs. */
1538
1539 /* Put all registers spilled so far back in potential_reload_regs, but
1540 put them at the front, since we've already spilled most of the
1541 psuedos in them (we might have left some pseudos unspilled if they
1542 were in a block that didn't need any spill registers of a conflicting
1543 class. We used to try to mark off the need for those registers,
1544 but doing so properly is very complex and reallocating them is the
1545 simpler approach. First, "pack" potential_reload_regs by pushing
1546 any nonnegative entries towards the end. That will leave room
1547 for the registers we already spilled.
1548
1549 Also, undo the marking of the spill registers from the last time
1550 around in FORBIDDEN_REGS since we will be probably be allocating
1551 them again below.
1552
1553 ??? It is theoretically possible that we might end up not using one
1554 of our previously-spilled registers in this allocation, even though
1555 they are at the head of the list. It's not clear what to do about
1556 this, but it was no better before, when we marked off the needs met
1557 by the previously-spilled registers. With the current code, globals
1558 can be allocated into these registers, but locals cannot. */
1559
1560 if (n_spills)
1561 {
1562 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1563 if (potential_reload_regs[i] != -1)
1564 potential_reload_regs[j--] = potential_reload_regs[i];
1565
1566 for (i = 0; i < n_spills; i++)
1567 {
1568 potential_reload_regs[i] = spill_regs[i];
1569 spill_reg_order[spill_regs[i]] = -1;
1570 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1571 }
1572
1573 n_spills = 0;
1574 }
1575
1576 /* Now find more reload regs to satisfy the remaining need
1577 Do it by ascending class number, since otherwise a reg
1578 might be spilled for a big class and might fail to count
1579 for a smaller class even though it belongs to that class.
1580
1581 Count spilled regs in `spills', and add entries to
1582 `spill_regs' and `spill_reg_order'.
1583
1584 ??? Note there is a problem here.
1585 When there is a need for a group in a high-numbered class,
1586 and also need for non-group regs that come from a lower class,
1587 the non-group regs are chosen first. If there aren't many regs,
1588 they might leave no room for a group.
1589
1590 This was happening on the 386. To fix it, we added the code
1591 that calls possible_group_p, so that the lower class won't
1592 break up the last possible group.
1593
1594 Really fixing the problem would require changes above
1595 in counting the regs already spilled, and in choose_reload_regs.
1596 It might be hard to avoid introducing bugs there. */
1597
1598 CLEAR_HARD_REG_SET (counted_for_groups);
1599 CLEAR_HARD_REG_SET (counted_for_nongroups);
1600
1601 for (class = 0; class < N_REG_CLASSES; class++)
1602 {
1603 /* First get the groups of registers.
1604 If we got single registers first, we might fragment
1605 possible groups. */
1606 while (max_groups[class] > 0)
1607 {
1608 /* If any single spilled regs happen to form groups,
1609 count them now. Maybe we don't really need
1610 to spill another group. */
1611 count_possible_groups (group_size, group_mode, max_groups,
1612 class);
1613
1614 if (max_groups[class] <= 0)
1615 break;
1616
1617 /* Groups of size 2 (the only groups used on most machines)
1618 are treated specially. */
1619 if (group_size[class] == 2)
1620 {
1621 /* First, look for a register that will complete a group. */
1622 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1623 {
1624 int other;
1625
1626 j = potential_reload_regs[i];
1627 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1628 &&
1629 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1630 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1631 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1632 && HARD_REGNO_MODE_OK (other, group_mode[class])
1633 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1634 other)
1635 /* We don't want one part of another group.
1636 We could get "two groups" that overlap! */
1637 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1638 ||
1639 (j < FIRST_PSEUDO_REGISTER - 1
1640 && (other = j + 1, spill_reg_order[other] >= 0)
1641 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1642 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1643 && HARD_REGNO_MODE_OK (j, group_mode[class])
1644 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1645 other)
1646 && ! TEST_HARD_REG_BIT (counted_for_groups,
1647 other))))
1648 {
1649 register enum reg_class *p;
1650
1651 /* We have found one that will complete a group,
1652 so count off one group as provided. */
1653 max_groups[class]--;
1654 p = reg_class_superclasses[class];
1655 while (*p != LIM_REG_CLASSES)
1656 max_groups[(int) *p++]--;
1657
1658 /* Indicate both these regs are part of a group. */
1659 SET_HARD_REG_BIT (counted_for_groups, j);
1660 SET_HARD_REG_BIT (counted_for_groups, other);
1661 break;
1662 }
1663 }
1664 /* We can't complete a group, so start one. */
1665 #ifdef SMALL_REGISTER_CLASSES
1666 /* Look for a pair neither of which is explicitly used. */
1667 if (i == FIRST_PSEUDO_REGISTER)
1668 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1669 {
1670 int k;
1671 j = potential_reload_regs[i];
1672 /* Verify that J+1 is a potential reload reg. */
1673 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1674 if (potential_reload_regs[k] == j + 1)
1675 break;
1676 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1677 && k < FIRST_PSEUDO_REGISTER
1678 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1679 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1680 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1681 && HARD_REGNO_MODE_OK (j, group_mode[class])
1682 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1683 j + 1)
1684 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1685 /* Reject J at this stage
1686 if J+1 was explicitly used. */
1687 && ! regs_explicitly_used[j + 1])
1688 break;
1689 }
1690 #endif
1691 /* Now try any group at all
1692 whose registers are not in bad_spill_regs. */
1693 if (i == FIRST_PSEUDO_REGISTER)
1694 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1695 {
1696 int k;
1697 j = potential_reload_regs[i];
1698 /* Verify that J+1 is a potential reload reg. */
1699 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1700 if (potential_reload_regs[k] == j + 1)
1701 break;
1702 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1703 && k < FIRST_PSEUDO_REGISTER
1704 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1705 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1706 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1707 && HARD_REGNO_MODE_OK (j, group_mode[class])
1708 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1709 j + 1)
1710 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1711 break;
1712 }
1713
1714 /* I should be the index in potential_reload_regs
1715 of the new reload reg we have found. */
1716
1717 if (i >= FIRST_PSEUDO_REGISTER)
1718 {
1719 /* There are no groups left to spill. */
1720 spill_failure (max_groups_insn[class]);
1721 failure = 1;
1722 goto failed;
1723 }
1724 else
1725 something_changed
1726 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1727 global, dumpfile);
1728 }
1729 else
1730 {
1731 /* For groups of more than 2 registers,
1732 look for a sufficient sequence of unspilled registers,
1733 and spill them all at once. */
1734 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1735 {
1736 int k;
1737
1738 j = potential_reload_regs[i];
1739 if (j >= 0
1740 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1741 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1742 {
1743 /* Check each reg in the sequence. */
1744 for (k = 0; k < group_size[class]; k++)
1745 if (! (spill_reg_order[j + k] < 0
1746 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1747 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1748 break;
1749 /* We got a full sequence, so spill them all. */
1750 if (k == group_size[class])
1751 {
1752 register enum reg_class *p;
1753 for (k = 0; k < group_size[class]; k++)
1754 {
1755 int idx;
1756 SET_HARD_REG_BIT (counted_for_groups, j + k);
1757 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1758 if (potential_reload_regs[idx] == j + k)
1759 break;
1760 something_changed
1761 |= new_spill_reg (idx, class,
1762 max_needs, NULL_PTR,
1763 global, dumpfile);
1764 }
1765
1766 /* We have found one that will complete a group,
1767 so count off one group as provided. */
1768 max_groups[class]--;
1769 p = reg_class_superclasses[class];
1770 while (*p != LIM_REG_CLASSES)
1771 max_groups[(int) *p++]--;
1772
1773 break;
1774 }
1775 }
1776 }
1777 /* We couldn't find any registers for this reload.
1778 Avoid going into an infinite loop. */
1779 if (i >= FIRST_PSEUDO_REGISTER)
1780 {
1781 /* There are no groups left. */
1782 spill_failure (max_groups_insn[class]);
1783 failure = 1;
1784 goto failed;
1785 }
1786 }
1787 }
1788
1789 /* Now similarly satisfy all need for single registers. */
1790
1791 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1792 {
1793 #ifdef SMALL_REGISTER_CLASSES
1794 /* This should be right for all machines, but only the 386
1795 is known to need it, so this conditional plays safe.
1796 ??? For 2.5, try making this unconditional. */
1797 /* If we spilled enough regs, but they weren't counted
1798 against the non-group need, see if we can count them now.
1799 If so, we can avoid some actual spilling. */
1800 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1801 for (i = 0; i < n_spills; i++)
1802 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1803 spill_regs[i])
1804 && !TEST_HARD_REG_BIT (counted_for_groups,
1805 spill_regs[i])
1806 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1807 spill_regs[i])
1808 && max_nongroups[class] > 0)
1809 {
1810 register enum reg_class *p;
1811
1812 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1813 max_nongroups[class]--;
1814 p = reg_class_superclasses[class];
1815 while (*p != LIM_REG_CLASSES)
1816 max_nongroups[(int) *p++]--;
1817 }
1818 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1819 break;
1820 #endif
1821
1822 /* Consider the potential reload regs that aren't
1823 yet in use as reload regs, in order of preference.
1824 Find the most preferred one that's in this class. */
1825
1826 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1827 if (potential_reload_regs[i] >= 0
1828 && TEST_HARD_REG_BIT (reg_class_contents[class],
1829 potential_reload_regs[i])
1830 /* If this reg will not be available for groups,
1831 pick one that does not foreclose possible groups.
1832 This is a kludge, and not very general,
1833 but it should be sufficient to make the 386 work,
1834 and the problem should not occur on machines with
1835 more registers. */
1836 && (max_nongroups[class] == 0
1837 || possible_group_p (potential_reload_regs[i], max_groups)))
1838 break;
1839
1840 /* If we couldn't get a register, try to get one even if we
1841 might foreclose possible groups. This may cause problems
1842 later, but that's better than aborting now, since it is
1843 possible that we will, in fact, be able to form the needed
1844 group even with this allocation. */
1845
1846 if (i >= FIRST_PSEUDO_REGISTER
1847 && (asm_noperands (max_needs[class] > 0
1848 ? max_needs_insn[class]
1849 : max_nongroups_insn[class])
1850 < 0))
1851 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1852 if (potential_reload_regs[i] >= 0
1853 && TEST_HARD_REG_BIT (reg_class_contents[class],
1854 potential_reload_regs[i]))
1855 break;
1856
1857 /* I should be the index in potential_reload_regs
1858 of the new reload reg we have found. */
1859
1860 if (i >= FIRST_PSEUDO_REGISTER)
1861 {
1862 /* There are no possible registers left to spill. */
1863 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1864 : max_nongroups_insn[class]);
1865 failure = 1;
1866 goto failed;
1867 }
1868 else
1869 something_changed
1870 |= new_spill_reg (i, class, max_needs, max_nongroups,
1871 global, dumpfile);
1872 }
1873 }
1874 }
1875
1876 /* If global-alloc was run, notify it of any register eliminations we have
1877 done. */
1878 if (global)
1879 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1880 if (ep->can_eliminate)
1881 mark_elimination (ep->from, ep->to);
1882
1883 /* Insert code to save and restore call-clobbered hard regs
1884 around calls. Tell if what mode to use so that we will process
1885 those insns in reload_as_needed if we have to. */
1886
1887 if (caller_save_needed)
1888 save_call_clobbered_regs (num_eliminable ? QImode
1889 : caller_save_spill_class != NO_REGS ? HImode
1890 : VOIDmode);
1891
1892 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1893 If that insn didn't set the register (i.e., it copied the register to
1894 memory), just delete that insn instead of the equivalencing insn plus
1895 anything now dead. If we call delete_dead_insn on that insn, we may
1896 delete the insn that actually sets the register if the register die
1897 there and that is incorrect. */
1898
1899 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1900 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1901 && GET_CODE (reg_equiv_init[i]) != NOTE)
1902 {
1903 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1904 delete_dead_insn (reg_equiv_init[i]);
1905 else
1906 {
1907 PUT_CODE (reg_equiv_init[i], NOTE);
1908 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1909 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1910 }
1911 }
1912
1913 /* Use the reload registers where necessary
1914 by generating move instructions to move the must-be-register
1915 values into or out of the reload registers. */
1916
1917 if (something_needs_reloads || something_needs_elimination
1918 || (caller_save_needed && num_eliminable)
1919 || caller_save_spill_class != NO_REGS)
1920 reload_as_needed (first, global);
1921
1922 /* If we were able to eliminate the frame pointer, show that it is no
1923 longer live at the start of any basic block. If it ls live by
1924 virtue of being in a pseudo, that pseudo will be marked live
1925 and hence the frame pointer will be known to be live via that
1926 pseudo. */
1927
1928 if (! frame_pointer_needed)
1929 for (i = 0; i < n_basic_blocks; i++)
1930 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1931 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1932 % REGSET_ELT_BITS));
1933
1934 /* Come here (with failure set nonzero) if we can't get enough spill regs
1935 and we decide not to abort about it. */
1936 failed:
1937
1938 reload_in_progress = 0;
1939
1940 /* Now eliminate all pseudo regs by modifying them into
1941 their equivalent memory references.
1942 The REG-rtx's for the pseudos are modified in place,
1943 so all insns that used to refer to them now refer to memory.
1944
1945 For a reg that has a reg_equiv_address, all those insns
1946 were changed by reloading so that no insns refer to it any longer;
1947 but the DECL_RTL of a variable decl may refer to it,
1948 and if so this causes the debugging info to mention the variable. */
1949
1950 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1951 {
1952 rtx addr = 0;
1953 int in_struct = 0;
1954 if (reg_equiv_mem[i])
1955 {
1956 addr = XEXP (reg_equiv_mem[i], 0);
1957 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1958 }
1959 if (reg_equiv_address[i])
1960 addr = reg_equiv_address[i];
1961 if (addr)
1962 {
1963 if (reg_renumber[i] < 0)
1964 {
1965 rtx reg = regno_reg_rtx[i];
1966 XEXP (reg, 0) = addr;
1967 REG_USERVAR_P (reg) = 0;
1968 MEM_IN_STRUCT_P (reg) = in_struct;
1969 PUT_CODE (reg, MEM);
1970 }
1971 else if (reg_equiv_mem[i])
1972 XEXP (reg_equiv_mem[i], 0) = addr;
1973 }
1974 }
1975
1976 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1977 /* Make a pass over all the insns and remove death notes for things that
1978 are no longer registers or no longer die in the insn (e.g., an input
1979 and output pseudo being tied). */
1980
1981 for (insn = first; insn; insn = NEXT_INSN (insn))
1982 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1983 {
1984 rtx note, next;
1985
1986 for (note = REG_NOTES (insn); note; note = next)
1987 {
1988 next = XEXP (note, 1);
1989 if (REG_NOTE_KIND (note) == REG_DEAD
1990 && (GET_CODE (XEXP (note, 0)) != REG
1991 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1992 remove_note (insn, note);
1993 }
1994 }
1995 #endif
1996
1997 /* Indicate that we no longer have known memory locations or constants. */
1998 reg_equiv_constant = 0;
1999 reg_equiv_memory_loc = 0;
2000
2001 if (scratch_list)
2002 free (scratch_list);
2003 scratch_list = 0;
2004 if (scratch_block)
2005 free (scratch_block);
2006 scratch_block = 0;
2007
2008 return failure;
2009 }
2010 \f
2011 /* Nonzero if, after spilling reg REGNO for non-groups,
2012 it will still be possible to find a group if we still need one. */
2013
2014 static int
2015 possible_group_p (regno, max_groups)
2016 int regno;
2017 int *max_groups;
2018 {
2019 int i;
2020 int class = (int) NO_REGS;
2021
2022 for (i = 0; i < (int) N_REG_CLASSES; i++)
2023 if (max_groups[i] > 0)
2024 {
2025 class = i;
2026 break;
2027 }
2028
2029 if (class == (int) NO_REGS)
2030 return 1;
2031
2032 /* Consider each pair of consecutive registers. */
2033 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2034 {
2035 /* Ignore pairs that include reg REGNO. */
2036 if (i == regno || i + 1 == regno)
2037 continue;
2038
2039 /* Ignore pairs that are outside the class that needs the group.
2040 ??? Here we fail to handle the case where two different classes
2041 independently need groups. But this never happens with our
2042 current machine descriptions. */
2043 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2044 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2045 continue;
2046
2047 /* A pair of consecutive regs we can still spill does the trick. */
2048 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2049 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2050 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2051 return 1;
2052
2053 /* A pair of one already spilled and one we can spill does it
2054 provided the one already spilled is not otherwise reserved. */
2055 if (spill_reg_order[i] < 0
2056 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2057 && spill_reg_order[i + 1] >= 0
2058 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2059 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2060 return 1;
2061 if (spill_reg_order[i + 1] < 0
2062 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2063 && spill_reg_order[i] >= 0
2064 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2065 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2066 return 1;
2067 }
2068
2069 return 0;
2070 }
2071 \f
2072 /* Count any groups of CLASS that can be formed from the registers recently
2073 spilled. */
2074
2075 static void
2076 count_possible_groups (group_size, group_mode, max_groups, class)
2077 int *group_size;
2078 enum machine_mode *group_mode;
2079 int *max_groups;
2080 int class;
2081 {
2082 HARD_REG_SET new;
2083 int i, j;
2084
2085 /* Now find all consecutive groups of spilled registers
2086 and mark each group off against the need for such groups.
2087 But don't count them against ordinary need, yet. */
2088
2089 if (group_size[class] == 0)
2090 return;
2091
2092 CLEAR_HARD_REG_SET (new);
2093
2094 /* Make a mask of all the regs that are spill regs in class I. */
2095 for (i = 0; i < n_spills; i++)
2096 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2097 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2098 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2099 SET_HARD_REG_BIT (new, spill_regs[i]);
2100
2101 /* Find each consecutive group of them. */
2102 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2103 if (TEST_HARD_REG_BIT (new, i)
2104 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2105 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2106 {
2107 for (j = 1; j < group_size[class]; j++)
2108 if (! TEST_HARD_REG_BIT (new, i + j))
2109 break;
2110
2111 if (j == group_size[class])
2112 {
2113 /* We found a group. Mark it off against this class's need for
2114 groups, and against each superclass too. */
2115 register enum reg_class *p;
2116
2117 max_groups[class]--;
2118 p = reg_class_superclasses[class];
2119 while (*p != LIM_REG_CLASSES)
2120 max_groups[(int) *p++]--;
2121
2122 /* Don't count these registers again. */
2123 for (j = 0; j < group_size[class]; j++)
2124 SET_HARD_REG_BIT (counted_for_groups, i + j);
2125 }
2126
2127 /* Skip to the last reg in this group. When i is incremented above,
2128 it will then point to the first reg of the next possible group. */
2129 i += j - 1;
2130 }
2131 }
2132 \f
2133 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2134 another mode that needs to be reloaded for the same register class CLASS.
2135 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2136 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2137
2138 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2139 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2140 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2141 causes unnecessary failures on machines requiring alignment of register
2142 groups when the two modes are different sizes, because the larger mode has
2143 more strict alignment rules than the smaller mode. */
2144
2145 static int
2146 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2147 enum machine_mode allocate_mode, other_mode;
2148 enum reg_class class;
2149 {
2150 register int regno;
2151 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2152 {
2153 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2154 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2155 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2156 return 0;
2157 }
2158 return 1;
2159 }
2160
2161 /* Handle the failure to find a register to spill.
2162 INSN should be one of the insns which needed this particular spill reg. */
2163
2164 static void
2165 spill_failure (insn)
2166 rtx insn;
2167 {
2168 if (asm_noperands (PATTERN (insn)) >= 0)
2169 error_for_asm (insn, "`asm' needs too many reloads");
2170 else
2171 fatal_insn ("Unable to find a register to spill.", insn);
2172 }
2173
2174 /* Add a new register to the tables of available spill-registers
2175 (as well as spilling all pseudos allocated to the register).
2176 I is the index of this register in potential_reload_regs.
2177 CLASS is the regclass whose need is being satisfied.
2178 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2179 so that this register can count off against them.
2180 MAX_NONGROUPS is 0 if this register is part of a group.
2181 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2182
2183 static int
2184 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2185 int i;
2186 int class;
2187 int *max_needs;
2188 int *max_nongroups;
2189 int global;
2190 FILE *dumpfile;
2191 {
2192 register enum reg_class *p;
2193 int val;
2194 int regno = potential_reload_regs[i];
2195
2196 if (i >= FIRST_PSEUDO_REGISTER)
2197 abort (); /* Caller failed to find any register. */
2198
2199 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2200 fatal ("fixed or forbidden register was spilled.\n\
2201 This may be due to a compiler bug or to impossible asm\n\
2202 statements or clauses.");
2203
2204 /* Make reg REGNO an additional reload reg. */
2205
2206 potential_reload_regs[i] = -1;
2207 spill_regs[n_spills] = regno;
2208 spill_reg_order[regno] = n_spills;
2209 if (dumpfile)
2210 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2211
2212 /* Clear off the needs we just satisfied. */
2213
2214 max_needs[class]--;
2215 p = reg_class_superclasses[class];
2216 while (*p != LIM_REG_CLASSES)
2217 max_needs[(int) *p++]--;
2218
2219 if (max_nongroups && max_nongroups[class] > 0)
2220 {
2221 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2222 max_nongroups[class]--;
2223 p = reg_class_superclasses[class];
2224 while (*p != LIM_REG_CLASSES)
2225 max_nongroups[(int) *p++]--;
2226 }
2227
2228 /* Spill every pseudo reg that was allocated to this reg
2229 or to something that overlaps this reg. */
2230
2231 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2232
2233 /* If there are some registers still to eliminate and this register
2234 wasn't ever used before, additional stack space may have to be
2235 allocated to store this register. Thus, we may have changed the offset
2236 between the stack and frame pointers, so mark that something has changed.
2237 (If new pseudos were spilled, thus requiring more space, VAL would have
2238 been set non-zero by the call to spill_hard_reg above since additional
2239 reloads may be needed in that case.
2240
2241 One might think that we need only set VAL to 1 if this is a call-used
2242 register. However, the set of registers that must be saved by the
2243 prologue is not identical to the call-used set. For example, the
2244 register used by the call insn for the return PC is a call-used register,
2245 but must be saved by the prologue. */
2246 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2247 val = 1;
2248
2249 regs_ever_live[spill_regs[n_spills]] = 1;
2250 n_spills++;
2251
2252 return val;
2253 }
2254 \f
2255 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2256 data that is dead in INSN. */
2257
2258 static void
2259 delete_dead_insn (insn)
2260 rtx insn;
2261 {
2262 rtx prev = prev_real_insn (insn);
2263 rtx prev_dest;
2264
2265 /* If the previous insn sets a register that dies in our insn, delete it
2266 too. */
2267 if (prev && GET_CODE (PATTERN (prev)) == SET
2268 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2269 && reg_mentioned_p (prev_dest, PATTERN (insn))
2270 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2271 delete_dead_insn (prev);
2272
2273 PUT_CODE (insn, NOTE);
2274 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2275 NOTE_SOURCE_FILE (insn) = 0;
2276 }
2277
2278 /* Modify the home of pseudo-reg I.
2279 The new home is present in reg_renumber[I].
2280
2281 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2282 or it may be -1, meaning there is none or it is not relevant.
2283 This is used so that all pseudos spilled from a given hard reg
2284 can share one stack slot. */
2285
2286 static void
2287 alter_reg (i, from_reg)
2288 register int i;
2289 int from_reg;
2290 {
2291 /* When outputting an inline function, this can happen
2292 for a reg that isn't actually used. */
2293 if (regno_reg_rtx[i] == 0)
2294 return;
2295
2296 /* If the reg got changed to a MEM at rtl-generation time,
2297 ignore it. */
2298 if (GET_CODE (regno_reg_rtx[i]) != REG)
2299 return;
2300
2301 /* Modify the reg-rtx to contain the new hard reg
2302 number or else to contain its pseudo reg number. */
2303 REGNO (regno_reg_rtx[i])
2304 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2305
2306 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2307 allocate a stack slot for it. */
2308
2309 if (reg_renumber[i] < 0
2310 && reg_n_refs[i] > 0
2311 && reg_equiv_constant[i] == 0
2312 && reg_equiv_memory_loc[i] == 0)
2313 {
2314 register rtx x;
2315 int inherent_size = PSEUDO_REGNO_BYTES (i);
2316 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2317 int adjust = 0;
2318
2319 /* Each pseudo reg has an inherent size which comes from its own mode,
2320 and a total size which provides room for paradoxical subregs
2321 which refer to the pseudo reg in wider modes.
2322
2323 We can use a slot already allocated if it provides both
2324 enough inherent space and enough total space.
2325 Otherwise, we allocate a new slot, making sure that it has no less
2326 inherent space, and no less total space, then the previous slot. */
2327 if (from_reg == -1)
2328 {
2329 /* No known place to spill from => no slot to reuse. */
2330 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2331 #if BYTES_BIG_ENDIAN
2332 /* Cancel the big-endian correction done in assign_stack_local.
2333 Get the address of the beginning of the slot.
2334 This is so we can do a big-endian correction unconditionally
2335 below. */
2336 adjust = inherent_size - total_size;
2337 #endif
2338 }
2339 /* Reuse a stack slot if possible. */
2340 else if (spill_stack_slot[from_reg] != 0
2341 && spill_stack_slot_width[from_reg] >= total_size
2342 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2343 >= inherent_size))
2344 x = spill_stack_slot[from_reg];
2345 /* Allocate a bigger slot. */
2346 else
2347 {
2348 /* Compute maximum size needed, both for inherent size
2349 and for total size. */
2350 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2351 if (spill_stack_slot[from_reg])
2352 {
2353 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2354 > inherent_size)
2355 mode = GET_MODE (spill_stack_slot[from_reg]);
2356 if (spill_stack_slot_width[from_reg] > total_size)
2357 total_size = spill_stack_slot_width[from_reg];
2358 }
2359 /* Make a slot with that size. */
2360 x = assign_stack_local (mode, total_size, -1);
2361 #if BYTES_BIG_ENDIAN
2362 /* Cancel the big-endian correction done in assign_stack_local.
2363 Get the address of the beginning of the slot.
2364 This is so we can do a big-endian correction unconditionally
2365 below. */
2366 adjust = GET_MODE_SIZE (mode) - total_size;
2367 #endif
2368 spill_stack_slot[from_reg] = x;
2369 spill_stack_slot_width[from_reg] = total_size;
2370 }
2371
2372 #if BYTES_BIG_ENDIAN
2373 /* On a big endian machine, the "address" of the slot
2374 is the address of the low part that fits its inherent mode. */
2375 if (inherent_size < total_size)
2376 adjust += (total_size - inherent_size);
2377 #endif /* BYTES_BIG_ENDIAN */
2378
2379 /* If we have any adjustment to make, or if the stack slot is the
2380 wrong mode, make a new stack slot. */
2381 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2382 {
2383 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2384 plus_constant (XEXP (x, 0), adjust));
2385 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2386 }
2387
2388 /* Save the stack slot for later. */
2389 reg_equiv_memory_loc[i] = x;
2390 }
2391 }
2392
2393 /* Mark the slots in regs_ever_live for the hard regs
2394 used by pseudo-reg number REGNO. */
2395
2396 void
2397 mark_home_live (regno)
2398 int regno;
2399 {
2400 register int i, lim;
2401 i = reg_renumber[regno];
2402 if (i < 0)
2403 return;
2404 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2405 while (i < lim)
2406 regs_ever_live[i++] = 1;
2407 }
2408
2409 /* Mark the registers used in SCRATCH as being live. */
2410
2411 static void
2412 mark_scratch_live (scratch)
2413 rtx scratch;
2414 {
2415 register int i;
2416 int regno = REGNO (scratch);
2417 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2418
2419 for (i = regno; i < lim; i++)
2420 regs_ever_live[i] = 1;
2421 }
2422 \f
2423 /* This function handles the tracking of elimination offsets around branches.
2424
2425 X is a piece of RTL being scanned.
2426
2427 INSN is the insn that it came from, if any.
2428
2429 INITIAL_P is non-zero if we are to set the offset to be the initial
2430 offset and zero if we are setting the offset of the label to be the
2431 current offset. */
2432
2433 static void
2434 set_label_offsets (x, insn, initial_p)
2435 rtx x;
2436 rtx insn;
2437 int initial_p;
2438 {
2439 enum rtx_code code = GET_CODE (x);
2440 rtx tem;
2441 int i;
2442 struct elim_table *p;
2443
2444 switch (code)
2445 {
2446 case LABEL_REF:
2447 if (LABEL_REF_NONLOCAL_P (x))
2448 return;
2449
2450 x = XEXP (x, 0);
2451
2452 /* ... fall through ... */
2453
2454 case CODE_LABEL:
2455 /* If we know nothing about this label, set the desired offsets. Note
2456 that this sets the offset at a label to be the offset before a label
2457 if we don't know anything about the label. This is not correct for
2458 the label after a BARRIER, but is the best guess we can make. If
2459 we guessed wrong, we will suppress an elimination that might have
2460 been possible had we been able to guess correctly. */
2461
2462 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2463 {
2464 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2465 offsets_at[CODE_LABEL_NUMBER (x)][i]
2466 = (initial_p ? reg_eliminate[i].initial_offset
2467 : reg_eliminate[i].offset);
2468 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2469 }
2470
2471 /* Otherwise, if this is the definition of a label and it is
2472 preceded by a BARRIER, set our offsets to the known offset of
2473 that label. */
2474
2475 else if (x == insn
2476 && (tem = prev_nonnote_insn (insn)) != 0
2477 && GET_CODE (tem) == BARRIER)
2478 {
2479 num_not_at_initial_offset = 0;
2480 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2481 {
2482 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2483 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2484 if (reg_eliminate[i].can_eliminate
2485 && (reg_eliminate[i].offset
2486 != reg_eliminate[i].initial_offset))
2487 num_not_at_initial_offset++;
2488 }
2489 }
2490
2491 else
2492 /* If neither of the above cases is true, compare each offset
2493 with those previously recorded and suppress any eliminations
2494 where the offsets disagree. */
2495
2496 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2497 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2498 != (initial_p ? reg_eliminate[i].initial_offset
2499 : reg_eliminate[i].offset))
2500 reg_eliminate[i].can_eliminate = 0;
2501
2502 return;
2503
2504 case JUMP_INSN:
2505 set_label_offsets (PATTERN (insn), insn, initial_p);
2506
2507 /* ... fall through ... */
2508
2509 case INSN:
2510 case CALL_INSN:
2511 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2512 and hence must have all eliminations at their initial offsets. */
2513 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2514 if (REG_NOTE_KIND (tem) == REG_LABEL)
2515 set_label_offsets (XEXP (tem, 0), insn, 1);
2516 return;
2517
2518 case ADDR_VEC:
2519 case ADDR_DIFF_VEC:
2520 /* Each of the labels in the address vector must be at their initial
2521 offsets. We want the first first for ADDR_VEC and the second
2522 field for ADDR_DIFF_VEC. */
2523
2524 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2525 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2526 insn, initial_p);
2527 return;
2528
2529 case SET:
2530 /* We only care about setting PC. If the source is not RETURN,
2531 IF_THEN_ELSE, or a label, disable any eliminations not at
2532 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2533 isn't one of those possibilities. For branches to a label,
2534 call ourselves recursively.
2535
2536 Note that this can disable elimination unnecessarily when we have
2537 a non-local goto since it will look like a non-constant jump to
2538 someplace in the current function. This isn't a significant
2539 problem since such jumps will normally be when all elimination
2540 pairs are back to their initial offsets. */
2541
2542 if (SET_DEST (x) != pc_rtx)
2543 return;
2544
2545 switch (GET_CODE (SET_SRC (x)))
2546 {
2547 case PC:
2548 case RETURN:
2549 return;
2550
2551 case LABEL_REF:
2552 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2553 return;
2554
2555 case IF_THEN_ELSE:
2556 tem = XEXP (SET_SRC (x), 1);
2557 if (GET_CODE (tem) == LABEL_REF)
2558 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2559 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2560 break;
2561
2562 tem = XEXP (SET_SRC (x), 2);
2563 if (GET_CODE (tem) == LABEL_REF)
2564 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2565 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2566 break;
2567 return;
2568 }
2569
2570 /* If we reach here, all eliminations must be at their initial
2571 offset because we are doing a jump to a variable address. */
2572 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2573 if (p->offset != p->initial_offset)
2574 p->can_eliminate = 0;
2575 }
2576 }
2577 \f
2578 /* Used for communication between the next two function to properly share
2579 the vector for an ASM_OPERANDS. */
2580
2581 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2582
2583 /* Scan X and replace any eliminable registers (such as fp) with a
2584 replacement (such as sp), plus an offset.
2585
2586 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2587 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2588 MEM, we are allowed to replace a sum of a register and the constant zero
2589 with the register, which we cannot do outside a MEM. In addition, we need
2590 to record the fact that a register is referenced outside a MEM.
2591
2592 If INSN is an insn, it is the insn containing X. If we replace a REG
2593 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2594 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2595 that the REG is being modified.
2596
2597 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2598 That's used when we eliminate in expressions stored in notes.
2599 This means, do not set ref_outside_mem even if the reference
2600 is outside of MEMs.
2601
2602 If we see a modification to a register we know about, take the
2603 appropriate action (see case SET, below).
2604
2605 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2606 replacements done assuming all offsets are at their initial values. If
2607 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2608 encounter, return the actual location so that find_reloads will do
2609 the proper thing. */
2610
2611 rtx
2612 eliminate_regs (x, mem_mode, insn)
2613 rtx x;
2614 enum machine_mode mem_mode;
2615 rtx insn;
2616 {
2617 enum rtx_code code = GET_CODE (x);
2618 struct elim_table *ep;
2619 int regno;
2620 rtx new;
2621 int i, j;
2622 char *fmt;
2623 int copied = 0;
2624
2625 switch (code)
2626 {
2627 case CONST_INT:
2628 case CONST_DOUBLE:
2629 case CONST:
2630 case SYMBOL_REF:
2631 case CODE_LABEL:
2632 case PC:
2633 case CC0:
2634 case ASM_INPUT:
2635 case ADDR_VEC:
2636 case ADDR_DIFF_VEC:
2637 case RETURN:
2638 return x;
2639
2640 case REG:
2641 regno = REGNO (x);
2642
2643 /* First handle the case where we encounter a bare register that
2644 is eliminable. Replace it with a PLUS. */
2645 if (regno < FIRST_PSEUDO_REGISTER)
2646 {
2647 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2648 ep++)
2649 if (ep->from_rtx == x && ep->can_eliminate)
2650 {
2651 if (! mem_mode
2652 /* Refs inside notes don't count for this purpose. */
2653 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2654 || GET_CODE (insn) == INSN_LIST)))
2655 ep->ref_outside_mem = 1;
2656 return plus_constant (ep->to_rtx, ep->previous_offset);
2657 }
2658
2659 }
2660 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2661 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2662 {
2663 /* In this case, find_reloads would attempt to either use an
2664 incorrect address (if something is not at its initial offset)
2665 or substitute an replaced address into an insn (which loses
2666 if the offset is changed by some later action). So we simply
2667 return the replaced stack slot (assuming it is changed by
2668 elimination) and ignore the fact that this is actually a
2669 reference to the pseudo. Ensure we make a copy of the
2670 address in case it is shared. */
2671 new = eliminate_regs (reg_equiv_memory_loc[regno],
2672 mem_mode, insn);
2673 if (new != reg_equiv_memory_loc[regno])
2674 {
2675 cannot_omit_stores[regno] = 1;
2676 return copy_rtx (new);
2677 }
2678 }
2679 return x;
2680
2681 case PLUS:
2682 /* If this is the sum of an eliminable register and a constant, rework
2683 the sum. */
2684 if (GET_CODE (XEXP (x, 0)) == REG
2685 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2686 && CONSTANT_P (XEXP (x, 1)))
2687 {
2688 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2689 ep++)
2690 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2691 {
2692 if (! mem_mode
2693 /* Refs inside notes don't count for this purpose. */
2694 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2695 || GET_CODE (insn) == INSN_LIST)))
2696 ep->ref_outside_mem = 1;
2697
2698 /* The only time we want to replace a PLUS with a REG (this
2699 occurs when the constant operand of the PLUS is the negative
2700 of the offset) is when we are inside a MEM. We won't want
2701 to do so at other times because that would change the
2702 structure of the insn in a way that reload can't handle.
2703 We special-case the commonest situation in
2704 eliminate_regs_in_insn, so just replace a PLUS with a
2705 PLUS here, unless inside a MEM. */
2706 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2707 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2708 return ep->to_rtx;
2709 else
2710 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2711 plus_constant (XEXP (x, 1),
2712 ep->previous_offset));
2713 }
2714
2715 /* If the register is not eliminable, we are done since the other
2716 operand is a constant. */
2717 return x;
2718 }
2719
2720 /* If this is part of an address, we want to bring any constant to the
2721 outermost PLUS. We will do this by doing register replacement in
2722 our operands and seeing if a constant shows up in one of them.
2723
2724 We assume here this is part of an address (or a "load address" insn)
2725 since an eliminable register is not likely to appear in any other
2726 context.
2727
2728 If we have (plus (eliminable) (reg)), we want to produce
2729 (plus (plus (replacement) (reg) (const))). If this was part of a
2730 normal add insn, (plus (replacement) (reg)) will be pushed as a
2731 reload. This is the desired action. */
2732
2733 {
2734 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2735 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2736
2737 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2738 {
2739 /* If one side is a PLUS and the other side is a pseudo that
2740 didn't get a hard register but has a reg_equiv_constant,
2741 we must replace the constant here since it may no longer
2742 be in the position of any operand. */
2743 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2744 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2745 && reg_renumber[REGNO (new1)] < 0
2746 && reg_equiv_constant != 0
2747 && reg_equiv_constant[REGNO (new1)] != 0)
2748 new1 = reg_equiv_constant[REGNO (new1)];
2749 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2750 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2751 && reg_renumber[REGNO (new0)] < 0
2752 && reg_equiv_constant[REGNO (new0)] != 0)
2753 new0 = reg_equiv_constant[REGNO (new0)];
2754
2755 new = form_sum (new0, new1);
2756
2757 /* As above, if we are not inside a MEM we do not want to
2758 turn a PLUS into something else. We might try to do so here
2759 for an addition of 0 if we aren't optimizing. */
2760 if (! mem_mode && GET_CODE (new) != PLUS)
2761 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2762 else
2763 return new;
2764 }
2765 }
2766 return x;
2767
2768 case MULT:
2769 /* If this is the product of an eliminable register and a
2770 constant, apply the distribute law and move the constant out
2771 so that we have (plus (mult ..) ..). This is needed in order
2772 to keep load-address insns valid. This case is pathalogical.
2773 We ignore the possibility of overflow here. */
2774 if (GET_CODE (XEXP (x, 0)) == REG
2775 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2776 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2777 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2778 ep++)
2779 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2780 {
2781 if (! mem_mode
2782 /* Refs inside notes don't count for this purpose. */
2783 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2784 || GET_CODE (insn) == INSN_LIST)))
2785 ep->ref_outside_mem = 1;
2786
2787 return
2788 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2789 ep->previous_offset * INTVAL (XEXP (x, 1)));
2790 }
2791
2792 /* ... fall through ... */
2793
2794 case CALL:
2795 case COMPARE:
2796 case MINUS:
2797 case DIV: case UDIV:
2798 case MOD: case UMOD:
2799 case AND: case IOR: case XOR:
2800 case ROTATERT: case ROTATE:
2801 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2802 case NE: case EQ:
2803 case GE: case GT: case GEU: case GTU:
2804 case LE: case LT: case LEU: case LTU:
2805 {
2806 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2807 rtx new1
2808 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2809
2810 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2811 return gen_rtx (code, GET_MODE (x), new0, new1);
2812 }
2813 return x;
2814
2815 case EXPR_LIST:
2816 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2817 if (XEXP (x, 0))
2818 {
2819 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2820 if (new != XEXP (x, 0))
2821 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2822 }
2823
2824 /* ... fall through ... */
2825
2826 case INSN_LIST:
2827 /* Now do eliminations in the rest of the chain. If this was
2828 an EXPR_LIST, this might result in allocating more memory than is
2829 strictly needed, but it simplifies the code. */
2830 if (XEXP (x, 1))
2831 {
2832 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2833 if (new != XEXP (x, 1))
2834 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2835 }
2836 return x;
2837
2838 case PRE_INC:
2839 case POST_INC:
2840 case PRE_DEC:
2841 case POST_DEC:
2842 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2843 if (ep->to_rtx == XEXP (x, 0))
2844 {
2845 int size = GET_MODE_SIZE (mem_mode);
2846
2847 /* If more bytes than MEM_MODE are pushed, account for them. */
2848 #ifdef PUSH_ROUNDING
2849 if (ep->to_rtx == stack_pointer_rtx)
2850 size = PUSH_ROUNDING (size);
2851 #endif
2852 if (code == PRE_DEC || code == POST_DEC)
2853 ep->offset += size;
2854 else
2855 ep->offset -= size;
2856 }
2857
2858 /* Fall through to generic unary operation case. */
2859 case USE:
2860 case STRICT_LOW_PART:
2861 case NEG: case NOT:
2862 case SIGN_EXTEND: case ZERO_EXTEND:
2863 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2864 case FLOAT: case FIX:
2865 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2866 case ABS:
2867 case SQRT:
2868 case FFS:
2869 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2870 if (new != XEXP (x, 0))
2871 return gen_rtx (code, GET_MODE (x), new);
2872 return x;
2873
2874 case SUBREG:
2875 /* Similar to above processing, but preserve SUBREG_WORD.
2876 Convert (subreg (mem)) to (mem) if not paradoxical.
2877 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2878 pseudo didn't get a hard reg, we must replace this with the
2879 eliminated version of the memory location because push_reloads
2880 may do the replacement in certain circumstances. */
2881 if (GET_CODE (SUBREG_REG (x)) == REG
2882 && (GET_MODE_SIZE (GET_MODE (x))
2883 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2884 && reg_equiv_memory_loc != 0
2885 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2886 {
2887 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2888 mem_mode, insn);
2889
2890 /* If we didn't change anything, we must retain the pseudo. */
2891 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2892 new = SUBREG_REG (x);
2893 else
2894 {
2895 /* Otherwise, ensure NEW isn't shared in case we have to reload
2896 it. */
2897 new = copy_rtx (new);
2898
2899 /* In this case, we must show that the pseudo is used in this
2900 insn so that delete_output_reload will do the right thing. */
2901 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2902 && GET_CODE (insn) != INSN_LIST)
2903 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2904 insn);
2905 }
2906 }
2907 else
2908 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2909
2910 if (new != XEXP (x, 0))
2911 {
2912 if (GET_CODE (new) == MEM
2913 && (GET_MODE_SIZE (GET_MODE (x))
2914 <= GET_MODE_SIZE (GET_MODE (new)))
2915 #ifdef LOAD_EXTEND_OP
2916 /* On these machines we will be reloading what is
2917 inside the SUBREG if it originally was a pseudo and
2918 the inner and outer modes are both a word or
2919 smaller. So leave the SUBREG then. */
2920 && ! (GET_CODE (SUBREG_REG (x)) == REG
2921 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2922 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2923 && (GET_MODE_SIZE (GET_MODE (x))
2924 > GET_MODE_SIZE (GET_MODE (new)))
2925 && INTEGRAL_MODE_P (GET_MODE (new))
2926 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
2927 #endif
2928 )
2929 {
2930 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2931 enum machine_mode mode = GET_MODE (x);
2932
2933 #if BYTES_BIG_ENDIAN
2934 offset += (MIN (UNITS_PER_WORD,
2935 GET_MODE_SIZE (GET_MODE (new)))
2936 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2937 #endif
2938
2939 PUT_MODE (new, mode);
2940 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2941 return new;
2942 }
2943 else
2944 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2945 }
2946
2947 return x;
2948
2949 case CLOBBER:
2950 /* If clobbering a register that is the replacement register for an
2951 elimination we still think can be performed, note that it cannot
2952 be performed. Otherwise, we need not be concerned about it. */
2953 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2954 if (ep->to_rtx == XEXP (x, 0))
2955 ep->can_eliminate = 0;
2956
2957 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2958 if (new != XEXP (x, 0))
2959 return gen_rtx (code, GET_MODE (x), new);
2960 return x;
2961
2962 case ASM_OPERANDS:
2963 {
2964 rtx *temp_vec;
2965 /* Properly handle sharing input and constraint vectors. */
2966 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2967 {
2968 /* When we come to a new vector not seen before,
2969 scan all its elements; keep the old vector if none
2970 of them changes; otherwise, make a copy. */
2971 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2972 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2973 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2974 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2975 mem_mode, insn);
2976
2977 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2978 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2979 break;
2980
2981 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2982 new_asm_operands_vec = old_asm_operands_vec;
2983 else
2984 new_asm_operands_vec
2985 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2986 }
2987
2988 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2989 if (new_asm_operands_vec == old_asm_operands_vec)
2990 return x;
2991
2992 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2993 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2994 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2995 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2996 ASM_OPERANDS_SOURCE_FILE (x),
2997 ASM_OPERANDS_SOURCE_LINE (x));
2998 new->volatil = x->volatil;
2999 return new;
3000 }
3001
3002 case SET:
3003 /* Check for setting a register that we know about. */
3004 if (GET_CODE (SET_DEST (x)) == REG)
3005 {
3006 /* See if this is setting the replacement register for an
3007 elimination.
3008
3009 If DEST is the hard frame pointer, we do nothing because we
3010 assume that all assignments to the frame pointer are for
3011 non-local gotos and are being done at a time when they are valid
3012 and do not disturb anything else. Some machines want to
3013 eliminate a fake argument pointer (or even a fake frame pointer)
3014 with either the real frame or the stack pointer. Assignments to
3015 the hard frame pointer must not prevent this elimination. */
3016
3017 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3018 ep++)
3019 if (ep->to_rtx == SET_DEST (x)
3020 && SET_DEST (x) != hard_frame_pointer_rtx)
3021 {
3022 /* If it is being incremented, adjust the offset. Otherwise,
3023 this elimination can't be done. */
3024 rtx src = SET_SRC (x);
3025
3026 if (GET_CODE (src) == PLUS
3027 && XEXP (src, 0) == SET_DEST (x)
3028 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3029 ep->offset -= INTVAL (XEXP (src, 1));
3030 else
3031 ep->can_eliminate = 0;
3032 }
3033
3034 /* Now check to see we are assigning to a register that can be
3035 eliminated. If so, it must be as part of a PARALLEL, since we
3036 will not have been called if this is a single SET. So indicate
3037 that we can no longer eliminate this reg. */
3038 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3039 ep++)
3040 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3041 ep->can_eliminate = 0;
3042 }
3043
3044 /* Now avoid the loop below in this common case. */
3045 {
3046 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3047 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3048
3049 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3050 write a CLOBBER insn. */
3051 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3052 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3053 && GET_CODE (insn) != INSN_LIST)
3054 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3055
3056 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3057 return gen_rtx (SET, VOIDmode, new0, new1);
3058 }
3059
3060 return x;
3061
3062 case MEM:
3063 /* Our only special processing is to pass the mode of the MEM to our
3064 recursive call and copy the flags. While we are here, handle this
3065 case more efficiently. */
3066 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3067 if (new != XEXP (x, 0))
3068 {
3069 new = gen_rtx (MEM, GET_MODE (x), new);
3070 new->volatil = x->volatil;
3071 new->unchanging = x->unchanging;
3072 new->in_struct = x->in_struct;
3073 return new;
3074 }
3075 else
3076 return x;
3077 }
3078
3079 /* Process each of our operands recursively. If any have changed, make a
3080 copy of the rtx. */
3081 fmt = GET_RTX_FORMAT (code);
3082 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3083 {
3084 if (*fmt == 'e')
3085 {
3086 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3087 if (new != XEXP (x, i) && ! copied)
3088 {
3089 rtx new_x = rtx_alloc (code);
3090 bcopy ((char *) x, (char *) new_x,
3091 (sizeof (*new_x) - sizeof (new_x->fld)
3092 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3093 x = new_x;
3094 copied = 1;
3095 }
3096 XEXP (x, i) = new;
3097 }
3098 else if (*fmt == 'E')
3099 {
3100 int copied_vec = 0;
3101 for (j = 0; j < XVECLEN (x, i); j++)
3102 {
3103 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3104 if (new != XVECEXP (x, i, j) && ! copied_vec)
3105 {
3106 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3107 &XVECEXP (x, i, 0));
3108 if (! copied)
3109 {
3110 rtx new_x = rtx_alloc (code);
3111 bcopy ((char *) x, (char *) new_x,
3112 (sizeof (*new_x) - sizeof (new_x->fld)
3113 + (sizeof (new_x->fld[0])
3114 * GET_RTX_LENGTH (code))));
3115 x = new_x;
3116 copied = 1;
3117 }
3118 XVEC (x, i) = new_v;
3119 copied_vec = 1;
3120 }
3121 XVECEXP (x, i, j) = new;
3122 }
3123 }
3124 }
3125
3126 return x;
3127 }
3128 \f
3129 /* Scan INSN and eliminate all eliminable registers in it.
3130
3131 If REPLACE is nonzero, do the replacement destructively. Also
3132 delete the insn as dead it if it is setting an eliminable register.
3133
3134 If REPLACE is zero, do all our allocations in reload_obstack.
3135
3136 If no eliminations were done and this insn doesn't require any elimination
3137 processing (these are not identical conditions: it might be updating sp,
3138 but not referencing fp; this needs to be seen during reload_as_needed so
3139 that the offset between fp and sp can be taken into consideration), zero
3140 is returned. Otherwise, 1 is returned. */
3141
3142 static int
3143 eliminate_regs_in_insn (insn, replace)
3144 rtx insn;
3145 int replace;
3146 {
3147 rtx old_body = PATTERN (insn);
3148 rtx old_set = single_set (insn);
3149 rtx new_body;
3150 int val = 0;
3151 struct elim_table *ep;
3152
3153 if (! replace)
3154 push_obstacks (&reload_obstack, &reload_obstack);
3155
3156 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3157 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3158 {
3159 /* Check for setting an eliminable register. */
3160 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3161 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3162 {
3163 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3164 /* If this is setting the frame pointer register to the
3165 hardware frame pointer register and this is an elimination
3166 that will be done (tested above), this insn is really
3167 adjusting the frame pointer downward to compensate for
3168 the adjustment done before a nonlocal goto. */
3169 if (ep->from == FRAME_POINTER_REGNUM
3170 && ep->to == HARD_FRAME_POINTER_REGNUM)
3171 {
3172 rtx src = SET_SRC (old_set);
3173 int offset, ok = 0;
3174
3175 if (src == ep->to_rtx)
3176 offset = 0, ok = 1;
3177 else if (GET_CODE (src) == PLUS
3178 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3179 offset = INTVAL (XEXP (src, 0)), ok = 1;
3180
3181 if (ok)
3182 {
3183 if (replace)
3184 {
3185 rtx src
3186 = plus_constant (ep->to_rtx, offset - ep->offset);
3187
3188 /* First see if this insn remains valid when we
3189 make the change. If not, keep the INSN_CODE
3190 the same and let reload fit it up. */
3191 validate_change (insn, &SET_SRC (old_set), src, 1);
3192 validate_change (insn, &SET_DEST (old_set),
3193 ep->to_rtx, 1);
3194 if (! apply_change_group ())
3195 {
3196 SET_SRC (old_set) = src;
3197 SET_DEST (old_set) = ep->to_rtx;
3198 }
3199 }
3200
3201 val = 1;
3202 goto done;
3203 }
3204 }
3205 #endif
3206
3207 /* In this case this insn isn't serving a useful purpose. We
3208 will delete it in reload_as_needed once we know that this
3209 elimination is, in fact, being done.
3210
3211 If REPLACE isn't set, we can't delete this insn, but neededn't
3212 process it since it won't be used unless something changes. */
3213 if (replace)
3214 delete_dead_insn (insn);
3215 val = 1;
3216 goto done;
3217 }
3218
3219 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3220 in the insn is the negative of the offset in FROM. Substitute
3221 (set (reg) (reg to)) for the insn and change its code.
3222
3223 We have to do this here, rather than in eliminate_regs, do that we can
3224 change the insn code. */
3225
3226 if (GET_CODE (SET_SRC (old_set)) == PLUS
3227 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3228 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3229 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3230 ep++)
3231 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3232 && ep->can_eliminate)
3233 {
3234 /* We must stop at the first elimination that will be used.
3235 If this one would replace the PLUS with a REG, do it
3236 now. Otherwise, quit the loop and let eliminate_regs
3237 do its normal replacement. */
3238 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3239 {
3240 /* We assume here that we don't need a PARALLEL of
3241 any CLOBBERs for this assignment. There's not
3242 much we can do if we do need it. */
3243 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3244 SET_DEST (old_set), ep->to_rtx);
3245 INSN_CODE (insn) = -1;
3246 val = 1;
3247 goto done;
3248 }
3249
3250 break;
3251 }
3252 }
3253
3254 old_asm_operands_vec = 0;
3255
3256 /* Replace the body of this insn with a substituted form. If we changed
3257 something, return non-zero.
3258
3259 If we are replacing a body that was a (set X (plus Y Z)), try to
3260 re-recognize the insn. We do this in case we had a simple addition
3261 but now can do this as a load-address. This saves an insn in this
3262 common case. */
3263
3264 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3265 if (new_body != old_body)
3266 {
3267 /* If we aren't replacing things permanently and we changed something,
3268 make another copy to ensure that all the RTL is new. Otherwise
3269 things can go wrong if find_reload swaps commutative operands
3270 and one is inside RTL that has been copied while the other is not. */
3271
3272 /* Don't copy an asm_operands because (1) there's no need and (2)
3273 copy_rtx can't do it properly when there are multiple outputs. */
3274 if (! replace && asm_noperands (old_body) < 0)
3275 new_body = copy_rtx (new_body);
3276
3277 /* If we had a move insn but now we don't, rerecognize it. This will
3278 cause spurious re-recognition if the old move had a PARALLEL since
3279 the new one still will, but we can't call single_set without
3280 having put NEW_BODY into the insn and the re-recognition won't
3281 hurt in this rare case. */
3282 if (old_set != 0
3283 && ((GET_CODE (SET_SRC (old_set)) == REG
3284 && (GET_CODE (new_body) != SET
3285 || GET_CODE (SET_SRC (new_body)) != REG))
3286 /* If this was a load from or store to memory, compare
3287 the MEM in recog_operand to the one in the insn. If they
3288 are not equal, then rerecognize the insn. */
3289 || (old_set != 0
3290 && ((GET_CODE (SET_SRC (old_set)) == MEM
3291 && SET_SRC (old_set) != recog_operand[1])
3292 || (GET_CODE (SET_DEST (old_set)) == MEM
3293 && SET_DEST (old_set) != recog_operand[0])))
3294 /* If this was an add insn before, rerecognize. */
3295 || GET_CODE (SET_SRC (old_set)) == PLUS))
3296 {
3297 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3298 /* If recognition fails, store the new body anyway.
3299 It's normal to have recognition failures here
3300 due to bizarre memory addresses; reloading will fix them. */
3301 PATTERN (insn) = new_body;
3302 }
3303 else
3304 PATTERN (insn) = new_body;
3305
3306 val = 1;
3307 }
3308
3309 /* Loop through all elimination pairs. See if any have changed and
3310 recalculate the number not at initial offset.
3311
3312 Compute the maximum offset (minimum offset if the stack does not
3313 grow downward) for each elimination pair.
3314
3315 We also detect a cases where register elimination cannot be done,
3316 namely, if a register would be both changed and referenced outside a MEM
3317 in the resulting insn since such an insn is often undefined and, even if
3318 not, we cannot know what meaning will be given to it. Note that it is
3319 valid to have a register used in an address in an insn that changes it
3320 (presumably with a pre- or post-increment or decrement).
3321
3322 If anything changes, return nonzero. */
3323
3324 num_not_at_initial_offset = 0;
3325 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3326 {
3327 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3328 ep->can_eliminate = 0;
3329
3330 ep->ref_outside_mem = 0;
3331
3332 if (ep->previous_offset != ep->offset)
3333 val = 1;
3334
3335 ep->previous_offset = ep->offset;
3336 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3337 num_not_at_initial_offset++;
3338
3339 #ifdef STACK_GROWS_DOWNWARD
3340 ep->max_offset = MAX (ep->max_offset, ep->offset);
3341 #else
3342 ep->max_offset = MIN (ep->max_offset, ep->offset);
3343 #endif
3344 }
3345
3346 done:
3347 /* If we changed something, perform elmination in REG_NOTES. This is
3348 needed even when REPLACE is zero because a REG_DEAD note might refer
3349 to a register that we eliminate and could cause a different number
3350 of spill registers to be needed in the final reload pass than in
3351 the pre-passes. */
3352 if (val && REG_NOTES (insn) != 0)
3353 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3354
3355 if (! replace)
3356 pop_obstacks ();
3357
3358 return val;
3359 }
3360
3361 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3362 replacement we currently believe is valid, mark it as not eliminable if X
3363 modifies DEST in any way other than by adding a constant integer to it.
3364
3365 If DEST is the frame pointer, we do nothing because we assume that
3366 all assignments to the hard frame pointer are nonlocal gotos and are being
3367 done at a time when they are valid and do not disturb anything else.
3368 Some machines want to eliminate a fake argument pointer with either the
3369 frame or stack pointer. Assignments to the hard frame pointer must not
3370 prevent this elimination.
3371
3372 Called via note_stores from reload before starting its passes to scan
3373 the insns of the function. */
3374
3375 static void
3376 mark_not_eliminable (dest, x)
3377 rtx dest;
3378 rtx x;
3379 {
3380 register int i;
3381
3382 /* A SUBREG of a hard register here is just changing its mode. We should
3383 not see a SUBREG of an eliminable hard register, but check just in
3384 case. */
3385 if (GET_CODE (dest) == SUBREG)
3386 dest = SUBREG_REG (dest);
3387
3388 if (dest == hard_frame_pointer_rtx)
3389 return;
3390
3391 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3392 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3393 && (GET_CODE (x) != SET
3394 || GET_CODE (SET_SRC (x)) != PLUS
3395 || XEXP (SET_SRC (x), 0) != dest
3396 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3397 {
3398 reg_eliminate[i].can_eliminate_previous
3399 = reg_eliminate[i].can_eliminate = 0;
3400 num_eliminable--;
3401 }
3402 }
3403 \f
3404 /* Kick all pseudos out of hard register REGNO.
3405 If GLOBAL is nonzero, try to find someplace else to put them.
3406 If DUMPFILE is nonzero, log actions taken on that file.
3407
3408 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3409 because we found we can't eliminate some register. In the case, no pseudos
3410 are allowed to be in the register, even if they are only in a block that
3411 doesn't require spill registers, unlike the case when we are spilling this
3412 hard reg to produce another spill register.
3413
3414 Return nonzero if any pseudos needed to be kicked out. */
3415
3416 static int
3417 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3418 register int regno;
3419 int global;
3420 FILE *dumpfile;
3421 int cant_eliminate;
3422 {
3423 enum reg_class class = REGNO_REG_CLASS (regno);
3424 int something_changed = 0;
3425 register int i;
3426
3427 SET_HARD_REG_BIT (forbidden_regs, regno);
3428
3429 if (cant_eliminate)
3430 regs_ever_live[regno] = 1;
3431
3432 /* Spill every pseudo reg that was allocated to this reg
3433 or to something that overlaps this reg. */
3434
3435 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3436 if (reg_renumber[i] >= 0
3437 && reg_renumber[i] <= regno
3438 && (reg_renumber[i]
3439 + HARD_REGNO_NREGS (reg_renumber[i],
3440 PSEUDO_REGNO_MODE (i))
3441 > regno))
3442 {
3443 /* If this register belongs solely to a basic block which needed no
3444 spilling of any class that this register is contained in,
3445 leave it be, unless we are spilling this register because
3446 it was a hard register that can't be eliminated. */
3447
3448 if (! cant_eliminate
3449 && basic_block_needs[0]
3450 && reg_basic_block[i] >= 0
3451 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3452 {
3453 enum reg_class *p;
3454
3455 for (p = reg_class_superclasses[(int) class];
3456 *p != LIM_REG_CLASSES; p++)
3457 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3458 break;
3459
3460 if (*p == LIM_REG_CLASSES)
3461 continue;
3462 }
3463
3464 /* Mark it as no longer having a hard register home. */
3465 reg_renumber[i] = -1;
3466 /* We will need to scan everything again. */
3467 something_changed = 1;
3468 if (global)
3469 retry_global_alloc (i, forbidden_regs);
3470
3471 alter_reg (i, regno);
3472 if (dumpfile)
3473 {
3474 if (reg_renumber[i] == -1)
3475 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3476 else
3477 fprintf (dumpfile, " Register %d now in %d.\n\n",
3478 i, reg_renumber[i]);
3479 }
3480 }
3481 for (i = 0; i < scratch_list_length; i++)
3482 {
3483 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3484 {
3485 if (! cant_eliminate && basic_block_needs[0]
3486 && ! basic_block_needs[(int) class][scratch_block[i]])
3487 {
3488 enum reg_class *p;
3489
3490 for (p = reg_class_superclasses[(int) class];
3491 *p != LIM_REG_CLASSES; p++)
3492 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3493 break;
3494
3495 if (*p == LIM_REG_CLASSES)
3496 continue;
3497 }
3498 PUT_CODE (scratch_list[i], SCRATCH);
3499 scratch_list[i] = 0;
3500 something_changed = 1;
3501 continue;
3502 }
3503 }
3504
3505 return something_changed;
3506 }
3507 \f
3508 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3509 Also mark any hard registers used to store user variables as
3510 forbidden from being used for spill registers. */
3511
3512 static void
3513 scan_paradoxical_subregs (x)
3514 register rtx x;
3515 {
3516 register int i;
3517 register char *fmt;
3518 register enum rtx_code code = GET_CODE (x);
3519
3520 switch (code)
3521 {
3522 case REG:
3523 #ifdef SMALL_REGISTER_CLASSES
3524 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3525 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3526 #endif
3527 return;
3528
3529 case CONST_INT:
3530 case CONST:
3531 case SYMBOL_REF:
3532 case LABEL_REF:
3533 case CONST_DOUBLE:
3534 case CC0:
3535 case PC:
3536 case USE:
3537 case CLOBBER:
3538 return;
3539
3540 case SUBREG:
3541 if (GET_CODE (SUBREG_REG (x)) == REG
3542 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3543 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3544 = GET_MODE_SIZE (GET_MODE (x));
3545 return;
3546 }
3547
3548 fmt = GET_RTX_FORMAT (code);
3549 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3550 {
3551 if (fmt[i] == 'e')
3552 scan_paradoxical_subregs (XEXP (x, i));
3553 else if (fmt[i] == 'E')
3554 {
3555 register int j;
3556 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3557 scan_paradoxical_subregs (XVECEXP (x, i, j));
3558 }
3559 }
3560 }
3561 \f
3562 static int
3563 hard_reg_use_compare (p1, p2)
3564 struct hard_reg_n_uses *p1, *p2;
3565 {
3566 int tem = p1->uses - p2->uses;
3567 if (tem != 0) return tem;
3568 /* If regs are equally good, sort by regno,
3569 so that the results of qsort leave nothing to chance. */
3570 return p1->regno - p2->regno;
3571 }
3572
3573 /* Choose the order to consider regs for use as reload registers
3574 based on how much trouble would be caused by spilling one.
3575 Store them in order of decreasing preference in potential_reload_regs. */
3576
3577 static void
3578 order_regs_for_reload ()
3579 {
3580 register int i;
3581 register int o = 0;
3582 int large = 0;
3583
3584 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3585
3586 CLEAR_HARD_REG_SET (bad_spill_regs);
3587
3588 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3589 potential_reload_regs[i] = -1;
3590
3591 /* Count number of uses of each hard reg by pseudo regs allocated to it
3592 and then order them by decreasing use. */
3593
3594 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3595 {
3596 hard_reg_n_uses[i].uses = 0;
3597 hard_reg_n_uses[i].regno = i;
3598 }
3599
3600 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3601 {
3602 int regno = reg_renumber[i];
3603 if (regno >= 0)
3604 {
3605 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3606 while (regno < lim)
3607 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3608 }
3609 large += reg_n_refs[i];
3610 }
3611
3612 /* Now fixed registers (which cannot safely be used for reloading)
3613 get a very high use count so they will be considered least desirable.
3614 Registers used explicitly in the rtl code are almost as bad. */
3615
3616 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3617 {
3618 if (fixed_regs[i])
3619 {
3620 hard_reg_n_uses[i].uses += 2 * large + 2;
3621 SET_HARD_REG_BIT (bad_spill_regs, i);
3622 }
3623 else if (regs_explicitly_used[i])
3624 {
3625 hard_reg_n_uses[i].uses += large + 1;
3626 #ifndef SMALL_REGISTER_CLASSES
3627 /* ??? We are doing this here because of the potential that
3628 bad code may be generated if a register explicitly used in
3629 an insn was used as a spill register for that insn. But
3630 not using these are spill registers may lose on some machine.
3631 We'll have to see how this works out. */
3632 SET_HARD_REG_BIT (bad_spill_regs, i);
3633 #endif
3634 }
3635 }
3636 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3637 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3638
3639 #ifdef ELIMINABLE_REGS
3640 /* If registers other than the frame pointer are eliminable, mark them as
3641 poor choices. */
3642 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3643 {
3644 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3645 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3646 }
3647 #endif
3648
3649 /* Prefer registers not so far used, for use in temporary loading.
3650 Among them, if REG_ALLOC_ORDER is defined, use that order.
3651 Otherwise, prefer registers not preserved by calls. */
3652
3653 #ifdef REG_ALLOC_ORDER
3654 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3655 {
3656 int regno = reg_alloc_order[i];
3657
3658 if (hard_reg_n_uses[regno].uses == 0)
3659 potential_reload_regs[o++] = regno;
3660 }
3661 #else
3662 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3663 {
3664 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3665 potential_reload_regs[o++] = i;
3666 }
3667 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3668 {
3669 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3670 potential_reload_regs[o++] = i;
3671 }
3672 #endif
3673
3674 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3675 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3676
3677 /* Now add the regs that are already used,
3678 preferring those used less often. The fixed and otherwise forbidden
3679 registers will be at the end of this list. */
3680
3681 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3682 if (hard_reg_n_uses[i].uses != 0)
3683 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3684 }
3685 \f
3686 /* Used in reload_as_needed to sort the spilled regs. */
3687
3688 static int
3689 compare_spill_regs (r1, r2)
3690 short *r1, *r2;
3691 {
3692 return *r1 - *r2;
3693 }
3694
3695 /* Reload pseudo-registers into hard regs around each insn as needed.
3696 Additional register load insns are output before the insn that needs it
3697 and perhaps store insns after insns that modify the reloaded pseudo reg.
3698
3699 reg_last_reload_reg and reg_reloaded_contents keep track of
3700 which registers are already available in reload registers.
3701 We update these for the reloads that we perform,
3702 as the insns are scanned. */
3703
3704 static void
3705 reload_as_needed (first, live_known)
3706 rtx first;
3707 int live_known;
3708 {
3709 register rtx insn;
3710 register int i;
3711 int this_block = 0;
3712 rtx x;
3713 rtx after_call = 0;
3714
3715 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3716 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3717 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3718 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3719 reg_has_output_reload = (char *) alloca (max_regno);
3720 for (i = 0; i < n_spills; i++)
3721 {
3722 reg_reloaded_contents[i] = -1;
3723 reg_reloaded_insn[i] = 0;
3724 }
3725
3726 /* Reset all offsets on eliminable registers to their initial values. */
3727 #ifdef ELIMINABLE_REGS
3728 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3729 {
3730 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3731 reg_eliminate[i].initial_offset);
3732 reg_eliminate[i].previous_offset
3733 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3734 }
3735 #else
3736 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3737 reg_eliminate[0].previous_offset
3738 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3739 #endif
3740
3741 num_not_at_initial_offset = 0;
3742
3743 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3744 pack registers with group needs. */
3745 if (n_spills > 1)
3746 {
3747 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3748 for (i = 0; i < n_spills; i++)
3749 spill_reg_order[spill_regs[i]] = i;
3750 }
3751
3752 for (insn = first; insn;)
3753 {
3754 register rtx next = NEXT_INSN (insn);
3755
3756 /* Notice when we move to a new basic block. */
3757 if (live_known && this_block + 1 < n_basic_blocks
3758 && insn == basic_block_head[this_block+1])
3759 ++this_block;
3760
3761 /* If we pass a label, copy the offsets from the label information
3762 into the current offsets of each elimination. */
3763 if (GET_CODE (insn) == CODE_LABEL)
3764 {
3765 num_not_at_initial_offset = 0;
3766 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3767 {
3768 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3769 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3770 if (reg_eliminate[i].can_eliminate
3771 && (reg_eliminate[i].offset
3772 != reg_eliminate[i].initial_offset))
3773 num_not_at_initial_offset++;
3774 }
3775 }
3776
3777 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3778 {
3779 rtx avoid_return_reg = 0;
3780
3781 #ifdef SMALL_REGISTER_CLASSES
3782 /* Set avoid_return_reg if this is an insn
3783 that might use the value of a function call. */
3784 if (GET_CODE (insn) == CALL_INSN)
3785 {
3786 if (GET_CODE (PATTERN (insn)) == SET)
3787 after_call = SET_DEST (PATTERN (insn));
3788 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3789 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3790 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3791 else
3792 after_call = 0;
3793 }
3794 else if (after_call != 0
3795 && !(GET_CODE (PATTERN (insn)) == SET
3796 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3797 {
3798 if (reg_referenced_p (after_call, PATTERN (insn)))
3799 avoid_return_reg = after_call;
3800 after_call = 0;
3801 }
3802 #endif /* SMALL_REGISTER_CLASSES */
3803
3804 /* If this is a USE and CLOBBER of a MEM, ensure that any
3805 references to eliminable registers have been removed. */
3806
3807 if ((GET_CODE (PATTERN (insn)) == USE
3808 || GET_CODE (PATTERN (insn)) == CLOBBER)
3809 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3810 XEXP (XEXP (PATTERN (insn), 0), 0)
3811 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3812 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3813
3814 /* If we need to do register elimination processing, do so.
3815 This might delete the insn, in which case we are done. */
3816 if (num_eliminable && GET_MODE (insn) == QImode)
3817 {
3818 eliminate_regs_in_insn (insn, 1);
3819 if (GET_CODE (insn) == NOTE)
3820 {
3821 insn = next;
3822 continue;
3823 }
3824 }
3825
3826 if (GET_MODE (insn) == VOIDmode)
3827 n_reloads = 0;
3828 /* First find the pseudo regs that must be reloaded for this insn.
3829 This info is returned in the tables reload_... (see reload.h).
3830 Also modify the body of INSN by substituting RELOAD
3831 rtx's for those pseudo regs. */
3832 else
3833 {
3834 bzero (reg_has_output_reload, max_regno);
3835 CLEAR_HARD_REG_SET (reg_is_output_reload);
3836
3837 find_reloads (insn, 1, spill_indirect_levels, live_known,
3838 spill_reg_order);
3839 }
3840
3841 if (n_reloads > 0)
3842 {
3843 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3844 rtx p;
3845 int class;
3846
3847 /* If this block has not had spilling done for a
3848 particular clas and we have any non-optionals that need a
3849 spill reg in that class, abort. */
3850
3851 for (class = 0; class < N_REG_CLASSES; class++)
3852 if (basic_block_needs[class] != 0
3853 && basic_block_needs[class][this_block] == 0)
3854 for (i = 0; i < n_reloads; i++)
3855 if (class == (int) reload_reg_class[i]
3856 && reload_reg_rtx[i] == 0
3857 && ! reload_optional[i]
3858 && (reload_in[i] != 0 || reload_out[i] != 0
3859 || reload_secondary_p[i] != 0))
3860 fatal_insn ("Non-optional registers need a spill register", insn);
3861
3862 /* Now compute which reload regs to reload them into. Perhaps
3863 reusing reload regs from previous insns, or else output
3864 load insns to reload them. Maybe output store insns too.
3865 Record the choices of reload reg in reload_reg_rtx. */
3866 choose_reload_regs (insn, avoid_return_reg);
3867
3868 #ifdef SMALL_REGISTER_CLASSES
3869 /* Merge any reloads that we didn't combine for fear of
3870 increasing the number of spill registers needed but now
3871 discover can be safely merged. */
3872 merge_assigned_reloads (insn);
3873 #endif
3874
3875 /* Generate the insns to reload operands into or out of
3876 their reload regs. */
3877 emit_reload_insns (insn);
3878
3879 /* Substitute the chosen reload regs from reload_reg_rtx
3880 into the insn's body (or perhaps into the bodies of other
3881 load and store insn that we just made for reloading
3882 and that we moved the structure into). */
3883 subst_reloads ();
3884
3885 /* If this was an ASM, make sure that all the reload insns
3886 we have generated are valid. If not, give an error
3887 and delete them. */
3888
3889 if (asm_noperands (PATTERN (insn)) >= 0)
3890 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3891 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3892 && (recog_memoized (p) < 0
3893 || (insn_extract (p),
3894 ! constrain_operands (INSN_CODE (p), 1))))
3895 {
3896 error_for_asm (insn,
3897 "`asm' operand requires impossible reload");
3898 PUT_CODE (p, NOTE);
3899 NOTE_SOURCE_FILE (p) = 0;
3900 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3901 }
3902 }
3903 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3904 is no longer validly lying around to save a future reload.
3905 Note that this does not detect pseudos that were reloaded
3906 for this insn in order to be stored in
3907 (obeying register constraints). That is correct; such reload
3908 registers ARE still valid. */
3909 note_stores (PATTERN (insn), forget_old_reloads_1);
3910
3911 /* There may have been CLOBBER insns placed after INSN. So scan
3912 between INSN and NEXT and use them to forget old reloads. */
3913 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3914 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3915 note_stores (PATTERN (x), forget_old_reloads_1);
3916
3917 #ifdef AUTO_INC_DEC
3918 /* Likewise for regs altered by auto-increment in this insn.
3919 But note that the reg-notes are not changed by reloading:
3920 they still contain the pseudo-regs, not the spill regs. */
3921 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3922 if (REG_NOTE_KIND (x) == REG_INC)
3923 {
3924 /* See if this pseudo reg was reloaded in this insn.
3925 If so, its last-reload info is still valid
3926 because it is based on this insn's reload. */
3927 for (i = 0; i < n_reloads; i++)
3928 if (reload_out[i] == XEXP (x, 0))
3929 break;
3930
3931 if (i == n_reloads)
3932 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3933 }
3934 #endif
3935 }
3936 /* A reload reg's contents are unknown after a label. */
3937 if (GET_CODE (insn) == CODE_LABEL)
3938 for (i = 0; i < n_spills; i++)
3939 {
3940 reg_reloaded_contents[i] = -1;
3941 reg_reloaded_insn[i] = 0;
3942 }
3943
3944 /* Don't assume a reload reg is still good after a call insn
3945 if it is a call-used reg. */
3946 else if (GET_CODE (insn) == CALL_INSN)
3947 for (i = 0; i < n_spills; i++)
3948 if (call_used_regs[spill_regs[i]])
3949 {
3950 reg_reloaded_contents[i] = -1;
3951 reg_reloaded_insn[i] = 0;
3952 }
3953
3954 /* In case registers overlap, allow certain insns to invalidate
3955 particular hard registers. */
3956
3957 #ifdef INSN_CLOBBERS_REGNO_P
3958 for (i = 0 ; i < n_spills ; i++)
3959 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3960 {
3961 reg_reloaded_contents[i] = -1;
3962 reg_reloaded_insn[i] = 0;
3963 }
3964 #endif
3965
3966 insn = next;
3967
3968 #ifdef USE_C_ALLOCA
3969 alloca (0);
3970 #endif
3971 }
3972 }
3973
3974 /* Discard all record of any value reloaded from X,
3975 or reloaded in X from someplace else;
3976 unless X is an output reload reg of the current insn.
3977
3978 X may be a hard reg (the reload reg)
3979 or it may be a pseudo reg that was reloaded from. */
3980
3981 static void
3982 forget_old_reloads_1 (x, ignored)
3983 rtx x;
3984 rtx ignored;
3985 {
3986 register int regno;
3987 int nr;
3988 int offset = 0;
3989
3990 /* note_stores does give us subregs of hard regs. */
3991 while (GET_CODE (x) == SUBREG)
3992 {
3993 offset += SUBREG_WORD (x);
3994 x = SUBREG_REG (x);
3995 }
3996
3997 if (GET_CODE (x) != REG)
3998 return;
3999
4000 regno = REGNO (x) + offset;
4001
4002 if (regno >= FIRST_PSEUDO_REGISTER)
4003 nr = 1;
4004 else
4005 {
4006 int i;
4007 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4008 /* Storing into a spilled-reg invalidates its contents.
4009 This can happen if a block-local pseudo is allocated to that reg
4010 and it wasn't spilled because this block's total need is 0.
4011 Then some insn might have an optional reload and use this reg. */
4012 for (i = 0; i < nr; i++)
4013 if (spill_reg_order[regno + i] >= 0
4014 /* But don't do this if the reg actually serves as an output
4015 reload reg in the current instruction. */
4016 && (n_reloads == 0
4017 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4018 {
4019 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4020 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4021 }
4022 }
4023
4024 /* Since value of X has changed,
4025 forget any value previously copied from it. */
4026
4027 while (nr-- > 0)
4028 /* But don't forget a copy if this is the output reload
4029 that establishes the copy's validity. */
4030 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4031 reg_last_reload_reg[regno + nr] = 0;
4032 }
4033 \f
4034 /* For each reload, the mode of the reload register. */
4035 static enum machine_mode reload_mode[MAX_RELOADS];
4036
4037 /* For each reload, the largest number of registers it will require. */
4038 static int reload_nregs[MAX_RELOADS];
4039
4040 /* Comparison function for qsort to decide which of two reloads
4041 should be handled first. *P1 and *P2 are the reload numbers. */
4042
4043 static int
4044 reload_reg_class_lower (p1, p2)
4045 short *p1, *p2;
4046 {
4047 register int r1 = *p1, r2 = *p2;
4048 register int t;
4049
4050 /* Consider required reloads before optional ones. */
4051 t = reload_optional[r1] - reload_optional[r2];
4052 if (t != 0)
4053 return t;
4054
4055 /* Count all solitary classes before non-solitary ones. */
4056 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4057 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4058 if (t != 0)
4059 return t;
4060
4061 /* Aside from solitaires, consider all multi-reg groups first. */
4062 t = reload_nregs[r2] - reload_nregs[r1];
4063 if (t != 0)
4064 return t;
4065
4066 /* Consider reloads in order of increasing reg-class number. */
4067 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4068 if (t != 0)
4069 return t;
4070
4071 /* If reloads are equally urgent, sort by reload number,
4072 so that the results of qsort leave nothing to chance. */
4073 return r1 - r2;
4074 }
4075 \f
4076 /* The following HARD_REG_SETs indicate when each hard register is
4077 used for a reload of various parts of the current insn. */
4078
4079 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4080 static HARD_REG_SET reload_reg_used;
4081 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4082 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4083 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4084 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4085 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4086 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4087 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4088 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4089 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4090 static HARD_REG_SET reload_reg_used_in_op_addr;
4091 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4092 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4093 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4094 static HARD_REG_SET reload_reg_used_in_insn;
4095 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4096 static HARD_REG_SET reload_reg_used_in_other_addr;
4097
4098 /* If reg is in use as a reload reg for any sort of reload. */
4099 static HARD_REG_SET reload_reg_used_at_all;
4100
4101 /* If reg is use as an inherited reload. We just mark the first register
4102 in the group. */
4103 static HARD_REG_SET reload_reg_used_for_inherit;
4104
4105 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4106 TYPE. MODE is used to indicate how many consecutive regs are
4107 actually used. */
4108
4109 static void
4110 mark_reload_reg_in_use (regno, opnum, type, mode)
4111 int regno;
4112 int opnum;
4113 enum reload_type type;
4114 enum machine_mode mode;
4115 {
4116 int nregs = HARD_REGNO_NREGS (regno, mode);
4117 int i;
4118
4119 for (i = regno; i < nregs + regno; i++)
4120 {
4121 switch (type)
4122 {
4123 case RELOAD_OTHER:
4124 SET_HARD_REG_BIT (reload_reg_used, i);
4125 break;
4126
4127 case RELOAD_FOR_INPUT_ADDRESS:
4128 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4129 break;
4130
4131 case RELOAD_FOR_OUTPUT_ADDRESS:
4132 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4133 break;
4134
4135 case RELOAD_FOR_OPERAND_ADDRESS:
4136 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4137 break;
4138
4139 case RELOAD_FOR_OPADDR_ADDR:
4140 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4141 break;
4142
4143 case RELOAD_FOR_OTHER_ADDRESS:
4144 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4145 break;
4146
4147 case RELOAD_FOR_INPUT:
4148 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4149 break;
4150
4151 case RELOAD_FOR_OUTPUT:
4152 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4153 break;
4154
4155 case RELOAD_FOR_INSN:
4156 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4157 break;
4158 }
4159
4160 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4161 }
4162 }
4163
4164 /* Similarly, but show REGNO is no longer in use for a reload. */
4165
4166 static void
4167 clear_reload_reg_in_use (regno, opnum, type, mode)
4168 int regno;
4169 int opnum;
4170 enum reload_type type;
4171 enum machine_mode mode;
4172 {
4173 int nregs = HARD_REGNO_NREGS (regno, mode);
4174 int i;
4175
4176 for (i = regno; i < nregs + regno; i++)
4177 {
4178 switch (type)
4179 {
4180 case RELOAD_OTHER:
4181 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4182 break;
4183
4184 case RELOAD_FOR_INPUT_ADDRESS:
4185 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4186 break;
4187
4188 case RELOAD_FOR_OUTPUT_ADDRESS:
4189 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4190 break;
4191
4192 case RELOAD_FOR_OPERAND_ADDRESS:
4193 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4194 break;
4195
4196 case RELOAD_FOR_OPADDR_ADDR:
4197 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4198 break;
4199
4200 case RELOAD_FOR_OTHER_ADDRESS:
4201 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4202 break;
4203
4204 case RELOAD_FOR_INPUT:
4205 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4206 break;
4207
4208 case RELOAD_FOR_OUTPUT:
4209 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4210 break;
4211
4212 case RELOAD_FOR_INSN:
4213 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4214 break;
4215 }
4216 }
4217 }
4218
4219 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4220 specified by OPNUM and TYPE. */
4221
4222 static int
4223 reload_reg_free_p (regno, opnum, type)
4224 int regno;
4225 int opnum;
4226 enum reload_type type;
4227 {
4228 int i;
4229
4230 /* In use for a RELOAD_OTHER means it's not available for anything except
4231 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4232 to be used only for inputs. */
4233
4234 if (type != RELOAD_FOR_OTHER_ADDRESS
4235 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4236 return 0;
4237
4238 switch (type)
4239 {
4240 case RELOAD_OTHER:
4241 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4242 we can't use it for RELOAD_OTHER. */
4243 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4244 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4245 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4246 return 0;
4247
4248 for (i = 0; i < reload_n_operands; i++)
4249 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4250 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4251 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4252 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4253 return 0;
4254
4255 return 1;
4256
4257 case RELOAD_FOR_INPUT:
4258 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4259 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4260 return 0;
4261
4262 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4263 return 0;
4264
4265 /* If it is used for some other input, can't use it. */
4266 for (i = 0; i < reload_n_operands; i++)
4267 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4268 return 0;
4269
4270 /* If it is used in a later operand's address, can't use it. */
4271 for (i = opnum + 1; i < reload_n_operands; i++)
4272 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4273 return 0;
4274
4275 return 1;
4276
4277 case RELOAD_FOR_INPUT_ADDRESS:
4278 /* Can't use a register if it is used for an input address for this
4279 operand or used as an input in an earlier one. */
4280 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4281 return 0;
4282
4283 for (i = 0; i < opnum; i++)
4284 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4285 return 0;
4286
4287 return 1;
4288
4289 case RELOAD_FOR_OUTPUT_ADDRESS:
4290 /* Can't use a register if it is used for an output address for this
4291 operand or used as an output in this or a later operand. */
4292 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4293 return 0;
4294
4295 for (i = opnum; i < reload_n_operands; i++)
4296 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4297 return 0;
4298
4299 return 1;
4300
4301 case RELOAD_FOR_OPERAND_ADDRESS:
4302 for (i = 0; i < reload_n_operands; i++)
4303 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4304 return 0;
4305
4306 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4307 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4308
4309 case RELOAD_FOR_OPADDR_ADDR:
4310 for (i = 0; i < reload_n_operands; i++)
4311 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4312 return 0;
4313
4314 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4315
4316 case RELOAD_FOR_OUTPUT:
4317 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4318 outputs, or an operand address for this or an earlier output. */
4319 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4320 return 0;
4321
4322 for (i = 0; i < reload_n_operands; i++)
4323 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4324 return 0;
4325
4326 for (i = 0; i <= opnum; i++)
4327 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4328 return 0;
4329
4330 return 1;
4331
4332 case RELOAD_FOR_INSN:
4333 for (i = 0; i < reload_n_operands; i++)
4334 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4335 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4336 return 0;
4337
4338 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4339 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4340
4341 case RELOAD_FOR_OTHER_ADDRESS:
4342 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4343 }
4344 abort ();
4345 }
4346
4347 /* Return 1 if the value in reload reg REGNO, as used by a reload
4348 needed for the part of the insn specified by OPNUM and TYPE,
4349 is not in use for a reload in any prior part of the insn.
4350
4351 We can assume that the reload reg was already tested for availability
4352 at the time it is needed, and we should not check this again,
4353 in case the reg has already been marked in use. */
4354
4355 static int
4356 reload_reg_free_before_p (regno, opnum, type)
4357 int regno;
4358 int opnum;
4359 enum reload_type type;
4360 {
4361 int i;
4362
4363 switch (type)
4364 {
4365 case RELOAD_FOR_OTHER_ADDRESS:
4366 /* These always come first. */
4367 return 1;
4368
4369 case RELOAD_OTHER:
4370 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4371
4372 /* If this use is for part of the insn,
4373 check the reg is not in use for any prior part. It is tempting
4374 to try to do this by falling through from objecs that occur
4375 later in the insn to ones that occur earlier, but that will not
4376 correctly take into account the fact that here we MUST ignore
4377 things that would prevent the register from being allocated in
4378 the first place, since we know that it was allocated. */
4379
4380 case RELOAD_FOR_OUTPUT_ADDRESS:
4381 /* Earlier reloads are for earlier outputs or their addresses,
4382 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4383 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4384 RELOAD_OTHER).. */
4385 for (i = 0; i < opnum; i++)
4386 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4387 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4388 return 0;
4389
4390 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4391 return 0;
4392
4393 for (i = 0; i < reload_n_operands; i++)
4394 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4395 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4396 return 0;
4397
4398 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4399 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4400 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4401
4402 case RELOAD_FOR_OUTPUT:
4403 /* This can't be used in the output address for this operand and
4404 anything that can't be used for it, except that we've already
4405 tested for RELOAD_FOR_INSN objects. */
4406
4407 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4408 return 0;
4409
4410 for (i = 0; i < opnum; i++)
4411 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4412 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4413 return 0;
4414
4415 for (i = 0; i < reload_n_operands; i++)
4416 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4417 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4418 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4419 return 0;
4420
4421 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4422
4423 case RELOAD_FOR_OPERAND_ADDRESS:
4424 case RELOAD_FOR_OPADDR_ADDR:
4425 case RELOAD_FOR_INSN:
4426 /* These can't conflict with inputs, or each other, so all we have to
4427 test is input addresses and the addresses of OTHER items. */
4428
4429 for (i = 0; i < reload_n_operands; i++)
4430 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4431 return 0;
4432
4433 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4434
4435 case RELOAD_FOR_INPUT:
4436 /* The only things earlier are the address for this and
4437 earlier inputs, other inputs (which we know we don't conflict
4438 with), and addresses of RELOAD_OTHER objects. */
4439
4440 for (i = 0; i <= opnum; i++)
4441 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4442 return 0;
4443
4444 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4445
4446 case RELOAD_FOR_INPUT_ADDRESS:
4447 /* Similarly, all we have to check is for use in earlier inputs'
4448 addresses. */
4449 for (i = 0; i < opnum; i++)
4450 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4451 return 0;
4452
4453 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4454 }
4455 abort ();
4456 }
4457
4458 /* Return 1 if the value in reload reg REGNO, as used by a reload
4459 needed for the part of the insn specified by OPNUM and TYPE,
4460 is still available in REGNO at the end of the insn.
4461
4462 We can assume that the reload reg was already tested for availability
4463 at the time it is needed, and we should not check this again,
4464 in case the reg has already been marked in use. */
4465
4466 static int
4467 reload_reg_reaches_end_p (regno, opnum, type)
4468 int regno;
4469 int opnum;
4470 enum reload_type type;
4471 {
4472 int i;
4473
4474 switch (type)
4475 {
4476 case RELOAD_OTHER:
4477 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4478 its value must reach the end. */
4479 return 1;
4480
4481 /* If this use is for part of the insn,
4482 its value reaches if no subsequent part uses the same register.
4483 Just like the above function, don't try to do this with lots
4484 of fallthroughs. */
4485
4486 case RELOAD_FOR_OTHER_ADDRESS:
4487 /* Here we check for everything else, since these don't conflict
4488 with anything else and everything comes later. */
4489
4490 for (i = 0; i < reload_n_operands; i++)
4491 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4492 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4493 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4494 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4495 return 0;
4496
4497 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4498 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4499 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4500
4501 case RELOAD_FOR_INPUT_ADDRESS:
4502 /* Similar, except that we check only for this and subsequent inputs
4503 and the address of only subsequent inputs and we do not need
4504 to check for RELOAD_OTHER objects since they are known not to
4505 conflict. */
4506
4507 for (i = opnum; i < reload_n_operands; i++)
4508 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4509 return 0;
4510
4511 for (i = opnum + 1; i < reload_n_operands; i++)
4512 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4513 return 0;
4514
4515 for (i = 0; i < reload_n_operands; i++)
4516 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4517 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4518 return 0;
4519
4520 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4521 return 0;
4522
4523 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4524 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4525
4526 case RELOAD_FOR_INPUT:
4527 /* Similar to input address, except we start at the next operand for
4528 both input and input address and we do not check for
4529 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4530 would conflict. */
4531
4532 for (i = opnum + 1; i < reload_n_operands; i++)
4533 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4534 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4535 return 0;
4536
4537 /* ... fall through ... */
4538
4539 case RELOAD_FOR_OPERAND_ADDRESS:
4540 /* Check outputs and their addresses. */
4541
4542 for (i = 0; i < reload_n_operands; i++)
4543 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4544 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4545 return 0;
4546
4547 return 1;
4548
4549 case RELOAD_FOR_OPADDR_ADDR:
4550 for (i = 0; i < reload_n_operands; i++)
4551 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4552 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4553 return 0;
4554
4555 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4556 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4557
4558 case RELOAD_FOR_INSN:
4559 /* These conflict with other outputs with RELOAD_OTHER. So
4560 we need only check for output addresses. */
4561
4562 opnum = -1;
4563
4564 /* ... fall through ... */
4565
4566 case RELOAD_FOR_OUTPUT:
4567 case RELOAD_FOR_OUTPUT_ADDRESS:
4568 /* We already know these can't conflict with a later output. So the
4569 only thing to check are later output addresses. */
4570 for (i = opnum + 1; i < reload_n_operands; i++)
4571 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4572 return 0;
4573
4574 return 1;
4575 }
4576
4577 abort ();
4578 }
4579 \f
4580 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4581 Return 0 otherwise.
4582
4583 This function uses the same algorithm as reload_reg_free_p above. */
4584
4585 static int
4586 reloads_conflict (r1, r2)
4587 int r1, r2;
4588 {
4589 enum reload_type r1_type = reload_when_needed[r1];
4590 enum reload_type r2_type = reload_when_needed[r2];
4591 int r1_opnum = reload_opnum[r1];
4592 int r2_opnum = reload_opnum[r2];
4593
4594 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4595
4596 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4597 return 1;
4598
4599 /* Otherwise, check conflicts differently for each type. */
4600
4601 switch (r1_type)
4602 {
4603 case RELOAD_FOR_INPUT:
4604 return (r2_type == RELOAD_FOR_INSN
4605 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4606 || r2_type == RELOAD_FOR_OPADDR_ADDR
4607 || r2_type == RELOAD_FOR_INPUT
4608 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4609
4610 case RELOAD_FOR_INPUT_ADDRESS:
4611 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4612 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4613
4614 case RELOAD_FOR_OUTPUT_ADDRESS:
4615 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4616 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4617
4618 case RELOAD_FOR_OPERAND_ADDRESS:
4619 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4620 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4621
4622 case RELOAD_FOR_OPADDR_ADDR:
4623 return (r2_type == RELOAD_FOR_INPUT
4624 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4625
4626 case RELOAD_FOR_OUTPUT:
4627 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4628 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4629 && r2_opnum >= r1_opnum));
4630
4631 case RELOAD_FOR_INSN:
4632 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4633 || r2_type == RELOAD_FOR_INSN
4634 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4635
4636 case RELOAD_FOR_OTHER_ADDRESS:
4637 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4638
4639 case RELOAD_OTHER:
4640 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4641
4642 default:
4643 abort ();
4644 }
4645 }
4646 \f
4647 /* Vector of reload-numbers showing the order in which the reloads should
4648 be processed. */
4649 short reload_order[MAX_RELOADS];
4650
4651 /* Indexed by reload number, 1 if incoming value
4652 inherited from previous insns. */
4653 char reload_inherited[MAX_RELOADS];
4654
4655 /* For an inherited reload, this is the insn the reload was inherited from,
4656 if we know it. Otherwise, this is 0. */
4657 rtx reload_inheritance_insn[MAX_RELOADS];
4658
4659 /* If non-zero, this is a place to get the value of the reload,
4660 rather than using reload_in. */
4661 rtx reload_override_in[MAX_RELOADS];
4662
4663 /* For each reload, the index in spill_regs of the spill register used,
4664 or -1 if we did not need one of the spill registers for this reload. */
4665 int reload_spill_index[MAX_RELOADS];
4666
4667 /* Index of last register assigned as a spill register. We allocate in
4668 a round-robin fashio. */
4669
4670 static int last_spill_reg = 0;
4671
4672 /* Find a spill register to use as a reload register for reload R.
4673 LAST_RELOAD is non-zero if this is the last reload for the insn being
4674 processed.
4675
4676 Set reload_reg_rtx[R] to the register allocated.
4677
4678 If NOERROR is nonzero, we return 1 if successful,
4679 or 0 if we couldn't find a spill reg and we didn't change anything. */
4680
4681 static int
4682 allocate_reload_reg (r, insn, last_reload, noerror)
4683 int r;
4684 rtx insn;
4685 int last_reload;
4686 int noerror;
4687 {
4688 int i;
4689 int pass;
4690 int count;
4691 rtx new;
4692 int regno;
4693
4694 /* If we put this reload ahead, thinking it is a group,
4695 then insist on finding a group. Otherwise we can grab a
4696 reg that some other reload needs.
4697 (That can happen when we have a 68000 DATA_OR_FP_REG
4698 which is a group of data regs or one fp reg.)
4699 We need not be so restrictive if there are no more reloads
4700 for this insn.
4701
4702 ??? Really it would be nicer to have smarter handling
4703 for that kind of reg class, where a problem like this is normal.
4704 Perhaps those classes should be avoided for reloading
4705 by use of more alternatives. */
4706
4707 int force_group = reload_nregs[r] > 1 && ! last_reload;
4708
4709 /* If we want a single register and haven't yet found one,
4710 take any reg in the right class and not in use.
4711 If we want a consecutive group, here is where we look for it.
4712
4713 We use two passes so we can first look for reload regs to
4714 reuse, which are already in use for other reloads in this insn,
4715 and only then use additional registers.
4716 I think that maximizing reuse is needed to make sure we don't
4717 run out of reload regs. Suppose we have three reloads, and
4718 reloads A and B can share regs. These need two regs.
4719 Suppose A and B are given different regs.
4720 That leaves none for C. */
4721 for (pass = 0; pass < 2; pass++)
4722 {
4723 /* I is the index in spill_regs.
4724 We advance it round-robin between insns to use all spill regs
4725 equally, so that inherited reloads have a chance
4726 of leapfrogging each other. Don't do this, however, when we have
4727 group needs and failure would be fatal; if we only have a relatively
4728 small number of spill registers, and more than one of them has
4729 group needs, then by starting in the middle, we may end up
4730 allocating the first one in such a way that we are not left with
4731 sufficient groups to handle the rest. */
4732
4733 if (noerror || ! force_group)
4734 i = last_spill_reg;
4735 else
4736 i = -1;
4737
4738 for (count = 0; count < n_spills; count++)
4739 {
4740 int class = (int) reload_reg_class[r];
4741
4742 i = (i + 1) % n_spills;
4743
4744 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4745 reload_when_needed[r])
4746 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4747 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4748 /* Look first for regs to share, then for unshared. But
4749 don't share regs used for inherited reloads; they are
4750 the ones we want to preserve. */
4751 && (pass
4752 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4753 spill_regs[i])
4754 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4755 spill_regs[i]))))
4756 {
4757 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4758 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4759 (on 68000) got us two FP regs. If NR is 1,
4760 we would reject both of them. */
4761 if (force_group)
4762 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4763 /* If we need only one reg, we have already won. */
4764 if (nr == 1)
4765 {
4766 /* But reject a single reg if we demand a group. */
4767 if (force_group)
4768 continue;
4769 break;
4770 }
4771 /* Otherwise check that as many consecutive regs as we need
4772 are available here.
4773 Also, don't use for a group registers that are
4774 needed for nongroups. */
4775 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4776 while (nr > 1)
4777 {
4778 regno = spill_regs[i] + nr - 1;
4779 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4780 && spill_reg_order[regno] >= 0
4781 && reload_reg_free_p (regno, reload_opnum[r],
4782 reload_when_needed[r])
4783 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4784 regno)))
4785 break;
4786 nr--;
4787 }
4788 if (nr == 1)
4789 break;
4790 }
4791 }
4792
4793 /* If we found something on pass 1, omit pass 2. */
4794 if (count < n_spills)
4795 break;
4796 }
4797
4798 /* We should have found a spill register by now. */
4799 if (count == n_spills)
4800 {
4801 if (noerror)
4802 return 0;
4803 goto failure;
4804 }
4805
4806 /* I is the index in SPILL_REG_RTX of the reload register we are to
4807 allocate. Get an rtx for it and find its register number. */
4808
4809 new = spill_reg_rtx[i];
4810
4811 if (new == 0 || GET_MODE (new) != reload_mode[r])
4812 spill_reg_rtx[i] = new
4813 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4814
4815 regno = true_regnum (new);
4816
4817 /* Detect when the reload reg can't hold the reload mode.
4818 This used to be one `if', but Sequent compiler can't handle that. */
4819 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4820 {
4821 enum machine_mode test_mode = VOIDmode;
4822 if (reload_in[r])
4823 test_mode = GET_MODE (reload_in[r]);
4824 /* If reload_in[r] has VOIDmode, it means we will load it
4825 in whatever mode the reload reg has: to wit, reload_mode[r].
4826 We have already tested that for validity. */
4827 /* Aside from that, we need to test that the expressions
4828 to reload from or into have modes which are valid for this
4829 reload register. Otherwise the reload insns would be invalid. */
4830 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4831 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4832 if (! (reload_out[r] != 0
4833 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4834 {
4835 /* The reg is OK. */
4836 last_spill_reg = i;
4837
4838 /* Mark as in use for this insn the reload regs we use
4839 for this. */
4840 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4841 reload_when_needed[r], reload_mode[r]);
4842
4843 reload_reg_rtx[r] = new;
4844 reload_spill_index[r] = i;
4845 return 1;
4846 }
4847 }
4848
4849 /* The reg is not OK. */
4850 if (noerror)
4851 return 0;
4852
4853 failure:
4854 if (asm_noperands (PATTERN (insn)) < 0)
4855 /* It's the compiler's fault. */
4856 fatal_insn ("Could not find a spill register", insn);
4857
4858 /* It's the user's fault; the operand's mode and constraint
4859 don't match. Disable this reload so we don't crash in final. */
4860 error_for_asm (insn,
4861 "`asm' operand constraint incompatible with operand size");
4862 reload_in[r] = 0;
4863 reload_out[r] = 0;
4864 reload_reg_rtx[r] = 0;
4865 reload_optional[r] = 1;
4866 reload_secondary_p[r] = 1;
4867
4868 return 1;
4869 }
4870 \f
4871 /* Assign hard reg targets for the pseudo-registers we must reload
4872 into hard regs for this insn.
4873 Also output the instructions to copy them in and out of the hard regs.
4874
4875 For machines with register classes, we are responsible for
4876 finding a reload reg in the proper class. */
4877
4878 static void
4879 choose_reload_regs (insn, avoid_return_reg)
4880 rtx insn;
4881 rtx avoid_return_reg;
4882 {
4883 register int i, j;
4884 int max_group_size = 1;
4885 enum reg_class group_class = NO_REGS;
4886 int inheritance;
4887
4888 rtx save_reload_reg_rtx[MAX_RELOADS];
4889 char save_reload_inherited[MAX_RELOADS];
4890 rtx save_reload_inheritance_insn[MAX_RELOADS];
4891 rtx save_reload_override_in[MAX_RELOADS];
4892 int save_reload_spill_index[MAX_RELOADS];
4893 HARD_REG_SET save_reload_reg_used;
4894 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4895 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4896 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4897 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4898 HARD_REG_SET save_reload_reg_used_in_op_addr;
4899 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4900 HARD_REG_SET save_reload_reg_used_in_insn;
4901 HARD_REG_SET save_reload_reg_used_in_other_addr;
4902 HARD_REG_SET save_reload_reg_used_at_all;
4903
4904 bzero (reload_inherited, MAX_RELOADS);
4905 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4906 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4907
4908 CLEAR_HARD_REG_SET (reload_reg_used);
4909 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4910 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4911 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4912 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4913 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4914
4915 for (i = 0; i < reload_n_operands; i++)
4916 {
4917 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4918 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4919 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4920 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4921 }
4922
4923 #ifdef SMALL_REGISTER_CLASSES
4924 /* Don't bother with avoiding the return reg
4925 if we have no mandatory reload that could use it. */
4926 if (avoid_return_reg)
4927 {
4928 int do_avoid = 0;
4929 int regno = REGNO (avoid_return_reg);
4930 int nregs
4931 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4932 int r;
4933
4934 for (r = regno; r < regno + nregs; r++)
4935 if (spill_reg_order[r] >= 0)
4936 for (j = 0; j < n_reloads; j++)
4937 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4938 && (reload_in[j] != 0 || reload_out[j] != 0
4939 || reload_secondary_p[j])
4940 &&
4941 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4942 do_avoid = 1;
4943 if (!do_avoid)
4944 avoid_return_reg = 0;
4945 }
4946 #endif /* SMALL_REGISTER_CLASSES */
4947
4948 #if 0 /* Not needed, now that we can always retry without inheritance. */
4949 /* See if we have more mandatory reloads than spill regs.
4950 If so, then we cannot risk optimizations that could prevent
4951 reloads from sharing one spill register.
4952
4953 Since we will try finding a better register than reload_reg_rtx
4954 unless it is equal to reload_in or reload_out, count such reloads. */
4955
4956 {
4957 int tem = 0;
4958 #ifdef SMALL_REGISTER_CLASSES
4959 int tem = (avoid_return_reg != 0);
4960 #endif
4961 for (j = 0; j < n_reloads; j++)
4962 if (! reload_optional[j]
4963 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4964 && (reload_reg_rtx[j] == 0
4965 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4966 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4967 tem++;
4968 if (tem > n_spills)
4969 must_reuse = 1;
4970 }
4971 #endif
4972
4973 #ifdef SMALL_REGISTER_CLASSES
4974 /* Don't use the subroutine call return reg for a reload
4975 if we are supposed to avoid it. */
4976 if (avoid_return_reg)
4977 {
4978 int regno = REGNO (avoid_return_reg);
4979 int nregs
4980 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4981 int r;
4982
4983 for (r = regno; r < regno + nregs; r++)
4984 if (spill_reg_order[r] >= 0)
4985 SET_HARD_REG_BIT (reload_reg_used, r);
4986 }
4987 #endif /* SMALL_REGISTER_CLASSES */
4988
4989 /* In order to be certain of getting the registers we need,
4990 we must sort the reloads into order of increasing register class.
4991 Then our grabbing of reload registers will parallel the process
4992 that provided the reload registers.
4993
4994 Also note whether any of the reloads wants a consecutive group of regs.
4995 If so, record the maximum size of the group desired and what
4996 register class contains all the groups needed by this insn. */
4997
4998 for (j = 0; j < n_reloads; j++)
4999 {
5000 reload_order[j] = j;
5001 reload_spill_index[j] = -1;
5002
5003 reload_mode[j]
5004 = (reload_inmode[j] == VOIDmode
5005 || (GET_MODE_SIZE (reload_outmode[j])
5006 > GET_MODE_SIZE (reload_inmode[j])))
5007 ? reload_outmode[j] : reload_inmode[j];
5008
5009 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5010
5011 if (reload_nregs[j] > 1)
5012 {
5013 max_group_size = MAX (reload_nregs[j], max_group_size);
5014 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5015 }
5016
5017 /* If we have already decided to use a certain register,
5018 don't use it in another way. */
5019 if (reload_reg_rtx[j])
5020 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5021 reload_when_needed[j], reload_mode[j]);
5022 }
5023
5024 if (n_reloads > 1)
5025 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5026
5027 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5028 sizeof reload_reg_rtx);
5029 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5030 bcopy ((char *) reload_inheritance_insn,
5031 (char *) save_reload_inheritance_insn,
5032 sizeof reload_inheritance_insn);
5033 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5034 sizeof reload_override_in);
5035 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5036 sizeof reload_spill_index);
5037 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5038 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5039 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5040 reload_reg_used_in_op_addr);
5041
5042 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5043 reload_reg_used_in_op_addr_reload);
5044
5045 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5046 reload_reg_used_in_insn);
5047 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5048 reload_reg_used_in_other_addr);
5049
5050 for (i = 0; i < reload_n_operands; i++)
5051 {
5052 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5053 reload_reg_used_in_output[i]);
5054 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5055 reload_reg_used_in_input[i]);
5056 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5057 reload_reg_used_in_input_addr[i]);
5058 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5059 reload_reg_used_in_output_addr[i]);
5060 }
5061
5062 /* If -O, try first with inheritance, then turning it off.
5063 If not -O, don't do inheritance.
5064 Using inheritance when not optimizing leads to paradoxes
5065 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5066 because one side of the comparison might be inherited. */
5067
5068 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5069 {
5070 /* Process the reloads in order of preference just found.
5071 Beyond this point, subregs can be found in reload_reg_rtx.
5072
5073 This used to look for an existing reloaded home for all
5074 of the reloads, and only then perform any new reloads.
5075 But that could lose if the reloads were done out of reg-class order
5076 because a later reload with a looser constraint might have an old
5077 home in a register needed by an earlier reload with a tighter constraint.
5078
5079 To solve this, we make two passes over the reloads, in the order
5080 described above. In the first pass we try to inherit a reload
5081 from a previous insn. If there is a later reload that needs a
5082 class that is a proper subset of the class being processed, we must
5083 also allocate a spill register during the first pass.
5084
5085 Then make a second pass over the reloads to allocate any reloads
5086 that haven't been given registers yet. */
5087
5088 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5089
5090 for (j = 0; j < n_reloads; j++)
5091 {
5092 register int r = reload_order[j];
5093
5094 /* Ignore reloads that got marked inoperative. */
5095 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5096 continue;
5097
5098 /* If find_reloads chose a to use reload_in or reload_out as a reload
5099 register, we don't need to chose one. Otherwise, try even if it found
5100 one since we might save an insn if we find the value lying around. */
5101 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5102 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5103 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5104 continue;
5105
5106 #if 0 /* No longer needed for correct operation.
5107 It might give better code, or might not; worth an experiment? */
5108 /* If this is an optional reload, we can't inherit from earlier insns
5109 until we are sure that any non-optional reloads have been allocated.
5110 The following code takes advantage of the fact that optional reloads
5111 are at the end of reload_order. */
5112 if (reload_optional[r] != 0)
5113 for (i = 0; i < j; i++)
5114 if ((reload_out[reload_order[i]] != 0
5115 || reload_in[reload_order[i]] != 0
5116 || reload_secondary_p[reload_order[i]])
5117 && ! reload_optional[reload_order[i]]
5118 && reload_reg_rtx[reload_order[i]] == 0)
5119 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5120 #endif
5121
5122 /* First see if this pseudo is already available as reloaded
5123 for a previous insn. We cannot try to inherit for reloads
5124 that are smaller than the maximum number of registers needed
5125 for groups unless the register we would allocate cannot be used
5126 for the groups.
5127
5128 We could check here to see if this is a secondary reload for
5129 an object that is already in a register of the desired class.
5130 This would avoid the need for the secondary reload register.
5131 But this is complex because we can't easily determine what
5132 objects might want to be loaded via this reload. So let a register
5133 be allocated here. In `emit_reload_insns' we suppress one of the
5134 loads in the case described above. */
5135
5136 if (inheritance)
5137 {
5138 register int regno = -1;
5139 enum machine_mode mode;
5140
5141 if (reload_in[r] == 0)
5142 ;
5143 else if (GET_CODE (reload_in[r]) == REG)
5144 {
5145 regno = REGNO (reload_in[r]);
5146 mode = GET_MODE (reload_in[r]);
5147 }
5148 else if (GET_CODE (reload_in_reg[r]) == REG)
5149 {
5150 regno = REGNO (reload_in_reg[r]);
5151 mode = GET_MODE (reload_in_reg[r]);
5152 }
5153 #if 0
5154 /* This won't work, since REGNO can be a pseudo reg number.
5155 Also, it takes much more hair to keep track of all the things
5156 that can invalidate an inherited reload of part of a pseudoreg. */
5157 else if (GET_CODE (reload_in[r]) == SUBREG
5158 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5159 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5160 #endif
5161
5162 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5163 {
5164 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5165
5166 if (reg_reloaded_contents[i] == regno
5167 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5168 >= GET_MODE_SIZE (mode))
5169 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5170 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5171 spill_regs[i])
5172 && (reload_nregs[r] == max_group_size
5173 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5174 spill_regs[i]))
5175 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5176 reload_when_needed[r])
5177 && reload_reg_free_before_p (spill_regs[i],
5178 reload_opnum[r],
5179 reload_when_needed[r]))
5180 {
5181 /* If a group is needed, verify that all the subsequent
5182 registers still have their values intact. */
5183 int nr
5184 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5185 int k;
5186
5187 for (k = 1; k < nr; k++)
5188 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5189 != regno)
5190 break;
5191
5192 if (k == nr)
5193 {
5194 int i1;
5195
5196 /* We found a register that contains the
5197 value we need. If this register is the
5198 same as an `earlyclobber' operand of the
5199 current insn, just mark it as a place to
5200 reload from since we can't use it as the
5201 reload register itself. */
5202
5203 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5204 if (reg_overlap_mentioned_for_reload_p
5205 (reg_last_reload_reg[regno],
5206 reload_earlyclobbers[i1]))
5207 break;
5208
5209 if (i1 != n_earlyclobbers
5210 /* Don't really use the inherited spill reg
5211 if we need it wider than we've got it. */
5212 || (GET_MODE_SIZE (reload_mode[r])
5213 > GET_MODE_SIZE (mode)))
5214 reload_override_in[r] = reg_last_reload_reg[regno];
5215 else
5216 {
5217 int k;
5218 /* We can use this as a reload reg. */
5219 /* Mark the register as in use for this part of
5220 the insn. */
5221 mark_reload_reg_in_use (spill_regs[i],
5222 reload_opnum[r],
5223 reload_when_needed[r],
5224 reload_mode[r]);
5225 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5226 reload_inherited[r] = 1;
5227 reload_inheritance_insn[r]
5228 = reg_reloaded_insn[i];
5229 reload_spill_index[r] = i;
5230 for (k = 0; k < nr; k++)
5231 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5232 spill_regs[i + k]);
5233 }
5234 }
5235 }
5236 }
5237 }
5238
5239 /* Here's another way to see if the value is already lying around. */
5240 if (inheritance
5241 && reload_in[r] != 0
5242 && ! reload_inherited[r]
5243 && reload_out[r] == 0
5244 && (CONSTANT_P (reload_in[r])
5245 || GET_CODE (reload_in[r]) == PLUS
5246 || GET_CODE (reload_in[r]) == REG
5247 || GET_CODE (reload_in[r]) == MEM)
5248 && (reload_nregs[r] == max_group_size
5249 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5250 {
5251 register rtx equiv
5252 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5253 -1, NULL_PTR, 0, reload_mode[r]);
5254 int regno;
5255
5256 if (equiv != 0)
5257 {
5258 if (GET_CODE (equiv) == REG)
5259 regno = REGNO (equiv);
5260 else if (GET_CODE (equiv) == SUBREG)
5261 {
5262 /* This must be a SUBREG of a hard register.
5263 Make a new REG since this might be used in an
5264 address and not all machines support SUBREGs
5265 there. */
5266 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5267 equiv = gen_rtx (REG, reload_mode[r], regno);
5268 }
5269 else
5270 abort ();
5271 }
5272
5273 /* If we found a spill reg, reject it unless it is free
5274 and of the desired class. */
5275 if (equiv != 0
5276 && ((spill_reg_order[regno] >= 0
5277 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5278 reload_when_needed[r]))
5279 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5280 regno)))
5281 equiv = 0;
5282
5283 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5284 equiv = 0;
5285
5286 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5287 equiv = 0;
5288
5289 /* We found a register that contains the value we need.
5290 If this register is the same as an `earlyclobber' operand
5291 of the current insn, just mark it as a place to reload from
5292 since we can't use it as the reload register itself. */
5293
5294 if (equiv != 0)
5295 for (i = 0; i < n_earlyclobbers; i++)
5296 if (reg_overlap_mentioned_for_reload_p (equiv,
5297 reload_earlyclobbers[i]))
5298 {
5299 reload_override_in[r] = equiv;
5300 equiv = 0;
5301 break;
5302 }
5303
5304 /* JRV: If the equiv register we have found is explicitly
5305 clobbered in the current insn, mark but don't use, as above. */
5306
5307 if (equiv != 0 && regno_clobbered_p (regno, insn))
5308 {
5309 reload_override_in[r] = equiv;
5310 equiv = 0;
5311 }
5312
5313 /* If we found an equivalent reg, say no code need be generated
5314 to load it, and use it as our reload reg. */
5315 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5316 {
5317 reload_reg_rtx[r] = equiv;
5318 reload_inherited[r] = 1;
5319 /* If it is a spill reg,
5320 mark the spill reg as in use for this insn. */
5321 i = spill_reg_order[regno];
5322 if (i >= 0)
5323 {
5324 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5325 int k;
5326 mark_reload_reg_in_use (regno, reload_opnum[r],
5327 reload_when_needed[r],
5328 reload_mode[r]);
5329 for (k = 0; k < nr; k++)
5330 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5331 }
5332 }
5333 }
5334
5335 /* If we found a register to use already, or if this is an optional
5336 reload, we are done. */
5337 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5338 continue;
5339
5340 #if 0 /* No longer needed for correct operation. Might or might not
5341 give better code on the average. Want to experiment? */
5342
5343 /* See if there is a later reload that has a class different from our
5344 class that intersects our class or that requires less register
5345 than our reload. If so, we must allocate a register to this
5346 reload now, since that reload might inherit a previous reload
5347 and take the only available register in our class. Don't do this
5348 for optional reloads since they will force all previous reloads
5349 to be allocated. Also don't do this for reloads that have been
5350 turned off. */
5351
5352 for (i = j + 1; i < n_reloads; i++)
5353 {
5354 int s = reload_order[i];
5355
5356 if ((reload_in[s] == 0 && reload_out[s] == 0
5357 && ! reload_secondary_p[s])
5358 || reload_optional[s])
5359 continue;
5360
5361 if ((reload_reg_class[s] != reload_reg_class[r]
5362 && reg_classes_intersect_p (reload_reg_class[r],
5363 reload_reg_class[s]))
5364 || reload_nregs[s] < reload_nregs[r])
5365 break;
5366 }
5367
5368 if (i == n_reloads)
5369 continue;
5370
5371 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5372 #endif
5373 }
5374
5375 /* Now allocate reload registers for anything non-optional that
5376 didn't get one yet. */
5377 for (j = 0; j < n_reloads; j++)
5378 {
5379 register int r = reload_order[j];
5380
5381 /* Ignore reloads that got marked inoperative. */
5382 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5383 continue;
5384
5385 /* Skip reloads that already have a register allocated or are
5386 optional. */
5387 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5388 continue;
5389
5390 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5391 break;
5392 }
5393
5394 /* If that loop got all the way, we have won. */
5395 if (j == n_reloads)
5396 break;
5397
5398 fail:
5399 /* Loop around and try without any inheritance. */
5400 /* First undo everything done by the failed attempt
5401 to allocate with inheritance. */
5402 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5403 sizeof reload_reg_rtx);
5404 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5405 sizeof reload_inherited);
5406 bcopy ((char *) save_reload_inheritance_insn,
5407 (char *) reload_inheritance_insn,
5408 sizeof reload_inheritance_insn);
5409 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5410 sizeof reload_override_in);
5411 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5412 sizeof reload_spill_index);
5413 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5414 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5415 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5416 save_reload_reg_used_in_op_addr);
5417 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5418 save_reload_reg_used_in_op_addr_reload);
5419 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5420 save_reload_reg_used_in_insn);
5421 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5422 save_reload_reg_used_in_other_addr);
5423
5424 for (i = 0; i < reload_n_operands; i++)
5425 {
5426 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5427 save_reload_reg_used_in_input[i]);
5428 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5429 save_reload_reg_used_in_output[i]);
5430 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5431 save_reload_reg_used_in_input_addr[i]);
5432 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5433 save_reload_reg_used_in_output_addr[i]);
5434 }
5435 }
5436
5437 /* If we thought we could inherit a reload, because it seemed that
5438 nothing else wanted the same reload register earlier in the insn,
5439 verify that assumption, now that all reloads have been assigned. */
5440
5441 for (j = 0; j < n_reloads; j++)
5442 {
5443 register int r = reload_order[j];
5444
5445 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5446 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5447 reload_opnum[r],
5448 reload_when_needed[r]))
5449 reload_inherited[r] = 0;
5450
5451 /* If we found a better place to reload from,
5452 validate it in the same fashion, if it is a reload reg. */
5453 if (reload_override_in[r]
5454 && (GET_CODE (reload_override_in[r]) == REG
5455 || GET_CODE (reload_override_in[r]) == SUBREG))
5456 {
5457 int regno = true_regnum (reload_override_in[r]);
5458 if (spill_reg_order[regno] >= 0
5459 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5460 reload_when_needed[r]))
5461 reload_override_in[r] = 0;
5462 }
5463 }
5464
5465 /* Now that reload_override_in is known valid,
5466 actually override reload_in. */
5467 for (j = 0; j < n_reloads; j++)
5468 if (reload_override_in[j])
5469 reload_in[j] = reload_override_in[j];
5470
5471 /* If this reload won't be done because it has been cancelled or is
5472 optional and not inherited, clear reload_reg_rtx so other
5473 routines (such as subst_reloads) don't get confused. */
5474 for (j = 0; j < n_reloads; j++)
5475 if (reload_reg_rtx[j] != 0
5476 && ((reload_optional[j] && ! reload_inherited[j])
5477 || (reload_in[j] == 0 && reload_out[j] == 0
5478 && ! reload_secondary_p[j])))
5479 {
5480 int regno = true_regnum (reload_reg_rtx[j]);
5481
5482 if (spill_reg_order[regno] >= 0)
5483 clear_reload_reg_in_use (regno, reload_opnum[j],
5484 reload_when_needed[j], reload_mode[j]);
5485 reload_reg_rtx[j] = 0;
5486 }
5487
5488 /* Record which pseudos and which spill regs have output reloads. */
5489 for (j = 0; j < n_reloads; j++)
5490 {
5491 register int r = reload_order[j];
5492
5493 i = reload_spill_index[r];
5494
5495 /* I is nonneg if this reload used one of the spill regs.
5496 If reload_reg_rtx[r] is 0, this is an optional reload
5497 that we opted to ignore. */
5498 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5499 && reload_reg_rtx[r] != 0)
5500 {
5501 register int nregno = REGNO (reload_out[r]);
5502 int nr = 1;
5503
5504 if (nregno < FIRST_PSEUDO_REGISTER)
5505 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5506
5507 while (--nr >= 0)
5508 reg_has_output_reload[nregno + nr] = 1;
5509
5510 if (i >= 0)
5511 {
5512 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5513 while (--nr >= 0)
5514 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5515 }
5516
5517 if (reload_when_needed[r] != RELOAD_OTHER
5518 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5519 && reload_when_needed[r] != RELOAD_FOR_INSN)
5520 abort ();
5521 }
5522 }
5523 }
5524 \f
5525 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5526 reloads of the same item for fear that we might not have enough reload
5527 registers. However, normally they will get the same reload register
5528 and hence actually need not be loaded twice.
5529
5530 Here we check for the most common case of this phenomenon: when we have
5531 a number of reloads for the same object, each of which were allocated
5532 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5533 reload, and is not modified in the insn itself. If we find such,
5534 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5535 This will not increase the number of spill registers needed and will
5536 prevent redundant code. */
5537
5538 #ifdef SMALL_REGISTER_CLASSES
5539
5540 static void
5541 merge_assigned_reloads (insn)
5542 rtx insn;
5543 {
5544 int i, j;
5545
5546 /* Scan all the reloads looking for ones that only load values and
5547 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5548 assigned and not modified by INSN. */
5549
5550 for (i = 0; i < n_reloads; i++)
5551 {
5552 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5553 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5554 || reg_set_p (reload_reg_rtx[i], insn))
5555 continue;
5556
5557 /* Look at all other reloads. Ensure that the only use of this
5558 reload_reg_rtx is in a reload that just loads the same value
5559 as we do. Note that any secondary reloads must be of the identical
5560 class since the values, modes, and result registers are the
5561 same, so we need not do anything with any secondary reloads. */
5562
5563 for (j = 0; j < n_reloads; j++)
5564 {
5565 if (i == j || reload_reg_rtx[j] == 0
5566 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5567 reload_reg_rtx[i]))
5568 continue;
5569
5570 /* If the reload regs aren't exactly the same (e.g, different modes)
5571 or if the values are different, we can't merge anything with this
5572 reload register. */
5573
5574 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5575 || reload_out[j] != 0 || reload_in[j] == 0
5576 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5577 break;
5578 }
5579
5580 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5581 we, in fact, found any matching reloads. */
5582
5583 if (j == n_reloads)
5584 {
5585 for (j = 0; j < n_reloads; j++)
5586 if (i != j && reload_reg_rtx[j] != 0
5587 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5588 {
5589 reload_when_needed[i] = RELOAD_OTHER;
5590 reload_in[j] = 0;
5591 transfer_replacements (i, j);
5592 }
5593
5594 /* If this is now RELOAD_OTHER, look for any reloads that load
5595 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5596 if they were for inputs, RELOAD_OTHER for outputs. Note that
5597 this test is equivalent to looking for reloads for this operand
5598 number. */
5599
5600 if (reload_when_needed[i] == RELOAD_OTHER)
5601 for (j = 0; j < n_reloads; j++)
5602 if (reload_in[j] != 0
5603 && reload_when_needed[i] != RELOAD_OTHER
5604 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5605 reload_in[i]))
5606 reload_when_needed[j]
5607 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5608 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5609 }
5610 }
5611 }
5612 #endif /* SMALL_RELOAD_CLASSES */
5613 \f
5614 /* Output insns to reload values in and out of the chosen reload regs. */
5615
5616 static void
5617 emit_reload_insns (insn)
5618 rtx insn;
5619 {
5620 register int j;
5621 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5622 rtx other_input_address_reload_insns = 0;
5623 rtx other_input_reload_insns = 0;
5624 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5625 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5626 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5627 rtx operand_reload_insns = 0;
5628 rtx other_operand_reload_insns = 0;
5629 rtx following_insn = NEXT_INSN (insn);
5630 rtx before_insn = insn;
5631 int special;
5632 /* Values to be put in spill_reg_store are put here first. */
5633 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5634
5635 for (j = 0; j < reload_n_operands; j++)
5636 input_reload_insns[j] = input_address_reload_insns[j]
5637 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5638
5639 /* Now output the instructions to copy the data into and out of the
5640 reload registers. Do these in the order that the reloads were reported,
5641 since reloads of base and index registers precede reloads of operands
5642 and the operands may need the base and index registers reloaded. */
5643
5644 for (j = 0; j < n_reloads; j++)
5645 {
5646 register rtx old;
5647 rtx oldequiv_reg = 0;
5648
5649 if (reload_spill_index[j] >= 0)
5650 new_spill_reg_store[reload_spill_index[j]] = 0;
5651
5652 old = reload_in[j];
5653 if (old != 0 && ! reload_inherited[j]
5654 && ! rtx_equal_p (reload_reg_rtx[j], old)
5655 && reload_reg_rtx[j] != 0)
5656 {
5657 register rtx reloadreg = reload_reg_rtx[j];
5658 rtx oldequiv = 0;
5659 enum machine_mode mode;
5660 rtx *where;
5661
5662 /* Determine the mode to reload in.
5663 This is very tricky because we have three to choose from.
5664 There is the mode the insn operand wants (reload_inmode[J]).
5665 There is the mode of the reload register RELOADREG.
5666 There is the intrinsic mode of the operand, which we could find
5667 by stripping some SUBREGs.
5668 It turns out that RELOADREG's mode is irrelevant:
5669 we can change that arbitrarily.
5670
5671 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5672 then the reload reg may not support QImode moves, so use SImode.
5673 If foo is in memory due to spilling a pseudo reg, this is safe,
5674 because the QImode value is in the least significant part of a
5675 slot big enough for a SImode. If foo is some other sort of
5676 memory reference, then it is impossible to reload this case,
5677 so previous passes had better make sure this never happens.
5678
5679 Then consider a one-word union which has SImode and one of its
5680 members is a float, being fetched as (SUBREG:SF union:SI).
5681 We must fetch that as SFmode because we could be loading into
5682 a float-only register. In this case OLD's mode is correct.
5683
5684 Consider an immediate integer: it has VOIDmode. Here we need
5685 to get a mode from something else.
5686
5687 In some cases, there is a fourth mode, the operand's
5688 containing mode. If the insn specifies a containing mode for
5689 this operand, it overrides all others.
5690
5691 I am not sure whether the algorithm here is always right,
5692 but it does the right things in those cases. */
5693
5694 mode = GET_MODE (old);
5695 if (mode == VOIDmode)
5696 mode = reload_inmode[j];
5697
5698 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5699 /* If we need a secondary register for this operation, see if
5700 the value is already in a register in that class. Don't
5701 do this if the secondary register will be used as a scratch
5702 register. */
5703
5704 if (reload_secondary_in_reload[j] >= 0
5705 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5706 && optimize)
5707 oldequiv
5708 = find_equiv_reg (old, insn,
5709 reload_reg_class[reload_secondary_in_reload[j]],
5710 -1, NULL_PTR, 0, mode);
5711 #endif
5712
5713 /* If reloading from memory, see if there is a register
5714 that already holds the same value. If so, reload from there.
5715 We can pass 0 as the reload_reg_p argument because
5716 any other reload has either already been emitted,
5717 in which case find_equiv_reg will see the reload-insn,
5718 or has yet to be emitted, in which case it doesn't matter
5719 because we will use this equiv reg right away. */
5720
5721 if (oldequiv == 0 && optimize
5722 && (GET_CODE (old) == MEM
5723 || (GET_CODE (old) == REG
5724 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5725 && reg_renumber[REGNO (old)] < 0)))
5726 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5727 -1, NULL_PTR, 0, mode);
5728
5729 if (oldequiv)
5730 {
5731 int regno = true_regnum (oldequiv);
5732
5733 /* If OLDEQUIV is a spill register, don't use it for this
5734 if any other reload needs it at an earlier stage of this insn
5735 or at this stage. */
5736 if (spill_reg_order[regno] >= 0
5737 && (! reload_reg_free_p (regno, reload_opnum[j],
5738 reload_when_needed[j])
5739 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5740 reload_when_needed[j])))
5741 oldequiv = 0;
5742
5743 /* If OLDEQUIV is not a spill register,
5744 don't use it if any other reload wants it. */
5745 if (spill_reg_order[regno] < 0)
5746 {
5747 int k;
5748 for (k = 0; k < n_reloads; k++)
5749 if (reload_reg_rtx[k] != 0 && k != j
5750 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5751 oldequiv))
5752 {
5753 oldequiv = 0;
5754 break;
5755 }
5756 }
5757
5758 /* If it is no cheaper to copy from OLDEQUIV into the
5759 reload register than it would be to move from memory,
5760 don't use it. Likewise, if we need a secondary register
5761 or memory. */
5762
5763 if (oldequiv != 0
5764 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5765 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5766 reload_reg_class[j])
5767 >= MEMORY_MOVE_COST (mode)))
5768 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5769 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5770 mode, oldequiv)
5771 != NO_REGS)
5772 #endif
5773 #ifdef SECONDARY_MEMORY_NEEDED
5774 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5775 REGNO_REG_CLASS (regno),
5776 mode)
5777 #endif
5778 ))
5779 oldequiv = 0;
5780 }
5781
5782 if (oldequiv == 0)
5783 oldequiv = old;
5784 else if (GET_CODE (oldequiv) == REG)
5785 oldequiv_reg = oldequiv;
5786 else if (GET_CODE (oldequiv) == SUBREG)
5787 oldequiv_reg = SUBREG_REG (oldequiv);
5788
5789 /* If we are reloading from a register that was recently stored in
5790 with an output-reload, see if we can prove there was
5791 actually no need to store the old value in it. */
5792
5793 if (optimize && GET_CODE (oldequiv) == REG
5794 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5795 && spill_reg_order[REGNO (oldequiv)] >= 0
5796 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5797 && find_reg_note (insn, REG_DEAD, reload_in[j])
5798 /* This is unsafe if operand occurs more than once in current
5799 insn. Perhaps some occurrences weren't reloaded. */
5800 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5801 delete_output_reload
5802 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5803
5804 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5805 then load RELOADREG from OLDEQUIV. Note that we cannot use
5806 gen_lowpart_common since it can do the wrong thing when
5807 RELOADREG has a multi-word mode. Note that RELOADREG
5808 must always be a REG here. */
5809
5810 if (GET_MODE (reloadreg) != mode)
5811 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5812 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5813 oldequiv = SUBREG_REG (oldequiv);
5814 if (GET_MODE (oldequiv) != VOIDmode
5815 && mode != GET_MODE (oldequiv))
5816 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5817
5818 /* Switch to the right place to emit the reload insns. */
5819 switch (reload_when_needed[j])
5820 {
5821 case RELOAD_OTHER:
5822 where = &other_input_reload_insns;
5823 break;
5824 case RELOAD_FOR_INPUT:
5825 where = &input_reload_insns[reload_opnum[j]];
5826 break;
5827 case RELOAD_FOR_INPUT_ADDRESS:
5828 where = &input_address_reload_insns[reload_opnum[j]];
5829 break;
5830 case RELOAD_FOR_OUTPUT_ADDRESS:
5831 where = &output_address_reload_insns[reload_opnum[j]];
5832 break;
5833 case RELOAD_FOR_OPERAND_ADDRESS:
5834 where = &operand_reload_insns;
5835 break;
5836 case RELOAD_FOR_OPADDR_ADDR:
5837 where = &other_operand_reload_insns;
5838 break;
5839 case RELOAD_FOR_OTHER_ADDRESS:
5840 where = &other_input_address_reload_insns;
5841 break;
5842 default:
5843 abort ();
5844 }
5845
5846 push_to_sequence (*where);
5847 special = 0;
5848
5849 /* Auto-increment addresses must be reloaded in a special way. */
5850 if (GET_CODE (oldequiv) == POST_INC
5851 || GET_CODE (oldequiv) == POST_DEC
5852 || GET_CODE (oldequiv) == PRE_INC
5853 || GET_CODE (oldequiv) == PRE_DEC)
5854 {
5855 /* We are not going to bother supporting the case where a
5856 incremented register can't be copied directly from
5857 OLDEQUIV since this seems highly unlikely. */
5858 if (reload_secondary_in_reload[j] >= 0)
5859 abort ();
5860 /* Prevent normal processing of this reload. */
5861 special = 1;
5862 /* Output a special code sequence for this case. */
5863 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5864 }
5865
5866 /* If we are reloading a pseudo-register that was set by the previous
5867 insn, see if we can get rid of that pseudo-register entirely
5868 by redirecting the previous insn into our reload register. */
5869
5870 else if (optimize && GET_CODE (old) == REG
5871 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5872 && dead_or_set_p (insn, old)
5873 /* This is unsafe if some other reload
5874 uses the same reg first. */
5875 && reload_reg_free_before_p (REGNO (reloadreg),
5876 reload_opnum[j],
5877 reload_when_needed[j]))
5878 {
5879 rtx temp = PREV_INSN (insn);
5880 while (temp && GET_CODE (temp) == NOTE)
5881 temp = PREV_INSN (temp);
5882 if (temp
5883 && GET_CODE (temp) == INSN
5884 && GET_CODE (PATTERN (temp)) == SET
5885 && SET_DEST (PATTERN (temp)) == old
5886 /* Make sure we can access insn_operand_constraint. */
5887 && asm_noperands (PATTERN (temp)) < 0
5888 /* This is unsafe if prev insn rejects our reload reg. */
5889 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5890 reloadreg)
5891 /* This is unsafe if operand occurs more than once in current
5892 insn. Perhaps some occurrences aren't reloaded. */
5893 && count_occurrences (PATTERN (insn), old) == 1
5894 /* Don't risk splitting a matching pair of operands. */
5895 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5896 {
5897 /* Store into the reload register instead of the pseudo. */
5898 SET_DEST (PATTERN (temp)) = reloadreg;
5899 /* If these are the only uses of the pseudo reg,
5900 pretend for GDB it lives in the reload reg we used. */
5901 if (reg_n_deaths[REGNO (old)] == 1
5902 && reg_n_sets[REGNO (old)] == 1)
5903 {
5904 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5905 alter_reg (REGNO (old), -1);
5906 }
5907 special = 1;
5908 }
5909 }
5910
5911 /* We can't do that, so output an insn to load RELOADREG. */
5912
5913 if (! special)
5914 {
5915 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5916 rtx second_reload_reg = 0;
5917 enum insn_code icode;
5918
5919 /* If we have a secondary reload, pick up the secondary register
5920 and icode, if any. If OLDEQUIV and OLD are different or
5921 if this is an in-out reload, recompute whether or not we
5922 still need a secondary register and what the icode should
5923 be. If we still need a secondary register and the class or
5924 icode is different, go back to reloading from OLD if using
5925 OLDEQUIV means that we got the wrong type of register. We
5926 cannot have different class or icode due to an in-out reload
5927 because we don't make such reloads when both the input and
5928 output need secondary reload registers. */
5929
5930 if (reload_secondary_in_reload[j] >= 0)
5931 {
5932 int secondary_reload = reload_secondary_in_reload[j];
5933 rtx real_oldequiv = oldequiv;
5934 rtx real_old = old;
5935
5936 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5937 and similarly for OLD.
5938 See comments in get_secondary_reload in reload.c. */
5939 if (GET_CODE (oldequiv) == REG
5940 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5941 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5942 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5943
5944 if (GET_CODE (old) == REG
5945 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5946 && reg_equiv_mem[REGNO (old)] != 0)
5947 real_old = reg_equiv_mem[REGNO (old)];
5948
5949 second_reload_reg = reload_reg_rtx[secondary_reload];
5950 icode = reload_secondary_in_icode[j];
5951
5952 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5953 || (reload_in[j] != 0 && reload_out[j] != 0))
5954 {
5955 enum reg_class new_class
5956 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5957 mode, real_oldequiv);
5958
5959 if (new_class == NO_REGS)
5960 second_reload_reg = 0;
5961 else
5962 {
5963 enum insn_code new_icode;
5964 enum machine_mode new_mode;
5965
5966 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5967 REGNO (second_reload_reg)))
5968 oldequiv = old, real_oldequiv = real_old;
5969 else
5970 {
5971 new_icode = reload_in_optab[(int) mode];
5972 if (new_icode != CODE_FOR_nothing
5973 && ((insn_operand_predicate[(int) new_icode][0]
5974 && ! ((*insn_operand_predicate[(int) new_icode][0])
5975 (reloadreg, mode)))
5976 || (insn_operand_predicate[(int) new_icode][1]
5977 && ! ((*insn_operand_predicate[(int) new_icode][1])
5978 (real_oldequiv, mode)))))
5979 new_icode = CODE_FOR_nothing;
5980
5981 if (new_icode == CODE_FOR_nothing)
5982 new_mode = mode;
5983 else
5984 new_mode = insn_operand_mode[(int) new_icode][2];
5985
5986 if (GET_MODE (second_reload_reg) != new_mode)
5987 {
5988 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5989 new_mode))
5990 oldequiv = old, real_oldequiv = real_old;
5991 else
5992 second_reload_reg
5993 = gen_rtx (REG, new_mode,
5994 REGNO (second_reload_reg));
5995 }
5996 }
5997 }
5998 }
5999
6000 /* If we still need a secondary reload register, check
6001 to see if it is being used as a scratch or intermediate
6002 register and generate code appropriately. If we need
6003 a scratch register, use REAL_OLDEQUIV since the form of
6004 the insn may depend on the actual address if it is
6005 a MEM. */
6006
6007 if (second_reload_reg)
6008 {
6009 if (icode != CODE_FOR_nothing)
6010 {
6011 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6012 second_reload_reg));
6013 special = 1;
6014 }
6015 else
6016 {
6017 /* See if we need a scratch register to load the
6018 intermediate register (a tertiary reload). */
6019 enum insn_code tertiary_icode
6020 = reload_secondary_in_icode[secondary_reload];
6021
6022 if (tertiary_icode != CODE_FOR_nothing)
6023 {
6024 rtx third_reload_reg
6025 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6026
6027 emit_insn ((GEN_FCN (tertiary_icode)
6028 (second_reload_reg, real_oldequiv,
6029 third_reload_reg)));
6030 }
6031 else
6032 gen_reload (second_reload_reg, oldequiv,
6033 reload_opnum[j],
6034 reload_when_needed[j]);
6035
6036 oldequiv = second_reload_reg;
6037 }
6038 }
6039 }
6040 #endif
6041
6042 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6043 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6044 reload_when_needed[j]);
6045
6046 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6047 /* We may have to make a REG_DEAD note for the secondary reload
6048 register in the insns we just made. Find the last insn that
6049 mentioned the register. */
6050 if (! special && second_reload_reg
6051 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6052 {
6053 rtx prev;
6054
6055 for (prev = get_last_insn (); prev;
6056 prev = PREV_INSN (prev))
6057 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6058 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6059 PATTERN (prev)))
6060 {
6061 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6062 second_reload_reg,
6063 REG_NOTES (prev));
6064 break;
6065 }
6066 }
6067 #endif
6068 }
6069
6070 /* End this sequence. */
6071 *where = get_insns ();
6072 end_sequence ();
6073 }
6074
6075 /* Add a note saying the input reload reg
6076 dies in this insn, if anyone cares. */
6077 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6078 if (old != 0
6079 && reload_reg_rtx[j] != old
6080 && reload_reg_rtx[j] != 0
6081 && reload_out[j] == 0
6082 && ! reload_inherited[j]
6083 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6084 {
6085 register rtx reloadreg = reload_reg_rtx[j];
6086
6087 #if 0
6088 /* We can't abort here because we need to support this for sched.c.
6089 It's not terrible to miss a REG_DEAD note, but we should try
6090 to figure out how to do this correctly. */
6091 /* The code below is incorrect for address-only reloads. */
6092 if (reload_when_needed[j] != RELOAD_OTHER
6093 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6094 abort ();
6095 #endif
6096
6097 /* Add a death note to this insn, for an input reload. */
6098
6099 if ((reload_when_needed[j] == RELOAD_OTHER
6100 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6101 && ! dead_or_set_p (insn, reloadreg))
6102 REG_NOTES (insn)
6103 = gen_rtx (EXPR_LIST, REG_DEAD,
6104 reloadreg, REG_NOTES (insn));
6105 }
6106
6107 /* When we inherit a reload, the last marked death of the reload reg
6108 may no longer really be a death. */
6109 if (reload_reg_rtx[j] != 0
6110 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6111 && reload_inherited[j])
6112 {
6113 /* Handle inheriting an output reload.
6114 Remove the death note from the output reload insn. */
6115 if (reload_spill_index[j] >= 0
6116 && GET_CODE (reload_in[j]) == REG
6117 && spill_reg_store[reload_spill_index[j]] != 0
6118 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6119 REG_DEAD, REGNO (reload_reg_rtx[j])))
6120 remove_death (REGNO (reload_reg_rtx[j]),
6121 spill_reg_store[reload_spill_index[j]]);
6122 /* Likewise for input reloads that were inherited. */
6123 else if (reload_spill_index[j] >= 0
6124 && GET_CODE (reload_in[j]) == REG
6125 && spill_reg_store[reload_spill_index[j]] == 0
6126 && reload_inheritance_insn[j] != 0
6127 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6128 REGNO (reload_reg_rtx[j])))
6129 remove_death (REGNO (reload_reg_rtx[j]),
6130 reload_inheritance_insn[j]);
6131 else
6132 {
6133 rtx prev;
6134
6135 /* We got this register from find_equiv_reg.
6136 Search back for its last death note and get rid of it.
6137 But don't search back too far.
6138 Don't go past a place where this reg is set,
6139 since a death note before that remains valid. */
6140 for (prev = PREV_INSN (insn);
6141 prev && GET_CODE (prev) != CODE_LABEL;
6142 prev = PREV_INSN (prev))
6143 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6144 && dead_or_set_p (prev, reload_reg_rtx[j]))
6145 {
6146 if (find_regno_note (prev, REG_DEAD,
6147 REGNO (reload_reg_rtx[j])))
6148 remove_death (REGNO (reload_reg_rtx[j]), prev);
6149 break;
6150 }
6151 }
6152 }
6153
6154 /* We might have used find_equiv_reg above to choose an alternate
6155 place from which to reload. If so, and it died, we need to remove
6156 that death and move it to one of the insns we just made. */
6157
6158 if (oldequiv_reg != 0
6159 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6160 {
6161 rtx prev, prev1;
6162
6163 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6164 prev = PREV_INSN (prev))
6165 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6166 && dead_or_set_p (prev, oldequiv_reg))
6167 {
6168 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6169 {
6170 for (prev1 = this_reload_insn;
6171 prev1; prev1 = PREV_INSN (prev1))
6172 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6173 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6174 PATTERN (prev1)))
6175 {
6176 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6177 oldequiv_reg,
6178 REG_NOTES (prev1));
6179 break;
6180 }
6181 remove_death (REGNO (oldequiv_reg), prev);
6182 }
6183 break;
6184 }
6185 }
6186 #endif
6187
6188 /* If we are reloading a register that was recently stored in with an
6189 output-reload, see if we can prove there was
6190 actually no need to store the old value in it. */
6191
6192 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6193 && reload_in[j] != 0
6194 && GET_CODE (reload_in[j]) == REG
6195 #if 0
6196 /* There doesn't seem to be any reason to restrict this to pseudos
6197 and doing so loses in the case where we are copying from a
6198 register of the wrong class. */
6199 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6200 #endif
6201 && spill_reg_store[reload_spill_index[j]] != 0
6202 /* This is unsafe if some other reload uses the same reg first. */
6203 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6204 reload_opnum[j], reload_when_needed[j])
6205 && dead_or_set_p (insn, reload_in[j])
6206 /* This is unsafe if operand occurs more than once in current
6207 insn. Perhaps some occurrences weren't reloaded. */
6208 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6209 delete_output_reload (insn, j,
6210 spill_reg_store[reload_spill_index[j]]);
6211
6212 /* Input-reloading is done. Now do output-reloading,
6213 storing the value from the reload-register after the main insn
6214 if reload_out[j] is nonzero.
6215
6216 ??? At some point we need to support handling output reloads of
6217 JUMP_INSNs or insns that set cc0. */
6218 old = reload_out[j];
6219 if (old != 0
6220 && reload_reg_rtx[j] != old
6221 && reload_reg_rtx[j] != 0)
6222 {
6223 register rtx reloadreg = reload_reg_rtx[j];
6224 register rtx second_reloadreg = 0;
6225 rtx note, p;
6226 enum machine_mode mode;
6227 int special = 0;
6228
6229 /* An output operand that dies right away does need a reload,
6230 but need not be copied from it. Show the new location in the
6231 REG_UNUSED note. */
6232 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6233 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6234 {
6235 XEXP (note, 0) = reload_reg_rtx[j];
6236 continue;
6237 }
6238 else if (GET_CODE (old) == SCRATCH)
6239 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6240 but we don't want to make an output reload. */
6241 continue;
6242
6243 #if 0
6244 /* Strip off of OLD any size-increasing SUBREGs such as
6245 (SUBREG:SI foo:QI 0). */
6246
6247 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6248 && (GET_MODE_SIZE (GET_MODE (old))
6249 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6250 old = SUBREG_REG (old);
6251 #endif
6252
6253 /* If is a JUMP_INSN, we can't support output reloads yet. */
6254 if (GET_CODE (insn) == JUMP_INSN)
6255 abort ();
6256
6257 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6258
6259 /* Determine the mode to reload in.
6260 See comments above (for input reloading). */
6261
6262 mode = GET_MODE (old);
6263 if (mode == VOIDmode)
6264 {
6265 /* VOIDmode should never happen for an output. */
6266 if (asm_noperands (PATTERN (insn)) < 0)
6267 /* It's the compiler's fault. */
6268 fatal_insn ("VOIDmode on an output", insn);
6269 error_for_asm (insn, "output operand is constant in `asm'");
6270 /* Prevent crash--use something we know is valid. */
6271 mode = word_mode;
6272 old = gen_rtx (REG, mode, REGNO (reloadreg));
6273 }
6274
6275 if (GET_MODE (reloadreg) != mode)
6276 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6277
6278 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6279
6280 /* If we need two reload regs, set RELOADREG to the intermediate
6281 one, since it will be stored into OLD. We might need a secondary
6282 register only for an input reload, so check again here. */
6283
6284 if (reload_secondary_out_reload[j] >= 0)
6285 {
6286 rtx real_old = old;
6287
6288 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6289 && reg_equiv_mem[REGNO (old)] != 0)
6290 real_old = reg_equiv_mem[REGNO (old)];
6291
6292 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6293 mode, real_old)
6294 != NO_REGS))
6295 {
6296 second_reloadreg = reloadreg;
6297 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6298
6299 /* See if RELOADREG is to be used as a scratch register
6300 or as an intermediate register. */
6301 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6302 {
6303 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6304 (real_old, second_reloadreg, reloadreg)));
6305 special = 1;
6306 }
6307 else
6308 {
6309 /* See if we need both a scratch and intermediate reload
6310 register. */
6311
6312 int secondary_reload = reload_secondary_out_reload[j];
6313 enum insn_code tertiary_icode
6314 = reload_secondary_out_icode[secondary_reload];
6315
6316 if (GET_MODE (reloadreg) != mode)
6317 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6318
6319 if (tertiary_icode != CODE_FOR_nothing)
6320 {
6321 rtx third_reloadreg
6322 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6323
6324 /* Copy primary reload reg to secondary reload reg.
6325 (Note that these have been swapped above, then
6326 secondary reload reg to OLD using our insn. */
6327
6328 gen_reload (reloadreg, second_reloadreg,
6329 reload_opnum[j], reload_when_needed[j]);
6330 emit_insn ((GEN_FCN (tertiary_icode)
6331 (real_old, reloadreg, third_reloadreg)));
6332 special = 1;
6333 }
6334
6335 else
6336 /* Copy between the reload regs here and then to
6337 OUT later. */
6338
6339 gen_reload (reloadreg, second_reloadreg,
6340 reload_opnum[j], reload_when_needed[j]);
6341 }
6342 }
6343 }
6344 #endif
6345
6346 /* Output the last reload insn. */
6347 if (! special)
6348 gen_reload (old, reloadreg, reload_opnum[j],
6349 reload_when_needed[j]);
6350
6351 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6352 /* If final will look at death notes for this reg,
6353 put one on the last output-reload insn to use it. Similarly
6354 for any secondary register. */
6355 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6356 for (p = get_last_insn (); p; p = PREV_INSN (p))
6357 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6358 && reg_overlap_mentioned_for_reload_p (reloadreg,
6359 PATTERN (p)))
6360 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6361 reloadreg, REG_NOTES (p));
6362
6363 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6364 if (! special
6365 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6366 for (p = get_last_insn (); p; p = PREV_INSN (p))
6367 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6368 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6369 PATTERN (p)))
6370 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6371 second_reloadreg, REG_NOTES (p));
6372 #endif
6373 #endif
6374 /* Look at all insns we emitted, just to be safe. */
6375 for (p = get_insns (); p; p = NEXT_INSN (p))
6376 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6377 {
6378 /* If this output reload doesn't come from a spill reg,
6379 clear any memory of reloaded copies of the pseudo reg.
6380 If this output reload comes from a spill reg,
6381 reg_has_output_reload will make this do nothing. */
6382 note_stores (PATTERN (p), forget_old_reloads_1);
6383
6384 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6385 && reload_spill_index[j] >= 0)
6386 new_spill_reg_store[reload_spill_index[j]] = p;
6387 }
6388
6389 output_reload_insns[reload_opnum[j]] = get_insns ();
6390 end_sequence ();
6391 }
6392 }
6393
6394 /* Now write all the insns we made for reloads in the order expected by
6395 the allocation functions. Prior to the insn being reloaded, we write
6396 the following reloads:
6397
6398 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6399
6400 RELOAD_OTHER reloads.
6401
6402 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6403 the RELOAD_FOR_INPUT reload for the operand.
6404
6405 RELOAD_FOR_OPADDR_ADDRS reloads.
6406
6407 RELOAD_FOR_OPERAND_ADDRESS reloads.
6408
6409 After the insn being reloaded, we write the following:
6410
6411 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6412 the RELOAD_FOR_OUTPUT reload for that operand. */
6413
6414 emit_insns_before (other_input_address_reload_insns, before_insn);
6415 emit_insns_before (other_input_reload_insns, before_insn);
6416
6417 for (j = 0; j < reload_n_operands; j++)
6418 {
6419 emit_insns_before (input_address_reload_insns[j], before_insn);
6420 emit_insns_before (input_reload_insns[j], before_insn);
6421 }
6422
6423 emit_insns_before (other_operand_reload_insns, before_insn);
6424 emit_insns_before (operand_reload_insns, before_insn);
6425
6426 for (j = 0; j < reload_n_operands; j++)
6427 {
6428 emit_insns_before (output_address_reload_insns[j], following_insn);
6429 emit_insns_before (output_reload_insns[j], following_insn);
6430 }
6431
6432 /* Move death notes from INSN
6433 to output-operand-address and output reload insns. */
6434 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6435 {
6436 rtx insn1;
6437 /* Loop over those insns, last ones first. */
6438 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6439 insn1 = PREV_INSN (insn1))
6440 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6441 {
6442 rtx source = SET_SRC (PATTERN (insn1));
6443 rtx dest = SET_DEST (PATTERN (insn1));
6444
6445 /* The note we will examine next. */
6446 rtx reg_notes = REG_NOTES (insn);
6447 /* The place that pointed to this note. */
6448 rtx *prev_reg_note = &REG_NOTES (insn);
6449
6450 /* If the note is for something used in the source of this
6451 reload insn, or in the output address, move the note. */
6452 while (reg_notes)
6453 {
6454 rtx next_reg_notes = XEXP (reg_notes, 1);
6455 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6456 && GET_CODE (XEXP (reg_notes, 0)) == REG
6457 && ((GET_CODE (dest) != REG
6458 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6459 dest))
6460 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6461 source)))
6462 {
6463 *prev_reg_note = next_reg_notes;
6464 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6465 REG_NOTES (insn1) = reg_notes;
6466 }
6467 else
6468 prev_reg_note = &XEXP (reg_notes, 1);
6469
6470 reg_notes = next_reg_notes;
6471 }
6472 }
6473 }
6474 #endif
6475
6476 /* For all the spill regs newly reloaded in this instruction,
6477 record what they were reloaded from, so subsequent instructions
6478 can inherit the reloads.
6479
6480 Update spill_reg_store for the reloads of this insn.
6481 Copy the elements that were updated in the loop above. */
6482
6483 for (j = 0; j < n_reloads; j++)
6484 {
6485 register int r = reload_order[j];
6486 register int i = reload_spill_index[r];
6487
6488 /* I is nonneg if this reload used one of the spill regs.
6489 If reload_reg_rtx[r] is 0, this is an optional reload
6490 that we opted to ignore.
6491
6492 Also ignore reloads that don't reach the end of the insn,
6493 since we will eventually see the one that does. */
6494
6495 if (i >= 0 && reload_reg_rtx[r] != 0
6496 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6497 reload_when_needed[r]))
6498 {
6499 /* First, clear out memory of what used to be in this spill reg.
6500 If consecutive registers are used, clear them all. */
6501 int nr
6502 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6503 int k;
6504
6505 for (k = 0; k < nr; k++)
6506 {
6507 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6508 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6509 }
6510
6511 /* Maybe the spill reg contains a copy of reload_out. */
6512 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6513 {
6514 register int nregno = REGNO (reload_out[r]);
6515 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6516 : HARD_REGNO_NREGS (nregno,
6517 GET_MODE (reload_reg_rtx[r])));
6518
6519 spill_reg_store[i] = new_spill_reg_store[i];
6520 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6521
6522 /* If NREGNO is a hard register, it may occupy more than
6523 one register. If it does, say what is in the
6524 rest of the registers assuming that both registers
6525 agree on how many words the object takes. If not,
6526 invalidate the subsequent registers. */
6527
6528 if (nregno < FIRST_PSEUDO_REGISTER)
6529 for (k = 1; k < nnr; k++)
6530 reg_last_reload_reg[nregno + k]
6531 = (nr == nnr ? gen_rtx (REG,
6532 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6533 REGNO (reload_reg_rtx[r]) + k)
6534 : 0);
6535
6536 /* Now do the inverse operation. */
6537 for (k = 0; k < nr; k++)
6538 {
6539 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6540 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6541 : nregno + k);
6542 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6543 }
6544 }
6545
6546 /* Maybe the spill reg contains a copy of reload_in. Only do
6547 something if there will not be an output reload for
6548 the register being reloaded. */
6549 else if (reload_out[r] == 0
6550 && reload_in[r] != 0
6551 && ((GET_CODE (reload_in[r]) == REG
6552 && ! reg_has_output_reload[REGNO (reload_in[r])]
6553 || (GET_CODE (reload_in_reg[r]) == REG
6554 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6555 {
6556 register int nregno;
6557 int nnr;
6558
6559 if (GET_CODE (reload_in[r]) == REG)
6560 nregno = REGNO (reload_in[r]);
6561 else
6562 nregno = REGNO (reload_in_reg[r]);
6563
6564 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6565 : HARD_REGNO_NREGS (nregno,
6566 GET_MODE (reload_reg_rtx[r])));
6567
6568 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6569
6570 if (nregno < FIRST_PSEUDO_REGISTER)
6571 for (k = 1; k < nnr; k++)
6572 reg_last_reload_reg[nregno + k]
6573 = (nr == nnr ? gen_rtx (REG,
6574 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6575 REGNO (reload_reg_rtx[r]) + k)
6576 : 0);
6577
6578 /* Unless we inherited this reload, show we haven't
6579 recently done a store. */
6580 if (! reload_inherited[r])
6581 spill_reg_store[i] = 0;
6582
6583 for (k = 0; k < nr; k++)
6584 {
6585 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6586 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6587 : nregno + k);
6588 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6589 = insn;
6590 }
6591 }
6592 }
6593
6594 /* The following if-statement was #if 0'd in 1.34 (or before...).
6595 It's reenabled in 1.35 because supposedly nothing else
6596 deals with this problem. */
6597
6598 /* If a register gets output-reloaded from a non-spill register,
6599 that invalidates any previous reloaded copy of it.
6600 But forget_old_reloads_1 won't get to see it, because
6601 it thinks only about the original insn. So invalidate it here. */
6602 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6603 {
6604 register int nregno = REGNO (reload_out[r]);
6605 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6606
6607 while (num_regs-- > 0)
6608 reg_last_reload_reg[nregno + num_regs] = 0;
6609 }
6610 }
6611 }
6612 \f
6613 /* Emit code to perform a reload from IN (which may be a reload register) to
6614 OUT (which may also be a reload register). IN or OUT is from operand
6615 OPNUM with reload type TYPE.
6616
6617 Returns first insn emitted. */
6618
6619 rtx
6620 gen_reload (out, in, opnum, type)
6621 rtx out;
6622 rtx in;
6623 int opnum;
6624 enum reload_type type;
6625 {
6626 rtx last = get_last_insn ();
6627
6628 /* How to do this reload can get quite tricky. Normally, we are being
6629 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6630 register that didn't get a hard register. In that case we can just
6631 call emit_move_insn.
6632
6633 We can also be asked to reload a PLUS that adds a register or a MEM to
6634 another register, constant or MEM. This can occur during frame pointer
6635 elimination and while reloading addresses. This case is handled by
6636 trying to emit a single insn to perform the add. If it is not valid,
6637 we use a two insn sequence.
6638
6639 Finally, we could be called to handle an 'o' constraint by putting
6640 an address into a register. In that case, we first try to do this
6641 with a named pattern of "reload_load_address". If no such pattern
6642 exists, we just emit a SET insn and hope for the best (it will normally
6643 be valid on machines that use 'o').
6644
6645 This entire process is made complex because reload will never
6646 process the insns we generate here and so we must ensure that
6647 they will fit their constraints and also by the fact that parts of
6648 IN might be being reloaded separately and replaced with spill registers.
6649 Because of this, we are, in some sense, just guessing the right approach
6650 here. The one listed above seems to work.
6651
6652 ??? At some point, this whole thing needs to be rethought. */
6653
6654 if (GET_CODE (in) == PLUS
6655 && (GET_CODE (XEXP (in, 0)) == REG
6656 || GET_CODE (XEXP (in, 0)) == MEM)
6657 && (GET_CODE (XEXP (in, 1)) == REG
6658 || CONSTANT_P (XEXP (in, 1))
6659 || GET_CODE (XEXP (in, 1)) == MEM))
6660 {
6661 /* We need to compute the sum of a register or a MEM and another
6662 register, constant, or MEM, and put it into the reload
6663 register. The best possible way of doing this is if the machine
6664 has a three-operand ADD insn that accepts the required operands.
6665
6666 The simplest approach is to try to generate such an insn and see if it
6667 is recognized and matches its constraints. If so, it can be used.
6668
6669 It might be better not to actually emit the insn unless it is valid,
6670 but we need to pass the insn as an operand to `recog' and
6671 `insn_extract' and it is simpler to emit and then delete the insn if
6672 not valid than to dummy things up. */
6673
6674 rtx op0, op1, tem, insn;
6675 int code;
6676
6677 op0 = find_replacement (&XEXP (in, 0));
6678 op1 = find_replacement (&XEXP (in, 1));
6679
6680 /* Since constraint checking is strict, commutativity won't be
6681 checked, so we need to do that here to avoid spurious failure
6682 if the add instruction is two-address and the second operand
6683 of the add is the same as the reload reg, which is frequently
6684 the case. If the insn would be A = B + A, rearrange it so
6685 it will be A = A + B as constrain_operands expects. */
6686
6687 if (GET_CODE (XEXP (in, 1)) == REG
6688 && REGNO (out) == REGNO (XEXP (in, 1)))
6689 tem = op0, op0 = op1, op1 = tem;
6690
6691 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6692 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6693
6694 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
6695 code = recog_memoized (insn);
6696
6697 if (code >= 0)
6698 {
6699 insn_extract (insn);
6700 /* We want constrain operands to treat this insn strictly in
6701 its validity determination, i.e., the way it would after reload
6702 has completed. */
6703 if (constrain_operands (code, 1))
6704 return insn;
6705 }
6706
6707 delete_insns_since (last);
6708
6709 /* If that failed, we must use a conservative two-insn sequence.
6710 use move to copy constant, MEM, or pseudo register to the reload
6711 register since "move" will be able to handle an arbitrary operand,
6712 unlike add which can't, in general. Then add the registers.
6713
6714 If there is another way to do this for a specific machine, a
6715 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6716 we emit below. */
6717
6718 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6719 || (GET_CODE (op1) == REG
6720 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6721 tem = op0, op0 = op1, op1 = tem;
6722
6723 emit_insn (gen_move_insn (out, op0));
6724
6725 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6726 This fixes a problem on the 32K where the stack pointer cannot
6727 be used as an operand of an add insn. */
6728
6729 if (rtx_equal_p (op0, op1))
6730 op1 = out;
6731
6732 insn = emit_insn (gen_add2_insn (out, op1));
6733
6734 /* If that failed, copy the address register to the reload register.
6735 Then add the constant to the reload register. */
6736
6737 code = recog_memoized (insn);
6738
6739 if (code >= 0)
6740 {
6741 insn_extract (insn);
6742 /* We want constrain operands to treat this insn strictly in
6743 its validity determination, i.e., the way it would after reload
6744 has completed. */
6745 if (constrain_operands (code, 1))
6746 return insn;
6747 }
6748
6749 delete_insns_since (last);
6750
6751 emit_insn (gen_move_insn (out, op1));
6752 emit_insn (gen_add2_insn (out, op0));
6753 }
6754
6755 #ifdef SECONDARY_MEMORY_NEEDED
6756 /* If we need a memory location to do the move, do it that way. */
6757 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6758 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
6759 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6760 REGNO_REG_CLASS (REGNO (out)),
6761 GET_MODE (out)))
6762 {
6763 /* Get the memory to use and rewrite both registers to its mode. */
6764 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
6765
6766 if (GET_MODE (loc) != GET_MODE (out))
6767 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
6768
6769 if (GET_MODE (loc) != GET_MODE (in))
6770 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6771
6772 emit_insn (gen_move_insn (loc, in));
6773 emit_insn (gen_move_insn (out, loc));
6774 }
6775 #endif
6776
6777 /* If IN is a simple operand, use gen_move_insn. */
6778 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6779 emit_insn (gen_move_insn (out, in));
6780
6781 #ifdef HAVE_reload_load_address
6782 else if (HAVE_reload_load_address)
6783 emit_insn (gen_reload_load_address (out, in));
6784 #endif
6785
6786 /* Otherwise, just write (set OUT IN) and hope for the best. */
6787 else
6788 emit_insn (gen_rtx (SET, VOIDmode, out, in));
6789
6790 /* Return the first insn emitted.
6791 We can not just return get_last_insn, because there may have
6792 been multiple instructions emitted. Also note that gen_move_insn may
6793 emit more than one insn itself, so we can not assume that there is one
6794 insn emitted per emit_insn_before call. */
6795
6796 return last ? NEXT_INSN (last) : get_insns ();
6797 }
6798 \f
6799 /* Delete a previously made output-reload
6800 whose result we now believe is not needed.
6801 First we double-check.
6802
6803 INSN is the insn now being processed.
6804 OUTPUT_RELOAD_INSN is the insn of the output reload.
6805 J is the reload-number for this insn. */
6806
6807 static void
6808 delete_output_reload (insn, j, output_reload_insn)
6809 rtx insn;
6810 int j;
6811 rtx output_reload_insn;
6812 {
6813 register rtx i1;
6814
6815 /* Get the raw pseudo-register referred to. */
6816
6817 rtx reg = reload_in[j];
6818 while (GET_CODE (reg) == SUBREG)
6819 reg = SUBREG_REG (reg);
6820
6821 /* If the pseudo-reg we are reloading is no longer referenced
6822 anywhere between the store into it and here,
6823 and no jumps or labels intervene, then the value can get
6824 here through the reload reg alone.
6825 Otherwise, give up--return. */
6826 for (i1 = NEXT_INSN (output_reload_insn);
6827 i1 != insn; i1 = NEXT_INSN (i1))
6828 {
6829 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6830 return;
6831 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6832 && reg_mentioned_p (reg, PATTERN (i1)))
6833 return;
6834 }
6835
6836 if (cannot_omit_stores[REGNO (reg)])
6837 return;
6838
6839 /* If this insn will store in the pseudo again,
6840 the previous store can be removed. */
6841 if (reload_out[j] == reload_in[j])
6842 delete_insn (output_reload_insn);
6843
6844 /* See if the pseudo reg has been completely replaced
6845 with reload regs. If so, delete the store insn
6846 and forget we had a stack slot for the pseudo. */
6847 else if (reg_n_deaths[REGNO (reg)] == 1
6848 && reg_basic_block[REGNO (reg)] >= 0
6849 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6850 {
6851 rtx i2;
6852
6853 /* We know that it was used only between here
6854 and the beginning of the current basic block.
6855 (We also know that the last use before INSN was
6856 the output reload we are thinking of deleting, but never mind that.)
6857 Search that range; see if any ref remains. */
6858 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6859 {
6860 rtx set = single_set (i2);
6861
6862 /* Uses which just store in the pseudo don't count,
6863 since if they are the only uses, they are dead. */
6864 if (set != 0 && SET_DEST (set) == reg)
6865 continue;
6866 if (GET_CODE (i2) == CODE_LABEL
6867 || GET_CODE (i2) == JUMP_INSN)
6868 break;
6869 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6870 && reg_mentioned_p (reg, PATTERN (i2)))
6871 /* Some other ref remains;
6872 we can't do anything. */
6873 return;
6874 }
6875
6876 /* Delete the now-dead stores into this pseudo. */
6877 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6878 {
6879 rtx set = single_set (i2);
6880
6881 if (set != 0 && SET_DEST (set) == reg)
6882 delete_insn (i2);
6883 if (GET_CODE (i2) == CODE_LABEL
6884 || GET_CODE (i2) == JUMP_INSN)
6885 break;
6886 }
6887
6888 /* For the debugging info,
6889 say the pseudo lives in this reload reg. */
6890 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6891 alter_reg (REGNO (reg), -1);
6892 }
6893 }
6894 \f
6895 /* Output reload-insns to reload VALUE into RELOADREG.
6896 VALUE is an autoincrement or autodecrement RTX whose operand
6897 is a register or memory location;
6898 so reloading involves incrementing that location.
6899
6900 INC_AMOUNT is the number to increment or decrement by (always positive).
6901 This cannot be deduced from VALUE. */
6902
6903 static void
6904 inc_for_reload (reloadreg, value, inc_amount)
6905 rtx reloadreg;
6906 rtx value;
6907 int inc_amount;
6908 {
6909 /* REG or MEM to be copied and incremented. */
6910 rtx incloc = XEXP (value, 0);
6911 /* Nonzero if increment after copying. */
6912 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6913 rtx last;
6914 rtx inc;
6915 rtx add_insn;
6916 int code;
6917
6918 /* No hard register is equivalent to this register after
6919 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6920 we could inc/dec that register as well (maybe even using it for
6921 the source), but I'm not sure it's worth worrying about. */
6922 if (GET_CODE (incloc) == REG)
6923 reg_last_reload_reg[REGNO (incloc)] = 0;
6924
6925 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6926 inc_amount = - inc_amount;
6927
6928 inc = GEN_INT (inc_amount);
6929
6930 /* If this is post-increment, first copy the location to the reload reg. */
6931 if (post)
6932 emit_insn (gen_move_insn (reloadreg, incloc));
6933
6934 /* See if we can directly increment INCLOC. Use a method similar to that
6935 in gen_reload. */
6936
6937 last = get_last_insn ();
6938 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6939 gen_rtx (PLUS, GET_MODE (incloc),
6940 incloc, inc)));
6941
6942 code = recog_memoized (add_insn);
6943 if (code >= 0)
6944 {
6945 insn_extract (add_insn);
6946 if (constrain_operands (code, 1))
6947 {
6948 /* If this is a pre-increment and we have incremented the value
6949 where it lives, copy the incremented value to RELOADREG to
6950 be used as an address. */
6951
6952 if (! post)
6953 emit_insn (gen_move_insn (reloadreg, incloc));
6954
6955 return;
6956 }
6957 }
6958
6959 delete_insns_since (last);
6960
6961 /* If couldn't do the increment directly, must increment in RELOADREG.
6962 The way we do this depends on whether this is pre- or post-increment.
6963 For pre-increment, copy INCLOC to the reload register, increment it
6964 there, then save back. */
6965
6966 if (! post)
6967 {
6968 emit_insn (gen_move_insn (reloadreg, incloc));
6969 emit_insn (gen_add2_insn (reloadreg, inc));
6970 emit_insn (gen_move_insn (incloc, reloadreg));
6971 }
6972 else
6973 {
6974 /* Postincrement.
6975 Because this might be a jump insn or a compare, and because RELOADREG
6976 may not be available after the insn in an input reload, we must do
6977 the incrementation before the insn being reloaded for.
6978
6979 We have already copied INCLOC to RELOADREG. Increment the copy in
6980 RELOADREG, save that back, then decrement RELOADREG so it has
6981 the original value. */
6982
6983 emit_insn (gen_add2_insn (reloadreg, inc));
6984 emit_insn (gen_move_insn (incloc, reloadreg));
6985 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6986 }
6987
6988 return;
6989 }
6990 \f
6991 /* Return 1 if we are certain that the constraint-string STRING allows
6992 the hard register REG. Return 0 if we can't be sure of this. */
6993
6994 static int
6995 constraint_accepts_reg_p (string, reg)
6996 char *string;
6997 rtx reg;
6998 {
6999 int value = 0;
7000 int regno = true_regnum (reg);
7001 int c;
7002
7003 /* Initialize for first alternative. */
7004 value = 0;
7005 /* Check that each alternative contains `g' or `r'. */
7006 while (1)
7007 switch (c = *string++)
7008 {
7009 case 0:
7010 /* If an alternative lacks `g' or `r', we lose. */
7011 return value;
7012 case ',':
7013 /* If an alternative lacks `g' or `r', we lose. */
7014 if (value == 0)
7015 return 0;
7016 /* Initialize for next alternative. */
7017 value = 0;
7018 break;
7019 case 'g':
7020 case 'r':
7021 /* Any general reg wins for this alternative. */
7022 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7023 value = 1;
7024 break;
7025 default:
7026 /* Any reg in specified class wins for this alternative. */
7027 {
7028 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7029
7030 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7031 value = 1;
7032 }
7033 }
7034 }
7035 \f
7036 /* Return the number of places FIND appears within X, but don't count
7037 an occurrence if some SET_DEST is FIND. */
7038
7039 static int
7040 count_occurrences (x, find)
7041 register rtx x, find;
7042 {
7043 register int i, j;
7044 register enum rtx_code code;
7045 register char *format_ptr;
7046 int count;
7047
7048 if (x == find)
7049 return 1;
7050 if (x == 0)
7051 return 0;
7052
7053 code = GET_CODE (x);
7054
7055 switch (code)
7056 {
7057 case REG:
7058 case QUEUED:
7059 case CONST_INT:
7060 case CONST_DOUBLE:
7061 case SYMBOL_REF:
7062 case CODE_LABEL:
7063 case PC:
7064 case CC0:
7065 return 0;
7066
7067 case SET:
7068 if (SET_DEST (x) == find)
7069 return count_occurrences (SET_SRC (x), find);
7070 break;
7071 }
7072
7073 format_ptr = GET_RTX_FORMAT (code);
7074 count = 0;
7075
7076 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7077 {
7078 switch (*format_ptr++)
7079 {
7080 case 'e':
7081 count += count_occurrences (XEXP (x, i), find);
7082 break;
7083
7084 case 'E':
7085 if (XVEC (x, i) != NULL)
7086 {
7087 for (j = 0; j < XVECLEN (x, i); j++)
7088 count += count_occurrences (XVECEXP (x, i, j), find);
7089 }
7090 break;
7091 }
7092 }
7093 return count;
7094 }