Add better error messages than abort to reload1, and print insn involed.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
186
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
192
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
198
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206 static char spill_indirect_levels;
207
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212 char indirect_symref_ok;
213
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216 char double_reg_address_ok;
217
218 /* Record the stack slot for each spilled hard register. */
219
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222 /* Width allocated so far for that stack slot. */
223
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231 char *basic_block_needs[N_REG_CLASSES];
232
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
236
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240 int caller_save_needed;
241
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245 int reload_in_progress = 0;
246
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258 struct obstack reload_obstack;
259 char *reload_firstobj;
260
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
263
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
266 \f
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272 static struct elim_table
273 {
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290 } reg_eliminate[] =
291
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296 #ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298 #else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300 #endif
301
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
308
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
311
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322 /* Number of labels in the current function. */
323
324 static int num_labels;
325
326 struct hard_reg_n_uses { int regno; int uses; };
327 \f
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *, int));
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void mark_scratch_live PROTO((rtx));
340 static void set_label_offsets PROTO((rtx, rtx, int));
341 static int eliminate_regs_in_insn PROTO((rtx, int));
342 static void mark_not_eliminable PROTO((rtx, rtx));
343 static int spill_hard_reg PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs PROTO((rtx));
345 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347 static void order_regs_for_reload PROTO((void));
348 static int compare_spill_regs PROTO((short *, short *));
349 static void reload_as_needed PROTO((rtx, int));
350 static void forget_old_reloads_1 PROTO((rtx, rtx));
351 static int reload_reg_class_lower PROTO((short *, short *));
352 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
354 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
356 static int reload_reg_free_p PROTO((int, int, enum reload_type));
357 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
359 static int reloads_conflict PROTO((int, int));
360 static int allocate_reload_reg PROTO((int, rtx, int, int));
361 static void choose_reload_regs PROTO((rtx, rtx));
362 static void merge_assigned_reloads PROTO((rtx));
363 static void emit_reload_insns PROTO((rtx));
364 static void delete_output_reload PROTO((rtx, int, rtx));
365 static void inc_for_reload PROTO((rtx, rtx, int));
366 static int constraint_accepts_reg_p PROTO((char *, rtx));
367 static int count_occurrences PROTO((rtx, rtx));
368 \f
369 /* Initialize the reload pass once per compilation. */
370
371 void
372 init_reload ()
373 {
374 register int i;
375
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
379
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
384 GEN_INT (4)));
385 spill_indirect_levels = 0;
386
387 while (memory_address_p (QImode, tem))
388 {
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
391 }
392
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
394
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
397
398 /* See if reg+reg is a valid (and offsettable) address. */
399
400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
401 {
402 tem = gen_rtx (PLUS, Pmode,
403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
407
408 if (memory_address_p (QImode, tem))
409 {
410 double_reg_address_ok = 1;
411 break;
412 }
413 }
414
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
418 }
419
420 /* Main entry point for the reload pass.
421
422 FIRST is the first insn of the function being compiled.
423
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
429
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
433 are spilled from them, and where the pseudo regs are reallocated to.
434
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
437
438 int
439 reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
443 {
444 register int class;
445 register int i, j, k;
446 register rtx insn;
447 register struct elim_table *ep;
448
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
455
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
458
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
461
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
464
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
467
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
470
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474 #endif
475
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
480
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
483 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
484
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
488
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
493
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
496
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
500
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
504
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
509
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
514
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
526 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
529
530 #ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532 #endif
533
534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
538
539 for (insn = first; insn; insn = NEXT_INSN (insn))
540 {
541 rtx set = single_set (insn);
542
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
544 {
545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
546 if (note
547 #ifdef LEGITIMATE_PIC_OPERAND_P
548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550 #endif
551 )
552 {
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
556 {
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
560 {
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
566 }
567 else
568 continue;
569
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
577 }
578 }
579 }
580
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
589
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
592 }
593
594 /* Does this function require a frame pointer? */
595
596 frame_pointer_needed = (! flag_omit_frame_pointer
597 #ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605 #endif
606 || FRAME_POINTER_REQUIRED);
607
608 num_eliminable = 0;
609
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612 #ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
614 {
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
617 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
618 }
619 #else
620 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
621 = ! frame_pointer_needed;
622 #endif
623
624 /* Count the number of eliminable registers and build the FROM and TO
625 REG rtx's. Note that code in gen_rtx will cause, e.g.,
626 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
627 We depend on this. */
628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
629 {
630 num_eliminable += ep->can_eliminate;
631 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
632 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
633 }
634
635 num_labels = max_label_num () - get_first_label_num ();
636
637 /* Allocate the tables used to store offset information at labels. */
638 offsets_known_at = (char *) alloca (num_labels);
639 offsets_at
640 = (int (*)[NUM_ELIMINABLE_REGS])
641 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
642
643 offsets_known_at -= get_first_label_num ();
644 offsets_at -= get_first_label_num ();
645
646 /* Alter each pseudo-reg rtx to contain its hard reg number.
647 Assign stack slots to the pseudos that lack hard regs or equivalents.
648 Do not touch virtual registers. */
649
650 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
651 alter_reg (i, -1);
652
653 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
654 because the stack size may be a part of the offset computation for
655 register elimination. */
656 assign_stack_local (BLKmode, 0, 0);
657
658 /* If we have some registers we think can be eliminated, scan all insns to
659 see if there is an insn that sets one of these registers to something
660 other than itself plus a constant. If so, the register cannot be
661 eliminated. Doing this scan here eliminates an extra pass through the
662 main reload loop in the most common case where register elimination
663 cannot be done. */
664 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
665 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
666 || GET_CODE (insn) == CALL_INSN)
667 note_stores (PATTERN (insn), mark_not_eliminable);
668
669 #ifndef REGISTER_CONSTRAINTS
670 /* If all the pseudo regs have hard regs,
671 except for those that are never referenced,
672 we know that no reloads are needed. */
673 /* But that is not true if there are register constraints, since
674 in that case some pseudos might be in the wrong kind of hard reg. */
675
676 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
677 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
678 break;
679
680 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
681 return;
682 #endif
683
684 /* Compute the order of preference for hard registers to spill.
685 Store them by decreasing preference in potential_reload_regs. */
686
687 order_regs_for_reload ();
688
689 /* So far, no hard regs have been spilled. */
690 n_spills = 0;
691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
692 spill_reg_order[i] = -1;
693
694 /* On most machines, we can't use any register explicitly used in the
695 rtl as a spill register. But on some, we have to. Those will have
696 taken care to keep the life of hard regs as short as possible. */
697
698 #ifndef SMALL_REGISTER_CLASSES
699 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
700 #endif
701
702 /* Spill any hard regs that we know we can't eliminate. */
703 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
704 if (! ep->can_eliminate)
705 spill_hard_reg (ep->from, global, dumpfile, 1);
706
707 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
708 if (frame_pointer_needed)
709 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
710 #endif
711
712 if (global)
713 for (i = 0; i < N_REG_CLASSES; i++)
714 {
715 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
716 bzero (basic_block_needs[i], n_basic_blocks);
717 }
718
719 /* From now on, we need to emit any moves without making new pseudos. */
720 reload_in_progress = 1;
721
722 /* This loop scans the entire function each go-round
723 and repeats until one repetition spills no additional hard regs. */
724
725 /* This flag is set when a pseudo reg is spilled,
726 to require another pass. Note that getting an additional reload
727 reg does not necessarily imply any pseudo reg was spilled;
728 sometimes we find a reload reg that no pseudo reg was allocated in. */
729 something_changed = 1;
730 /* This flag is set if there are any insns that require reloading. */
731 something_needs_reloads = 0;
732 /* This flag is set if there are any insns that require register
733 eliminations. */
734 something_needs_elimination = 0;
735 while (something_changed)
736 {
737 rtx after_call = 0;
738
739 /* For each class, number of reload regs needed in that class.
740 This is the maximum over all insns of the needs in that class
741 of the individual insn. */
742 int max_needs[N_REG_CLASSES];
743 /* For each class, size of group of consecutive regs
744 that is needed for the reloads of this class. */
745 int group_size[N_REG_CLASSES];
746 /* For each class, max number of consecutive groups needed.
747 (Each group contains group_size[CLASS] consecutive registers.) */
748 int max_groups[N_REG_CLASSES];
749 /* For each class, max number needed of regs that don't belong
750 to any of the groups. */
751 int max_nongroups[N_REG_CLASSES];
752 /* For each class, the machine mode which requires consecutive
753 groups of regs of that class.
754 If two different modes ever require groups of one class,
755 they must be the same size and equally restrictive for that class,
756 otherwise we can't handle the complexity. */
757 enum machine_mode group_mode[N_REG_CLASSES];
758 /* Record the insn where each maximum need is first found. */
759 rtx max_needs_insn[N_REG_CLASSES];
760 rtx max_groups_insn[N_REG_CLASSES];
761 rtx max_nongroups_insn[N_REG_CLASSES];
762 rtx x;
763 int starting_frame_size = get_frame_size ();
764 int previous_frame_pointer_needed = frame_pointer_needed;
765 static char *reg_class_names[] = REG_CLASS_NAMES;
766
767 something_changed = 0;
768 bzero ((char *) max_needs, sizeof max_needs);
769 bzero ((char *) max_groups, sizeof max_groups);
770 bzero ((char *) max_nongroups, sizeof max_nongroups);
771 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
772 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
773 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
774 bzero ((char *) group_size, sizeof group_size);
775 for (i = 0; i < N_REG_CLASSES; i++)
776 group_mode[i] = VOIDmode;
777
778 /* Keep track of which basic blocks are needing the reloads. */
779 this_block = 0;
780
781 /* Remember whether any element of basic_block_needs
782 changes from 0 to 1 in this pass. */
783 new_basic_block_needs = 0;
784
785 /* Reset all offsets on eliminable registers to their initial values. */
786 #ifdef ELIMINABLE_REGS
787 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
788 {
789 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
790 ep->previous_offset = ep->offset
791 = ep->max_offset = ep->initial_offset;
792 }
793 #else
794 #ifdef INITIAL_FRAME_POINTER_OFFSET
795 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
796 #else
797 if (!FRAME_POINTER_REQUIRED)
798 abort ();
799 reg_eliminate[0].initial_offset = 0;
800 #endif
801 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
802 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
803 #endif
804
805 num_not_at_initial_offset = 0;
806
807 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
808
809 /* Set a known offset for each forced label to be at the initial offset
810 of each elimination. We do this because we assume that all
811 computed jumps occur from a location where each elimination is
812 at its initial offset. */
813
814 for (x = forced_labels; x; x = XEXP (x, 1))
815 if (XEXP (x, 0))
816 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
817
818 /* For each pseudo register that has an equivalent location defined,
819 try to eliminate any eliminable registers (such as the frame pointer)
820 assuming initial offsets for the replacement register, which
821 is the normal case.
822
823 If the resulting location is directly addressable, substitute
824 the MEM we just got directly for the old REG.
825
826 If it is not addressable but is a constant or the sum of a hard reg
827 and constant, it is probably not addressable because the constant is
828 out of range, in that case record the address; we will generate
829 hairy code to compute the address in a register each time it is
830 needed. Similarly if it is a hard register, but one that is not
831 valid as an address register.
832
833 If the location is not addressable, but does not have one of the
834 above forms, assign a stack slot. We have to do this to avoid the
835 potential of producing lots of reloads if, e.g., a location involves
836 a pseudo that didn't get a hard register and has an equivalent memory
837 location that also involves a pseudo that didn't get a hard register.
838
839 Perhaps at some point we will improve reload_when_needed handling
840 so this problem goes away. But that's very hairy. */
841
842 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
843 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
844 {
845 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
846
847 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
848 XEXP (x, 0)))
849 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
850 else if (CONSTANT_P (XEXP (x, 0))
851 || (GET_CODE (XEXP (x, 0)) == REG
852 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
853 || (GET_CODE (XEXP (x, 0)) == PLUS
854 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
855 && (REGNO (XEXP (XEXP (x, 0), 0))
856 < FIRST_PSEUDO_REGISTER)
857 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
858 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
859 else
860 {
861 /* Make a new stack slot. Then indicate that something
862 changed so we go back and recompute offsets for
863 eliminable registers because the allocation of memory
864 below might change some offset. reg_equiv_{mem,address}
865 will be set up for this pseudo on the next pass around
866 the loop. */
867 reg_equiv_memory_loc[i] = 0;
868 reg_equiv_init[i] = 0;
869 alter_reg (i, -1);
870 something_changed = 1;
871 }
872 }
873
874 /* If we allocated another pseudo to the stack, redo elimination
875 bookkeeping. */
876 if (something_changed)
877 continue;
878
879 /* If caller-saves needs a group, initialize the group to include
880 the size and mode required for caller-saves. */
881
882 if (caller_save_group_size > 1)
883 {
884 group_mode[(int) caller_save_spill_class] = Pmode;
885 group_size[(int) caller_save_spill_class] = caller_save_group_size;
886 }
887
888 /* Compute the most additional registers needed by any instruction.
889 Collect information separately for each class of regs. */
890
891 for (insn = first; insn; insn = NEXT_INSN (insn))
892 {
893 if (global && this_block + 1 < n_basic_blocks
894 && insn == basic_block_head[this_block+1])
895 ++this_block;
896
897 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
898 might include REG_LABEL), we need to see what effects this
899 has on the known offsets at labels. */
900
901 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
902 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
903 && REG_NOTES (insn) != 0))
904 set_label_offsets (insn, insn, 0);
905
906 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
907 {
908 /* Nonzero means don't use a reload reg that overlaps
909 the place where a function value can be returned. */
910 rtx avoid_return_reg = 0;
911
912 rtx old_body = PATTERN (insn);
913 int old_code = INSN_CODE (insn);
914 rtx old_notes = REG_NOTES (insn);
915 int did_elimination = 0;
916
917 /* To compute the number of reload registers of each class
918 needed for an insn, we must similate what choose_reload_regs
919 can do. We do this by splitting an insn into an "input" and
920 an "output" part. RELOAD_OTHER reloads are used in both.
921 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
922 which must be live over the entire input section of reloads,
923 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
924 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
925 inputs.
926
927 The registers needed for output are RELOAD_OTHER and
928 RELOAD_FOR_OUTPUT, which are live for the entire output
929 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
930 reloads for each operand.
931
932 The total number of registers needed is the maximum of the
933 inputs and outputs. */
934
935 struct needs
936 {
937 /* [0] is normal, [1] is nongroup. */
938 int regs[2][N_REG_CLASSES];
939 int groups[N_REG_CLASSES];
940 };
941
942 /* Each `struct needs' corresponds to one RELOAD_... type. */
943 struct {
944 struct needs other;
945 struct needs input;
946 struct needs output;
947 struct needs insn;
948 struct needs other_addr;
949 struct needs op_addr;
950 struct needs op_addr_reload;
951 struct needs in_addr[MAX_RECOG_OPERANDS];
952 struct needs out_addr[MAX_RECOG_OPERANDS];
953 } insn_needs;
954
955 /* If needed, eliminate any eliminable registers. */
956 if (num_eliminable)
957 did_elimination = eliminate_regs_in_insn (insn, 0);
958
959 #ifdef SMALL_REGISTER_CLASSES
960 /* Set avoid_return_reg if this is an insn
961 that might use the value of a function call. */
962 if (GET_CODE (insn) == CALL_INSN)
963 {
964 if (GET_CODE (PATTERN (insn)) == SET)
965 after_call = SET_DEST (PATTERN (insn));
966 else if (GET_CODE (PATTERN (insn)) == PARALLEL
967 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
968 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
969 else
970 after_call = 0;
971 }
972 else if (after_call != 0
973 && !(GET_CODE (PATTERN (insn)) == SET
974 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
975 {
976 if (reg_referenced_p (after_call, PATTERN (insn)))
977 avoid_return_reg = after_call;
978 after_call = 0;
979 }
980 #endif /* SMALL_REGISTER_CLASSES */
981
982 /* Analyze the instruction. */
983 find_reloads (insn, 0, spill_indirect_levels, global,
984 spill_reg_order);
985
986 /* Remember for later shortcuts which insns had any reloads or
987 register eliminations.
988
989 One might think that it would be worthwhile to mark insns
990 that need register replacements but not reloads, but this is
991 not safe because find_reloads may do some manipulation of
992 the insn (such as swapping commutative operands), which would
993 be lost when we restore the old pattern after register
994 replacement. So the actions of find_reloads must be redone in
995 subsequent passes or in reload_as_needed.
996
997 However, it is safe to mark insns that need reloads
998 but not register replacement. */
999
1000 PUT_MODE (insn, (did_elimination ? QImode
1001 : n_reloads ? HImode
1002 : GET_MODE (insn) == DImode ? DImode
1003 : VOIDmode));
1004
1005 /* Discard any register replacements done. */
1006 if (did_elimination)
1007 {
1008 obstack_free (&reload_obstack, reload_firstobj);
1009 PATTERN (insn) = old_body;
1010 INSN_CODE (insn) = old_code;
1011 REG_NOTES (insn) = old_notes;
1012 something_needs_elimination = 1;
1013 }
1014
1015 /* If this insn has no reloads, we need not do anything except
1016 in the case of a CALL_INSN when we have caller-saves and
1017 caller-save needs reloads. */
1018
1019 if (n_reloads == 0
1020 && ! (GET_CODE (insn) == CALL_INSN
1021 && caller_save_spill_class != NO_REGS))
1022 continue;
1023
1024 something_needs_reloads = 1;
1025 bzero ((char *) &insn_needs, sizeof insn_needs);
1026
1027 /* Count each reload once in every class
1028 containing the reload's own class. */
1029
1030 for (i = 0; i < n_reloads; i++)
1031 {
1032 register enum reg_class *p;
1033 enum reg_class class = reload_reg_class[i];
1034 int size;
1035 enum machine_mode mode;
1036 int nongroup_need;
1037 struct needs *this_needs;
1038
1039 /* Don't count the dummy reloads, for which one of the
1040 regs mentioned in the insn can be used for reloading.
1041 Don't count optional reloads.
1042 Don't count reloads that got combined with others. */
1043 if (reload_reg_rtx[i] != 0
1044 || reload_optional[i] != 0
1045 || (reload_out[i] == 0 && reload_in[i] == 0
1046 && ! reload_secondary_p[i]))
1047 continue;
1048
1049 /* Show that a reload register of this class is needed
1050 in this basic block. We do not use insn_needs and
1051 insn_groups because they are overly conservative for
1052 this purpose. */
1053 if (global && ! basic_block_needs[(int) class][this_block])
1054 {
1055 basic_block_needs[(int) class][this_block] = 1;
1056 new_basic_block_needs = 1;
1057 }
1058
1059
1060 mode = reload_inmode[i];
1061 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1062 mode = reload_outmode[i];
1063 size = CLASS_MAX_NREGS (class, mode);
1064
1065 /* If this class doesn't want a group, determine if we have
1066 a nongroup need or a regular need. We have a nongroup
1067 need if this reload conflicts with a group reload whose
1068 class intersects with this reload's class. */
1069
1070 nongroup_need = 0;
1071 if (size == 1)
1072 for (j = 0; j < n_reloads; j++)
1073 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1074 (GET_MODE_SIZE (reload_outmode[j])
1075 > GET_MODE_SIZE (reload_inmode[j]))
1076 ? reload_outmode[j]
1077 : reload_inmode[j])
1078 > 1)
1079 && (!reload_optional[j])
1080 && (reload_in[j] != 0 || reload_out[j] != 0
1081 || reload_secondary_p[j])
1082 && reloads_conflict (i, j)
1083 && reg_classes_intersect_p (class,
1084 reload_reg_class[j]))
1085 {
1086 nongroup_need = 1;
1087 break;
1088 }
1089
1090 /* Decide which time-of-use to count this reload for. */
1091 switch (reload_when_needed[i])
1092 {
1093 case RELOAD_OTHER:
1094 this_needs = &insn_needs.other;
1095 break;
1096 case RELOAD_FOR_INPUT:
1097 this_needs = &insn_needs.input;
1098 break;
1099 case RELOAD_FOR_OUTPUT:
1100 this_needs = &insn_needs.output;
1101 break;
1102 case RELOAD_FOR_INSN:
1103 this_needs = &insn_needs.insn;
1104 break;
1105 case RELOAD_FOR_OTHER_ADDRESS:
1106 this_needs = &insn_needs.other_addr;
1107 break;
1108 case RELOAD_FOR_INPUT_ADDRESS:
1109 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1110 break;
1111 case RELOAD_FOR_OUTPUT_ADDRESS:
1112 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1113 break;
1114 case RELOAD_FOR_OPERAND_ADDRESS:
1115 this_needs = &insn_needs.op_addr;
1116 break;
1117 case RELOAD_FOR_OPADDR_ADDR:
1118 this_needs = &insn_needs.op_addr_reload;
1119 break;
1120 }
1121
1122 if (size > 1)
1123 {
1124 enum machine_mode other_mode, allocate_mode;
1125
1126 /* Count number of groups needed separately from
1127 number of individual regs needed. */
1128 this_needs->groups[(int) class]++;
1129 p = reg_class_superclasses[(int) class];
1130 while (*p != LIM_REG_CLASSES)
1131 this_needs->groups[(int) *p++]++;
1132
1133 /* Record size and mode of a group of this class. */
1134 /* If more than one size group is needed,
1135 make all groups the largest needed size. */
1136 if (group_size[(int) class] < size)
1137 {
1138 other_mode = group_mode[(int) class];
1139 allocate_mode = mode;
1140
1141 group_size[(int) class] = size;
1142 group_mode[(int) class] = mode;
1143 }
1144 else
1145 {
1146 other_mode = mode;
1147 allocate_mode = group_mode[(int) class];
1148 }
1149
1150 /* Crash if two dissimilar machine modes both need
1151 groups of consecutive regs of the same class. */
1152
1153 if (other_mode != VOIDmode && other_mode != allocate_mode
1154 && ! modes_equiv_for_class_p (allocate_mode,
1155 other_mode, class))
1156 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1157 insn);
1158 }
1159 else if (size == 1)
1160 {
1161 this_needs->regs[nongroup_need][(int) class] += 1;
1162 p = reg_class_superclasses[(int) class];
1163 while (*p != LIM_REG_CLASSES)
1164 this_needs->regs[nongroup_need][(int) *p++] += 1;
1165 }
1166 else
1167 abort ();
1168 }
1169
1170 /* All reloads have been counted for this insn;
1171 now merge the various times of use.
1172 This sets insn_needs, etc., to the maximum total number
1173 of registers needed at any point in this insn. */
1174
1175 for (i = 0; i < N_REG_CLASSES; i++)
1176 {
1177 int in_max, out_max;
1178
1179 /* Compute normal and nongroup needs. */
1180 for (j = 0; j <= 1; j++)
1181 {
1182 for (in_max = 0, out_max = 0, k = 0;
1183 k < reload_n_operands; k++)
1184 {
1185 in_max
1186 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1187 out_max
1188 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1189 }
1190
1191 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1192 and operand addresses but not things used to reload
1193 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1194 don't conflict with things needed to reload inputs or
1195 outputs. */
1196
1197 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1198 insn_needs.op_addr_reload.regs[j][i]),
1199 in_max);
1200
1201 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1202
1203 insn_needs.input.regs[j][i]
1204 = MAX (insn_needs.input.regs[j][i]
1205 + insn_needs.op_addr.regs[j][i]
1206 + insn_needs.insn.regs[j][i],
1207 in_max + insn_needs.input.regs[j][i]);
1208
1209 insn_needs.output.regs[j][i] += out_max;
1210 insn_needs.other.regs[j][i]
1211 += MAX (MAX (insn_needs.input.regs[j][i],
1212 insn_needs.output.regs[j][i]),
1213 insn_needs.other_addr.regs[j][i]);
1214
1215 }
1216
1217 /* Now compute group needs. */
1218 for (in_max = 0, out_max = 0, j = 0;
1219 j < reload_n_operands; j++)
1220 {
1221 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1222 out_max
1223 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1224 }
1225
1226 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1227 insn_needs.op_addr_reload.groups[i]),
1228 in_max);
1229 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1230
1231 insn_needs.input.groups[i]
1232 = MAX (insn_needs.input.groups[i]
1233 + insn_needs.op_addr.groups[i]
1234 + insn_needs.insn.groups[i],
1235 in_max + insn_needs.input.groups[i]);
1236
1237 insn_needs.output.groups[i] += out_max;
1238 insn_needs.other.groups[i]
1239 += MAX (MAX (insn_needs.input.groups[i],
1240 insn_needs.output.groups[i]),
1241 insn_needs.other_addr.groups[i]);
1242 }
1243
1244 /* If this is a CALL_INSN and caller-saves will need
1245 a spill register, act as if the spill register is
1246 needed for this insn. However, the spill register
1247 can be used by any reload of this insn, so we only
1248 need do something if no need for that class has
1249 been recorded.
1250
1251 The assumption that every CALL_INSN will trigger a
1252 caller-save is highly conservative, however, the number
1253 of cases where caller-saves will need a spill register but
1254 a block containing a CALL_INSN won't need a spill register
1255 of that class should be quite rare.
1256
1257 If a group is needed, the size and mode of the group will
1258 have been set up at the beginning of this loop. */
1259
1260 if (GET_CODE (insn) == CALL_INSN
1261 && caller_save_spill_class != NO_REGS)
1262 {
1263 /* See if this register would conflict with any reload
1264 that needs a group. */
1265 int nongroup_need = 0;
1266 int *caller_save_needs;
1267
1268 for (j = 0; j < n_reloads; j++)
1269 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1270 (GET_MODE_SIZE (reload_outmode[j])
1271 > GET_MODE_SIZE (reload_inmode[j]))
1272 ? reload_outmode[j]
1273 : reload_inmode[j])
1274 > 1)
1275 && reg_classes_intersect_p (caller_save_spill_class,
1276 reload_reg_class[j]))
1277 {
1278 nongroup_need = 1;
1279 break;
1280 }
1281
1282 caller_save_needs
1283 = (caller_save_group_size > 1
1284 ? insn_needs.other.groups
1285 : insn_needs.other.regs[nongroup_need]);
1286
1287 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1288 {
1289 register enum reg_class *p
1290 = reg_class_superclasses[(int) caller_save_spill_class];
1291
1292 caller_save_needs[(int) caller_save_spill_class]++;
1293
1294 while (*p != LIM_REG_CLASSES)
1295 caller_save_needs[(int) *p++] += 1;
1296 }
1297
1298 /* Show that this basic block will need a register of
1299 this class. */
1300
1301 if (global
1302 && ! (basic_block_needs[(int) caller_save_spill_class]
1303 [this_block]))
1304 {
1305 basic_block_needs[(int) caller_save_spill_class]
1306 [this_block] = 1;
1307 new_basic_block_needs = 1;
1308 }
1309 }
1310
1311 #ifdef SMALL_REGISTER_CLASSES
1312 /* If this insn stores the value of a function call,
1313 and that value is in a register that has been spilled,
1314 and if the insn needs a reload in a class
1315 that might use that register as the reload register,
1316 then add add an extra need in that class.
1317 This makes sure we have a register available that does
1318 not overlap the return value. */
1319
1320 if (avoid_return_reg)
1321 {
1322 int regno = REGNO (avoid_return_reg);
1323 int nregs
1324 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1325 int r;
1326 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1327
1328 /* First compute the "basic needs", which counts a
1329 need only in the smallest class in which it
1330 is required. */
1331
1332 bcopy (insn_needs.other.regs[0], basic_needs,
1333 sizeof basic_needs);
1334 bcopy (insn_needs.other.groups, basic_groups,
1335 sizeof basic_groups);
1336
1337 for (i = 0; i < N_REG_CLASSES; i++)
1338 {
1339 enum reg_class *p;
1340
1341 if (basic_needs[i] >= 0)
1342 for (p = reg_class_superclasses[i];
1343 *p != LIM_REG_CLASSES; p++)
1344 basic_needs[(int) *p] -= basic_needs[i];
1345
1346 if (basic_groups[i] >= 0)
1347 for (p = reg_class_superclasses[i];
1348 *p != LIM_REG_CLASSES; p++)
1349 basic_groups[(int) *p] -= basic_groups[i];
1350 }
1351
1352 /* Now count extra regs if there might be a conflict with
1353 the return value register.
1354
1355 ??? This is not quite correct because we don't properly
1356 handle the case of groups, but if we end up doing
1357 something wrong, it either will end up not mattering or
1358 we will abort elsewhere. */
1359
1360 for (r = regno; r < regno + nregs; r++)
1361 if (spill_reg_order[r] >= 0)
1362 for (i = 0; i < N_REG_CLASSES; i++)
1363 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1364 {
1365 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1366 {
1367 enum reg_class *p;
1368
1369 insn_needs.other.regs[0][i]++;
1370 p = reg_class_superclasses[i];
1371 while (*p != LIM_REG_CLASSES)
1372 insn_needs.other.regs[0][(int) *p++]++;
1373 }
1374 }
1375 }
1376 #endif /* SMALL_REGISTER_CLASSES */
1377
1378 /* For each class, collect maximum need of any insn. */
1379
1380 for (i = 0; i < N_REG_CLASSES; i++)
1381 {
1382 if (max_needs[i] < insn_needs.other.regs[0][i])
1383 {
1384 max_needs[i] = insn_needs.other.regs[0][i];
1385 max_needs_insn[i] = insn;
1386 }
1387 if (max_groups[i] < insn_needs.other.groups[i])
1388 {
1389 max_groups[i] = insn_needs.other.groups[i];
1390 max_groups_insn[i] = insn;
1391 }
1392 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1393 {
1394 max_nongroups[i] = insn_needs.other.regs[1][i];
1395 max_nongroups_insn[i] = insn;
1396 }
1397 }
1398 }
1399 /* Note that there is a continue statement above. */
1400 }
1401
1402 /* If we allocated any new memory locations, make another pass
1403 since it might have changed elimination offsets. */
1404 if (starting_frame_size != get_frame_size ())
1405 something_changed = 1;
1406
1407 if (dumpfile)
1408 for (i = 0; i < N_REG_CLASSES; i++)
1409 {
1410 if (max_needs[i] > 0)
1411 fprintf (dumpfile,
1412 ";; Need %d reg%s of class %s (for insn %d).\n",
1413 max_needs[i], max_needs[i] == 1 ? "" : "s",
1414 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1415 if (max_nongroups[i] > 0)
1416 fprintf (dumpfile,
1417 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1418 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1419 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1420 if (max_groups[i] > 0)
1421 fprintf (dumpfile,
1422 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1423 max_groups[i], max_groups[i] == 1 ? "" : "s",
1424 mode_name[(int) group_mode[i]],
1425 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1426 }
1427
1428 /* If we have caller-saves, set up the save areas and see if caller-save
1429 will need a spill register. */
1430
1431 if (caller_save_needed
1432 && ! setup_save_areas (&something_changed)
1433 && caller_save_spill_class == NO_REGS)
1434 {
1435 /* The class we will need depends on whether the machine
1436 supports the sum of two registers for an address; see
1437 find_address_reloads for details. */
1438
1439 caller_save_spill_class
1440 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1441 caller_save_group_size
1442 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1443 something_changed = 1;
1444 }
1445
1446 /* See if anything that happened changes which eliminations are valid.
1447 For example, on the Sparc, whether or not the frame pointer can
1448 be eliminated can depend on what registers have been used. We need
1449 not check some conditions again (such as flag_omit_frame_pointer)
1450 since they can't have changed. */
1451
1452 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1453 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1454 #ifdef ELIMINABLE_REGS
1455 || ! CAN_ELIMINATE (ep->from, ep->to)
1456 #endif
1457 )
1458 ep->can_eliminate = 0;
1459
1460 /* Look for the case where we have discovered that we can't replace
1461 register A with register B and that means that we will now be
1462 trying to replace register A with register C. This means we can
1463 no longer replace register C with register B and we need to disable
1464 such an elimination, if it exists. This occurs often with A == ap,
1465 B == sp, and C == fp. */
1466
1467 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1468 {
1469 struct elim_table *op;
1470 register int new_to = -1;
1471
1472 if (! ep->can_eliminate && ep->can_eliminate_previous)
1473 {
1474 /* Find the current elimination for ep->from, if there is a
1475 new one. */
1476 for (op = reg_eliminate;
1477 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1478 if (op->from == ep->from && op->can_eliminate)
1479 {
1480 new_to = op->to;
1481 break;
1482 }
1483
1484 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1485 disable it. */
1486 for (op = reg_eliminate;
1487 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1488 if (op->from == new_to && op->to == ep->to)
1489 op->can_eliminate = 0;
1490 }
1491 }
1492
1493 /* See if any registers that we thought we could eliminate the previous
1494 time are no longer eliminable. If so, something has changed and we
1495 must spill the register. Also, recompute the number of eliminable
1496 registers and see if the frame pointer is needed; it is if there is
1497 no elimination of the frame pointer that we can perform. */
1498
1499 frame_pointer_needed = 1;
1500 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1501 {
1502 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1503 && ep->to != HARD_FRAME_POINTER_REGNUM)
1504 frame_pointer_needed = 0;
1505
1506 if (! ep->can_eliminate && ep->can_eliminate_previous)
1507 {
1508 ep->can_eliminate_previous = 0;
1509 spill_hard_reg (ep->from, global, dumpfile, 1);
1510 something_changed = 1;
1511 num_eliminable--;
1512 }
1513 }
1514
1515 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1516 /* If we didn't need a frame pointer last time, but we do now, spill
1517 the hard frame pointer. */
1518 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1519 {
1520 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1521 something_changed = 1;
1522 }
1523 #endif
1524
1525 /* If all needs are met, we win. */
1526
1527 for (i = 0; i < N_REG_CLASSES; i++)
1528 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1529 break;
1530 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1531 break;
1532
1533 /* Not all needs are met; must spill some hard regs. */
1534
1535 /* Put all registers spilled so far back in potential_reload_regs, but
1536 put them at the front, since we've already spilled most of the
1537 psuedos in them (we might have left some pseudos unspilled if they
1538 were in a block that didn't need any spill registers of a conflicting
1539 class. We used to try to mark off the need for those registers,
1540 but doing so properly is very complex and reallocating them is the
1541 simpler approach. First, "pack" potential_reload_regs by pushing
1542 any nonnegative entries towards the end. That will leave room
1543 for the registers we already spilled.
1544
1545 Also, undo the marking of the spill registers from the last time
1546 around in FORBIDDEN_REGS since we will be probably be allocating
1547 them again below.
1548
1549 ??? It is theoretically possible that we might end up not using one
1550 of our previously-spilled registers in this allocation, even though
1551 they are at the head of the list. It's not clear what to do about
1552 this, but it was no better before, when we marked off the needs met
1553 by the previously-spilled registers. With the current code, globals
1554 can be allocated into these registers, but locals cannot. */
1555
1556 if (n_spills)
1557 {
1558 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1559 if (potential_reload_regs[i] != -1)
1560 potential_reload_regs[j--] = potential_reload_regs[i];
1561
1562 for (i = 0; i < n_spills; i++)
1563 {
1564 potential_reload_regs[i] = spill_regs[i];
1565 spill_reg_order[spill_regs[i]] = -1;
1566 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1567 }
1568
1569 n_spills = 0;
1570 }
1571
1572 /* Now find more reload regs to satisfy the remaining need
1573 Do it by ascending class number, since otherwise a reg
1574 might be spilled for a big class and might fail to count
1575 for a smaller class even though it belongs to that class.
1576
1577 Count spilled regs in `spills', and add entries to
1578 `spill_regs' and `spill_reg_order'.
1579
1580 ??? Note there is a problem here.
1581 When there is a need for a group in a high-numbered class,
1582 and also need for non-group regs that come from a lower class,
1583 the non-group regs are chosen first. If there aren't many regs,
1584 they might leave no room for a group.
1585
1586 This was happening on the 386. To fix it, we added the code
1587 that calls possible_group_p, so that the lower class won't
1588 break up the last possible group.
1589
1590 Really fixing the problem would require changes above
1591 in counting the regs already spilled, and in choose_reload_regs.
1592 It might be hard to avoid introducing bugs there. */
1593
1594 CLEAR_HARD_REG_SET (counted_for_groups);
1595 CLEAR_HARD_REG_SET (counted_for_nongroups);
1596
1597 for (class = 0; class < N_REG_CLASSES; class++)
1598 {
1599 /* First get the groups of registers.
1600 If we got single registers first, we might fragment
1601 possible groups. */
1602 while (max_groups[class] > 0)
1603 {
1604 /* If any single spilled regs happen to form groups,
1605 count them now. Maybe we don't really need
1606 to spill another group. */
1607 count_possible_groups (group_size, group_mode, max_groups,
1608 class);
1609
1610 if (max_groups[class] <= 0)
1611 break;
1612
1613 /* Groups of size 2 (the only groups used on most machines)
1614 are treated specially. */
1615 if (group_size[class] == 2)
1616 {
1617 /* First, look for a register that will complete a group. */
1618 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1619 {
1620 int other;
1621
1622 j = potential_reload_regs[i];
1623 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1624 &&
1625 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1626 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1627 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1628 && HARD_REGNO_MODE_OK (other, group_mode[class])
1629 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1630 other)
1631 /* We don't want one part of another group.
1632 We could get "two groups" that overlap! */
1633 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1634 ||
1635 (j < FIRST_PSEUDO_REGISTER - 1
1636 && (other = j + 1, spill_reg_order[other] >= 0)
1637 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1638 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1639 && HARD_REGNO_MODE_OK (j, group_mode[class])
1640 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1641 other)
1642 && ! TEST_HARD_REG_BIT (counted_for_groups,
1643 other))))
1644 {
1645 register enum reg_class *p;
1646
1647 /* We have found one that will complete a group,
1648 so count off one group as provided. */
1649 max_groups[class]--;
1650 p = reg_class_superclasses[class];
1651 while (*p != LIM_REG_CLASSES)
1652 max_groups[(int) *p++]--;
1653
1654 /* Indicate both these regs are part of a group. */
1655 SET_HARD_REG_BIT (counted_for_groups, j);
1656 SET_HARD_REG_BIT (counted_for_groups, other);
1657 break;
1658 }
1659 }
1660 /* We can't complete a group, so start one. */
1661 #ifdef SMALL_REGISTER_CLASSES
1662 /* Look for a pair neither of which is explicitly used. */
1663 if (i == FIRST_PSEUDO_REGISTER)
1664 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1665 {
1666 int k;
1667 j = potential_reload_regs[i];
1668 /* Verify that J+1 is a potential reload reg. */
1669 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1670 if (potential_reload_regs[k] == j + 1)
1671 break;
1672 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1673 && k < FIRST_PSEUDO_REGISTER
1674 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1675 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1676 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1677 && HARD_REGNO_MODE_OK (j, group_mode[class])
1678 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1679 j + 1)
1680 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1681 /* Reject J at this stage
1682 if J+1 was explicitly used. */
1683 && ! regs_explicitly_used[j + 1])
1684 break;
1685 }
1686 #endif
1687 /* Now try any group at all
1688 whose registers are not in bad_spill_regs. */
1689 if (i == FIRST_PSEUDO_REGISTER)
1690 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1691 {
1692 int k;
1693 j = potential_reload_regs[i];
1694 /* Verify that J+1 is a potential reload reg. */
1695 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1696 if (potential_reload_regs[k] == j + 1)
1697 break;
1698 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1699 && k < FIRST_PSEUDO_REGISTER
1700 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1701 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1702 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1703 && HARD_REGNO_MODE_OK (j, group_mode[class])
1704 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1705 j + 1)
1706 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1707 break;
1708 }
1709
1710 /* I should be the index in potential_reload_regs
1711 of the new reload reg we have found. */
1712
1713 if (i >= FIRST_PSEUDO_REGISTER)
1714 {
1715 /* There are no groups left to spill. */
1716 spill_failure (max_groups_insn[class]);
1717 failure = 1;
1718 goto failed;
1719 }
1720 else
1721 something_changed
1722 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1723 global, dumpfile);
1724 }
1725 else
1726 {
1727 /* For groups of more than 2 registers,
1728 look for a sufficient sequence of unspilled registers,
1729 and spill them all at once. */
1730 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1731 {
1732 int k;
1733
1734 j = potential_reload_regs[i];
1735 if (j >= 0
1736 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1737 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1738 {
1739 /* Check each reg in the sequence. */
1740 for (k = 0; k < group_size[class]; k++)
1741 if (! (spill_reg_order[j + k] < 0
1742 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1743 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1744 break;
1745 /* We got a full sequence, so spill them all. */
1746 if (k == group_size[class])
1747 {
1748 register enum reg_class *p;
1749 for (k = 0; k < group_size[class]; k++)
1750 {
1751 int idx;
1752 SET_HARD_REG_BIT (counted_for_groups, j + k);
1753 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1754 if (potential_reload_regs[idx] == j + k)
1755 break;
1756 something_changed
1757 |= new_spill_reg (idx, class,
1758 max_needs, NULL_PTR,
1759 global, dumpfile);
1760 }
1761
1762 /* We have found one that will complete a group,
1763 so count off one group as provided. */
1764 max_groups[class]--;
1765 p = reg_class_superclasses[class];
1766 while (*p != LIM_REG_CLASSES)
1767 max_groups[(int) *p++]--;
1768
1769 break;
1770 }
1771 }
1772 }
1773 /* We couldn't find any registers for this reload.
1774 Avoid going into an infinite loop. */
1775 if (i >= FIRST_PSEUDO_REGISTER)
1776 {
1777 /* There are no groups left. */
1778 spill_failure (max_groups_insn[class]);
1779 failure = 1;
1780 goto failed;
1781 }
1782 }
1783 }
1784
1785 /* Now similarly satisfy all need for single registers. */
1786
1787 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1788 {
1789 #ifdef SMALL_REGISTER_CLASSES
1790 /* This should be right for all machines, but only the 386
1791 is known to need it, so this conditional plays safe.
1792 ??? For 2.5, try making this unconditional. */
1793 /* If we spilled enough regs, but they weren't counted
1794 against the non-group need, see if we can count them now.
1795 If so, we can avoid some actual spilling. */
1796 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1797 for (i = 0; i < n_spills; i++)
1798 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1799 spill_regs[i])
1800 && !TEST_HARD_REG_BIT (counted_for_groups,
1801 spill_regs[i])
1802 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1803 spill_regs[i])
1804 && max_nongroups[class] > 0)
1805 {
1806 register enum reg_class *p;
1807
1808 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1809 max_nongroups[class]--;
1810 p = reg_class_superclasses[class];
1811 while (*p != LIM_REG_CLASSES)
1812 max_nongroups[(int) *p++]--;
1813 }
1814 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1815 break;
1816 #endif
1817
1818 /* Consider the potential reload regs that aren't
1819 yet in use as reload regs, in order of preference.
1820 Find the most preferred one that's in this class. */
1821
1822 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1823 if (potential_reload_regs[i] >= 0
1824 && TEST_HARD_REG_BIT (reg_class_contents[class],
1825 potential_reload_regs[i])
1826 /* If this reg will not be available for groups,
1827 pick one that does not foreclose possible groups.
1828 This is a kludge, and not very general,
1829 but it should be sufficient to make the 386 work,
1830 and the problem should not occur on machines with
1831 more registers. */
1832 && (max_nongroups[class] == 0
1833 || possible_group_p (potential_reload_regs[i], max_groups)))
1834 break;
1835
1836 /* If we couldn't get a register, try to get one even if we
1837 might foreclose possible groups. This may cause problems
1838 later, but that's better than aborting now, since it is
1839 possible that we will, in fact, be able to form the needed
1840 group even with this allocation. */
1841
1842 if (i >= FIRST_PSEUDO_REGISTER
1843 && (asm_noperands (max_needs[class] > 0
1844 ? max_needs_insn[class]
1845 : max_nongroups_insn[class])
1846 < 0))
1847 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1848 if (potential_reload_regs[i] >= 0
1849 && TEST_HARD_REG_BIT (reg_class_contents[class],
1850 potential_reload_regs[i]))
1851 break;
1852
1853 /* I should be the index in potential_reload_regs
1854 of the new reload reg we have found. */
1855
1856 if (i >= FIRST_PSEUDO_REGISTER)
1857 {
1858 /* There are no possible registers left to spill. */
1859 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1860 : max_nongroups_insn[class]);
1861 failure = 1;
1862 goto failed;
1863 }
1864 else
1865 something_changed
1866 |= new_spill_reg (i, class, max_needs, max_nongroups,
1867 global, dumpfile);
1868 }
1869 }
1870 }
1871
1872 /* If global-alloc was run, notify it of any register eliminations we have
1873 done. */
1874 if (global)
1875 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1876 if (ep->can_eliminate)
1877 mark_elimination (ep->from, ep->to);
1878
1879 /* Insert code to save and restore call-clobbered hard regs
1880 around calls. Tell if what mode to use so that we will process
1881 those insns in reload_as_needed if we have to. */
1882
1883 if (caller_save_needed)
1884 save_call_clobbered_regs (num_eliminable ? QImode
1885 : caller_save_spill_class != NO_REGS ? HImode
1886 : VOIDmode);
1887
1888 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1889 If that insn didn't set the register (i.e., it copied the register to
1890 memory), just delete that insn instead of the equivalencing insn plus
1891 anything now dead. If we call delete_dead_insn on that insn, we may
1892 delete the insn that actually sets the register if the register die
1893 there and that is incorrect. */
1894
1895 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1896 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1897 && GET_CODE (reg_equiv_init[i]) != NOTE)
1898 {
1899 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1900 delete_dead_insn (reg_equiv_init[i]);
1901 else
1902 {
1903 PUT_CODE (reg_equiv_init[i], NOTE);
1904 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1905 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1906 }
1907 }
1908
1909 /* Use the reload registers where necessary
1910 by generating move instructions to move the must-be-register
1911 values into or out of the reload registers. */
1912
1913 if (something_needs_reloads || something_needs_elimination
1914 || (caller_save_needed && num_eliminable)
1915 || caller_save_spill_class != NO_REGS)
1916 reload_as_needed (first, global);
1917
1918 /* If we were able to eliminate the frame pointer, show that it is no
1919 longer live at the start of any basic block. If it ls live by
1920 virtue of being in a pseudo, that pseudo will be marked live
1921 and hence the frame pointer will be known to be live via that
1922 pseudo. */
1923
1924 if (! frame_pointer_needed)
1925 for (i = 0; i < n_basic_blocks; i++)
1926 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1927 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1928 % REGSET_ELT_BITS));
1929
1930 /* Come here (with failure set nonzero) if we can't get enough spill regs
1931 and we decide not to abort about it. */
1932 failed:
1933
1934 reload_in_progress = 0;
1935
1936 /* Now eliminate all pseudo regs by modifying them into
1937 their equivalent memory references.
1938 The REG-rtx's for the pseudos are modified in place,
1939 so all insns that used to refer to them now refer to memory.
1940
1941 For a reg that has a reg_equiv_address, all those insns
1942 were changed by reloading so that no insns refer to it any longer;
1943 but the DECL_RTL of a variable decl may refer to it,
1944 and if so this causes the debugging info to mention the variable. */
1945
1946 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1947 {
1948 rtx addr = 0;
1949 int in_struct = 0;
1950 if (reg_equiv_mem[i])
1951 {
1952 addr = XEXP (reg_equiv_mem[i], 0);
1953 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1954 }
1955 if (reg_equiv_address[i])
1956 addr = reg_equiv_address[i];
1957 if (addr)
1958 {
1959 if (reg_renumber[i] < 0)
1960 {
1961 rtx reg = regno_reg_rtx[i];
1962 XEXP (reg, 0) = addr;
1963 REG_USERVAR_P (reg) = 0;
1964 MEM_IN_STRUCT_P (reg) = in_struct;
1965 PUT_CODE (reg, MEM);
1966 }
1967 else if (reg_equiv_mem[i])
1968 XEXP (reg_equiv_mem[i], 0) = addr;
1969 }
1970 }
1971
1972 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1973 /* Make a pass over all the insns and remove death notes for things that
1974 are no longer registers or no longer die in the insn (e.g., an input
1975 and output pseudo being tied). */
1976
1977 for (insn = first; insn; insn = NEXT_INSN (insn))
1978 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1979 {
1980 rtx note, next;
1981
1982 for (note = REG_NOTES (insn); note; note = next)
1983 {
1984 next = XEXP (note, 1);
1985 if (REG_NOTE_KIND (note) == REG_DEAD
1986 && (GET_CODE (XEXP (note, 0)) != REG
1987 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1988 remove_note (insn, note);
1989 }
1990 }
1991 #endif
1992
1993 /* Indicate that we no longer have known memory locations or constants. */
1994 reg_equiv_constant = 0;
1995 reg_equiv_memory_loc = 0;
1996
1997 if (scratch_list)
1998 free (scratch_list);
1999 scratch_list = 0;
2000 if (scratch_block)
2001 free (scratch_block);
2002 scratch_block = 0;
2003
2004 return failure;
2005 }
2006 \f
2007 /* Nonzero if, after spilling reg REGNO for non-groups,
2008 it will still be possible to find a group if we still need one. */
2009
2010 static int
2011 possible_group_p (regno, max_groups)
2012 int regno;
2013 int *max_groups;
2014 {
2015 int i;
2016 int class = (int) NO_REGS;
2017
2018 for (i = 0; i < (int) N_REG_CLASSES; i++)
2019 if (max_groups[i] > 0)
2020 {
2021 class = i;
2022 break;
2023 }
2024
2025 if (class == (int) NO_REGS)
2026 return 1;
2027
2028 /* Consider each pair of consecutive registers. */
2029 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2030 {
2031 /* Ignore pairs that include reg REGNO. */
2032 if (i == regno || i + 1 == regno)
2033 continue;
2034
2035 /* Ignore pairs that are outside the class that needs the group.
2036 ??? Here we fail to handle the case where two different classes
2037 independently need groups. But this never happens with our
2038 current machine descriptions. */
2039 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2040 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2041 continue;
2042
2043 /* A pair of consecutive regs we can still spill does the trick. */
2044 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2045 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2046 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2047 return 1;
2048
2049 /* A pair of one already spilled and one we can spill does it
2050 provided the one already spilled is not otherwise reserved. */
2051 if (spill_reg_order[i] < 0
2052 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2053 && spill_reg_order[i + 1] >= 0
2054 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2055 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2056 return 1;
2057 if (spill_reg_order[i + 1] < 0
2058 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2059 && spill_reg_order[i] >= 0
2060 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2061 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2062 return 1;
2063 }
2064
2065 return 0;
2066 }
2067 \f
2068 /* Count any groups of CLASS that can be formed from the registers recently
2069 spilled. */
2070
2071 static void
2072 count_possible_groups (group_size, group_mode, max_groups, class)
2073 int *group_size;
2074 enum machine_mode *group_mode;
2075 int *max_groups;
2076 int class;
2077 {
2078 HARD_REG_SET new;
2079 int i, j;
2080
2081 /* Now find all consecutive groups of spilled registers
2082 and mark each group off against the need for such groups.
2083 But don't count them against ordinary need, yet. */
2084
2085 if (group_size[class] == 0)
2086 return;
2087
2088 CLEAR_HARD_REG_SET (new);
2089
2090 /* Make a mask of all the regs that are spill regs in class I. */
2091 for (i = 0; i < n_spills; i++)
2092 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2093 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2094 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2095 SET_HARD_REG_BIT (new, spill_regs[i]);
2096
2097 /* Find each consecutive group of them. */
2098 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2099 if (TEST_HARD_REG_BIT (new, i)
2100 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2101 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2102 {
2103 for (j = 1; j < group_size[class]; j++)
2104 if (! TEST_HARD_REG_BIT (new, i + j))
2105 break;
2106
2107 if (j == group_size[class])
2108 {
2109 /* We found a group. Mark it off against this class's need for
2110 groups, and against each superclass too. */
2111 register enum reg_class *p;
2112
2113 max_groups[class]--;
2114 p = reg_class_superclasses[class];
2115 while (*p != LIM_REG_CLASSES)
2116 max_groups[(int) *p++]--;
2117
2118 /* Don't count these registers again. */
2119 for (j = 0; j < group_size[j]; j++)
2120 SET_HARD_REG_BIT (counted_for_groups, i + j);
2121 }
2122
2123 /* Skip to the last reg in this group. When i is incremented above,
2124 it will then point to the first reg of the next possible group. */
2125 i += j - 1;
2126 }
2127 }
2128 \f
2129 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2130 another mode that needs to be reloaded for the same register class CLASS.
2131 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2132 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2133
2134 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2135 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2136 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2137 causes unnecessary failures on machines requiring alignment of register
2138 groups when the two modes are different sizes, because the larger mode has
2139 more strict alignment rules than the smaller mode. */
2140
2141 static int
2142 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2143 enum machine_mode allocate_mode, other_mode;
2144 enum reg_class class;
2145 {
2146 register int regno;
2147 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2148 {
2149 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2150 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2151 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2152 return 0;
2153 }
2154 return 1;
2155 }
2156
2157 /* Handle the failure to find a register to spill.
2158 INSN should be one of the insns which needed this particular spill reg. */
2159
2160 static void
2161 spill_failure (insn)
2162 rtx insn;
2163 {
2164 if (asm_noperands (PATTERN (insn)) >= 0)
2165 error_for_asm (insn, "`asm' needs too many reloads");
2166 else
2167 fatal_insn ("Unable to find a register to spill.", insn);
2168 }
2169
2170 /* Add a new register to the tables of available spill-registers
2171 (as well as spilling all pseudos allocated to the register).
2172 I is the index of this register in potential_reload_regs.
2173 CLASS is the regclass whose need is being satisfied.
2174 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2175 so that this register can count off against them.
2176 MAX_NONGROUPS is 0 if this register is part of a group.
2177 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2178
2179 static int
2180 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2181 int i;
2182 int class;
2183 int *max_needs;
2184 int *max_nongroups;
2185 int global;
2186 FILE *dumpfile;
2187 {
2188 register enum reg_class *p;
2189 int val;
2190 int regno = potential_reload_regs[i];
2191
2192 if (i >= FIRST_PSEUDO_REGISTER)
2193 abort (); /* Caller failed to find any register. */
2194
2195 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2196 fatal ("fixed or forbidden register was spilled.\n\
2197 This may be due to a compiler bug or to impossible asm\n\
2198 statements or clauses.");
2199
2200 /* Make reg REGNO an additional reload reg. */
2201
2202 potential_reload_regs[i] = -1;
2203 spill_regs[n_spills] = regno;
2204 spill_reg_order[regno] = n_spills;
2205 if (dumpfile)
2206 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2207
2208 /* Clear off the needs we just satisfied. */
2209
2210 max_needs[class]--;
2211 p = reg_class_superclasses[class];
2212 while (*p != LIM_REG_CLASSES)
2213 max_needs[(int) *p++]--;
2214
2215 if (max_nongroups && max_nongroups[class] > 0)
2216 {
2217 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2218 max_nongroups[class]--;
2219 p = reg_class_superclasses[class];
2220 while (*p != LIM_REG_CLASSES)
2221 max_nongroups[(int) *p++]--;
2222 }
2223
2224 /* Spill every pseudo reg that was allocated to this reg
2225 or to something that overlaps this reg. */
2226
2227 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2228
2229 /* If there are some registers still to eliminate and this register
2230 wasn't ever used before, additional stack space may have to be
2231 allocated to store this register. Thus, we may have changed the offset
2232 between the stack and frame pointers, so mark that something has changed.
2233 (If new pseudos were spilled, thus requiring more space, VAL would have
2234 been set non-zero by the call to spill_hard_reg above since additional
2235 reloads may be needed in that case.
2236
2237 One might think that we need only set VAL to 1 if this is a call-used
2238 register. However, the set of registers that must be saved by the
2239 prologue is not identical to the call-used set. For example, the
2240 register used by the call insn for the return PC is a call-used register,
2241 but must be saved by the prologue. */
2242 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2243 val = 1;
2244
2245 regs_ever_live[spill_regs[n_spills]] = 1;
2246 n_spills++;
2247
2248 return val;
2249 }
2250 \f
2251 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2252 data that is dead in INSN. */
2253
2254 static void
2255 delete_dead_insn (insn)
2256 rtx insn;
2257 {
2258 rtx prev = prev_real_insn (insn);
2259 rtx prev_dest;
2260
2261 /* If the previous insn sets a register that dies in our insn, delete it
2262 too. */
2263 if (prev && GET_CODE (PATTERN (prev)) == SET
2264 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2265 && reg_mentioned_p (prev_dest, PATTERN (insn))
2266 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2267 delete_dead_insn (prev);
2268
2269 PUT_CODE (insn, NOTE);
2270 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2271 NOTE_SOURCE_FILE (insn) = 0;
2272 }
2273
2274 /* Modify the home of pseudo-reg I.
2275 The new home is present in reg_renumber[I].
2276
2277 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2278 or it may be -1, meaning there is none or it is not relevant.
2279 This is used so that all pseudos spilled from a given hard reg
2280 can share one stack slot. */
2281
2282 static void
2283 alter_reg (i, from_reg)
2284 register int i;
2285 int from_reg;
2286 {
2287 /* When outputting an inline function, this can happen
2288 for a reg that isn't actually used. */
2289 if (regno_reg_rtx[i] == 0)
2290 return;
2291
2292 /* If the reg got changed to a MEM at rtl-generation time,
2293 ignore it. */
2294 if (GET_CODE (regno_reg_rtx[i]) != REG)
2295 return;
2296
2297 /* Modify the reg-rtx to contain the new hard reg
2298 number or else to contain its pseudo reg number. */
2299 REGNO (regno_reg_rtx[i])
2300 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2301
2302 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2303 allocate a stack slot for it. */
2304
2305 if (reg_renumber[i] < 0
2306 && reg_n_refs[i] > 0
2307 && reg_equiv_constant[i] == 0
2308 && reg_equiv_memory_loc[i] == 0)
2309 {
2310 register rtx x;
2311 int inherent_size = PSEUDO_REGNO_BYTES (i);
2312 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2313 int adjust = 0;
2314
2315 /* Each pseudo reg has an inherent size which comes from its own mode,
2316 and a total size which provides room for paradoxical subregs
2317 which refer to the pseudo reg in wider modes.
2318
2319 We can use a slot already allocated if it provides both
2320 enough inherent space and enough total space.
2321 Otherwise, we allocate a new slot, making sure that it has no less
2322 inherent space, and no less total space, then the previous slot. */
2323 if (from_reg == -1)
2324 {
2325 /* No known place to spill from => no slot to reuse. */
2326 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2327 #if BYTES_BIG_ENDIAN
2328 /* Cancel the big-endian correction done in assign_stack_local.
2329 Get the address of the beginning of the slot.
2330 This is so we can do a big-endian correction unconditionally
2331 below. */
2332 adjust = inherent_size - total_size;
2333 #endif
2334 }
2335 /* Reuse a stack slot if possible. */
2336 else if (spill_stack_slot[from_reg] != 0
2337 && spill_stack_slot_width[from_reg] >= total_size
2338 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2339 >= inherent_size))
2340 x = spill_stack_slot[from_reg];
2341 /* Allocate a bigger slot. */
2342 else
2343 {
2344 /* Compute maximum size needed, both for inherent size
2345 and for total size. */
2346 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2347 if (spill_stack_slot[from_reg])
2348 {
2349 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2350 > inherent_size)
2351 mode = GET_MODE (spill_stack_slot[from_reg]);
2352 if (spill_stack_slot_width[from_reg] > total_size)
2353 total_size = spill_stack_slot_width[from_reg];
2354 }
2355 /* Make a slot with that size. */
2356 x = assign_stack_local (mode, total_size, -1);
2357 #if BYTES_BIG_ENDIAN
2358 /* Cancel the big-endian correction done in assign_stack_local.
2359 Get the address of the beginning of the slot.
2360 This is so we can do a big-endian correction unconditionally
2361 below. */
2362 adjust = GET_MODE_SIZE (mode) - total_size;
2363 #endif
2364 spill_stack_slot[from_reg] = x;
2365 spill_stack_slot_width[from_reg] = total_size;
2366 }
2367
2368 #if BYTES_BIG_ENDIAN
2369 /* On a big endian machine, the "address" of the slot
2370 is the address of the low part that fits its inherent mode. */
2371 if (inherent_size < total_size)
2372 adjust += (total_size - inherent_size);
2373 #endif /* BYTES_BIG_ENDIAN */
2374
2375 /* If we have any adjustment to make, or if the stack slot is the
2376 wrong mode, make a new stack slot. */
2377 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2378 {
2379 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2380 plus_constant (XEXP (x, 0), adjust));
2381 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2382 }
2383
2384 /* Save the stack slot for later. */
2385 reg_equiv_memory_loc[i] = x;
2386 }
2387 }
2388
2389 /* Mark the slots in regs_ever_live for the hard regs
2390 used by pseudo-reg number REGNO. */
2391
2392 void
2393 mark_home_live (regno)
2394 int regno;
2395 {
2396 register int i, lim;
2397 i = reg_renumber[regno];
2398 if (i < 0)
2399 return;
2400 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2401 while (i < lim)
2402 regs_ever_live[i++] = 1;
2403 }
2404
2405 /* Mark the registers used in SCRATCH as being live. */
2406
2407 static void
2408 mark_scratch_live (scratch)
2409 rtx scratch;
2410 {
2411 register int i;
2412 int regno = REGNO (scratch);
2413 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2414
2415 for (i = regno; i < lim; i++)
2416 regs_ever_live[i] = 1;
2417 }
2418 \f
2419 /* This function handles the tracking of elimination offsets around branches.
2420
2421 X is a piece of RTL being scanned.
2422
2423 INSN is the insn that it came from, if any.
2424
2425 INITIAL_P is non-zero if we are to set the offset to be the initial
2426 offset and zero if we are setting the offset of the label to be the
2427 current offset. */
2428
2429 static void
2430 set_label_offsets (x, insn, initial_p)
2431 rtx x;
2432 rtx insn;
2433 int initial_p;
2434 {
2435 enum rtx_code code = GET_CODE (x);
2436 rtx tem;
2437 int i;
2438 struct elim_table *p;
2439
2440 switch (code)
2441 {
2442 case LABEL_REF:
2443 if (LABEL_REF_NONLOCAL_P (x))
2444 return;
2445
2446 x = XEXP (x, 0);
2447
2448 /* ... fall through ... */
2449
2450 case CODE_LABEL:
2451 /* If we know nothing about this label, set the desired offsets. Note
2452 that this sets the offset at a label to be the offset before a label
2453 if we don't know anything about the label. This is not correct for
2454 the label after a BARRIER, but is the best guess we can make. If
2455 we guessed wrong, we will suppress an elimination that might have
2456 been possible had we been able to guess correctly. */
2457
2458 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2459 {
2460 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2461 offsets_at[CODE_LABEL_NUMBER (x)][i]
2462 = (initial_p ? reg_eliminate[i].initial_offset
2463 : reg_eliminate[i].offset);
2464 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2465 }
2466
2467 /* Otherwise, if this is the definition of a label and it is
2468 preceded by a BARRIER, set our offsets to the known offset of
2469 that label. */
2470
2471 else if (x == insn
2472 && (tem = prev_nonnote_insn (insn)) != 0
2473 && GET_CODE (tem) == BARRIER)
2474 {
2475 num_not_at_initial_offset = 0;
2476 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2477 {
2478 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2479 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2480 if (reg_eliminate[i].can_eliminate
2481 && (reg_eliminate[i].offset
2482 != reg_eliminate[i].initial_offset))
2483 num_not_at_initial_offset++;
2484 }
2485 }
2486
2487 else
2488 /* If neither of the above cases is true, compare each offset
2489 with those previously recorded and suppress any eliminations
2490 where the offsets disagree. */
2491
2492 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2493 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2494 != (initial_p ? reg_eliminate[i].initial_offset
2495 : reg_eliminate[i].offset))
2496 reg_eliminate[i].can_eliminate = 0;
2497
2498 return;
2499
2500 case JUMP_INSN:
2501 set_label_offsets (PATTERN (insn), insn, initial_p);
2502
2503 /* ... fall through ... */
2504
2505 case INSN:
2506 case CALL_INSN:
2507 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2508 and hence must have all eliminations at their initial offsets. */
2509 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2510 if (REG_NOTE_KIND (tem) == REG_LABEL)
2511 set_label_offsets (XEXP (tem, 0), insn, 1);
2512 return;
2513
2514 case ADDR_VEC:
2515 case ADDR_DIFF_VEC:
2516 /* Each of the labels in the address vector must be at their initial
2517 offsets. We want the first first for ADDR_VEC and the second
2518 field for ADDR_DIFF_VEC. */
2519
2520 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2521 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2522 insn, initial_p);
2523 return;
2524
2525 case SET:
2526 /* We only care about setting PC. If the source is not RETURN,
2527 IF_THEN_ELSE, or a label, disable any eliminations not at
2528 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2529 isn't one of those possibilities. For branches to a label,
2530 call ourselves recursively.
2531
2532 Note that this can disable elimination unnecessarily when we have
2533 a non-local goto since it will look like a non-constant jump to
2534 someplace in the current function. This isn't a significant
2535 problem since such jumps will normally be when all elimination
2536 pairs are back to their initial offsets. */
2537
2538 if (SET_DEST (x) != pc_rtx)
2539 return;
2540
2541 switch (GET_CODE (SET_SRC (x)))
2542 {
2543 case PC:
2544 case RETURN:
2545 return;
2546
2547 case LABEL_REF:
2548 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2549 return;
2550
2551 case IF_THEN_ELSE:
2552 tem = XEXP (SET_SRC (x), 1);
2553 if (GET_CODE (tem) == LABEL_REF)
2554 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2555 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2556 break;
2557
2558 tem = XEXP (SET_SRC (x), 2);
2559 if (GET_CODE (tem) == LABEL_REF)
2560 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2561 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2562 break;
2563 return;
2564 }
2565
2566 /* If we reach here, all eliminations must be at their initial
2567 offset because we are doing a jump to a variable address. */
2568 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2569 if (p->offset != p->initial_offset)
2570 p->can_eliminate = 0;
2571 }
2572 }
2573 \f
2574 /* Used for communication between the next two function to properly share
2575 the vector for an ASM_OPERANDS. */
2576
2577 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2578
2579 /* Scan X and replace any eliminable registers (such as fp) with a
2580 replacement (such as sp), plus an offset.
2581
2582 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2583 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2584 MEM, we are allowed to replace a sum of a register and the constant zero
2585 with the register, which we cannot do outside a MEM. In addition, we need
2586 to record the fact that a register is referenced outside a MEM.
2587
2588 If INSN is an insn, it is the insn containing X. If we replace a REG
2589 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2590 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2591 that the REG is being modified.
2592
2593 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2594 That's used when we eliminate in expressions stored in notes.
2595 This means, do not set ref_outside_mem even if the reference
2596 is outside of MEMs.
2597
2598 If we see a modification to a register we know about, take the
2599 appropriate action (see case SET, below).
2600
2601 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2602 replacements done assuming all offsets are at their initial values. If
2603 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2604 encounter, return the actual location so that find_reloads will do
2605 the proper thing. */
2606
2607 rtx
2608 eliminate_regs (x, mem_mode, insn)
2609 rtx x;
2610 enum machine_mode mem_mode;
2611 rtx insn;
2612 {
2613 enum rtx_code code = GET_CODE (x);
2614 struct elim_table *ep;
2615 int regno;
2616 rtx new;
2617 int i, j;
2618 char *fmt;
2619 int copied = 0;
2620
2621 switch (code)
2622 {
2623 case CONST_INT:
2624 case CONST_DOUBLE:
2625 case CONST:
2626 case SYMBOL_REF:
2627 case CODE_LABEL:
2628 case PC:
2629 case CC0:
2630 case ASM_INPUT:
2631 case ADDR_VEC:
2632 case ADDR_DIFF_VEC:
2633 case RETURN:
2634 return x;
2635
2636 case REG:
2637 regno = REGNO (x);
2638
2639 /* First handle the case where we encounter a bare register that
2640 is eliminable. Replace it with a PLUS. */
2641 if (regno < FIRST_PSEUDO_REGISTER)
2642 {
2643 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2644 ep++)
2645 if (ep->from_rtx == x && ep->can_eliminate)
2646 {
2647 if (! mem_mode
2648 /* Refs inside notes don't count for this purpose. */
2649 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2650 || GET_CODE (insn) == INSN_LIST)))
2651 ep->ref_outside_mem = 1;
2652 return plus_constant (ep->to_rtx, ep->previous_offset);
2653 }
2654
2655 }
2656 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2657 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2658 {
2659 /* In this case, find_reloads would attempt to either use an
2660 incorrect address (if something is not at its initial offset)
2661 or substitute an replaced address into an insn (which loses
2662 if the offset is changed by some later action). So we simply
2663 return the replaced stack slot (assuming it is changed by
2664 elimination) and ignore the fact that this is actually a
2665 reference to the pseudo. Ensure we make a copy of the
2666 address in case it is shared. */
2667 new = eliminate_regs (reg_equiv_memory_loc[regno],
2668 mem_mode, insn);
2669 if (new != reg_equiv_memory_loc[regno])
2670 {
2671 cannot_omit_stores[regno] = 1;
2672 return copy_rtx (new);
2673 }
2674 }
2675 return x;
2676
2677 case PLUS:
2678 /* If this is the sum of an eliminable register and a constant, rework
2679 the sum. */
2680 if (GET_CODE (XEXP (x, 0)) == REG
2681 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2682 && CONSTANT_P (XEXP (x, 1)))
2683 {
2684 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2685 ep++)
2686 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2687 {
2688 if (! mem_mode
2689 /* Refs inside notes don't count for this purpose. */
2690 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2691 || GET_CODE (insn) == INSN_LIST)))
2692 ep->ref_outside_mem = 1;
2693
2694 /* The only time we want to replace a PLUS with a REG (this
2695 occurs when the constant operand of the PLUS is the negative
2696 of the offset) is when we are inside a MEM. We won't want
2697 to do so at other times because that would change the
2698 structure of the insn in a way that reload can't handle.
2699 We special-case the commonest situation in
2700 eliminate_regs_in_insn, so just replace a PLUS with a
2701 PLUS here, unless inside a MEM. */
2702 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2703 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2704 return ep->to_rtx;
2705 else
2706 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2707 plus_constant (XEXP (x, 1),
2708 ep->previous_offset));
2709 }
2710
2711 /* If the register is not eliminable, we are done since the other
2712 operand is a constant. */
2713 return x;
2714 }
2715
2716 /* If this is part of an address, we want to bring any constant to the
2717 outermost PLUS. We will do this by doing register replacement in
2718 our operands and seeing if a constant shows up in one of them.
2719
2720 We assume here this is part of an address (or a "load address" insn)
2721 since an eliminable register is not likely to appear in any other
2722 context.
2723
2724 If we have (plus (eliminable) (reg)), we want to produce
2725 (plus (plus (replacement) (reg) (const))). If this was part of a
2726 normal add insn, (plus (replacement) (reg)) will be pushed as a
2727 reload. This is the desired action. */
2728
2729 {
2730 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2731 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2732
2733 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2734 {
2735 /* If one side is a PLUS and the other side is a pseudo that
2736 didn't get a hard register but has a reg_equiv_constant,
2737 we must replace the constant here since it may no longer
2738 be in the position of any operand. */
2739 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2740 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2741 && reg_renumber[REGNO (new1)] < 0
2742 && reg_equiv_constant != 0
2743 && reg_equiv_constant[REGNO (new1)] != 0)
2744 new1 = reg_equiv_constant[REGNO (new1)];
2745 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2746 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2747 && reg_renumber[REGNO (new0)] < 0
2748 && reg_equiv_constant[REGNO (new0)] != 0)
2749 new0 = reg_equiv_constant[REGNO (new0)];
2750
2751 new = form_sum (new0, new1);
2752
2753 /* As above, if we are not inside a MEM we do not want to
2754 turn a PLUS into something else. We might try to do so here
2755 for an addition of 0 if we aren't optimizing. */
2756 if (! mem_mode && GET_CODE (new) != PLUS)
2757 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2758 else
2759 return new;
2760 }
2761 }
2762 return x;
2763
2764 case MULT:
2765 /* If this is the product of an eliminable register and a
2766 constant, apply the distribute law and move the constant out
2767 so that we have (plus (mult ..) ..). This is needed in order
2768 to keep load-address insns valid. This case is pathalogical.
2769 We ignore the possibility of overflow here. */
2770 if (GET_CODE (XEXP (x, 0)) == REG
2771 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2772 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2773 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2774 ep++)
2775 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2776 {
2777 if (! mem_mode
2778 /* Refs inside notes don't count for this purpose. */
2779 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2780 || GET_CODE (insn) == INSN_LIST)))
2781 ep->ref_outside_mem = 1;
2782
2783 return
2784 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2785 ep->previous_offset * INTVAL (XEXP (x, 1)));
2786 }
2787
2788 /* ... fall through ... */
2789
2790 case CALL:
2791 case COMPARE:
2792 case MINUS:
2793 case DIV: case UDIV:
2794 case MOD: case UMOD:
2795 case AND: case IOR: case XOR:
2796 case ROTATERT: case ROTATE:
2797 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2798 case NE: case EQ:
2799 case GE: case GT: case GEU: case GTU:
2800 case LE: case LT: case LEU: case LTU:
2801 {
2802 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2803 rtx new1
2804 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2805
2806 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2807 return gen_rtx (code, GET_MODE (x), new0, new1);
2808 }
2809 return x;
2810
2811 case EXPR_LIST:
2812 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2813 if (XEXP (x, 0))
2814 {
2815 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2816 if (new != XEXP (x, 0))
2817 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2818 }
2819
2820 /* ... fall through ... */
2821
2822 case INSN_LIST:
2823 /* Now do eliminations in the rest of the chain. If this was
2824 an EXPR_LIST, this might result in allocating more memory than is
2825 strictly needed, but it simplifies the code. */
2826 if (XEXP (x, 1))
2827 {
2828 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2829 if (new != XEXP (x, 1))
2830 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2831 }
2832 return x;
2833
2834 case PRE_INC:
2835 case POST_INC:
2836 case PRE_DEC:
2837 case POST_DEC:
2838 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2839 if (ep->to_rtx == XEXP (x, 0))
2840 {
2841 int size = GET_MODE_SIZE (mem_mode);
2842
2843 /* If more bytes than MEM_MODE are pushed, account for them. */
2844 #ifdef PUSH_ROUNDING
2845 if (ep->to_rtx == stack_pointer_rtx)
2846 size = PUSH_ROUNDING (size);
2847 #endif
2848 if (code == PRE_DEC || code == POST_DEC)
2849 ep->offset += size;
2850 else
2851 ep->offset -= size;
2852 }
2853
2854 /* Fall through to generic unary operation case. */
2855 case USE:
2856 case STRICT_LOW_PART:
2857 case NEG: case NOT:
2858 case SIGN_EXTEND: case ZERO_EXTEND:
2859 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2860 case FLOAT: case FIX:
2861 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2862 case ABS:
2863 case SQRT:
2864 case FFS:
2865 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2866 if (new != XEXP (x, 0))
2867 return gen_rtx (code, GET_MODE (x), new);
2868 return x;
2869
2870 case SUBREG:
2871 /* Similar to above processing, but preserve SUBREG_WORD.
2872 Convert (subreg (mem)) to (mem) if not paradoxical.
2873 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2874 pseudo didn't get a hard reg, we must replace this with the
2875 eliminated version of the memory location because push_reloads
2876 may do the replacement in certain circumstances. */
2877 if (GET_CODE (SUBREG_REG (x)) == REG
2878 && (GET_MODE_SIZE (GET_MODE (x))
2879 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2880 && reg_equiv_memory_loc != 0
2881 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2882 {
2883 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2884 mem_mode, insn);
2885
2886 /* If we didn't change anything, we must retain the pseudo. */
2887 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2888 new = SUBREG_REG (x);
2889 else
2890 {
2891 /* Otherwise, ensure NEW isn't shared in case we have to reload
2892 it. */
2893 new = copy_rtx (new);
2894
2895 /* In this case, we must show that the pseudo is used in this
2896 insn so that delete_output_reload will do the right thing. */
2897 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2898 && GET_CODE (insn) != INSN_LIST)
2899 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2900 insn);
2901 }
2902 }
2903 else
2904 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2905
2906 if (new != XEXP (x, 0))
2907 {
2908 if (GET_CODE (new) == MEM
2909 && (GET_MODE_SIZE (GET_MODE (x))
2910 <= GET_MODE_SIZE (GET_MODE (new)))
2911 #ifdef LOAD_EXTEND_OP
2912 /* On these machines we will be reloading what is
2913 inside the SUBREG if it originally was a pseudo and
2914 the inner and outer modes are both a word or
2915 smaller. So leave the SUBREG then. */
2916 && ! (GET_CODE (SUBREG_REG (x)) == REG
2917 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2918 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2919 #endif
2920 )
2921 {
2922 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2923 enum machine_mode mode = GET_MODE (x);
2924
2925 #if BYTES_BIG_ENDIAN
2926 offset += (MIN (UNITS_PER_WORD,
2927 GET_MODE_SIZE (GET_MODE (new)))
2928 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2929 #endif
2930
2931 PUT_MODE (new, mode);
2932 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2933 return new;
2934 }
2935 else
2936 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2937 }
2938
2939 return x;
2940
2941 case CLOBBER:
2942 /* If clobbering a register that is the replacement register for an
2943 elimination we still think can be performed, note that it cannot
2944 be performed. Otherwise, we need not be concerned about it. */
2945 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2946 if (ep->to_rtx == XEXP (x, 0))
2947 ep->can_eliminate = 0;
2948
2949 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2950 if (new != XEXP (x, 0))
2951 return gen_rtx (code, GET_MODE (x), new);
2952 return x;
2953
2954 case ASM_OPERANDS:
2955 {
2956 rtx *temp_vec;
2957 /* Properly handle sharing input and constraint vectors. */
2958 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2959 {
2960 /* When we come to a new vector not seen before,
2961 scan all its elements; keep the old vector if none
2962 of them changes; otherwise, make a copy. */
2963 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2964 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2965 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2966 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2967 mem_mode, insn);
2968
2969 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2970 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2971 break;
2972
2973 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2974 new_asm_operands_vec = old_asm_operands_vec;
2975 else
2976 new_asm_operands_vec
2977 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2978 }
2979
2980 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2981 if (new_asm_operands_vec == old_asm_operands_vec)
2982 return x;
2983
2984 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2985 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2986 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2987 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2988 ASM_OPERANDS_SOURCE_FILE (x),
2989 ASM_OPERANDS_SOURCE_LINE (x));
2990 new->volatil = x->volatil;
2991 return new;
2992 }
2993
2994 case SET:
2995 /* Check for setting a register that we know about. */
2996 if (GET_CODE (SET_DEST (x)) == REG)
2997 {
2998 /* See if this is setting the replacement register for an
2999 elimination.
3000
3001 If DEST is the hard frame pointer, we do nothing because we
3002 assume that all assignments to the frame pointer are for
3003 non-local gotos and are being done at a time when they are valid
3004 and do not disturb anything else. Some machines want to
3005 eliminate a fake argument pointer (or even a fake frame pointer)
3006 with either the real frame or the stack pointer. Assignments to
3007 the hard frame pointer must not prevent this elimination. */
3008
3009 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3010 ep++)
3011 if (ep->to_rtx == SET_DEST (x)
3012 && SET_DEST (x) != hard_frame_pointer_rtx)
3013 {
3014 /* If it is being incremented, adjust the offset. Otherwise,
3015 this elimination can't be done. */
3016 rtx src = SET_SRC (x);
3017
3018 if (GET_CODE (src) == PLUS
3019 && XEXP (src, 0) == SET_DEST (x)
3020 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3021 ep->offset -= INTVAL (XEXP (src, 1));
3022 else
3023 ep->can_eliminate = 0;
3024 }
3025
3026 /* Now check to see we are assigning to a register that can be
3027 eliminated. If so, it must be as part of a PARALLEL, since we
3028 will not have been called if this is a single SET. So indicate
3029 that we can no longer eliminate this reg. */
3030 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3031 ep++)
3032 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3033 ep->can_eliminate = 0;
3034 }
3035
3036 /* Now avoid the loop below in this common case. */
3037 {
3038 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3039 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3040
3041 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3042 write a CLOBBER insn. */
3043 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3044 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3045 && GET_CODE (insn) != INSN_LIST)
3046 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3047
3048 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3049 return gen_rtx (SET, VOIDmode, new0, new1);
3050 }
3051
3052 return x;
3053
3054 case MEM:
3055 /* Our only special processing is to pass the mode of the MEM to our
3056 recursive call and copy the flags. While we are here, handle this
3057 case more efficiently. */
3058 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3059 if (new != XEXP (x, 0))
3060 {
3061 new = gen_rtx (MEM, GET_MODE (x), new);
3062 new->volatil = x->volatil;
3063 new->unchanging = x->unchanging;
3064 new->in_struct = x->in_struct;
3065 return new;
3066 }
3067 else
3068 return x;
3069 }
3070
3071 /* Process each of our operands recursively. If any have changed, make a
3072 copy of the rtx. */
3073 fmt = GET_RTX_FORMAT (code);
3074 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3075 {
3076 if (*fmt == 'e')
3077 {
3078 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3079 if (new != XEXP (x, i) && ! copied)
3080 {
3081 rtx new_x = rtx_alloc (code);
3082 bcopy ((char *) x, (char *) new_x,
3083 (sizeof (*new_x) - sizeof (new_x->fld)
3084 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3085 x = new_x;
3086 copied = 1;
3087 }
3088 XEXP (x, i) = new;
3089 }
3090 else if (*fmt == 'E')
3091 {
3092 int copied_vec = 0;
3093 for (j = 0; j < XVECLEN (x, i); j++)
3094 {
3095 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3096 if (new != XVECEXP (x, i, j) && ! copied_vec)
3097 {
3098 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3099 &XVECEXP (x, i, 0));
3100 if (! copied)
3101 {
3102 rtx new_x = rtx_alloc (code);
3103 bcopy ((char *) x, (char *) new_x,
3104 (sizeof (*new_x) - sizeof (new_x->fld)
3105 + (sizeof (new_x->fld[0])
3106 * GET_RTX_LENGTH (code))));
3107 x = new_x;
3108 copied = 1;
3109 }
3110 XVEC (x, i) = new_v;
3111 copied_vec = 1;
3112 }
3113 XVECEXP (x, i, j) = new;
3114 }
3115 }
3116 }
3117
3118 return x;
3119 }
3120 \f
3121 /* Scan INSN and eliminate all eliminable registers in it.
3122
3123 If REPLACE is nonzero, do the replacement destructively. Also
3124 delete the insn as dead it if it is setting an eliminable register.
3125
3126 If REPLACE is zero, do all our allocations in reload_obstack.
3127
3128 If no eliminations were done and this insn doesn't require any elimination
3129 processing (these are not identical conditions: it might be updating sp,
3130 but not referencing fp; this needs to be seen during reload_as_needed so
3131 that the offset between fp and sp can be taken into consideration), zero
3132 is returned. Otherwise, 1 is returned. */
3133
3134 static int
3135 eliminate_regs_in_insn (insn, replace)
3136 rtx insn;
3137 int replace;
3138 {
3139 rtx old_body = PATTERN (insn);
3140 rtx old_set = single_set (insn);
3141 rtx new_body;
3142 int val = 0;
3143 struct elim_table *ep;
3144
3145 if (! replace)
3146 push_obstacks (&reload_obstack, &reload_obstack);
3147
3148 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3149 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3150 {
3151 /* Check for setting an eliminable register. */
3152 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3153 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3154 {
3155 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3156 /* If this is setting the frame pointer register to the
3157 hardware frame pointer register and this is an elimination
3158 that will be done (tested above), this insn is really
3159 adjusting the frame pointer downward to compensate for
3160 the adjustment done before a nonlocal goto. */
3161 if (ep->from == FRAME_POINTER_REGNUM
3162 && ep->to == HARD_FRAME_POINTER_REGNUM)
3163 {
3164 rtx src = SET_SRC (old_set);
3165 int offset, ok = 0;
3166
3167 if (src == ep->to_rtx)
3168 offset = 0, ok = 1;
3169 else if (GET_CODE (src) == PLUS
3170 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3171 offset = INTVAL (XEXP (src, 0)), ok = 1;
3172
3173 if (ok)
3174 {
3175 if (replace)
3176 {
3177 rtx src
3178 = plus_constant (ep->to_rtx, offset - ep->offset);
3179
3180 /* First see if this insn remains valid when we
3181 make the change. If not, keep the INSN_CODE
3182 the same and let reload fit it up. */
3183 validate_change (insn, &SET_SRC (old_set), src, 1);
3184 validate_change (insn, &SET_DEST (old_set),
3185 ep->to_rtx, 1);
3186 if (! apply_change_group ())
3187 {
3188 SET_SRC (old_set) = src;
3189 SET_DEST (old_set) = ep->to_rtx;
3190 }
3191 }
3192
3193 val = 1;
3194 goto done;
3195 }
3196 }
3197 #endif
3198
3199 /* In this case this insn isn't serving a useful purpose. We
3200 will delete it in reload_as_needed once we know that this
3201 elimination is, in fact, being done.
3202
3203 If REPLACE isn't set, we can't delete this insn, but neededn't
3204 process it since it won't be used unless something changes. */
3205 if (replace)
3206 delete_dead_insn (insn);
3207 val = 1;
3208 goto done;
3209 }
3210
3211 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3212 in the insn is the negative of the offset in FROM. Substitute
3213 (set (reg) (reg to)) for the insn and change its code.
3214
3215 We have to do this here, rather than in eliminate_regs, do that we can
3216 change the insn code. */
3217
3218 if (GET_CODE (SET_SRC (old_set)) == PLUS
3219 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3220 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3221 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3222 ep++)
3223 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3224 && ep->can_eliminate)
3225 {
3226 /* We must stop at the first elimination that will be used.
3227 If this one would replace the PLUS with a REG, do it
3228 now. Otherwise, quit the loop and let eliminate_regs
3229 do its normal replacement. */
3230 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3231 {
3232 /* We assume here that we don't need a PARALLEL of
3233 any CLOBBERs for this assignment. There's not
3234 much we can do if we do need it. */
3235 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3236 SET_DEST (old_set), ep->to_rtx);
3237 INSN_CODE (insn) = -1;
3238 val = 1;
3239 goto done;
3240 }
3241
3242 break;
3243 }
3244 }
3245
3246 old_asm_operands_vec = 0;
3247
3248 /* Replace the body of this insn with a substituted form. If we changed
3249 something, return non-zero.
3250
3251 If we are replacing a body that was a (set X (plus Y Z)), try to
3252 re-recognize the insn. We do this in case we had a simple addition
3253 but now can do this as a load-address. This saves an insn in this
3254 common case. */
3255
3256 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3257 if (new_body != old_body)
3258 {
3259 /* If we aren't replacing things permanently and we changed something,
3260 make another copy to ensure that all the RTL is new. Otherwise
3261 things can go wrong if find_reload swaps commutative operands
3262 and one is inside RTL that has been copied while the other is not. */
3263
3264 /* Don't copy an asm_operands because (1) there's no need and (2)
3265 copy_rtx can't do it properly when there are multiple outputs. */
3266 if (! replace && asm_noperands (old_body) < 0)
3267 new_body = copy_rtx (new_body);
3268
3269 /* If we had a move insn but now we don't, rerecognize it. This will
3270 cause spurious re-recognition if the old move had a PARALLEL since
3271 the new one still will, but we can't call single_set without
3272 having put NEW_BODY into the insn and the re-recognition won't
3273 hurt in this rare case. */
3274 if (old_set != 0
3275 && ((GET_CODE (SET_SRC (old_set)) == REG
3276 && (GET_CODE (new_body) != SET
3277 || GET_CODE (SET_SRC (new_body)) != REG))
3278 /* If this was a load from or store to memory, compare
3279 the MEM in recog_operand to the one in the insn. If they
3280 are not equal, then rerecognize the insn. */
3281 || (old_set != 0
3282 && ((GET_CODE (SET_SRC (old_set)) == MEM
3283 && SET_SRC (old_set) != recog_operand[1])
3284 || (GET_CODE (SET_DEST (old_set)) == MEM
3285 && SET_DEST (old_set) != recog_operand[0])))
3286 /* If this was an add insn before, rerecognize. */
3287 || GET_CODE (SET_SRC (old_set)) == PLUS))
3288 {
3289 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3290 /* If recognition fails, store the new body anyway.
3291 It's normal to have recognition failures here
3292 due to bizarre memory addresses; reloading will fix them. */
3293 PATTERN (insn) = new_body;
3294 }
3295 else
3296 PATTERN (insn) = new_body;
3297
3298 val = 1;
3299 }
3300
3301 /* Loop through all elimination pairs. See if any have changed and
3302 recalculate the number not at initial offset.
3303
3304 Compute the maximum offset (minimum offset if the stack does not
3305 grow downward) for each elimination pair.
3306
3307 We also detect a cases where register elimination cannot be done,
3308 namely, if a register would be both changed and referenced outside a MEM
3309 in the resulting insn since such an insn is often undefined and, even if
3310 not, we cannot know what meaning will be given to it. Note that it is
3311 valid to have a register used in an address in an insn that changes it
3312 (presumably with a pre- or post-increment or decrement).
3313
3314 If anything changes, return nonzero. */
3315
3316 num_not_at_initial_offset = 0;
3317 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3318 {
3319 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3320 ep->can_eliminate = 0;
3321
3322 ep->ref_outside_mem = 0;
3323
3324 if (ep->previous_offset != ep->offset)
3325 val = 1;
3326
3327 ep->previous_offset = ep->offset;
3328 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3329 num_not_at_initial_offset++;
3330
3331 #ifdef STACK_GROWS_DOWNWARD
3332 ep->max_offset = MAX (ep->max_offset, ep->offset);
3333 #else
3334 ep->max_offset = MIN (ep->max_offset, ep->offset);
3335 #endif
3336 }
3337
3338 done:
3339 /* If we changed something, perform elmination in REG_NOTES. This is
3340 needed even when REPLACE is zero because a REG_DEAD note might refer
3341 to a register that we eliminate and could cause a different number
3342 of spill registers to be needed in the final reload pass than in
3343 the pre-passes. */
3344 if (val && REG_NOTES (insn) != 0)
3345 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3346
3347 if (! replace)
3348 pop_obstacks ();
3349
3350 return val;
3351 }
3352
3353 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3354 replacement we currently believe is valid, mark it as not eliminable if X
3355 modifies DEST in any way other than by adding a constant integer to it.
3356
3357 If DEST is the frame pointer, we do nothing because we assume that
3358 all assignments to the hard frame pointer are nonlocal gotos and are being
3359 done at a time when they are valid and do not disturb anything else.
3360 Some machines want to eliminate a fake argument pointer with either the
3361 frame or stack pointer. Assignments to the hard frame pointer must not
3362 prevent this elimination.
3363
3364 Called via note_stores from reload before starting its passes to scan
3365 the insns of the function. */
3366
3367 static void
3368 mark_not_eliminable (dest, x)
3369 rtx dest;
3370 rtx x;
3371 {
3372 register int i;
3373
3374 /* A SUBREG of a hard register here is just changing its mode. We should
3375 not see a SUBREG of an eliminable hard register, but check just in
3376 case. */
3377 if (GET_CODE (dest) == SUBREG)
3378 dest = SUBREG_REG (dest);
3379
3380 if (dest == hard_frame_pointer_rtx)
3381 return;
3382
3383 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3384 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3385 && (GET_CODE (x) != SET
3386 || GET_CODE (SET_SRC (x)) != PLUS
3387 || XEXP (SET_SRC (x), 0) != dest
3388 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3389 {
3390 reg_eliminate[i].can_eliminate_previous
3391 = reg_eliminate[i].can_eliminate = 0;
3392 num_eliminable--;
3393 }
3394 }
3395 \f
3396 /* Kick all pseudos out of hard register REGNO.
3397 If GLOBAL is nonzero, try to find someplace else to put them.
3398 If DUMPFILE is nonzero, log actions taken on that file.
3399
3400 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3401 because we found we can't eliminate some register. In the case, no pseudos
3402 are allowed to be in the register, even if they are only in a block that
3403 doesn't require spill registers, unlike the case when we are spilling this
3404 hard reg to produce another spill register.
3405
3406 Return nonzero if any pseudos needed to be kicked out. */
3407
3408 static int
3409 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3410 register int regno;
3411 int global;
3412 FILE *dumpfile;
3413 int cant_eliminate;
3414 {
3415 enum reg_class class = REGNO_REG_CLASS (regno);
3416 int something_changed = 0;
3417 register int i;
3418
3419 SET_HARD_REG_BIT (forbidden_regs, regno);
3420
3421 if (cant_eliminate)
3422 regs_ever_live[regno] = 1;
3423
3424 /* Spill every pseudo reg that was allocated to this reg
3425 or to something that overlaps this reg. */
3426
3427 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3428 if (reg_renumber[i] >= 0
3429 && reg_renumber[i] <= regno
3430 && (reg_renumber[i]
3431 + HARD_REGNO_NREGS (reg_renumber[i],
3432 PSEUDO_REGNO_MODE (i))
3433 > regno))
3434 {
3435 /* If this register belongs solely to a basic block which needed no
3436 spilling of any class that this register is contained in,
3437 leave it be, unless we are spilling this register because
3438 it was a hard register that can't be eliminated. */
3439
3440 if (! cant_eliminate
3441 && basic_block_needs[0]
3442 && reg_basic_block[i] >= 0
3443 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3444 {
3445 enum reg_class *p;
3446
3447 for (p = reg_class_superclasses[(int) class];
3448 *p != LIM_REG_CLASSES; p++)
3449 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3450 break;
3451
3452 if (*p == LIM_REG_CLASSES)
3453 continue;
3454 }
3455
3456 /* Mark it as no longer having a hard register home. */
3457 reg_renumber[i] = -1;
3458 /* We will need to scan everything again. */
3459 something_changed = 1;
3460 if (global)
3461 retry_global_alloc (i, forbidden_regs);
3462
3463 alter_reg (i, regno);
3464 if (dumpfile)
3465 {
3466 if (reg_renumber[i] == -1)
3467 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3468 else
3469 fprintf (dumpfile, " Register %d now in %d.\n\n",
3470 i, reg_renumber[i]);
3471 }
3472 }
3473 for (i = 0; i < scratch_list_length; i++)
3474 {
3475 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3476 {
3477 if (! cant_eliminate && basic_block_needs[0]
3478 && ! basic_block_needs[(int) class][scratch_block[i]])
3479 {
3480 enum reg_class *p;
3481
3482 for (p = reg_class_superclasses[(int) class];
3483 *p != LIM_REG_CLASSES; p++)
3484 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3485 break;
3486
3487 if (*p == LIM_REG_CLASSES)
3488 continue;
3489 }
3490 PUT_CODE (scratch_list[i], SCRATCH);
3491 scratch_list[i] = 0;
3492 something_changed = 1;
3493 continue;
3494 }
3495 }
3496
3497 return something_changed;
3498 }
3499 \f
3500 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3501 Also mark any hard registers used to store user variables as
3502 forbidden from being used for spill registers. */
3503
3504 static void
3505 scan_paradoxical_subregs (x)
3506 register rtx x;
3507 {
3508 register int i;
3509 register char *fmt;
3510 register enum rtx_code code = GET_CODE (x);
3511
3512 switch (code)
3513 {
3514 case REG:
3515 #ifdef SMALL_REGISTER_CLASSES
3516 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3517 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3518 #endif
3519 return;
3520
3521 case CONST_INT:
3522 case CONST:
3523 case SYMBOL_REF:
3524 case LABEL_REF:
3525 case CONST_DOUBLE:
3526 case CC0:
3527 case PC:
3528 case USE:
3529 case CLOBBER:
3530 return;
3531
3532 case SUBREG:
3533 if (GET_CODE (SUBREG_REG (x)) == REG
3534 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3535 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3536 = GET_MODE_SIZE (GET_MODE (x));
3537 return;
3538 }
3539
3540 fmt = GET_RTX_FORMAT (code);
3541 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3542 {
3543 if (fmt[i] == 'e')
3544 scan_paradoxical_subregs (XEXP (x, i));
3545 else if (fmt[i] == 'E')
3546 {
3547 register int j;
3548 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3549 scan_paradoxical_subregs (XVECEXP (x, i, j));
3550 }
3551 }
3552 }
3553 \f
3554 static int
3555 hard_reg_use_compare (p1, p2)
3556 struct hard_reg_n_uses *p1, *p2;
3557 {
3558 int tem = p1->uses - p2->uses;
3559 if (tem != 0) return tem;
3560 /* If regs are equally good, sort by regno,
3561 so that the results of qsort leave nothing to chance. */
3562 return p1->regno - p2->regno;
3563 }
3564
3565 /* Choose the order to consider regs for use as reload registers
3566 based on how much trouble would be caused by spilling one.
3567 Store them in order of decreasing preference in potential_reload_regs. */
3568
3569 static void
3570 order_regs_for_reload ()
3571 {
3572 register int i;
3573 register int o = 0;
3574 int large = 0;
3575
3576 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3577
3578 CLEAR_HARD_REG_SET (bad_spill_regs);
3579
3580 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3581 potential_reload_regs[i] = -1;
3582
3583 /* Count number of uses of each hard reg by pseudo regs allocated to it
3584 and then order them by decreasing use. */
3585
3586 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3587 {
3588 hard_reg_n_uses[i].uses = 0;
3589 hard_reg_n_uses[i].regno = i;
3590 }
3591
3592 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3593 {
3594 int regno = reg_renumber[i];
3595 if (regno >= 0)
3596 {
3597 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3598 while (regno < lim)
3599 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3600 }
3601 large += reg_n_refs[i];
3602 }
3603
3604 /* Now fixed registers (which cannot safely be used for reloading)
3605 get a very high use count so they will be considered least desirable.
3606 Registers used explicitly in the rtl code are almost as bad. */
3607
3608 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3609 {
3610 if (fixed_regs[i])
3611 {
3612 hard_reg_n_uses[i].uses += 2 * large + 2;
3613 SET_HARD_REG_BIT (bad_spill_regs, i);
3614 }
3615 else if (regs_explicitly_used[i])
3616 {
3617 hard_reg_n_uses[i].uses += large + 1;
3618 #ifndef SMALL_REGISTER_CLASSES
3619 /* ??? We are doing this here because of the potential that
3620 bad code may be generated if a register explicitly used in
3621 an insn was used as a spill register for that insn. But
3622 not using these are spill registers may lose on some machine.
3623 We'll have to see how this works out. */
3624 SET_HARD_REG_BIT (bad_spill_regs, i);
3625 #endif
3626 }
3627 }
3628 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3629 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3630
3631 #ifdef ELIMINABLE_REGS
3632 /* If registers other than the frame pointer are eliminable, mark them as
3633 poor choices. */
3634 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3635 {
3636 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3637 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3638 }
3639 #endif
3640
3641 /* Prefer registers not so far used, for use in temporary loading.
3642 Among them, if REG_ALLOC_ORDER is defined, use that order.
3643 Otherwise, prefer registers not preserved by calls. */
3644
3645 #ifdef REG_ALLOC_ORDER
3646 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3647 {
3648 int regno = reg_alloc_order[i];
3649
3650 if (hard_reg_n_uses[regno].uses == 0)
3651 potential_reload_regs[o++] = regno;
3652 }
3653 #else
3654 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3655 {
3656 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3657 potential_reload_regs[o++] = i;
3658 }
3659 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3660 {
3661 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3662 potential_reload_regs[o++] = i;
3663 }
3664 #endif
3665
3666 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3667 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3668
3669 /* Now add the regs that are already used,
3670 preferring those used less often. The fixed and otherwise forbidden
3671 registers will be at the end of this list. */
3672
3673 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3674 if (hard_reg_n_uses[i].uses != 0)
3675 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3676 }
3677 \f
3678 /* Used in reload_as_needed to sort the spilled regs. */
3679
3680 static int
3681 compare_spill_regs (r1, r2)
3682 short *r1, *r2;
3683 {
3684 return *r1 - *r2;
3685 }
3686
3687 /* Reload pseudo-registers into hard regs around each insn as needed.
3688 Additional register load insns are output before the insn that needs it
3689 and perhaps store insns after insns that modify the reloaded pseudo reg.
3690
3691 reg_last_reload_reg and reg_reloaded_contents keep track of
3692 which registers are already available in reload registers.
3693 We update these for the reloads that we perform,
3694 as the insns are scanned. */
3695
3696 static void
3697 reload_as_needed (first, live_known)
3698 rtx first;
3699 int live_known;
3700 {
3701 register rtx insn;
3702 register int i;
3703 int this_block = 0;
3704 rtx x;
3705 rtx after_call = 0;
3706
3707 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3708 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3709 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3710 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3711 reg_has_output_reload = (char *) alloca (max_regno);
3712 for (i = 0; i < n_spills; i++)
3713 {
3714 reg_reloaded_contents[i] = -1;
3715 reg_reloaded_insn[i] = 0;
3716 }
3717
3718 /* Reset all offsets on eliminable registers to their initial values. */
3719 #ifdef ELIMINABLE_REGS
3720 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3721 {
3722 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3723 reg_eliminate[i].initial_offset);
3724 reg_eliminate[i].previous_offset
3725 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3726 }
3727 #else
3728 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3729 reg_eliminate[0].previous_offset
3730 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3731 #endif
3732
3733 num_not_at_initial_offset = 0;
3734
3735 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3736 pack registers with group needs. */
3737 if (n_spills > 1)
3738 {
3739 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3740 for (i = 0; i < n_spills; i++)
3741 spill_reg_order[spill_regs[i]] = i;
3742 }
3743
3744 for (insn = first; insn;)
3745 {
3746 register rtx next = NEXT_INSN (insn);
3747
3748 /* Notice when we move to a new basic block. */
3749 if (live_known && this_block + 1 < n_basic_blocks
3750 && insn == basic_block_head[this_block+1])
3751 ++this_block;
3752
3753 /* If we pass a label, copy the offsets from the label information
3754 into the current offsets of each elimination. */
3755 if (GET_CODE (insn) == CODE_LABEL)
3756 {
3757 num_not_at_initial_offset = 0;
3758 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3759 {
3760 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3761 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3762 if (reg_eliminate[i].can_eliminate
3763 && (reg_eliminate[i].offset
3764 != reg_eliminate[i].initial_offset))
3765 num_not_at_initial_offset++;
3766 }
3767 }
3768
3769 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3770 {
3771 rtx avoid_return_reg = 0;
3772
3773 #ifdef SMALL_REGISTER_CLASSES
3774 /* Set avoid_return_reg if this is an insn
3775 that might use the value of a function call. */
3776 if (GET_CODE (insn) == CALL_INSN)
3777 {
3778 if (GET_CODE (PATTERN (insn)) == SET)
3779 after_call = SET_DEST (PATTERN (insn));
3780 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3781 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3782 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3783 else
3784 after_call = 0;
3785 }
3786 else if (after_call != 0
3787 && !(GET_CODE (PATTERN (insn)) == SET
3788 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3789 {
3790 if (reg_referenced_p (after_call, PATTERN (insn)))
3791 avoid_return_reg = after_call;
3792 after_call = 0;
3793 }
3794 #endif /* SMALL_REGISTER_CLASSES */
3795
3796 /* If this is a USE and CLOBBER of a MEM, ensure that any
3797 references to eliminable registers have been removed. */
3798
3799 if ((GET_CODE (PATTERN (insn)) == USE
3800 || GET_CODE (PATTERN (insn)) == CLOBBER)
3801 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3802 XEXP (XEXP (PATTERN (insn), 0), 0)
3803 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3804 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3805
3806 /* If we need to do register elimination processing, do so.
3807 This might delete the insn, in which case we are done. */
3808 if (num_eliminable && GET_MODE (insn) == QImode)
3809 {
3810 eliminate_regs_in_insn (insn, 1);
3811 if (GET_CODE (insn) == NOTE)
3812 {
3813 insn = next;
3814 continue;
3815 }
3816 }
3817
3818 if (GET_MODE (insn) == VOIDmode)
3819 n_reloads = 0;
3820 /* First find the pseudo regs that must be reloaded for this insn.
3821 This info is returned in the tables reload_... (see reload.h).
3822 Also modify the body of INSN by substituting RELOAD
3823 rtx's for those pseudo regs. */
3824 else
3825 {
3826 bzero (reg_has_output_reload, max_regno);
3827 CLEAR_HARD_REG_SET (reg_is_output_reload);
3828
3829 find_reloads (insn, 1, spill_indirect_levels, live_known,
3830 spill_reg_order);
3831 }
3832
3833 if (n_reloads > 0)
3834 {
3835 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3836 rtx p;
3837 int class;
3838
3839 /* If this block has not had spilling done for a
3840 particular clas and we have any non-optionals that need a
3841 spill reg in that class, abort. */
3842
3843 for (class = 0; class < N_REG_CLASSES; class++)
3844 if (basic_block_needs[class] != 0
3845 && basic_block_needs[class][this_block] == 0)
3846 for (i = 0; i < n_reloads; i++)
3847 if (class == (int) reload_reg_class[i]
3848 && reload_reg_rtx[i] == 0
3849 && ! reload_optional[i]
3850 && (reload_in[i] != 0 || reload_out[i] != 0
3851 || reload_secondary_p[i] != 0))
3852 fatal_insn ("Non-optional registers need a spill register", insn);
3853
3854 /* Now compute which reload regs to reload them into. Perhaps
3855 reusing reload regs from previous insns, or else output
3856 load insns to reload them. Maybe output store insns too.
3857 Record the choices of reload reg in reload_reg_rtx. */
3858 choose_reload_regs (insn, avoid_return_reg);
3859
3860 #ifdef SMALL_REGISTER_CLASSES
3861 /* Merge any reloads that we didn't combine for fear of
3862 increasing the number of spill registers needed but now
3863 discover can be safely merged. */
3864 merge_assigned_reloads (insn);
3865 #endif
3866
3867 /* Generate the insns to reload operands into or out of
3868 their reload regs. */
3869 emit_reload_insns (insn);
3870
3871 /* Substitute the chosen reload regs from reload_reg_rtx
3872 into the insn's body (or perhaps into the bodies of other
3873 load and store insn that we just made for reloading
3874 and that we moved the structure into). */
3875 subst_reloads ();
3876
3877 /* If this was an ASM, make sure that all the reload insns
3878 we have generated are valid. If not, give an error
3879 and delete them. */
3880
3881 if (asm_noperands (PATTERN (insn)) >= 0)
3882 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3883 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3884 && (recog_memoized (p) < 0
3885 || (insn_extract (p),
3886 ! constrain_operands (INSN_CODE (p), 1))))
3887 {
3888 error_for_asm (insn,
3889 "`asm' operand requires impossible reload");
3890 PUT_CODE (p, NOTE);
3891 NOTE_SOURCE_FILE (p) = 0;
3892 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3893 }
3894 }
3895 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3896 is no longer validly lying around to save a future reload.
3897 Note that this does not detect pseudos that were reloaded
3898 for this insn in order to be stored in
3899 (obeying register constraints). That is correct; such reload
3900 registers ARE still valid. */
3901 note_stores (PATTERN (insn), forget_old_reloads_1);
3902
3903 /* There may have been CLOBBER insns placed after INSN. So scan
3904 between INSN and NEXT and use them to forget old reloads. */
3905 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3906 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3907 note_stores (PATTERN (x), forget_old_reloads_1);
3908
3909 #ifdef AUTO_INC_DEC
3910 /* Likewise for regs altered by auto-increment in this insn.
3911 But note that the reg-notes are not changed by reloading:
3912 they still contain the pseudo-regs, not the spill regs. */
3913 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3914 if (REG_NOTE_KIND (x) == REG_INC)
3915 {
3916 /* See if this pseudo reg was reloaded in this insn.
3917 If so, its last-reload info is still valid
3918 because it is based on this insn's reload. */
3919 for (i = 0; i < n_reloads; i++)
3920 if (reload_out[i] == XEXP (x, 0))
3921 break;
3922
3923 if (i == n_reloads)
3924 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3925 }
3926 #endif
3927 }
3928 /* A reload reg's contents are unknown after a label. */
3929 if (GET_CODE (insn) == CODE_LABEL)
3930 for (i = 0; i < n_spills; i++)
3931 {
3932 reg_reloaded_contents[i] = -1;
3933 reg_reloaded_insn[i] = 0;
3934 }
3935
3936 /* Don't assume a reload reg is still good after a call insn
3937 if it is a call-used reg. */
3938 else if (GET_CODE (insn) == CALL_INSN)
3939 for (i = 0; i < n_spills; i++)
3940 if (call_used_regs[spill_regs[i]])
3941 {
3942 reg_reloaded_contents[i] = -1;
3943 reg_reloaded_insn[i] = 0;
3944 }
3945
3946 /* In case registers overlap, allow certain insns to invalidate
3947 particular hard registers. */
3948
3949 #ifdef INSN_CLOBBERS_REGNO_P
3950 for (i = 0 ; i < n_spills ; i++)
3951 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3952 {
3953 reg_reloaded_contents[i] = -1;
3954 reg_reloaded_insn[i] = 0;
3955 }
3956 #endif
3957
3958 insn = next;
3959
3960 #ifdef USE_C_ALLOCA
3961 alloca (0);
3962 #endif
3963 }
3964 }
3965
3966 /* Discard all record of any value reloaded from X,
3967 or reloaded in X from someplace else;
3968 unless X is an output reload reg of the current insn.
3969
3970 X may be a hard reg (the reload reg)
3971 or it may be a pseudo reg that was reloaded from. */
3972
3973 static void
3974 forget_old_reloads_1 (x, ignored)
3975 rtx x;
3976 rtx ignored;
3977 {
3978 register int regno;
3979 int nr;
3980 int offset = 0;
3981
3982 /* note_stores does give us subregs of hard regs. */
3983 while (GET_CODE (x) == SUBREG)
3984 {
3985 offset += SUBREG_WORD (x);
3986 x = SUBREG_REG (x);
3987 }
3988
3989 if (GET_CODE (x) != REG)
3990 return;
3991
3992 regno = REGNO (x) + offset;
3993
3994 if (regno >= FIRST_PSEUDO_REGISTER)
3995 nr = 1;
3996 else
3997 {
3998 int i;
3999 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4000 /* Storing into a spilled-reg invalidates its contents.
4001 This can happen if a block-local pseudo is allocated to that reg
4002 and it wasn't spilled because this block's total need is 0.
4003 Then some insn might have an optional reload and use this reg. */
4004 for (i = 0; i < nr; i++)
4005 if (spill_reg_order[regno + i] >= 0
4006 /* But don't do this if the reg actually serves as an output
4007 reload reg in the current instruction. */
4008 && (n_reloads == 0
4009 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4010 {
4011 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4012 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4013 }
4014 }
4015
4016 /* Since value of X has changed,
4017 forget any value previously copied from it. */
4018
4019 while (nr-- > 0)
4020 /* But don't forget a copy if this is the output reload
4021 that establishes the copy's validity. */
4022 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4023 reg_last_reload_reg[regno + nr] = 0;
4024 }
4025 \f
4026 /* For each reload, the mode of the reload register. */
4027 static enum machine_mode reload_mode[MAX_RELOADS];
4028
4029 /* For each reload, the largest number of registers it will require. */
4030 static int reload_nregs[MAX_RELOADS];
4031
4032 /* Comparison function for qsort to decide which of two reloads
4033 should be handled first. *P1 and *P2 are the reload numbers. */
4034
4035 static int
4036 reload_reg_class_lower (p1, p2)
4037 short *p1, *p2;
4038 {
4039 register int r1 = *p1, r2 = *p2;
4040 register int t;
4041
4042 /* Consider required reloads before optional ones. */
4043 t = reload_optional[r1] - reload_optional[r2];
4044 if (t != 0)
4045 return t;
4046
4047 /* Count all solitary classes before non-solitary ones. */
4048 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4049 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4050 if (t != 0)
4051 return t;
4052
4053 /* Aside from solitaires, consider all multi-reg groups first. */
4054 t = reload_nregs[r2] - reload_nregs[r1];
4055 if (t != 0)
4056 return t;
4057
4058 /* Consider reloads in order of increasing reg-class number. */
4059 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4060 if (t != 0)
4061 return t;
4062
4063 /* If reloads are equally urgent, sort by reload number,
4064 so that the results of qsort leave nothing to chance. */
4065 return r1 - r2;
4066 }
4067 \f
4068 /* The following HARD_REG_SETs indicate when each hard register is
4069 used for a reload of various parts of the current insn. */
4070
4071 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4072 static HARD_REG_SET reload_reg_used;
4073 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4074 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4075 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4076 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4077 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4078 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4079 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4080 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4081 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4082 static HARD_REG_SET reload_reg_used_in_op_addr;
4083 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4084 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4085 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4086 static HARD_REG_SET reload_reg_used_in_insn;
4087 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4088 static HARD_REG_SET reload_reg_used_in_other_addr;
4089
4090 /* If reg is in use as a reload reg for any sort of reload. */
4091 static HARD_REG_SET reload_reg_used_at_all;
4092
4093 /* If reg is use as an inherited reload. We just mark the first register
4094 in the group. */
4095 static HARD_REG_SET reload_reg_used_for_inherit;
4096
4097 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4098 TYPE. MODE is used to indicate how many consecutive regs are
4099 actually used. */
4100
4101 static void
4102 mark_reload_reg_in_use (regno, opnum, type, mode)
4103 int regno;
4104 int opnum;
4105 enum reload_type type;
4106 enum machine_mode mode;
4107 {
4108 int nregs = HARD_REGNO_NREGS (regno, mode);
4109 int i;
4110
4111 for (i = regno; i < nregs + regno; i++)
4112 {
4113 switch (type)
4114 {
4115 case RELOAD_OTHER:
4116 SET_HARD_REG_BIT (reload_reg_used, i);
4117 break;
4118
4119 case RELOAD_FOR_INPUT_ADDRESS:
4120 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4121 break;
4122
4123 case RELOAD_FOR_OUTPUT_ADDRESS:
4124 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4125 break;
4126
4127 case RELOAD_FOR_OPERAND_ADDRESS:
4128 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4129 break;
4130
4131 case RELOAD_FOR_OPADDR_ADDR:
4132 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4133 break;
4134
4135 case RELOAD_FOR_OTHER_ADDRESS:
4136 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4137 break;
4138
4139 case RELOAD_FOR_INPUT:
4140 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4141 break;
4142
4143 case RELOAD_FOR_OUTPUT:
4144 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4145 break;
4146
4147 case RELOAD_FOR_INSN:
4148 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4149 break;
4150 }
4151
4152 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4153 }
4154 }
4155
4156 /* Similarly, but show REGNO is no longer in use for a reload. */
4157
4158 static void
4159 clear_reload_reg_in_use (regno, opnum, type, mode)
4160 int regno;
4161 int opnum;
4162 enum reload_type type;
4163 enum machine_mode mode;
4164 {
4165 int nregs = HARD_REGNO_NREGS (regno, mode);
4166 int i;
4167
4168 for (i = regno; i < nregs + regno; i++)
4169 {
4170 switch (type)
4171 {
4172 case RELOAD_OTHER:
4173 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4174 break;
4175
4176 case RELOAD_FOR_INPUT_ADDRESS:
4177 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4178 break;
4179
4180 case RELOAD_FOR_OUTPUT_ADDRESS:
4181 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4182 break;
4183
4184 case RELOAD_FOR_OPERAND_ADDRESS:
4185 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4186 break;
4187
4188 case RELOAD_FOR_OPADDR_ADDR:
4189 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4190 break;
4191
4192 case RELOAD_FOR_OTHER_ADDRESS:
4193 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4194 break;
4195
4196 case RELOAD_FOR_INPUT:
4197 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4198 break;
4199
4200 case RELOAD_FOR_OUTPUT:
4201 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4202 break;
4203
4204 case RELOAD_FOR_INSN:
4205 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4206 break;
4207 }
4208 }
4209 }
4210
4211 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4212 specified by OPNUM and TYPE. */
4213
4214 static int
4215 reload_reg_free_p (regno, opnum, type)
4216 int regno;
4217 int opnum;
4218 enum reload_type type;
4219 {
4220 int i;
4221
4222 /* In use for a RELOAD_OTHER means it's not available for anything except
4223 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4224 to be used only for inputs. */
4225
4226 if (type != RELOAD_FOR_OTHER_ADDRESS
4227 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4228 return 0;
4229
4230 switch (type)
4231 {
4232 case RELOAD_OTHER:
4233 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4234 we can't use it for RELOAD_OTHER. */
4235 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4236 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4237 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4238 return 0;
4239
4240 for (i = 0; i < reload_n_operands; i++)
4241 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4242 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4243 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4244 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4245 return 0;
4246
4247 return 1;
4248
4249 case RELOAD_FOR_INPUT:
4250 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4251 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4252 return 0;
4253
4254 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4255 return 0;
4256
4257 /* If it is used for some other input, can't use it. */
4258 for (i = 0; i < reload_n_operands; i++)
4259 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4260 return 0;
4261
4262 /* If it is used in a later operand's address, can't use it. */
4263 for (i = opnum + 1; i < reload_n_operands; i++)
4264 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4265 return 0;
4266
4267 return 1;
4268
4269 case RELOAD_FOR_INPUT_ADDRESS:
4270 /* Can't use a register if it is used for an input address for this
4271 operand or used as an input in an earlier one. */
4272 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4273 return 0;
4274
4275 for (i = 0; i < opnum; i++)
4276 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4277 return 0;
4278
4279 return 1;
4280
4281 case RELOAD_FOR_OUTPUT_ADDRESS:
4282 /* Can't use a register if it is used for an output address for this
4283 operand or used as an output in this or a later operand. */
4284 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4285 return 0;
4286
4287 for (i = opnum; i < reload_n_operands; i++)
4288 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4289 return 0;
4290
4291 return 1;
4292
4293 case RELOAD_FOR_OPERAND_ADDRESS:
4294 for (i = 0; i < reload_n_operands; i++)
4295 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4296 return 0;
4297
4298 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4299 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4300
4301 case RELOAD_FOR_OPADDR_ADDR:
4302 for (i = 0; i < reload_n_operands; i++)
4303 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4304 return 0;
4305
4306 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4307
4308 case RELOAD_FOR_OUTPUT:
4309 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4310 outputs, or an operand address for this or an earlier output. */
4311 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4312 return 0;
4313
4314 for (i = 0; i < reload_n_operands; i++)
4315 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4316 return 0;
4317
4318 for (i = 0; i <= opnum; i++)
4319 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4320 return 0;
4321
4322 return 1;
4323
4324 case RELOAD_FOR_INSN:
4325 for (i = 0; i < reload_n_operands; i++)
4326 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4327 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4328 return 0;
4329
4330 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4331 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4332
4333 case RELOAD_FOR_OTHER_ADDRESS:
4334 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4335 }
4336 abort ();
4337 }
4338
4339 /* Return 1 if the value in reload reg REGNO, as used by a reload
4340 needed for the part of the insn specified by OPNUM and TYPE,
4341 is not in use for a reload in any prior part of the insn.
4342
4343 We can assume that the reload reg was already tested for availability
4344 at the time it is needed, and we should not check this again,
4345 in case the reg has already been marked in use. */
4346
4347 static int
4348 reload_reg_free_before_p (regno, opnum, type)
4349 int regno;
4350 int opnum;
4351 enum reload_type type;
4352 {
4353 int i;
4354
4355 switch (type)
4356 {
4357 case RELOAD_FOR_OTHER_ADDRESS:
4358 /* These always come first. */
4359 return 1;
4360
4361 case RELOAD_OTHER:
4362 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4363
4364 /* If this use is for part of the insn,
4365 check the reg is not in use for any prior part. It is tempting
4366 to try to do this by falling through from objecs that occur
4367 later in the insn to ones that occur earlier, but that will not
4368 correctly take into account the fact that here we MUST ignore
4369 things that would prevent the register from being allocated in
4370 the first place, since we know that it was allocated. */
4371
4372 case RELOAD_FOR_OUTPUT_ADDRESS:
4373 /* Earlier reloads are for earlier outputs or their addresses,
4374 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4375 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4376 RELOAD_OTHER).. */
4377 for (i = 0; i < opnum; i++)
4378 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4379 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4380 return 0;
4381
4382 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4383 return 0;
4384
4385 for (i = 0; i < reload_n_operands; i++)
4386 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4387 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4388 return 0;
4389
4390 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4391 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4392 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4393
4394 case RELOAD_FOR_OUTPUT:
4395 /* This can't be used in the output address for this operand and
4396 anything that can't be used for it, except that we've already
4397 tested for RELOAD_FOR_INSN objects. */
4398
4399 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4400 return 0;
4401
4402 for (i = 0; i < opnum; i++)
4403 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4404 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4405 return 0;
4406
4407 for (i = 0; i < reload_n_operands; i++)
4408 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4409 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4410 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4411 return 0;
4412
4413 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4414
4415 case RELOAD_FOR_OPERAND_ADDRESS:
4416 case RELOAD_FOR_OPADDR_ADDR:
4417 case RELOAD_FOR_INSN:
4418 /* These can't conflict with inputs, or each other, so all we have to
4419 test is input addresses and the addresses of OTHER items. */
4420
4421 for (i = 0; i < reload_n_operands; i++)
4422 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4423 return 0;
4424
4425 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4426
4427 case RELOAD_FOR_INPUT:
4428 /* The only things earlier are the address for this and
4429 earlier inputs, other inputs (which we know we don't conflict
4430 with), and addresses of RELOAD_OTHER objects. */
4431
4432 for (i = 0; i <= opnum; i++)
4433 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4434 return 0;
4435
4436 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4437
4438 case RELOAD_FOR_INPUT_ADDRESS:
4439 /* Similarly, all we have to check is for use in earlier inputs'
4440 addresses. */
4441 for (i = 0; i < opnum; i++)
4442 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4443 return 0;
4444
4445 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4446 }
4447 abort ();
4448 }
4449
4450 /* Return 1 if the value in reload reg REGNO, as used by a reload
4451 needed for the part of the insn specified by OPNUM and TYPE,
4452 is still available in REGNO at the end of the insn.
4453
4454 We can assume that the reload reg was already tested for availability
4455 at the time it is needed, and we should not check this again,
4456 in case the reg has already been marked in use. */
4457
4458 static int
4459 reload_reg_reaches_end_p (regno, opnum, type)
4460 int regno;
4461 int opnum;
4462 enum reload_type type;
4463 {
4464 int i;
4465
4466 switch (type)
4467 {
4468 case RELOAD_OTHER:
4469 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4470 its value must reach the end. */
4471 return 1;
4472
4473 /* If this use is for part of the insn,
4474 its value reaches if no subsequent part uses the same register.
4475 Just like the above function, don't try to do this with lots
4476 of fallthroughs. */
4477
4478 case RELOAD_FOR_OTHER_ADDRESS:
4479 /* Here we check for everything else, since these don't conflict
4480 with anything else and everything comes later. */
4481
4482 for (i = 0; i < reload_n_operands; i++)
4483 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4484 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4485 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4486 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4487 return 0;
4488
4489 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4490 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4491 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4492
4493 case RELOAD_FOR_INPUT_ADDRESS:
4494 /* Similar, except that we check only for this and subsequent inputs
4495 and the address of only subsequent inputs and we do not need
4496 to check for RELOAD_OTHER objects since they are known not to
4497 conflict. */
4498
4499 for (i = opnum; i < reload_n_operands; i++)
4500 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4501 return 0;
4502
4503 for (i = opnum + 1; i < reload_n_operands; i++)
4504 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4505 return 0;
4506
4507 for (i = 0; i < reload_n_operands; i++)
4508 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4509 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4510 return 0;
4511
4512 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4513 return 0;
4514
4515 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4516 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4517
4518 case RELOAD_FOR_INPUT:
4519 /* Similar to input address, except we start at the next operand for
4520 both input and input address and we do not check for
4521 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4522 would conflict. */
4523
4524 for (i = opnum + 1; i < reload_n_operands; i++)
4525 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4526 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4527 return 0;
4528
4529 /* ... fall through ... */
4530
4531 case RELOAD_FOR_OPERAND_ADDRESS:
4532 /* Check outputs and their addresses. */
4533
4534 for (i = 0; i < reload_n_operands; i++)
4535 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4536 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4537 return 0;
4538
4539 return 1;
4540
4541 case RELOAD_FOR_OPADDR_ADDR:
4542 for (i = 0; i < reload_n_operands; i++)
4543 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4544 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4545 return 0;
4546
4547 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4548 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4549
4550 case RELOAD_FOR_INSN:
4551 /* These conflict with other outputs with RELOAD_OTHER. So
4552 we need only check for output addresses. */
4553
4554 opnum = -1;
4555
4556 /* ... fall through ... */
4557
4558 case RELOAD_FOR_OUTPUT:
4559 case RELOAD_FOR_OUTPUT_ADDRESS:
4560 /* We already know these can't conflict with a later output. So the
4561 only thing to check are later output addresses. */
4562 for (i = opnum + 1; i < reload_n_operands; i++)
4563 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4564 return 0;
4565
4566 return 1;
4567 }
4568
4569 abort ();
4570 }
4571 \f
4572 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4573 Return 0 otherwise.
4574
4575 This function uses the same algorithm as reload_reg_free_p above. */
4576
4577 static int
4578 reloads_conflict (r1, r2)
4579 int r1, r2;
4580 {
4581 enum reload_type r1_type = reload_when_needed[r1];
4582 enum reload_type r2_type = reload_when_needed[r2];
4583 int r1_opnum = reload_opnum[r1];
4584 int r2_opnum = reload_opnum[r2];
4585
4586 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4587
4588 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4589 return 1;
4590
4591 /* Otherwise, check conflicts differently for each type. */
4592
4593 switch (r1_type)
4594 {
4595 case RELOAD_FOR_INPUT:
4596 return (r2_type == RELOAD_FOR_INSN
4597 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4598 || r2_type == RELOAD_FOR_OPADDR_ADDR
4599 || r2_type == RELOAD_FOR_INPUT
4600 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4601
4602 case RELOAD_FOR_INPUT_ADDRESS:
4603 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4604 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4605
4606 case RELOAD_FOR_OUTPUT_ADDRESS:
4607 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4608 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4609
4610 case RELOAD_FOR_OPERAND_ADDRESS:
4611 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4612 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4613
4614 case RELOAD_FOR_OPADDR_ADDR:
4615 return (r2_type == RELOAD_FOR_INPUT
4616 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4617
4618 case RELOAD_FOR_OUTPUT:
4619 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4620 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4621 && r2_opnum >= r1_opnum));
4622
4623 case RELOAD_FOR_INSN:
4624 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4625 || r2_type == RELOAD_FOR_INSN
4626 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4627
4628 case RELOAD_FOR_OTHER_ADDRESS:
4629 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4630
4631 case RELOAD_OTHER:
4632 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4633
4634 default:
4635 abort ();
4636 }
4637 }
4638 \f
4639 /* Vector of reload-numbers showing the order in which the reloads should
4640 be processed. */
4641 short reload_order[MAX_RELOADS];
4642
4643 /* Indexed by reload number, 1 if incoming value
4644 inherited from previous insns. */
4645 char reload_inherited[MAX_RELOADS];
4646
4647 /* For an inherited reload, this is the insn the reload was inherited from,
4648 if we know it. Otherwise, this is 0. */
4649 rtx reload_inheritance_insn[MAX_RELOADS];
4650
4651 /* If non-zero, this is a place to get the value of the reload,
4652 rather than using reload_in. */
4653 rtx reload_override_in[MAX_RELOADS];
4654
4655 /* For each reload, the index in spill_regs of the spill register used,
4656 or -1 if we did not need one of the spill registers for this reload. */
4657 int reload_spill_index[MAX_RELOADS];
4658
4659 /* Index of last register assigned as a spill register. We allocate in
4660 a round-robin fashio. */
4661
4662 static int last_spill_reg = 0;
4663
4664 /* Find a spill register to use as a reload register for reload R.
4665 LAST_RELOAD is non-zero if this is the last reload for the insn being
4666 processed.
4667
4668 Set reload_reg_rtx[R] to the register allocated.
4669
4670 If NOERROR is nonzero, we return 1 if successful,
4671 or 0 if we couldn't find a spill reg and we didn't change anything. */
4672
4673 static int
4674 allocate_reload_reg (r, insn, last_reload, noerror)
4675 int r;
4676 rtx insn;
4677 int last_reload;
4678 int noerror;
4679 {
4680 int i;
4681 int pass;
4682 int count;
4683 rtx new;
4684 int regno;
4685
4686 /* If we put this reload ahead, thinking it is a group,
4687 then insist on finding a group. Otherwise we can grab a
4688 reg that some other reload needs.
4689 (That can happen when we have a 68000 DATA_OR_FP_REG
4690 which is a group of data regs or one fp reg.)
4691 We need not be so restrictive if there are no more reloads
4692 for this insn.
4693
4694 ??? Really it would be nicer to have smarter handling
4695 for that kind of reg class, where a problem like this is normal.
4696 Perhaps those classes should be avoided for reloading
4697 by use of more alternatives. */
4698
4699 int force_group = reload_nregs[r] > 1 && ! last_reload;
4700
4701 /* If we want a single register and haven't yet found one,
4702 take any reg in the right class and not in use.
4703 If we want a consecutive group, here is where we look for it.
4704
4705 We use two passes so we can first look for reload regs to
4706 reuse, which are already in use for other reloads in this insn,
4707 and only then use additional registers.
4708 I think that maximizing reuse is needed to make sure we don't
4709 run out of reload regs. Suppose we have three reloads, and
4710 reloads A and B can share regs. These need two regs.
4711 Suppose A and B are given different regs.
4712 That leaves none for C. */
4713 for (pass = 0; pass < 2; pass++)
4714 {
4715 /* I is the index in spill_regs.
4716 We advance it round-robin between insns to use all spill regs
4717 equally, so that inherited reloads have a chance
4718 of leapfrogging each other. Don't do this, however, when we have
4719 group needs and failure would be fatal; if we only have a relatively
4720 small number of spill registers, and more than one of them has
4721 group needs, then by starting in the middle, we may end up
4722 allocating the first one in such a way that we are not left with
4723 sufficient groups to handle the rest. */
4724
4725 if (noerror || ! force_group)
4726 i = last_spill_reg;
4727 else
4728 i = -1;
4729
4730 for (count = 0; count < n_spills; count++)
4731 {
4732 int class = (int) reload_reg_class[r];
4733
4734 i = (i + 1) % n_spills;
4735
4736 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4737 reload_when_needed[r])
4738 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4739 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4740 /* Look first for regs to share, then for unshared. But
4741 don't share regs used for inherited reloads; they are
4742 the ones we want to preserve. */
4743 && (pass
4744 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4745 spill_regs[i])
4746 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4747 spill_regs[i]))))
4748 {
4749 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4750 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4751 (on 68000) got us two FP regs. If NR is 1,
4752 we would reject both of them. */
4753 if (force_group)
4754 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4755 /* If we need only one reg, we have already won. */
4756 if (nr == 1)
4757 {
4758 /* But reject a single reg if we demand a group. */
4759 if (force_group)
4760 continue;
4761 break;
4762 }
4763 /* Otherwise check that as many consecutive regs as we need
4764 are available here.
4765 Also, don't use for a group registers that are
4766 needed for nongroups. */
4767 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4768 while (nr > 1)
4769 {
4770 regno = spill_regs[i] + nr - 1;
4771 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4772 && spill_reg_order[regno] >= 0
4773 && reload_reg_free_p (regno, reload_opnum[r],
4774 reload_when_needed[r])
4775 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4776 regno)))
4777 break;
4778 nr--;
4779 }
4780 if (nr == 1)
4781 break;
4782 }
4783 }
4784
4785 /* If we found something on pass 1, omit pass 2. */
4786 if (count < n_spills)
4787 break;
4788 }
4789
4790 /* We should have found a spill register by now. */
4791 if (count == n_spills)
4792 {
4793 if (noerror)
4794 return 0;
4795 goto failure;
4796 }
4797
4798 /* I is the index in SPILL_REG_RTX of the reload register we are to
4799 allocate. Get an rtx for it and find its register number. */
4800
4801 new = spill_reg_rtx[i];
4802
4803 if (new == 0 || GET_MODE (new) != reload_mode[r])
4804 spill_reg_rtx[i] = new
4805 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4806
4807 regno = true_regnum (new);
4808
4809 /* Detect when the reload reg can't hold the reload mode.
4810 This used to be one `if', but Sequent compiler can't handle that. */
4811 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4812 {
4813 enum machine_mode test_mode = VOIDmode;
4814 if (reload_in[r])
4815 test_mode = GET_MODE (reload_in[r]);
4816 /* If reload_in[r] has VOIDmode, it means we will load it
4817 in whatever mode the reload reg has: to wit, reload_mode[r].
4818 We have already tested that for validity. */
4819 /* Aside from that, we need to test that the expressions
4820 to reload from or into have modes which are valid for this
4821 reload register. Otherwise the reload insns would be invalid. */
4822 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4823 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4824 if (! (reload_out[r] != 0
4825 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4826 {
4827 /* The reg is OK. */
4828 last_spill_reg = i;
4829
4830 /* Mark as in use for this insn the reload regs we use
4831 for this. */
4832 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4833 reload_when_needed[r], reload_mode[r]);
4834
4835 reload_reg_rtx[r] = new;
4836 reload_spill_index[r] = i;
4837 return 1;
4838 }
4839 }
4840
4841 /* The reg is not OK. */
4842 if (noerror)
4843 return 0;
4844
4845 failure:
4846 if (asm_noperands (PATTERN (insn)) < 0)
4847 /* It's the compiler's fault. */
4848 fatal_insn ("Could not find a spill register", insn);
4849
4850 /* It's the user's fault; the operand's mode and constraint
4851 don't match. Disable this reload so we don't crash in final. */
4852 error_for_asm (insn,
4853 "`asm' operand constraint incompatible with operand size");
4854 reload_in[r] = 0;
4855 reload_out[r] = 0;
4856 reload_reg_rtx[r] = 0;
4857 reload_optional[r] = 1;
4858 reload_secondary_p[r] = 1;
4859
4860 return 1;
4861 }
4862 \f
4863 /* Assign hard reg targets for the pseudo-registers we must reload
4864 into hard regs for this insn.
4865 Also output the instructions to copy them in and out of the hard regs.
4866
4867 For machines with register classes, we are responsible for
4868 finding a reload reg in the proper class. */
4869
4870 static void
4871 choose_reload_regs (insn, avoid_return_reg)
4872 rtx insn;
4873 rtx avoid_return_reg;
4874 {
4875 register int i, j;
4876 int max_group_size = 1;
4877 enum reg_class group_class = NO_REGS;
4878 int inheritance;
4879
4880 rtx save_reload_reg_rtx[MAX_RELOADS];
4881 char save_reload_inherited[MAX_RELOADS];
4882 rtx save_reload_inheritance_insn[MAX_RELOADS];
4883 rtx save_reload_override_in[MAX_RELOADS];
4884 int save_reload_spill_index[MAX_RELOADS];
4885 HARD_REG_SET save_reload_reg_used;
4886 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4887 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4888 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4889 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4890 HARD_REG_SET save_reload_reg_used_in_op_addr;
4891 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4892 HARD_REG_SET save_reload_reg_used_in_insn;
4893 HARD_REG_SET save_reload_reg_used_in_other_addr;
4894 HARD_REG_SET save_reload_reg_used_at_all;
4895
4896 bzero (reload_inherited, MAX_RELOADS);
4897 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4898 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4899
4900 CLEAR_HARD_REG_SET (reload_reg_used);
4901 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4902 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4903 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4904 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4905 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4906
4907 for (i = 0; i < reload_n_operands; i++)
4908 {
4909 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4910 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4911 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4912 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4913 }
4914
4915 #ifdef SMALL_REGISTER_CLASSES
4916 /* Don't bother with avoiding the return reg
4917 if we have no mandatory reload that could use it. */
4918 if (avoid_return_reg)
4919 {
4920 int do_avoid = 0;
4921 int regno = REGNO (avoid_return_reg);
4922 int nregs
4923 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4924 int r;
4925
4926 for (r = regno; r < regno + nregs; r++)
4927 if (spill_reg_order[r] >= 0)
4928 for (j = 0; j < n_reloads; j++)
4929 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4930 && (reload_in[j] != 0 || reload_out[j] != 0
4931 || reload_secondary_p[j])
4932 &&
4933 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4934 do_avoid = 1;
4935 if (!do_avoid)
4936 avoid_return_reg = 0;
4937 }
4938 #endif /* SMALL_REGISTER_CLASSES */
4939
4940 #if 0 /* Not needed, now that we can always retry without inheritance. */
4941 /* See if we have more mandatory reloads than spill regs.
4942 If so, then we cannot risk optimizations that could prevent
4943 reloads from sharing one spill register.
4944
4945 Since we will try finding a better register than reload_reg_rtx
4946 unless it is equal to reload_in or reload_out, count such reloads. */
4947
4948 {
4949 int tem = 0;
4950 #ifdef SMALL_REGISTER_CLASSES
4951 int tem = (avoid_return_reg != 0);
4952 #endif
4953 for (j = 0; j < n_reloads; j++)
4954 if (! reload_optional[j]
4955 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4956 && (reload_reg_rtx[j] == 0
4957 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4958 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4959 tem++;
4960 if (tem > n_spills)
4961 must_reuse = 1;
4962 }
4963 #endif
4964
4965 #ifdef SMALL_REGISTER_CLASSES
4966 /* Don't use the subroutine call return reg for a reload
4967 if we are supposed to avoid it. */
4968 if (avoid_return_reg)
4969 {
4970 int regno = REGNO (avoid_return_reg);
4971 int nregs
4972 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4973 int r;
4974
4975 for (r = regno; r < regno + nregs; r++)
4976 if (spill_reg_order[r] >= 0)
4977 SET_HARD_REG_BIT (reload_reg_used, r);
4978 }
4979 #endif /* SMALL_REGISTER_CLASSES */
4980
4981 /* In order to be certain of getting the registers we need,
4982 we must sort the reloads into order of increasing register class.
4983 Then our grabbing of reload registers will parallel the process
4984 that provided the reload registers.
4985
4986 Also note whether any of the reloads wants a consecutive group of regs.
4987 If so, record the maximum size of the group desired and what
4988 register class contains all the groups needed by this insn. */
4989
4990 for (j = 0; j < n_reloads; j++)
4991 {
4992 reload_order[j] = j;
4993 reload_spill_index[j] = -1;
4994
4995 reload_mode[j]
4996 = (reload_inmode[j] == VOIDmode
4997 || (GET_MODE_SIZE (reload_outmode[j])
4998 > GET_MODE_SIZE (reload_inmode[j])))
4999 ? reload_outmode[j] : reload_inmode[j];
5000
5001 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5002
5003 if (reload_nregs[j] > 1)
5004 {
5005 max_group_size = MAX (reload_nregs[j], max_group_size);
5006 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5007 }
5008
5009 /* If we have already decided to use a certain register,
5010 don't use it in another way. */
5011 if (reload_reg_rtx[j])
5012 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5013 reload_when_needed[j], reload_mode[j]);
5014 }
5015
5016 if (n_reloads > 1)
5017 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5018
5019 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5020 sizeof reload_reg_rtx);
5021 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5022 bcopy ((char *) reload_inheritance_insn,
5023 (char *) save_reload_inheritance_insn,
5024 sizeof reload_inheritance_insn);
5025 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5026 sizeof reload_override_in);
5027 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5028 sizeof reload_spill_index);
5029 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5030 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5031 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5032 reload_reg_used_in_op_addr);
5033
5034 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5035 reload_reg_used_in_op_addr_reload);
5036
5037 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5038 reload_reg_used_in_insn);
5039 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5040 reload_reg_used_in_other_addr);
5041
5042 for (i = 0; i < reload_n_operands; i++)
5043 {
5044 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5045 reload_reg_used_in_output[i]);
5046 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5047 reload_reg_used_in_input[i]);
5048 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5049 reload_reg_used_in_input_addr[i]);
5050 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5051 reload_reg_used_in_output_addr[i]);
5052 }
5053
5054 /* If -O, try first with inheritance, then turning it off.
5055 If not -O, don't do inheritance.
5056 Using inheritance when not optimizing leads to paradoxes
5057 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5058 because one side of the comparison might be inherited. */
5059
5060 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5061 {
5062 /* Process the reloads in order of preference just found.
5063 Beyond this point, subregs can be found in reload_reg_rtx.
5064
5065 This used to look for an existing reloaded home for all
5066 of the reloads, and only then perform any new reloads.
5067 But that could lose if the reloads were done out of reg-class order
5068 because a later reload with a looser constraint might have an old
5069 home in a register needed by an earlier reload with a tighter constraint.
5070
5071 To solve this, we make two passes over the reloads, in the order
5072 described above. In the first pass we try to inherit a reload
5073 from a previous insn. If there is a later reload that needs a
5074 class that is a proper subset of the class being processed, we must
5075 also allocate a spill register during the first pass.
5076
5077 Then make a second pass over the reloads to allocate any reloads
5078 that haven't been given registers yet. */
5079
5080 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5081
5082 for (j = 0; j < n_reloads; j++)
5083 {
5084 register int r = reload_order[j];
5085
5086 /* Ignore reloads that got marked inoperative. */
5087 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5088 continue;
5089
5090 /* If find_reloads chose a to use reload_in or reload_out as a reload
5091 register, we don't need to chose one. Otherwise, try even if it found
5092 one since we might save an insn if we find the value lying around. */
5093 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5094 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5095 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5096 continue;
5097
5098 #if 0 /* No longer needed for correct operation.
5099 It might give better code, or might not; worth an experiment? */
5100 /* If this is an optional reload, we can't inherit from earlier insns
5101 until we are sure that any non-optional reloads have been allocated.
5102 The following code takes advantage of the fact that optional reloads
5103 are at the end of reload_order. */
5104 if (reload_optional[r] != 0)
5105 for (i = 0; i < j; i++)
5106 if ((reload_out[reload_order[i]] != 0
5107 || reload_in[reload_order[i]] != 0
5108 || reload_secondary_p[reload_order[i]])
5109 && ! reload_optional[reload_order[i]]
5110 && reload_reg_rtx[reload_order[i]] == 0)
5111 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5112 #endif
5113
5114 /* First see if this pseudo is already available as reloaded
5115 for a previous insn. We cannot try to inherit for reloads
5116 that are smaller than the maximum number of registers needed
5117 for groups unless the register we would allocate cannot be used
5118 for the groups.
5119
5120 We could check here to see if this is a secondary reload for
5121 an object that is already in a register of the desired class.
5122 This would avoid the need for the secondary reload register.
5123 But this is complex because we can't easily determine what
5124 objects might want to be loaded via this reload. So let a register
5125 be allocated here. In `emit_reload_insns' we suppress one of the
5126 loads in the case described above. */
5127
5128 if (inheritance)
5129 {
5130 register int regno = -1;
5131 enum machine_mode mode;
5132
5133 if (reload_in[r] == 0)
5134 ;
5135 else if (GET_CODE (reload_in[r]) == REG)
5136 {
5137 regno = REGNO (reload_in[r]);
5138 mode = GET_MODE (reload_in[r]);
5139 }
5140 else if (GET_CODE (reload_in_reg[r]) == REG)
5141 {
5142 regno = REGNO (reload_in_reg[r]);
5143 mode = GET_MODE (reload_in_reg[r]);
5144 }
5145 #if 0
5146 /* This won't work, since REGNO can be a pseudo reg number.
5147 Also, it takes much more hair to keep track of all the things
5148 that can invalidate an inherited reload of part of a pseudoreg. */
5149 else if (GET_CODE (reload_in[r]) == SUBREG
5150 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5151 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5152 #endif
5153
5154 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5155 {
5156 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5157
5158 if (reg_reloaded_contents[i] == regno
5159 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5160 >= GET_MODE_SIZE (mode))
5161 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5162 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5163 spill_regs[i])
5164 && (reload_nregs[r] == max_group_size
5165 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5166 spill_regs[i]))
5167 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5168 reload_when_needed[r])
5169 && reload_reg_free_before_p (spill_regs[i],
5170 reload_opnum[r],
5171 reload_when_needed[r]))
5172 {
5173 /* If a group is needed, verify that all the subsequent
5174 registers still have their values intact. */
5175 int nr
5176 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5177 int k;
5178
5179 for (k = 1; k < nr; k++)
5180 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5181 != regno)
5182 break;
5183
5184 if (k == nr)
5185 {
5186 int i1;
5187
5188 /* We found a register that contains the
5189 value we need. If this register is the
5190 same as an `earlyclobber' operand of the
5191 current insn, just mark it as a place to
5192 reload from since we can't use it as the
5193 reload register itself. */
5194
5195 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5196 if (reg_overlap_mentioned_for_reload_p
5197 (reg_last_reload_reg[regno],
5198 reload_earlyclobbers[i1]))
5199 break;
5200
5201 if (i1 != n_earlyclobbers
5202 /* Don't really use the inherited spill reg
5203 if we need it wider than we've got it. */
5204 || (GET_MODE_SIZE (reload_mode[r])
5205 > GET_MODE_SIZE (mode)))
5206 reload_override_in[r] = reg_last_reload_reg[regno];
5207 else
5208 {
5209 int k;
5210 /* We can use this as a reload reg. */
5211 /* Mark the register as in use for this part of
5212 the insn. */
5213 mark_reload_reg_in_use (spill_regs[i],
5214 reload_opnum[r],
5215 reload_when_needed[r],
5216 reload_mode[r]);
5217 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5218 reload_inherited[r] = 1;
5219 reload_inheritance_insn[r]
5220 = reg_reloaded_insn[i];
5221 reload_spill_index[r] = i;
5222 for (k = 0; k < nr; k++)
5223 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5224 spill_regs[i + k]);
5225 }
5226 }
5227 }
5228 }
5229 }
5230
5231 /* Here's another way to see if the value is already lying around. */
5232 if (inheritance
5233 && reload_in[r] != 0
5234 && ! reload_inherited[r]
5235 && reload_out[r] == 0
5236 && (CONSTANT_P (reload_in[r])
5237 || GET_CODE (reload_in[r]) == PLUS
5238 || GET_CODE (reload_in[r]) == REG
5239 || GET_CODE (reload_in[r]) == MEM)
5240 && (reload_nregs[r] == max_group_size
5241 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5242 {
5243 register rtx equiv
5244 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5245 -1, NULL_PTR, 0, reload_mode[r]);
5246 int regno;
5247
5248 if (equiv != 0)
5249 {
5250 if (GET_CODE (equiv) == REG)
5251 regno = REGNO (equiv);
5252 else if (GET_CODE (equiv) == SUBREG)
5253 {
5254 /* This must be a SUBREG of a hard register.
5255 Make a new REG since this might be used in an
5256 address and not all machines support SUBREGs
5257 there. */
5258 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5259 equiv = gen_rtx (REG, reload_mode[r], regno);
5260 }
5261 else
5262 abort ();
5263 }
5264
5265 /* If we found a spill reg, reject it unless it is free
5266 and of the desired class. */
5267 if (equiv != 0
5268 && ((spill_reg_order[regno] >= 0
5269 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5270 reload_when_needed[r]))
5271 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5272 regno)))
5273 equiv = 0;
5274
5275 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5276 equiv = 0;
5277
5278 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5279 equiv = 0;
5280
5281 /* We found a register that contains the value we need.
5282 If this register is the same as an `earlyclobber' operand
5283 of the current insn, just mark it as a place to reload from
5284 since we can't use it as the reload register itself. */
5285
5286 if (equiv != 0)
5287 for (i = 0; i < n_earlyclobbers; i++)
5288 if (reg_overlap_mentioned_for_reload_p (equiv,
5289 reload_earlyclobbers[i]))
5290 {
5291 reload_override_in[r] = equiv;
5292 equiv = 0;
5293 break;
5294 }
5295
5296 /* JRV: If the equiv register we have found is explicitly
5297 clobbered in the current insn, mark but don't use, as above. */
5298
5299 if (equiv != 0 && regno_clobbered_p (regno, insn))
5300 {
5301 reload_override_in[r] = equiv;
5302 equiv = 0;
5303 }
5304
5305 /* If we found an equivalent reg, say no code need be generated
5306 to load it, and use it as our reload reg. */
5307 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5308 {
5309 reload_reg_rtx[r] = equiv;
5310 reload_inherited[r] = 1;
5311 /* If it is a spill reg,
5312 mark the spill reg as in use for this insn. */
5313 i = spill_reg_order[regno];
5314 if (i >= 0)
5315 {
5316 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5317 int k;
5318 mark_reload_reg_in_use (regno, reload_opnum[r],
5319 reload_when_needed[r],
5320 reload_mode[r]);
5321 for (k = 0; k < nr; k++)
5322 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5323 }
5324 }
5325 }
5326
5327 /* If we found a register to use already, or if this is an optional
5328 reload, we are done. */
5329 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5330 continue;
5331
5332 #if 0 /* No longer needed for correct operation. Might or might not
5333 give better code on the average. Want to experiment? */
5334
5335 /* See if there is a later reload that has a class different from our
5336 class that intersects our class or that requires less register
5337 than our reload. If so, we must allocate a register to this
5338 reload now, since that reload might inherit a previous reload
5339 and take the only available register in our class. Don't do this
5340 for optional reloads since they will force all previous reloads
5341 to be allocated. Also don't do this for reloads that have been
5342 turned off. */
5343
5344 for (i = j + 1; i < n_reloads; i++)
5345 {
5346 int s = reload_order[i];
5347
5348 if ((reload_in[s] == 0 && reload_out[s] == 0
5349 && ! reload_secondary_p[s])
5350 || reload_optional[s])
5351 continue;
5352
5353 if ((reload_reg_class[s] != reload_reg_class[r]
5354 && reg_classes_intersect_p (reload_reg_class[r],
5355 reload_reg_class[s]))
5356 || reload_nregs[s] < reload_nregs[r])
5357 break;
5358 }
5359
5360 if (i == n_reloads)
5361 continue;
5362
5363 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5364 #endif
5365 }
5366
5367 /* Now allocate reload registers for anything non-optional that
5368 didn't get one yet. */
5369 for (j = 0; j < n_reloads; j++)
5370 {
5371 register int r = reload_order[j];
5372
5373 /* Ignore reloads that got marked inoperative. */
5374 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5375 continue;
5376
5377 /* Skip reloads that already have a register allocated or are
5378 optional. */
5379 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5380 continue;
5381
5382 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5383 break;
5384 }
5385
5386 /* If that loop got all the way, we have won. */
5387 if (j == n_reloads)
5388 break;
5389
5390 fail:
5391 /* Loop around and try without any inheritance. */
5392 /* First undo everything done by the failed attempt
5393 to allocate with inheritance. */
5394 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5395 sizeof reload_reg_rtx);
5396 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5397 sizeof reload_inherited);
5398 bcopy ((char *) save_reload_inheritance_insn,
5399 (char *) reload_inheritance_insn,
5400 sizeof reload_inheritance_insn);
5401 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5402 sizeof reload_override_in);
5403 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5404 sizeof reload_spill_index);
5405 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5406 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5407 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5408 save_reload_reg_used_in_op_addr);
5409 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5410 save_reload_reg_used_in_op_addr_reload);
5411 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5412 save_reload_reg_used_in_insn);
5413 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5414 save_reload_reg_used_in_other_addr);
5415
5416 for (i = 0; i < reload_n_operands; i++)
5417 {
5418 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5419 save_reload_reg_used_in_input[i]);
5420 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5421 save_reload_reg_used_in_output[i]);
5422 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5423 save_reload_reg_used_in_input_addr[i]);
5424 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5425 save_reload_reg_used_in_output_addr[i]);
5426 }
5427 }
5428
5429 /* If we thought we could inherit a reload, because it seemed that
5430 nothing else wanted the same reload register earlier in the insn,
5431 verify that assumption, now that all reloads have been assigned. */
5432
5433 for (j = 0; j < n_reloads; j++)
5434 {
5435 register int r = reload_order[j];
5436
5437 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5438 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5439 reload_opnum[r],
5440 reload_when_needed[r]))
5441 reload_inherited[r] = 0;
5442
5443 /* If we found a better place to reload from,
5444 validate it in the same fashion, if it is a reload reg. */
5445 if (reload_override_in[r]
5446 && (GET_CODE (reload_override_in[r]) == REG
5447 || GET_CODE (reload_override_in[r]) == SUBREG))
5448 {
5449 int regno = true_regnum (reload_override_in[r]);
5450 if (spill_reg_order[regno] >= 0
5451 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5452 reload_when_needed[r]))
5453 reload_override_in[r] = 0;
5454 }
5455 }
5456
5457 /* Now that reload_override_in is known valid,
5458 actually override reload_in. */
5459 for (j = 0; j < n_reloads; j++)
5460 if (reload_override_in[j])
5461 reload_in[j] = reload_override_in[j];
5462
5463 /* If this reload won't be done because it has been cancelled or is
5464 optional and not inherited, clear reload_reg_rtx so other
5465 routines (such as subst_reloads) don't get confused. */
5466 for (j = 0; j < n_reloads; j++)
5467 if (reload_reg_rtx[j] != 0
5468 && ((reload_optional[j] && ! reload_inherited[j])
5469 || (reload_in[j] == 0 && reload_out[j] == 0
5470 && ! reload_secondary_p[j])))
5471 {
5472 int regno = true_regnum (reload_reg_rtx[j]);
5473
5474 if (spill_reg_order[regno] >= 0)
5475 clear_reload_reg_in_use (regno, reload_opnum[j],
5476 reload_when_needed[j], reload_mode[j]);
5477 reload_reg_rtx[j] = 0;
5478 }
5479
5480 /* Record which pseudos and which spill regs have output reloads. */
5481 for (j = 0; j < n_reloads; j++)
5482 {
5483 register int r = reload_order[j];
5484
5485 i = reload_spill_index[r];
5486
5487 /* I is nonneg if this reload used one of the spill regs.
5488 If reload_reg_rtx[r] is 0, this is an optional reload
5489 that we opted to ignore. */
5490 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5491 && reload_reg_rtx[r] != 0)
5492 {
5493 register int nregno = REGNO (reload_out[r]);
5494 int nr = 1;
5495
5496 if (nregno < FIRST_PSEUDO_REGISTER)
5497 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5498
5499 while (--nr >= 0)
5500 reg_has_output_reload[nregno + nr] = 1;
5501
5502 if (i >= 0)
5503 {
5504 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5505 while (--nr >= 0)
5506 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5507 }
5508
5509 if (reload_when_needed[r] != RELOAD_OTHER
5510 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5511 && reload_when_needed[r] != RELOAD_FOR_INSN)
5512 abort ();
5513 }
5514 }
5515 }
5516 \f
5517 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5518 reloads of the same item for fear that we might not have enough reload
5519 registers. However, normally they will get the same reload register
5520 and hence actually need not be loaded twice.
5521
5522 Here we check for the most common case of this phenomenon: when we have
5523 a number of reloads for the same object, each of which were allocated
5524 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5525 reload, and is not modified in the insn itself. If we find such,
5526 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5527 This will not increase the number of spill registers needed and will
5528 prevent redundant code. */
5529
5530 #ifdef SMALL_REGISTER_CLASSES
5531
5532 static void
5533 merge_assigned_reloads (insn)
5534 rtx insn;
5535 {
5536 int i, j;
5537
5538 /* Scan all the reloads looking for ones that only load values and
5539 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5540 assigned and not modified by INSN. */
5541
5542 for (i = 0; i < n_reloads; i++)
5543 {
5544 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5545 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5546 || reg_set_p (reload_reg_rtx[i], insn))
5547 continue;
5548
5549 /* Look at all other reloads. Ensure that the only use of this
5550 reload_reg_rtx is in a reload that just loads the same value
5551 as we do. Note that any secondary reloads must be of the identical
5552 class since the values, modes, and result registers are the
5553 same, so we need not do anything with any secondary reloads. */
5554
5555 for (j = 0; j < n_reloads; j++)
5556 {
5557 if (i == j || reload_reg_rtx[j] == 0
5558 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5559 reload_reg_rtx[i]))
5560 continue;
5561
5562 /* If the reload regs aren't exactly the same (e.g, different modes)
5563 or if the values are different, we can't merge anything with this
5564 reload register. */
5565
5566 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5567 || reload_out[j] != 0 || reload_in[j] == 0
5568 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5569 break;
5570 }
5571
5572 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5573 we, in fact, found any matching reloads. */
5574
5575 if (j == n_reloads)
5576 {
5577 for (j = 0; j < n_reloads; j++)
5578 if (i != j && reload_reg_rtx[j] != 0
5579 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5580 {
5581 reload_when_needed[i] = RELOAD_OTHER;
5582 reload_in[j] = 0;
5583 transfer_replacements (i, j);
5584 }
5585
5586 /* If this is now RELOAD_OTHER, look for any reloads that load
5587 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5588 if they were for inputs, RELOAD_OTHER for outputs. Note that
5589 this test is equivalent to looking for reloads for this operand
5590 number. */
5591
5592 if (reload_when_needed[i] == RELOAD_OTHER)
5593 for (j = 0; j < n_reloads; j++)
5594 if (reload_in[j] != 0
5595 && reload_when_needed[i] != RELOAD_OTHER
5596 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5597 reload_in[i]))
5598 reload_when_needed[j]
5599 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5600 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5601 }
5602 }
5603 }
5604 #endif /* SMALL_RELOAD_CLASSES */
5605 \f
5606 /* Output insns to reload values in and out of the chosen reload regs. */
5607
5608 static void
5609 emit_reload_insns (insn)
5610 rtx insn;
5611 {
5612 register int j;
5613 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5614 rtx other_input_address_reload_insns = 0;
5615 rtx other_input_reload_insns = 0;
5616 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5617 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5618 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5619 rtx operand_reload_insns = 0;
5620 rtx other_operand_reload_insns = 0;
5621 rtx following_insn = NEXT_INSN (insn);
5622 rtx before_insn = insn;
5623 int special;
5624 /* Values to be put in spill_reg_store are put here first. */
5625 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5626
5627 for (j = 0; j < reload_n_operands; j++)
5628 input_reload_insns[j] = input_address_reload_insns[j]
5629 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5630
5631 /* Now output the instructions to copy the data into and out of the
5632 reload registers. Do these in the order that the reloads were reported,
5633 since reloads of base and index registers precede reloads of operands
5634 and the operands may need the base and index registers reloaded. */
5635
5636 for (j = 0; j < n_reloads; j++)
5637 {
5638 register rtx old;
5639 rtx oldequiv_reg = 0;
5640
5641 if (reload_spill_index[j] >= 0)
5642 new_spill_reg_store[reload_spill_index[j]] = 0;
5643
5644 old = reload_in[j];
5645 if (old != 0 && ! reload_inherited[j]
5646 && ! rtx_equal_p (reload_reg_rtx[j], old)
5647 && reload_reg_rtx[j] != 0)
5648 {
5649 register rtx reloadreg = reload_reg_rtx[j];
5650 rtx oldequiv = 0;
5651 enum machine_mode mode;
5652 rtx *where;
5653
5654 /* Determine the mode to reload in.
5655 This is very tricky because we have three to choose from.
5656 There is the mode the insn operand wants (reload_inmode[J]).
5657 There is the mode of the reload register RELOADREG.
5658 There is the intrinsic mode of the operand, which we could find
5659 by stripping some SUBREGs.
5660 It turns out that RELOADREG's mode is irrelevant:
5661 we can change that arbitrarily.
5662
5663 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5664 then the reload reg may not support QImode moves, so use SImode.
5665 If foo is in memory due to spilling a pseudo reg, this is safe,
5666 because the QImode value is in the least significant part of a
5667 slot big enough for a SImode. If foo is some other sort of
5668 memory reference, then it is impossible to reload this case,
5669 so previous passes had better make sure this never happens.
5670
5671 Then consider a one-word union which has SImode and one of its
5672 members is a float, being fetched as (SUBREG:SF union:SI).
5673 We must fetch that as SFmode because we could be loading into
5674 a float-only register. In this case OLD's mode is correct.
5675
5676 Consider an immediate integer: it has VOIDmode. Here we need
5677 to get a mode from something else.
5678
5679 In some cases, there is a fourth mode, the operand's
5680 containing mode. If the insn specifies a containing mode for
5681 this operand, it overrides all others.
5682
5683 I am not sure whether the algorithm here is always right,
5684 but it does the right things in those cases. */
5685
5686 mode = GET_MODE (old);
5687 if (mode == VOIDmode)
5688 mode = reload_inmode[j];
5689
5690 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5691 /* If we need a secondary register for this operation, see if
5692 the value is already in a register in that class. Don't
5693 do this if the secondary register will be used as a scratch
5694 register. */
5695
5696 if (reload_secondary_in_reload[j] >= 0
5697 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5698 && optimize)
5699 oldequiv
5700 = find_equiv_reg (old, insn,
5701 reload_reg_class[reload_secondary_in_reload[j]],
5702 -1, NULL_PTR, 0, mode);
5703 #endif
5704
5705 /* If reloading from memory, see if there is a register
5706 that already holds the same value. If so, reload from there.
5707 We can pass 0 as the reload_reg_p argument because
5708 any other reload has either already been emitted,
5709 in which case find_equiv_reg will see the reload-insn,
5710 or has yet to be emitted, in which case it doesn't matter
5711 because we will use this equiv reg right away. */
5712
5713 if (oldequiv == 0 && optimize
5714 && (GET_CODE (old) == MEM
5715 || (GET_CODE (old) == REG
5716 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5717 && reg_renumber[REGNO (old)] < 0)))
5718 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5719 -1, NULL_PTR, 0, mode);
5720
5721 if (oldequiv)
5722 {
5723 int regno = true_regnum (oldequiv);
5724
5725 /* If OLDEQUIV is a spill register, don't use it for this
5726 if any other reload needs it at an earlier stage of this insn
5727 or at this stage. */
5728 if (spill_reg_order[regno] >= 0
5729 && (! reload_reg_free_p (regno, reload_opnum[j],
5730 reload_when_needed[j])
5731 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5732 reload_when_needed[j])))
5733 oldequiv = 0;
5734
5735 /* If OLDEQUIV is not a spill register,
5736 don't use it if any other reload wants it. */
5737 if (spill_reg_order[regno] < 0)
5738 {
5739 int k;
5740 for (k = 0; k < n_reloads; k++)
5741 if (reload_reg_rtx[k] != 0 && k != j
5742 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5743 oldequiv))
5744 {
5745 oldequiv = 0;
5746 break;
5747 }
5748 }
5749
5750 /* If it is no cheaper to copy from OLDEQUIV into the
5751 reload register than it would be to move from memory,
5752 don't use it. Likewise, if we need a secondary register
5753 or memory. */
5754
5755 if (oldequiv != 0
5756 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5757 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5758 reload_reg_class[j])
5759 >= MEMORY_MOVE_COST (mode)))
5760 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5761 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5762 mode, oldequiv)
5763 != NO_REGS)
5764 #endif
5765 #ifdef SECONDARY_MEMORY_NEEDED
5766 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5767 REGNO_REG_CLASS (regno),
5768 mode)
5769 #endif
5770 ))
5771 oldequiv = 0;
5772 }
5773
5774 if (oldequiv == 0)
5775 oldequiv = old;
5776 else if (GET_CODE (oldequiv) == REG)
5777 oldequiv_reg = oldequiv;
5778 else if (GET_CODE (oldequiv) == SUBREG)
5779 oldequiv_reg = SUBREG_REG (oldequiv);
5780
5781 /* If we are reloading from a register that was recently stored in
5782 with an output-reload, see if we can prove there was
5783 actually no need to store the old value in it. */
5784
5785 if (optimize && GET_CODE (oldequiv) == REG
5786 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5787 && spill_reg_order[REGNO (oldequiv)] >= 0
5788 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5789 && find_reg_note (insn, REG_DEAD, reload_in[j])
5790 /* This is unsafe if operand occurs more than once in current
5791 insn. Perhaps some occurrences weren't reloaded. */
5792 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5793 delete_output_reload
5794 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5795
5796 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5797 then load RELOADREG from OLDEQUIV. Note that we cannot use
5798 gen_lowpart_common since it can do the wrong thing when
5799 RELOADREG has a multi-word mode. Note that RELOADREG
5800 must always be a REG here. */
5801
5802 if (GET_MODE (reloadreg) != mode)
5803 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5804 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5805 oldequiv = SUBREG_REG (oldequiv);
5806 if (GET_MODE (oldequiv) != VOIDmode
5807 && mode != GET_MODE (oldequiv))
5808 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5809
5810 /* Switch to the right place to emit the reload insns. */
5811 switch (reload_when_needed[j])
5812 {
5813 case RELOAD_OTHER:
5814 where = &other_input_reload_insns;
5815 break;
5816 case RELOAD_FOR_INPUT:
5817 where = &input_reload_insns[reload_opnum[j]];
5818 break;
5819 case RELOAD_FOR_INPUT_ADDRESS:
5820 where = &input_address_reload_insns[reload_opnum[j]];
5821 break;
5822 case RELOAD_FOR_OUTPUT_ADDRESS:
5823 where = &output_address_reload_insns[reload_opnum[j]];
5824 break;
5825 case RELOAD_FOR_OPERAND_ADDRESS:
5826 where = &operand_reload_insns;
5827 break;
5828 case RELOAD_FOR_OPADDR_ADDR:
5829 where = &other_operand_reload_insns;
5830 break;
5831 case RELOAD_FOR_OTHER_ADDRESS:
5832 where = &other_input_address_reload_insns;
5833 break;
5834 default:
5835 abort ();
5836 }
5837
5838 push_to_sequence (*where);
5839 special = 0;
5840
5841 /* Auto-increment addresses must be reloaded in a special way. */
5842 if (GET_CODE (oldequiv) == POST_INC
5843 || GET_CODE (oldequiv) == POST_DEC
5844 || GET_CODE (oldequiv) == PRE_INC
5845 || GET_CODE (oldequiv) == PRE_DEC)
5846 {
5847 /* We are not going to bother supporting the case where a
5848 incremented register can't be copied directly from
5849 OLDEQUIV since this seems highly unlikely. */
5850 if (reload_secondary_in_reload[j] >= 0)
5851 abort ();
5852 /* Prevent normal processing of this reload. */
5853 special = 1;
5854 /* Output a special code sequence for this case. */
5855 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5856 }
5857
5858 /* If we are reloading a pseudo-register that was set by the previous
5859 insn, see if we can get rid of that pseudo-register entirely
5860 by redirecting the previous insn into our reload register. */
5861
5862 else if (optimize && GET_CODE (old) == REG
5863 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5864 && dead_or_set_p (insn, old)
5865 /* This is unsafe if some other reload
5866 uses the same reg first. */
5867 && reload_reg_free_before_p (REGNO (reloadreg),
5868 reload_opnum[j],
5869 reload_when_needed[j]))
5870 {
5871 rtx temp = PREV_INSN (insn);
5872 while (temp && GET_CODE (temp) == NOTE)
5873 temp = PREV_INSN (temp);
5874 if (temp
5875 && GET_CODE (temp) == INSN
5876 && GET_CODE (PATTERN (temp)) == SET
5877 && SET_DEST (PATTERN (temp)) == old
5878 /* Make sure we can access insn_operand_constraint. */
5879 && asm_noperands (PATTERN (temp)) < 0
5880 /* This is unsafe if prev insn rejects our reload reg. */
5881 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5882 reloadreg)
5883 /* This is unsafe if operand occurs more than once in current
5884 insn. Perhaps some occurrences aren't reloaded. */
5885 && count_occurrences (PATTERN (insn), old) == 1
5886 /* Don't risk splitting a matching pair of operands. */
5887 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5888 {
5889 /* Store into the reload register instead of the pseudo. */
5890 SET_DEST (PATTERN (temp)) = reloadreg;
5891 /* If these are the only uses of the pseudo reg,
5892 pretend for GDB it lives in the reload reg we used. */
5893 if (reg_n_deaths[REGNO (old)] == 1
5894 && reg_n_sets[REGNO (old)] == 1)
5895 {
5896 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5897 alter_reg (REGNO (old), -1);
5898 }
5899 special = 1;
5900 }
5901 }
5902
5903 /* We can't do that, so output an insn to load RELOADREG. */
5904
5905 if (! special)
5906 {
5907 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5908 rtx second_reload_reg = 0;
5909 enum insn_code icode;
5910
5911 /* If we have a secondary reload, pick up the secondary register
5912 and icode, if any. If OLDEQUIV and OLD are different or
5913 if this is an in-out reload, recompute whether or not we
5914 still need a secondary register and what the icode should
5915 be. If we still need a secondary register and the class or
5916 icode is different, go back to reloading from OLD if using
5917 OLDEQUIV means that we got the wrong type of register. We
5918 cannot have different class or icode due to an in-out reload
5919 because we don't make such reloads when both the input and
5920 output need secondary reload registers. */
5921
5922 if (reload_secondary_in_reload[j] >= 0)
5923 {
5924 int secondary_reload = reload_secondary_in_reload[j];
5925 rtx real_oldequiv = oldequiv;
5926 rtx real_old = old;
5927
5928 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5929 and similarly for OLD.
5930 See comments in get_secondary_reload in reload.c. */
5931 if (GET_CODE (oldequiv) == REG
5932 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5933 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5934 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5935
5936 if (GET_CODE (old) == REG
5937 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5938 && reg_equiv_mem[REGNO (old)] != 0)
5939 real_old = reg_equiv_mem[REGNO (old)];
5940
5941 second_reload_reg = reload_reg_rtx[secondary_reload];
5942 icode = reload_secondary_in_icode[j];
5943
5944 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5945 || (reload_in[j] != 0 && reload_out[j] != 0))
5946 {
5947 enum reg_class new_class
5948 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5949 mode, real_oldequiv);
5950
5951 if (new_class == NO_REGS)
5952 second_reload_reg = 0;
5953 else
5954 {
5955 enum insn_code new_icode;
5956 enum machine_mode new_mode;
5957
5958 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5959 REGNO (second_reload_reg)))
5960 oldequiv = old, real_oldequiv = real_old;
5961 else
5962 {
5963 new_icode = reload_in_optab[(int) mode];
5964 if (new_icode != CODE_FOR_nothing
5965 && ((insn_operand_predicate[(int) new_icode][0]
5966 && ! ((*insn_operand_predicate[(int) new_icode][0])
5967 (reloadreg, mode)))
5968 || (insn_operand_predicate[(int) new_icode][1]
5969 && ! ((*insn_operand_predicate[(int) new_icode][1])
5970 (real_oldequiv, mode)))))
5971 new_icode = CODE_FOR_nothing;
5972
5973 if (new_icode == CODE_FOR_nothing)
5974 new_mode = mode;
5975 else
5976 new_mode = insn_operand_mode[(int) new_icode][2];
5977
5978 if (GET_MODE (second_reload_reg) != new_mode)
5979 {
5980 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5981 new_mode))
5982 oldequiv = old, real_oldequiv = real_old;
5983 else
5984 second_reload_reg
5985 = gen_rtx (REG, new_mode,
5986 REGNO (second_reload_reg));
5987 }
5988 }
5989 }
5990 }
5991
5992 /* If we still need a secondary reload register, check
5993 to see if it is being used as a scratch or intermediate
5994 register and generate code appropriately. If we need
5995 a scratch register, use REAL_OLDEQUIV since the form of
5996 the insn may depend on the actual address if it is
5997 a MEM. */
5998
5999 if (second_reload_reg)
6000 {
6001 if (icode != CODE_FOR_nothing)
6002 {
6003 rtx pat;
6004 #ifdef SECONDARY_MEMORY_NEEDED
6005 /* If we need a memory location to do the move, do
6006 it that way. */
6007 if (GET_CODE (real_oldequiv) == REG
6008 && REGNO (real_oldequiv) < FIRST_PSEUDO_REGISTER
6009 && SECONDARY_MEMORY_NEEDED
6010 (REGNO_REG_CLASS (REGNO (real_oldequiv)),
6011 REGNO_REG_CLASS (REGNO (second_reload_reg)),
6012 GET_MODE (second_reload_reg)))
6013 {
6014 /* Get the memory to use and rewrite both
6015 registers to its mode. */
6016 rtx loc
6017 = get_secondary_mem (real_oldequiv,
6018 GET_MODE (second_reload_reg),
6019 reload_opnum[j],
6020 reload_when_needed[j]);
6021 rtx tmp_reloadreg;
6022
6023 if (GET_MODE (loc)
6024 != GET_MODE (second_reload_reg))
6025 second_reload_reg
6026 = gen_rtx (REG,
6027 GET_MODE (loc),
6028 REGNO (second_reload_reg));
6029
6030 if (GET_MODE (loc) != GET_MODE (real_oldequiv))
6031 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6032 REGNO (real_oldequiv));
6033 else
6034 tmp_reloadreg = real_oldequiv;
6035
6036 emit_move_insn (loc, tmp_reloadreg);
6037 emit_move_insn (second_reload_reg, loc);
6038 pat = gen_move_insn (reloadreg, second_reload_reg);
6039
6040 }
6041 else
6042 #endif
6043 pat = GEN_FCN (icode) (reloadreg,
6044 real_oldequiv,
6045 second_reload_reg);
6046 emit_insn (pat);
6047 special = 1;
6048 }
6049 else
6050 {
6051 /* See if we need a scratch register to load the
6052 intermediate register (a tertiary reload). */
6053 enum insn_code tertiary_icode
6054 = reload_secondary_in_icode[secondary_reload];
6055
6056 if (tertiary_icode != CODE_FOR_nothing)
6057 {
6058 rtx third_reload_reg
6059 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6060
6061 emit_insn ((GEN_FCN (tertiary_icode)
6062 (second_reload_reg, real_oldequiv,
6063 third_reload_reg)));
6064 }
6065 else
6066 gen_input_reload (second_reload_reg, oldequiv,
6067 reload_opnum[j],
6068 reload_when_needed[j]);
6069
6070 oldequiv = second_reload_reg;
6071 }
6072 }
6073 }
6074 #endif
6075
6076 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6077 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
6078 reload_when_needed[j]);
6079
6080 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6081 /* We may have to make a REG_DEAD note for the secondary reload
6082 register in the insns we just made. Find the last insn that
6083 mentioned the register. */
6084 if (! special && second_reload_reg
6085 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6086 {
6087 rtx prev;
6088
6089 for (prev = get_last_insn (); prev;
6090 prev = PREV_INSN (prev))
6091 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6092 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6093 PATTERN (prev)))
6094 {
6095 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6096 second_reload_reg,
6097 REG_NOTES (prev));
6098 break;
6099 }
6100 }
6101 #endif
6102 }
6103
6104 /* End this sequence. */
6105 *where = get_insns ();
6106 end_sequence ();
6107 }
6108
6109 /* Add a note saying the input reload reg
6110 dies in this insn, if anyone cares. */
6111 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6112 if (old != 0
6113 && reload_reg_rtx[j] != old
6114 && reload_reg_rtx[j] != 0
6115 && reload_out[j] == 0
6116 && ! reload_inherited[j]
6117 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6118 {
6119 register rtx reloadreg = reload_reg_rtx[j];
6120
6121 #if 0
6122 /* We can't abort here because we need to support this for sched.c.
6123 It's not terrible to miss a REG_DEAD note, but we should try
6124 to figure out how to do this correctly. */
6125 /* The code below is incorrect for address-only reloads. */
6126 if (reload_when_needed[j] != RELOAD_OTHER
6127 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6128 abort ();
6129 #endif
6130
6131 /* Add a death note to this insn, for an input reload. */
6132
6133 if ((reload_when_needed[j] == RELOAD_OTHER
6134 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6135 && ! dead_or_set_p (insn, reloadreg))
6136 REG_NOTES (insn)
6137 = gen_rtx (EXPR_LIST, REG_DEAD,
6138 reloadreg, REG_NOTES (insn));
6139 }
6140
6141 /* When we inherit a reload, the last marked death of the reload reg
6142 may no longer really be a death. */
6143 if (reload_reg_rtx[j] != 0
6144 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6145 && reload_inherited[j])
6146 {
6147 /* Handle inheriting an output reload.
6148 Remove the death note from the output reload insn. */
6149 if (reload_spill_index[j] >= 0
6150 && GET_CODE (reload_in[j]) == REG
6151 && spill_reg_store[reload_spill_index[j]] != 0
6152 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6153 REG_DEAD, REGNO (reload_reg_rtx[j])))
6154 remove_death (REGNO (reload_reg_rtx[j]),
6155 spill_reg_store[reload_spill_index[j]]);
6156 /* Likewise for input reloads that were inherited. */
6157 else if (reload_spill_index[j] >= 0
6158 && GET_CODE (reload_in[j]) == REG
6159 && spill_reg_store[reload_spill_index[j]] == 0
6160 && reload_inheritance_insn[j] != 0
6161 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6162 REGNO (reload_reg_rtx[j])))
6163 remove_death (REGNO (reload_reg_rtx[j]),
6164 reload_inheritance_insn[j]);
6165 else
6166 {
6167 rtx prev;
6168
6169 /* We got this register from find_equiv_reg.
6170 Search back for its last death note and get rid of it.
6171 But don't search back too far.
6172 Don't go past a place where this reg is set,
6173 since a death note before that remains valid. */
6174 for (prev = PREV_INSN (insn);
6175 prev && GET_CODE (prev) != CODE_LABEL;
6176 prev = PREV_INSN (prev))
6177 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6178 && dead_or_set_p (prev, reload_reg_rtx[j]))
6179 {
6180 if (find_regno_note (prev, REG_DEAD,
6181 REGNO (reload_reg_rtx[j])))
6182 remove_death (REGNO (reload_reg_rtx[j]), prev);
6183 break;
6184 }
6185 }
6186 }
6187
6188 /* We might have used find_equiv_reg above to choose an alternate
6189 place from which to reload. If so, and it died, we need to remove
6190 that death and move it to one of the insns we just made. */
6191
6192 if (oldequiv_reg != 0
6193 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6194 {
6195 rtx prev, prev1;
6196
6197 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6198 prev = PREV_INSN (prev))
6199 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6200 && dead_or_set_p (prev, oldequiv_reg))
6201 {
6202 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6203 {
6204 for (prev1 = this_reload_insn;
6205 prev1; prev1 = PREV_INSN (prev1))
6206 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6207 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6208 PATTERN (prev1)))
6209 {
6210 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6211 oldequiv_reg,
6212 REG_NOTES (prev1));
6213 break;
6214 }
6215 remove_death (REGNO (oldequiv_reg), prev);
6216 }
6217 break;
6218 }
6219 }
6220 #endif
6221
6222 /* If we are reloading a register that was recently stored in with an
6223 output-reload, see if we can prove there was
6224 actually no need to store the old value in it. */
6225
6226 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6227 && reload_in[j] != 0
6228 && GET_CODE (reload_in[j]) == REG
6229 #if 0
6230 /* There doesn't seem to be any reason to restrict this to pseudos
6231 and doing so loses in the case where we are copying from a
6232 register of the wrong class. */
6233 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6234 #endif
6235 && spill_reg_store[reload_spill_index[j]] != 0
6236 /* This is unsafe if some other reload uses the same reg first. */
6237 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6238 reload_opnum[j], reload_when_needed[j])
6239 && dead_or_set_p (insn, reload_in[j])
6240 /* This is unsafe if operand occurs more than once in current
6241 insn. Perhaps some occurrences weren't reloaded. */
6242 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6243 delete_output_reload (insn, j,
6244 spill_reg_store[reload_spill_index[j]]);
6245
6246 /* Input-reloading is done. Now do output-reloading,
6247 storing the value from the reload-register after the main insn
6248 if reload_out[j] is nonzero.
6249
6250 ??? At some point we need to support handling output reloads of
6251 JUMP_INSNs or insns that set cc0. */
6252 old = reload_out[j];
6253 if (old != 0
6254 && reload_reg_rtx[j] != old
6255 && reload_reg_rtx[j] != 0)
6256 {
6257 register rtx reloadreg = reload_reg_rtx[j];
6258 register rtx second_reloadreg = 0;
6259 rtx note, p;
6260 enum machine_mode mode;
6261 int special = 0;
6262
6263 /* An output operand that dies right away does need a reload,
6264 but need not be copied from it. Show the new location in the
6265 REG_UNUSED note. */
6266 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6267 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6268 {
6269 XEXP (note, 0) = reload_reg_rtx[j];
6270 continue;
6271 }
6272 else if (GET_CODE (old) == SCRATCH)
6273 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6274 but we don't want to make an output reload. */
6275 continue;
6276
6277 #if 0
6278 /* Strip off of OLD any size-increasing SUBREGs such as
6279 (SUBREG:SI foo:QI 0). */
6280
6281 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6282 && (GET_MODE_SIZE (GET_MODE (old))
6283 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6284 old = SUBREG_REG (old);
6285 #endif
6286
6287 /* If is a JUMP_INSN, we can't support output reloads yet. */
6288 if (GET_CODE (insn) == JUMP_INSN)
6289 abort ();
6290
6291 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6292
6293 /* Determine the mode to reload in.
6294 See comments above (for input reloading). */
6295
6296 mode = GET_MODE (old);
6297 if (mode == VOIDmode)
6298 {
6299 /* VOIDmode should never happen for an output. */
6300 if (asm_noperands (PATTERN (insn)) < 0)
6301 /* It's the compiler's fault. */
6302 fatal_insn ("VOIDmode on an output", insn);
6303 error_for_asm (insn, "output operand is constant in `asm'");
6304 /* Prevent crash--use something we know is valid. */
6305 mode = word_mode;
6306 old = gen_rtx (REG, mode, REGNO (reloadreg));
6307 }
6308
6309 if (GET_MODE (reloadreg) != mode)
6310 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6311
6312 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6313
6314 /* If we need two reload regs, set RELOADREG to the intermediate
6315 one, since it will be stored into OUT. We might need a secondary
6316 register only for an input reload, so check again here. */
6317
6318 if (reload_secondary_out_reload[j] >= 0)
6319 {
6320 rtx real_old = old;
6321
6322 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6323 && reg_equiv_mem[REGNO (old)] != 0)
6324 real_old = reg_equiv_mem[REGNO (old)];
6325
6326 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6327 mode, real_old)
6328 != NO_REGS))
6329 {
6330 second_reloadreg = reloadreg;
6331 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6332
6333 /* See if RELOADREG is to be used as a scratch register
6334 or as an intermediate register. */
6335 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6336 {
6337 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6338 (real_old, second_reloadreg, reloadreg)));
6339 special = 1;
6340 }
6341 else
6342 {
6343 /* See if we need both a scratch and intermediate reload
6344 register. */
6345 int secondary_reload = reload_secondary_out_reload[j];
6346 enum insn_code tertiary_icode
6347 = reload_secondary_out_icode[secondary_reload];
6348 rtx pat;
6349
6350 if (GET_MODE (reloadreg) != mode)
6351 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6352
6353 if (tertiary_icode != CODE_FOR_nothing)
6354 {
6355 rtx third_reloadreg
6356 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6357 pat = (GEN_FCN (tertiary_icode)
6358 (reloadreg, second_reloadreg, third_reloadreg));
6359 }
6360 #ifdef SECONDARY_MEMORY_NEEDED
6361 /* If we need a memory location to do the move, do it that way. */
6362 else if (GET_CODE (reloadreg) == REG
6363 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6364 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6365 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6366 GET_MODE (second_reloadreg)))
6367 {
6368 /* Get the memory to use and rewrite both registers
6369 to its mode. */
6370 rtx loc
6371 = get_secondary_mem (reloadreg,
6372 GET_MODE (second_reloadreg),
6373 reload_opnum[j],
6374 reload_when_needed[j]);
6375 rtx tmp_reloadreg;
6376
6377 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6378 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6379 REGNO (second_reloadreg));
6380
6381 if (GET_MODE (loc) != GET_MODE (reloadreg))
6382 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6383 REGNO (reloadreg));
6384 else
6385 tmp_reloadreg = reloadreg;
6386
6387 emit_move_insn (loc, second_reloadreg);
6388 pat = gen_move_insn (tmp_reloadreg, loc);
6389 }
6390 #endif
6391 else
6392 pat = gen_move_insn (reloadreg, second_reloadreg);
6393
6394 emit_insn (pat);
6395 }
6396 }
6397 }
6398 #endif
6399
6400 /* Output the last reload insn. */
6401 if (! special)
6402 {
6403 #ifdef SECONDARY_MEMORY_NEEDED
6404 /* If we need a memory location to do the move, do it that way. */
6405 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6406 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6407 REGNO_REG_CLASS (REGNO (reloadreg)),
6408 GET_MODE (reloadreg)))
6409 {
6410 /* Get the memory to use and rewrite both registers to
6411 its mode. */
6412 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6413 reload_opnum[j],
6414 reload_when_needed[j]);
6415
6416 if (GET_MODE (loc) != GET_MODE (reloadreg))
6417 reloadreg = gen_rtx (REG, GET_MODE (loc),
6418 REGNO (reloadreg));
6419
6420 if (GET_MODE (loc) != GET_MODE (old))
6421 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6422
6423 emit_insn (gen_move_insn (loc, reloadreg));
6424 emit_insn (gen_move_insn (old, loc));
6425 }
6426 else
6427 #endif
6428 emit_insn (gen_move_insn (old, reloadreg));
6429 }
6430
6431 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6432 /* If final will look at death notes for this reg,
6433 put one on the last output-reload insn to use it. Similarly
6434 for any secondary register. */
6435 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6436 for (p = get_last_insn (); p; p = PREV_INSN (p))
6437 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6438 && reg_overlap_mentioned_for_reload_p (reloadreg,
6439 PATTERN (p)))
6440 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6441 reloadreg, REG_NOTES (p));
6442
6443 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6444 if (! special
6445 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6446 for (p = get_last_insn (); p; p = PREV_INSN (p))
6447 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6448 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6449 PATTERN (p)))
6450 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6451 second_reloadreg, REG_NOTES (p));
6452 #endif
6453 #endif
6454 /* Look at all insns we emitted, just to be safe. */
6455 for (p = get_insns (); p; p = NEXT_INSN (p))
6456 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6457 {
6458 /* If this output reload doesn't come from a spill reg,
6459 clear any memory of reloaded copies of the pseudo reg.
6460 If this output reload comes from a spill reg,
6461 reg_has_output_reload will make this do nothing. */
6462 note_stores (PATTERN (p), forget_old_reloads_1);
6463
6464 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6465 && reload_spill_index[j] >= 0)
6466 new_spill_reg_store[reload_spill_index[j]] = p;
6467 }
6468
6469 output_reload_insns[reload_opnum[j]] = get_insns ();
6470 end_sequence ();
6471 }
6472 }
6473
6474 /* Now write all the insns we made for reloads in the order expected by
6475 the allocation functions. Prior to the insn being reloaded, we write
6476 the following reloads:
6477
6478 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6479
6480 RELOAD_OTHER reloads.
6481
6482 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6483 the RELOAD_FOR_INPUT reload for the operand.
6484
6485 RELOAD_FOR_OPADDR_ADDRS reloads.
6486
6487 RELOAD_FOR_OPERAND_ADDRESS reloads.
6488
6489 After the insn being reloaded, we write the following:
6490
6491 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6492 the RELOAD_FOR_OUTPUT reload for that operand. */
6493
6494 emit_insns_before (other_input_address_reload_insns, before_insn);
6495 emit_insns_before (other_input_reload_insns, before_insn);
6496
6497 for (j = 0; j < reload_n_operands; j++)
6498 {
6499 emit_insns_before (input_address_reload_insns[j], before_insn);
6500 emit_insns_before (input_reload_insns[j], before_insn);
6501 }
6502
6503 emit_insns_before (other_operand_reload_insns, before_insn);
6504 emit_insns_before (operand_reload_insns, before_insn);
6505
6506 for (j = 0; j < reload_n_operands; j++)
6507 {
6508 emit_insns_before (output_address_reload_insns[j], following_insn);
6509 emit_insns_before (output_reload_insns[j], following_insn);
6510 }
6511
6512 /* Move death notes from INSN
6513 to output-operand-address and output reload insns. */
6514 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6515 {
6516 rtx insn1;
6517 /* Loop over those insns, last ones first. */
6518 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6519 insn1 = PREV_INSN (insn1))
6520 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6521 {
6522 rtx source = SET_SRC (PATTERN (insn1));
6523 rtx dest = SET_DEST (PATTERN (insn1));
6524
6525 /* The note we will examine next. */
6526 rtx reg_notes = REG_NOTES (insn);
6527 /* The place that pointed to this note. */
6528 rtx *prev_reg_note = &REG_NOTES (insn);
6529
6530 /* If the note is for something used in the source of this
6531 reload insn, or in the output address, move the note. */
6532 while (reg_notes)
6533 {
6534 rtx next_reg_notes = XEXP (reg_notes, 1);
6535 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6536 && GET_CODE (XEXP (reg_notes, 0)) == REG
6537 && ((GET_CODE (dest) != REG
6538 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6539 dest))
6540 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6541 source)))
6542 {
6543 *prev_reg_note = next_reg_notes;
6544 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6545 REG_NOTES (insn1) = reg_notes;
6546 }
6547 else
6548 prev_reg_note = &XEXP (reg_notes, 1);
6549
6550 reg_notes = next_reg_notes;
6551 }
6552 }
6553 }
6554 #endif
6555
6556 /* For all the spill regs newly reloaded in this instruction,
6557 record what they were reloaded from, so subsequent instructions
6558 can inherit the reloads.
6559
6560 Update spill_reg_store for the reloads of this insn.
6561 Copy the elements that were updated in the loop above. */
6562
6563 for (j = 0; j < n_reloads; j++)
6564 {
6565 register int r = reload_order[j];
6566 register int i = reload_spill_index[r];
6567
6568 /* I is nonneg if this reload used one of the spill regs.
6569 If reload_reg_rtx[r] is 0, this is an optional reload
6570 that we opted to ignore.
6571
6572 Also ignore reloads that don't reach the end of the insn,
6573 since we will eventually see the one that does. */
6574
6575 if (i >= 0 && reload_reg_rtx[r] != 0
6576 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6577 reload_when_needed[r]))
6578 {
6579 /* First, clear out memory of what used to be in this spill reg.
6580 If consecutive registers are used, clear them all. */
6581 int nr
6582 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6583 int k;
6584
6585 for (k = 0; k < nr; k++)
6586 {
6587 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6588 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6589 }
6590
6591 /* Maybe the spill reg contains a copy of reload_out. */
6592 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6593 {
6594 register int nregno = REGNO (reload_out[r]);
6595 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6596 : HARD_REGNO_NREGS (nregno,
6597 GET_MODE (reload_reg_rtx[r])));
6598
6599 spill_reg_store[i] = new_spill_reg_store[i];
6600 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6601
6602 /* If NREGNO is a hard register, it may occupy more than
6603 one register. If it does, say what is in the
6604 rest of the registers assuming that both registers
6605 agree on how many words the object takes. If not,
6606 invalidate the subsequent registers. */
6607
6608 if (nregno < FIRST_PSEUDO_REGISTER)
6609 for (k = 1; k < nnr; k++)
6610 reg_last_reload_reg[nregno + k]
6611 = (nr == nnr ? gen_rtx (REG,
6612 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6613 REGNO (reload_reg_rtx[r]) + k)
6614 : 0);
6615
6616 /* Now do the inverse operation. */
6617 for (k = 0; k < nr; k++)
6618 {
6619 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6620 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6621 : nregno + k);
6622 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6623 }
6624 }
6625
6626 /* Maybe the spill reg contains a copy of reload_in. Only do
6627 something if there will not be an output reload for
6628 the register being reloaded. */
6629 else if (reload_out[r] == 0
6630 && reload_in[r] != 0
6631 && ((GET_CODE (reload_in[r]) == REG
6632 && ! reg_has_output_reload[REGNO (reload_in[r])]
6633 || (GET_CODE (reload_in_reg[r]) == REG
6634 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6635 {
6636 register int nregno;
6637 int nnr;
6638
6639 if (GET_CODE (reload_in[r]) == REG)
6640 nregno = REGNO (reload_in[r]);
6641 else
6642 nregno = REGNO (reload_in_reg[r]);
6643
6644 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6645 : HARD_REGNO_NREGS (nregno,
6646 GET_MODE (reload_reg_rtx[r])));
6647
6648 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6649
6650 if (nregno < FIRST_PSEUDO_REGISTER)
6651 for (k = 1; k < nnr; k++)
6652 reg_last_reload_reg[nregno + k]
6653 = (nr == nnr ? gen_rtx (REG,
6654 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6655 REGNO (reload_reg_rtx[r]) + k)
6656 : 0);
6657
6658 /* Unless we inherited this reload, show we haven't
6659 recently done a store. */
6660 if (! reload_inherited[r])
6661 spill_reg_store[i] = 0;
6662
6663 for (k = 0; k < nr; k++)
6664 {
6665 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6666 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6667 : nregno + k);
6668 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6669 = insn;
6670 }
6671 }
6672 }
6673
6674 /* The following if-statement was #if 0'd in 1.34 (or before...).
6675 It's reenabled in 1.35 because supposedly nothing else
6676 deals with this problem. */
6677
6678 /* If a register gets output-reloaded from a non-spill register,
6679 that invalidates any previous reloaded copy of it.
6680 But forget_old_reloads_1 won't get to see it, because
6681 it thinks only about the original insn. So invalidate it here. */
6682 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6683 {
6684 register int nregno = REGNO (reload_out[r]);
6685 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6686
6687 while (num_regs-- > 0)
6688 reg_last_reload_reg[nregno + num_regs] = 0;
6689 }
6690 }
6691 }
6692 \f
6693 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6694 operand OPNUM with reload type TYPE.
6695
6696 Returns first insn emitted. */
6697
6698 rtx
6699 gen_input_reload (reloadreg, in, opnum, type)
6700 rtx reloadreg;
6701 rtx in;
6702 int opnum;
6703 enum reload_type type;
6704 {
6705 rtx last = get_last_insn ();
6706
6707 /* How to do this reload can get quite tricky. Normally, we are being
6708 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6709 register that didn't get a hard register. In that case we can just
6710 call emit_move_insn.
6711
6712 We can also be asked to reload a PLUS that adds a register or a MEM to
6713 another register, constant or MEM. This can occur during frame pointer
6714 elimination and while reloading addresses. This case is handled by
6715 trying to emit a single insn to perform the add. If it is not valid,
6716 we use a two insn sequence.
6717
6718 Finally, we could be called to handle an 'o' constraint by putting
6719 an address into a register. In that case, we first try to do this
6720 with a named pattern of "reload_load_address". If no such pattern
6721 exists, we just emit a SET insn and hope for the best (it will normally
6722 be valid on machines that use 'o').
6723
6724 This entire process is made complex because reload will never
6725 process the insns we generate here and so we must ensure that
6726 they will fit their constraints and also by the fact that parts of
6727 IN might be being reloaded separately and replaced with spill registers.
6728 Because of this, we are, in some sense, just guessing the right approach
6729 here. The one listed above seems to work.
6730
6731 ??? At some point, this whole thing needs to be rethought. */
6732
6733 if (GET_CODE (in) == PLUS
6734 && (GET_CODE (XEXP (in, 0)) == REG
6735 || GET_CODE (XEXP (in, 0)) == MEM)
6736 && (GET_CODE (XEXP (in, 1)) == REG
6737 || CONSTANT_P (XEXP (in, 1))
6738 || GET_CODE (XEXP (in, 1)) == MEM))
6739 {
6740 /* We need to compute the sum of a register or a MEM and another
6741 register, constant, or MEM, and put it into the reload
6742 register. The best possible way of doing this is if the machine
6743 has a three-operand ADD insn that accepts the required operands.
6744
6745 The simplest approach is to try to generate such an insn and see if it
6746 is recognized and matches its constraints. If so, it can be used.
6747
6748 It might be better not to actually emit the insn unless it is valid,
6749 but we need to pass the insn as an operand to `recog' and
6750 `insn_extract' and it is simpler to emit and then delete the insn if
6751 not valid than to dummy things up. */
6752
6753 rtx op0, op1, tem, insn;
6754 int code;
6755
6756 op0 = find_replacement (&XEXP (in, 0));
6757 op1 = find_replacement (&XEXP (in, 1));
6758
6759 /* Since constraint checking is strict, commutativity won't be
6760 checked, so we need to do that here to avoid spurious failure
6761 if the add instruction is two-address and the second operand
6762 of the add is the same as the reload reg, which is frequently
6763 the case. If the insn would be A = B + A, rearrange it so
6764 it will be A = A + B as constrain_operands expects. */
6765
6766 if (GET_CODE (XEXP (in, 1)) == REG
6767 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6768 tem = op0, op0 = op1, op1 = tem;
6769
6770 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6771 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6772
6773 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6774 code = recog_memoized (insn);
6775
6776 if (code >= 0)
6777 {
6778 insn_extract (insn);
6779 /* We want constrain operands to treat this insn strictly in
6780 its validity determination, i.e., the way it would after reload
6781 has completed. */
6782 if (constrain_operands (code, 1))
6783 return insn;
6784 }
6785
6786 delete_insns_since (last);
6787
6788 /* If that failed, we must use a conservative two-insn sequence.
6789 use move to copy constant, MEM, or pseudo register to the reload
6790 register since "move" will be able to handle an arbitrary operand,
6791 unlike add which can't, in general. Then add the registers.
6792
6793 If there is another way to do this for a specific machine, a
6794 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6795 we emit below. */
6796
6797 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6798 || (GET_CODE (op1) == REG
6799 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6800 tem = op0, op0 = op1, op1 = tem;
6801
6802 emit_insn (gen_move_insn (reloadreg, op0));
6803
6804 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6805 This fixes a problem on the 32K where the stack pointer cannot
6806 be used as an operand of an add insn. */
6807
6808 if (rtx_equal_p (op0, op1))
6809 op1 = reloadreg;
6810
6811 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6812
6813 /* If that failed, copy the address register to the reload register.
6814 Then add the constant to the reload register. */
6815
6816 code = recog_memoized (insn);
6817
6818 if (code >= 0)
6819 {
6820 insn_extract (insn);
6821 /* We want constrain operands to treat this insn strictly in
6822 its validity determination, i.e., the way it would after reload
6823 has completed. */
6824 if (constrain_operands (code, 1))
6825 return insn;
6826 }
6827
6828 delete_insns_since (last);
6829
6830 emit_insn (gen_move_insn (reloadreg, op1));
6831 emit_insn (gen_add2_insn (reloadreg, op0));
6832 }
6833
6834 #ifdef SECONDARY_MEMORY_NEEDED
6835 /* If we need a memory location to do the move, do it that way. */
6836 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6837 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6838 REGNO_REG_CLASS (REGNO (reloadreg)),
6839 GET_MODE (reloadreg)))
6840 {
6841 /* Get the memory to use and rewrite both registers to its mode. */
6842 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6843
6844 if (GET_MODE (loc) != GET_MODE (reloadreg))
6845 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6846
6847 if (GET_MODE (loc) != GET_MODE (in))
6848 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6849
6850 emit_insn (gen_move_insn (loc, in));
6851 emit_insn (gen_move_insn (reloadreg, loc));
6852 }
6853 #endif
6854
6855 /* If IN is a simple operand, use gen_move_insn. */
6856 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6857 emit_insn (gen_move_insn (reloadreg, in));
6858
6859 #ifdef HAVE_reload_load_address
6860 else if (HAVE_reload_load_address)
6861 emit_insn (gen_reload_load_address (reloadreg, in));
6862 #endif
6863
6864 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6865 else
6866 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6867
6868 /* Return the first insn emitted.
6869 We can not just return get_last_insn, because there may have
6870 been multiple instructions emitted. Also note that gen_move_insn may
6871 emit more than one insn itself, so we can not assume that there is one
6872 insn emitted per emit_insn_before call. */
6873
6874 return last ? NEXT_INSN (last) : get_insns ();
6875 }
6876 \f
6877 /* Delete a previously made output-reload
6878 whose result we now believe is not needed.
6879 First we double-check.
6880
6881 INSN is the insn now being processed.
6882 OUTPUT_RELOAD_INSN is the insn of the output reload.
6883 J is the reload-number for this insn. */
6884
6885 static void
6886 delete_output_reload (insn, j, output_reload_insn)
6887 rtx insn;
6888 int j;
6889 rtx output_reload_insn;
6890 {
6891 register rtx i1;
6892
6893 /* Get the raw pseudo-register referred to. */
6894
6895 rtx reg = reload_in[j];
6896 while (GET_CODE (reg) == SUBREG)
6897 reg = SUBREG_REG (reg);
6898
6899 /* If the pseudo-reg we are reloading is no longer referenced
6900 anywhere between the store into it and here,
6901 and no jumps or labels intervene, then the value can get
6902 here through the reload reg alone.
6903 Otherwise, give up--return. */
6904 for (i1 = NEXT_INSN (output_reload_insn);
6905 i1 != insn; i1 = NEXT_INSN (i1))
6906 {
6907 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6908 return;
6909 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6910 && reg_mentioned_p (reg, PATTERN (i1)))
6911 return;
6912 }
6913
6914 if (cannot_omit_stores[REGNO (reg)])
6915 return;
6916
6917 /* If this insn will store in the pseudo again,
6918 the previous store can be removed. */
6919 if (reload_out[j] == reload_in[j])
6920 delete_insn (output_reload_insn);
6921
6922 /* See if the pseudo reg has been completely replaced
6923 with reload regs. If so, delete the store insn
6924 and forget we had a stack slot for the pseudo. */
6925 else if (reg_n_deaths[REGNO (reg)] == 1
6926 && reg_basic_block[REGNO (reg)] >= 0
6927 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6928 {
6929 rtx i2;
6930
6931 /* We know that it was used only between here
6932 and the beginning of the current basic block.
6933 (We also know that the last use before INSN was
6934 the output reload we are thinking of deleting, but never mind that.)
6935 Search that range; see if any ref remains. */
6936 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6937 {
6938 rtx set = single_set (i2);
6939
6940 /* Uses which just store in the pseudo don't count,
6941 since if they are the only uses, they are dead. */
6942 if (set != 0 && SET_DEST (set) == reg)
6943 continue;
6944 if (GET_CODE (i2) == CODE_LABEL
6945 || GET_CODE (i2) == JUMP_INSN)
6946 break;
6947 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6948 && reg_mentioned_p (reg, PATTERN (i2)))
6949 /* Some other ref remains;
6950 we can't do anything. */
6951 return;
6952 }
6953
6954 /* Delete the now-dead stores into this pseudo. */
6955 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6956 {
6957 rtx set = single_set (i2);
6958
6959 if (set != 0 && SET_DEST (set) == reg)
6960 delete_insn (i2);
6961 if (GET_CODE (i2) == CODE_LABEL
6962 || GET_CODE (i2) == JUMP_INSN)
6963 break;
6964 }
6965
6966 /* For the debugging info,
6967 say the pseudo lives in this reload reg. */
6968 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6969 alter_reg (REGNO (reg), -1);
6970 }
6971 }
6972 \f
6973 /* Output reload-insns to reload VALUE into RELOADREG.
6974 VALUE is an autoincrement or autodecrement RTX whose operand
6975 is a register or memory location;
6976 so reloading involves incrementing that location.
6977
6978 INC_AMOUNT is the number to increment or decrement by (always positive).
6979 This cannot be deduced from VALUE. */
6980
6981 static void
6982 inc_for_reload (reloadreg, value, inc_amount)
6983 rtx reloadreg;
6984 rtx value;
6985 int inc_amount;
6986 {
6987 /* REG or MEM to be copied and incremented. */
6988 rtx incloc = XEXP (value, 0);
6989 /* Nonzero if increment after copying. */
6990 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6991 rtx last;
6992 rtx inc;
6993 rtx add_insn;
6994 int code;
6995
6996 /* No hard register is equivalent to this register after
6997 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6998 we could inc/dec that register as well (maybe even using it for
6999 the source), but I'm not sure it's worth worrying about. */
7000 if (GET_CODE (incloc) == REG)
7001 reg_last_reload_reg[REGNO (incloc)] = 0;
7002
7003 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7004 inc_amount = - inc_amount;
7005
7006 inc = GEN_INT (inc_amount);
7007
7008 /* If this is post-increment, first copy the location to the reload reg. */
7009 if (post)
7010 emit_insn (gen_move_insn (reloadreg, incloc));
7011
7012 /* See if we can directly increment INCLOC. Use a method similar to that
7013 in gen_input_reload. */
7014
7015 last = get_last_insn ();
7016 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7017 gen_rtx (PLUS, GET_MODE (incloc),
7018 incloc, inc)));
7019
7020 code = recog_memoized (add_insn);
7021 if (code >= 0)
7022 {
7023 insn_extract (add_insn);
7024 if (constrain_operands (code, 1))
7025 {
7026 /* If this is a pre-increment and we have incremented the value
7027 where it lives, copy the incremented value to RELOADREG to
7028 be used as an address. */
7029
7030 if (! post)
7031 emit_insn (gen_move_insn (reloadreg, incloc));
7032
7033 return;
7034 }
7035 }
7036
7037 delete_insns_since (last);
7038
7039 /* If couldn't do the increment directly, must increment in RELOADREG.
7040 The way we do this depends on whether this is pre- or post-increment.
7041 For pre-increment, copy INCLOC to the reload register, increment it
7042 there, then save back. */
7043
7044 if (! post)
7045 {
7046 emit_insn (gen_move_insn (reloadreg, incloc));
7047 emit_insn (gen_add2_insn (reloadreg, inc));
7048 emit_insn (gen_move_insn (incloc, reloadreg));
7049 }
7050 else
7051 {
7052 /* Postincrement.
7053 Because this might be a jump insn or a compare, and because RELOADREG
7054 may not be available after the insn in an input reload, we must do
7055 the incrementation before the insn being reloaded for.
7056
7057 We have already copied INCLOC to RELOADREG. Increment the copy in
7058 RELOADREG, save that back, then decrement RELOADREG so it has
7059 the original value. */
7060
7061 emit_insn (gen_add2_insn (reloadreg, inc));
7062 emit_insn (gen_move_insn (incloc, reloadreg));
7063 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7064 }
7065
7066 return;
7067 }
7068 \f
7069 /* Return 1 if we are certain that the constraint-string STRING allows
7070 the hard register REG. Return 0 if we can't be sure of this. */
7071
7072 static int
7073 constraint_accepts_reg_p (string, reg)
7074 char *string;
7075 rtx reg;
7076 {
7077 int value = 0;
7078 int regno = true_regnum (reg);
7079 int c;
7080
7081 /* Initialize for first alternative. */
7082 value = 0;
7083 /* Check that each alternative contains `g' or `r'. */
7084 while (1)
7085 switch (c = *string++)
7086 {
7087 case 0:
7088 /* If an alternative lacks `g' or `r', we lose. */
7089 return value;
7090 case ',':
7091 /* If an alternative lacks `g' or `r', we lose. */
7092 if (value == 0)
7093 return 0;
7094 /* Initialize for next alternative. */
7095 value = 0;
7096 break;
7097 case 'g':
7098 case 'r':
7099 /* Any general reg wins for this alternative. */
7100 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7101 value = 1;
7102 break;
7103 default:
7104 /* Any reg in specified class wins for this alternative. */
7105 {
7106 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7107
7108 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7109 value = 1;
7110 }
7111 }
7112 }
7113 \f
7114 /* Return the number of places FIND appears within X, but don't count
7115 an occurrence if some SET_DEST is FIND. */
7116
7117 static int
7118 count_occurrences (x, find)
7119 register rtx x, find;
7120 {
7121 register int i, j;
7122 register enum rtx_code code;
7123 register char *format_ptr;
7124 int count;
7125
7126 if (x == find)
7127 return 1;
7128 if (x == 0)
7129 return 0;
7130
7131 code = GET_CODE (x);
7132
7133 switch (code)
7134 {
7135 case REG:
7136 case QUEUED:
7137 case CONST_INT:
7138 case CONST_DOUBLE:
7139 case SYMBOL_REF:
7140 case CODE_LABEL:
7141 case PC:
7142 case CC0:
7143 return 0;
7144
7145 case SET:
7146 if (SET_DEST (x) == find)
7147 return count_occurrences (SET_SRC (x), find);
7148 break;
7149 }
7150
7151 format_ptr = GET_RTX_FORMAT (code);
7152 count = 0;
7153
7154 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7155 {
7156 switch (*format_ptr++)
7157 {
7158 case 'e':
7159 count += count_occurrences (XEXP (x, i), find);
7160 break;
7161
7162 case 'E':
7163 if (XVEC (x, i) != NULL)
7164 {
7165 for (j = 0; j < XVECLEN (x, i); j++)
7166 count += count_occurrences (XVECEXP (x, i, j), find);
7167 }
7168 break;
7169 }
7170 }
7171 return count;
7172 }