(reload): Initialize the previous_offset fields in reg_eliminate
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include <stdio.h>
23 #include "config.h"
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37 #include "real.h"
38
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
45
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
49
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
53
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
60
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
64
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
71
72
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
75 #endif
76
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
79 #endif
80 \f
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
84
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
88
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
92
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
98
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
104
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
109
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
113
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
116
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
120
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
127
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
133
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 static int n_spills;
136
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
142
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
147
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
153
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
158
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
163
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166 static HARD_REG_SET bad_spill_regs;
167
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
172
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
177
178 HARD_REG_SET used_spill_regs;
179
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
182
183 static int last_spill_reg;
184
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
191
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
196
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
200
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
206
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
212
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
219
220 static char spill_indirect_levels;
221
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
225
226 char indirect_symref_ok;
227
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
229
230 char double_reg_address_ok;
231
232 /* Record the stack slot for each spilled hard register. */
233
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
235
236 /* Width allocated so far for that stack slot. */
237
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
244
245 char *basic_block_needs[N_REG_CLASSES];
246
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
250
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253
254 int caller_save_needed;
255
256 /* Set to 1 while reload_as_needed is operating.
257 Required by some machines to handle any generated moves differently. */
258
259 int reload_in_progress = 0;
260
261 /* These arrays record the insn_code of insns that may be needed to
262 perform input and output reloads of special objects. They provide a
263 place to pass a scratch register. */
264
265 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
267
268 /* This obstack is used for allocation of rtl during register elimination.
269 The allocated storage can be freed once find_reloads has processed the
270 insn. */
271
272 struct obstack reload_obstack;
273 char *reload_firstobj;
274
275 #define obstack_chunk_alloc xmalloc
276 #define obstack_chunk_free free
277
278 /* List of labels that must never be deleted. */
279 extern rtx forced_labels;
280
281 /* Allocation number table from global register allocation. */
282 extern int *reg_allocno;
283 \f
284 /* This structure is used to record information about register eliminations.
285 Each array entry describes one possible way of eliminating a register
286 in favor of another. If there is more than one way of eliminating a
287 particular register, the most preferred should be specified first. */
288
289 static struct elim_table
290 {
291 int from; /* Register number to be eliminated. */
292 int to; /* Register number used as replacement. */
293 int initial_offset; /* Initial difference between values. */
294 int can_eliminate; /* Non-zero if this elimination can be done. */
295 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
296 insns made by reload. */
297 int offset; /* Current offset between the two regs. */
298 int max_offset; /* Maximum offset between the two regs. */
299 int previous_offset; /* Offset at end of previous insn. */
300 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
301 rtx from_rtx; /* REG rtx for the register to be eliminated.
302 We cannot simply compare the number since
303 we might then spuriously replace a hard
304 register corresponding to a pseudo
305 assigned to the reg to be eliminated. */
306 rtx to_rtx; /* REG rtx for the replacement. */
307 } reg_eliminate[] =
308
309 /* If a set of eliminable registers was specified, define the table from it.
310 Otherwise, default to the normal case of the frame pointer being
311 replaced by the stack pointer. */
312
313 #ifdef ELIMINABLE_REGS
314 ELIMINABLE_REGS;
315 #else
316 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
317 #endif
318
319 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
320
321 /* Record the number of pending eliminations that have an offset not equal
322 to their initial offset. If non-zero, we use a new copy of each
323 replacement result in any insns encountered. */
324 static int num_not_at_initial_offset;
325
326 /* Count the number of registers that we may be able to eliminate. */
327 static int num_eliminable;
328
329 /* For each label, we record the offset of each elimination. If we reach
330 a label by more than one path and an offset differs, we cannot do the
331 elimination. This information is indexed by the number of the label.
332 The first table is an array of flags that records whether we have yet
333 encountered a label and the second table is an array of arrays, one
334 entry in the latter array for each elimination. */
335
336 static char *offsets_known_at;
337 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
338
339 /* Number of labels in the current function. */
340
341 static int num_labels;
342
343 struct hard_reg_n_uses { int regno; int uses; };
344 \f
345 static int possible_group_p PROTO((int, int *));
346 static void count_possible_groups PROTO((int *, enum machine_mode *,
347 int *, int));
348 static int modes_equiv_for_class_p PROTO((enum machine_mode,
349 enum machine_mode,
350 enum reg_class));
351 static void spill_failure PROTO((rtx));
352 static int new_spill_reg PROTO((int, int, int *, int *, int,
353 FILE *));
354 static void delete_dead_insn PROTO((rtx));
355 static void alter_reg PROTO((int, int));
356 static void mark_scratch_live PROTO((rtx));
357 static void set_label_offsets PROTO((rtx, rtx, int));
358 static int eliminate_regs_in_insn PROTO((rtx, int));
359 static void mark_not_eliminable PROTO((rtx, rtx));
360 static int spill_hard_reg PROTO((int, int, FILE *, int));
361 static void scan_paradoxical_subregs PROTO((rtx));
362 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
363 struct hard_reg_n_uses *));
364 static void order_regs_for_reload PROTO((int));
365 static int compare_spill_regs PROTO((short *, short *));
366 static void reload_as_needed PROTO((rtx, int));
367 static void forget_old_reloads_1 PROTO((rtx, rtx));
368 static int reload_reg_class_lower PROTO((short *, short *));
369 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
370 enum machine_mode));
371 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
372 enum machine_mode));
373 static int reload_reg_free_p PROTO((int, int, enum reload_type));
374 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
375 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
376 static int reloads_conflict PROTO((int, int));
377 static int allocate_reload_reg PROTO((int, rtx, int, int));
378 static void choose_reload_regs PROTO((rtx, rtx));
379 static void merge_assigned_reloads PROTO((rtx));
380 static void emit_reload_insns PROTO((rtx));
381 static void delete_output_reload PROTO((rtx, int, rtx));
382 static void inc_for_reload PROTO((rtx, rtx, int));
383 static int constraint_accepts_reg_p PROTO((char *, rtx));
384 static int count_occurrences PROTO((rtx, rtx));
385 \f
386 /* Initialize the reload pass once per compilation. */
387
388 void
389 init_reload ()
390 {
391 register int i;
392
393 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
394 Set spill_indirect_levels to the number of levels such addressing is
395 permitted, zero if it is not permitted at all. */
396
397 register rtx tem
398 = gen_rtx (MEM, Pmode,
399 gen_rtx (PLUS, Pmode,
400 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
401 GEN_INT (4)));
402 spill_indirect_levels = 0;
403
404 while (memory_address_p (QImode, tem))
405 {
406 spill_indirect_levels++;
407 tem = gen_rtx (MEM, Pmode, tem);
408 }
409
410 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
411
412 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
413 indirect_symref_ok = memory_address_p (QImode, tem);
414
415 /* See if reg+reg is a valid (and offsettable) address. */
416
417 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
418 {
419 tem = gen_rtx (PLUS, Pmode,
420 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
421 gen_rtx (REG, Pmode, i));
422 /* This way, we make sure that reg+reg is an offsettable address. */
423 tem = plus_constant (tem, 4);
424
425 if (memory_address_p (QImode, tem))
426 {
427 double_reg_address_ok = 1;
428 break;
429 }
430 }
431
432 /* Initialize obstack for our rtl allocation. */
433 gcc_obstack_init (&reload_obstack);
434 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
435 }
436
437 /* Main entry point for the reload pass.
438
439 FIRST is the first insn of the function being compiled.
440
441 GLOBAL nonzero means we were called from global_alloc
442 and should attempt to reallocate any pseudoregs that we
443 displace from hard regs we will use for reloads.
444 If GLOBAL is zero, we do not have enough information to do that,
445 so any pseudo reg that is spilled must go to the stack.
446
447 DUMPFILE is the global-reg debugging dump file stream, or 0.
448 If it is nonzero, messages are written to it to describe
449 which registers are seized as reload regs, which pseudo regs
450 are spilled from them, and where the pseudo regs are reallocated to.
451
452 Return value is nonzero if reload failed
453 and we must not do any more for this function. */
454
455 int
456 reload (first, global, dumpfile)
457 rtx first;
458 int global;
459 FILE *dumpfile;
460 {
461 register int class;
462 register int i, j, k;
463 register rtx insn;
464 register struct elim_table *ep;
465
466 int something_changed;
467 int something_needs_reloads;
468 int something_needs_elimination;
469 int new_basic_block_needs;
470 enum reg_class caller_save_spill_class = NO_REGS;
471 int caller_save_group_size = 1;
472
473 /* Nonzero means we couldn't get enough spill regs. */
474 int failure = 0;
475
476 /* The basic block number currently being processed for INSN. */
477 int this_block;
478
479 /* Make sure even insns with volatile mem refs are recognizable. */
480 init_recog ();
481
482 /* Enable find_equiv_reg to distinguish insns made by reload. */
483 reload_first_uid = get_max_uid ();
484
485 for (i = 0; i < N_REG_CLASSES; i++)
486 basic_block_needs[i] = 0;
487
488 #ifdef SECONDARY_MEMORY_NEEDED
489 /* Initialize the secondary memory table. */
490 clear_secondary_mem ();
491 #endif
492
493 /* Remember which hard regs appear explicitly
494 before we merge into `regs_ever_live' the ones in which
495 pseudo regs have been allocated. */
496 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
497
498 /* We don't have a stack slot for any spill reg yet. */
499 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
500 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
501
502 /* Initialize the save area information for caller-save, in case some
503 are needed. */
504 init_save_areas ();
505
506 /* Compute which hard registers are now in use
507 as homes for pseudo registers.
508 This is done here rather than (eg) in global_alloc
509 because this point is reached even if not optimizing. */
510
511 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
512 mark_home_live (i);
513
514 for (i = 0; i < scratch_list_length; i++)
515 if (scratch_list[i])
516 mark_scratch_live (scratch_list[i]);
517
518 /* Make sure that the last insn in the chain
519 is not something that needs reloading. */
520 emit_note (NULL_PTR, NOTE_INSN_DELETED);
521
522 /* Find all the pseudo registers that didn't get hard regs
523 but do have known equivalent constants or memory slots.
524 These include parameters (known equivalent to parameter slots)
525 and cse'd or loop-moved constant memory addresses.
526
527 Record constant equivalents in reg_equiv_constant
528 so they will be substituted by find_reloads.
529 Record memory equivalents in reg_mem_equiv so they can
530 be substituted eventually by altering the REG-rtx's. */
531
532 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
533 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
534 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
535 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
536 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
537 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
538 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
539 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
540 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
541 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
542 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
543 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
544 cannot_omit_stores = (char *) alloca (max_regno);
545 bzero (cannot_omit_stores, max_regno);
546
547 #ifdef SMALL_REGISTER_CLASSES
548 CLEAR_HARD_REG_SET (forbidden_regs);
549 #endif
550
551 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
552 Also find all paradoxical subregs and find largest such for each pseudo.
553 On machines with small register classes, record hard registers that
554 are used for user variables. These can never be used for spills.
555 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
556 caller-saved registers must be marked live. */
557
558 for (insn = first; insn; insn = NEXT_INSN (insn))
559 {
560 rtx set = single_set (insn);
561
562 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
563 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
564 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
565 if (! call_used_regs[i])
566 regs_ever_live[i] = 1;
567
568 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
569 {
570 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
571 if (note
572 #ifdef LEGITIMATE_PIC_OPERAND_P
573 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
574 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
575 #endif
576 )
577 {
578 rtx x = XEXP (note, 0);
579 i = REGNO (SET_DEST (set));
580 if (i > LAST_VIRTUAL_REGISTER)
581 {
582 if (GET_CODE (x) == MEM)
583 reg_equiv_memory_loc[i] = x;
584 else if (CONSTANT_P (x))
585 {
586 if (LEGITIMATE_CONSTANT_P (x))
587 reg_equiv_constant[i] = x;
588 else
589 reg_equiv_memory_loc[i]
590 = force_const_mem (GET_MODE (SET_DEST (set)), x);
591 }
592 else
593 continue;
594
595 /* If this register is being made equivalent to a MEM
596 and the MEM is not SET_SRC, the equivalencing insn
597 is one with the MEM as a SET_DEST and it occurs later.
598 So don't mark this insn now. */
599 if (GET_CODE (x) != MEM
600 || rtx_equal_p (SET_SRC (set), x))
601 reg_equiv_init[i] = insn;
602 }
603 }
604 }
605
606 /* If this insn is setting a MEM from a register equivalent to it,
607 this is the equivalencing insn. */
608 else if (set && GET_CODE (SET_DEST (set)) == MEM
609 && GET_CODE (SET_SRC (set)) == REG
610 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
611 && rtx_equal_p (SET_DEST (set),
612 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
613 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
614
615 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
616 scan_paradoxical_subregs (PATTERN (insn));
617 }
618
619 /* Does this function require a frame pointer? */
620
621 frame_pointer_needed = (! flag_omit_frame_pointer
622 #ifdef EXIT_IGNORE_STACK
623 /* ?? If EXIT_IGNORE_STACK is set, we will not save
624 and restore sp for alloca. So we can't eliminate
625 the frame pointer in that case. At some point,
626 we should improve this by emitting the
627 sp-adjusting insns for this case. */
628 || (current_function_calls_alloca
629 && EXIT_IGNORE_STACK)
630 #endif
631 || FRAME_POINTER_REQUIRED);
632
633 num_eliminable = 0;
634
635 /* Initialize the table of registers to eliminate. The way we do this
636 depends on how the eliminable registers were defined. */
637 #ifdef ELIMINABLE_REGS
638 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
639 {
640 ep->can_eliminate = ep->can_eliminate_previous
641 = (CAN_ELIMINATE (ep->from, ep->to)
642 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
643 }
644 #else
645 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
646 = ! frame_pointer_needed;
647 #endif
648
649 /* Count the number of eliminable registers and build the FROM and TO
650 REG rtx's. Note that code in gen_rtx will cause, e.g.,
651 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
652 We depend on this. */
653 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
654 {
655 num_eliminable += ep->can_eliminate;
656 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
657 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
658 }
659
660 num_labels = max_label_num () - get_first_label_num ();
661
662 /* Allocate the tables used to store offset information at labels. */
663 offsets_known_at = (char *) alloca (num_labels);
664 offsets_at
665 = (int (*)[NUM_ELIMINABLE_REGS])
666 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
667
668 offsets_known_at -= get_first_label_num ();
669 offsets_at -= get_first_label_num ();
670
671 /* Alter each pseudo-reg rtx to contain its hard reg number.
672 Assign stack slots to the pseudos that lack hard regs or equivalents.
673 Do not touch virtual registers. */
674
675 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
676 alter_reg (i, -1);
677
678 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
679 because the stack size may be a part of the offset computation for
680 register elimination. */
681 assign_stack_local (BLKmode, 0, 0);
682
683 /* If we have some registers we think can be eliminated, scan all insns to
684 see if there is an insn that sets one of these registers to something
685 other than itself plus a constant. If so, the register cannot be
686 eliminated. Doing this scan here eliminates an extra pass through the
687 main reload loop in the most common case where register elimination
688 cannot be done. */
689 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
690 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
691 || GET_CODE (insn) == CALL_INSN)
692 note_stores (PATTERN (insn), mark_not_eliminable);
693
694 #ifndef REGISTER_CONSTRAINTS
695 /* If all the pseudo regs have hard regs,
696 except for those that are never referenced,
697 we know that no reloads are needed. */
698 /* But that is not true if there are register constraints, since
699 in that case some pseudos might be in the wrong kind of hard reg. */
700
701 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
702 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
703 break;
704
705 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
706 return;
707 #endif
708
709 /* Compute the order of preference for hard registers to spill.
710 Store them by decreasing preference in potential_reload_regs. */
711
712 order_regs_for_reload (global);
713
714 /* So far, no hard regs have been spilled. */
715 n_spills = 0;
716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
717 spill_reg_order[i] = -1;
718
719 /* Initialize to -1, which means take the first spill register. */
720 last_spill_reg = -1;
721
722 /* On most machines, we can't use any register explicitly used in the
723 rtl as a spill register. But on some, we have to. Those will have
724 taken care to keep the life of hard regs as short as possible. */
725
726 #ifndef SMALL_REGISTER_CLASSES
727 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
728 #endif
729
730 /* Spill any hard regs that we know we can't eliminate. */
731 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
732 if (! ep->can_eliminate)
733 spill_hard_reg (ep->from, global, dumpfile, 1);
734
735 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
736 if (frame_pointer_needed)
737 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
738 #endif
739
740 if (global)
741 for (i = 0; i < N_REG_CLASSES; i++)
742 {
743 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
744 bzero (basic_block_needs[i], n_basic_blocks);
745 }
746
747 /* From now on, we need to emit any moves without making new pseudos. */
748 reload_in_progress = 1;
749
750 /* This loop scans the entire function each go-round
751 and repeats until one repetition spills no additional hard regs. */
752
753 /* This flag is set when a pseudo reg is spilled,
754 to require another pass. Note that getting an additional reload
755 reg does not necessarily imply any pseudo reg was spilled;
756 sometimes we find a reload reg that no pseudo reg was allocated in. */
757 something_changed = 1;
758 /* This flag is set if there are any insns that require reloading. */
759 something_needs_reloads = 0;
760 /* This flag is set if there are any insns that require register
761 eliminations. */
762 something_needs_elimination = 0;
763 while (something_changed)
764 {
765 rtx after_call = 0;
766
767 /* For each class, number of reload regs needed in that class.
768 This is the maximum over all insns of the needs in that class
769 of the individual insn. */
770 int max_needs[N_REG_CLASSES];
771 /* For each class, size of group of consecutive regs
772 that is needed for the reloads of this class. */
773 int group_size[N_REG_CLASSES];
774 /* For each class, max number of consecutive groups needed.
775 (Each group contains group_size[CLASS] consecutive registers.) */
776 int max_groups[N_REG_CLASSES];
777 /* For each class, max number needed of regs that don't belong
778 to any of the groups. */
779 int max_nongroups[N_REG_CLASSES];
780 /* For each class, the machine mode which requires consecutive
781 groups of regs of that class.
782 If two different modes ever require groups of one class,
783 they must be the same size and equally restrictive for that class,
784 otherwise we can't handle the complexity. */
785 enum machine_mode group_mode[N_REG_CLASSES];
786 /* Record the insn where each maximum need is first found. */
787 rtx max_needs_insn[N_REG_CLASSES];
788 rtx max_groups_insn[N_REG_CLASSES];
789 rtx max_nongroups_insn[N_REG_CLASSES];
790 rtx x;
791 int starting_frame_size = get_frame_size ();
792 int previous_frame_pointer_needed = frame_pointer_needed;
793 static char *reg_class_names[] = REG_CLASS_NAMES;
794
795 something_changed = 0;
796 bzero ((char *) max_needs, sizeof max_needs);
797 bzero ((char *) max_groups, sizeof max_groups);
798 bzero ((char *) max_nongroups, sizeof max_nongroups);
799 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
800 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
801 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
802 bzero ((char *) group_size, sizeof group_size);
803 for (i = 0; i < N_REG_CLASSES; i++)
804 group_mode[i] = VOIDmode;
805
806 /* Keep track of which basic blocks are needing the reloads. */
807 this_block = 0;
808
809 /* Remember whether any element of basic_block_needs
810 changes from 0 to 1 in this pass. */
811 new_basic_block_needs = 0;
812
813 /* Reset all offsets on eliminable registers to their initial values. */
814 #ifdef ELIMINABLE_REGS
815 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
816 {
817 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
818 ep->previous_offset = ep->offset
819 = ep->max_offset = ep->initial_offset;
820 }
821 #else
822 #ifdef INITIAL_FRAME_POINTER_OFFSET
823 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
824 #else
825 if (!FRAME_POINTER_REQUIRED)
826 abort ();
827 reg_eliminate[0].initial_offset = 0;
828 #endif
829 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
830 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
831 #endif
832
833 num_not_at_initial_offset = 0;
834
835 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
836
837 /* Set a known offset for each forced label to be at the initial offset
838 of each elimination. We do this because we assume that all
839 computed jumps occur from a location where each elimination is
840 at its initial offset. */
841
842 for (x = forced_labels; x; x = XEXP (x, 1))
843 if (XEXP (x, 0))
844 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
845
846 /* For each pseudo register that has an equivalent location defined,
847 try to eliminate any eliminable registers (such as the frame pointer)
848 assuming initial offsets for the replacement register, which
849 is the normal case.
850
851 If the resulting location is directly addressable, substitute
852 the MEM we just got directly for the old REG.
853
854 If it is not addressable but is a constant or the sum of a hard reg
855 and constant, it is probably not addressable because the constant is
856 out of range, in that case record the address; we will generate
857 hairy code to compute the address in a register each time it is
858 needed. Similarly if it is a hard register, but one that is not
859 valid as an address register.
860
861 If the location is not addressable, but does not have one of the
862 above forms, assign a stack slot. We have to do this to avoid the
863 potential of producing lots of reloads if, e.g., a location involves
864 a pseudo that didn't get a hard register and has an equivalent memory
865 location that also involves a pseudo that didn't get a hard register.
866
867 Perhaps at some point we will improve reload_when_needed handling
868 so this problem goes away. But that's very hairy. */
869
870 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
871 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
872 {
873 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
874
875 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
876 XEXP (x, 0)))
877 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
878 else if (CONSTANT_P (XEXP (x, 0))
879 || (GET_CODE (XEXP (x, 0)) == REG
880 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
881 || (GET_CODE (XEXP (x, 0)) == PLUS
882 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
883 && (REGNO (XEXP (XEXP (x, 0), 0))
884 < FIRST_PSEUDO_REGISTER)
885 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
886 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
887 else
888 {
889 /* Make a new stack slot. Then indicate that something
890 changed so we go back and recompute offsets for
891 eliminable registers because the allocation of memory
892 below might change some offset. reg_equiv_{mem,address}
893 will be set up for this pseudo on the next pass around
894 the loop. */
895 reg_equiv_memory_loc[i] = 0;
896 reg_equiv_init[i] = 0;
897 alter_reg (i, -1);
898 something_changed = 1;
899 }
900 }
901
902 /* If we allocated another pseudo to the stack, redo elimination
903 bookkeeping. */
904 if (something_changed)
905 continue;
906
907 /* If caller-saves needs a group, initialize the group to include
908 the size and mode required for caller-saves. */
909
910 if (caller_save_group_size > 1)
911 {
912 group_mode[(int) caller_save_spill_class] = Pmode;
913 group_size[(int) caller_save_spill_class] = caller_save_group_size;
914 }
915
916 /* Compute the most additional registers needed by any instruction.
917 Collect information separately for each class of regs. */
918
919 for (insn = first; insn; insn = NEXT_INSN (insn))
920 {
921 if (global && this_block + 1 < n_basic_blocks
922 && insn == basic_block_head[this_block+1])
923 ++this_block;
924
925 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
926 might include REG_LABEL), we need to see what effects this
927 has on the known offsets at labels. */
928
929 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
930 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
931 && REG_NOTES (insn) != 0))
932 set_label_offsets (insn, insn, 0);
933
934 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
935 {
936 /* Nonzero means don't use a reload reg that overlaps
937 the place where a function value can be returned. */
938 rtx avoid_return_reg = 0;
939
940 rtx old_body = PATTERN (insn);
941 int old_code = INSN_CODE (insn);
942 rtx old_notes = REG_NOTES (insn);
943 int did_elimination = 0;
944
945 /* To compute the number of reload registers of each class
946 needed for an insn, we must simulate what choose_reload_regs
947 can do. We do this by splitting an insn into an "input" and
948 an "output" part. RELOAD_OTHER reloads are used in both.
949 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
950 which must be live over the entire input section of reloads,
951 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
952 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
953 inputs.
954
955 The registers needed for output are RELOAD_OTHER and
956 RELOAD_FOR_OUTPUT, which are live for the entire output
957 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
958 reloads for each operand.
959
960 The total number of registers needed is the maximum of the
961 inputs and outputs. */
962
963 struct needs
964 {
965 /* [0] is normal, [1] is nongroup. */
966 int regs[2][N_REG_CLASSES];
967 int groups[N_REG_CLASSES];
968 };
969
970 /* Each `struct needs' corresponds to one RELOAD_... type. */
971 struct {
972 struct needs other;
973 struct needs input;
974 struct needs output;
975 struct needs insn;
976 struct needs other_addr;
977 struct needs op_addr;
978 struct needs op_addr_reload;
979 struct needs in_addr[MAX_RECOG_OPERANDS];
980 struct needs out_addr[MAX_RECOG_OPERANDS];
981 } insn_needs;
982
983 /* If needed, eliminate any eliminable registers. */
984 if (num_eliminable)
985 did_elimination = eliminate_regs_in_insn (insn, 0);
986
987 #ifdef SMALL_REGISTER_CLASSES
988 /* Set avoid_return_reg if this is an insn
989 that might use the value of a function call. */
990 if (GET_CODE (insn) == CALL_INSN)
991 {
992 if (GET_CODE (PATTERN (insn)) == SET)
993 after_call = SET_DEST (PATTERN (insn));
994 else if (GET_CODE (PATTERN (insn)) == PARALLEL
995 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
996 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
997 else
998 after_call = 0;
999 }
1000 else if (after_call != 0
1001 && !(GET_CODE (PATTERN (insn)) == SET
1002 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1003 {
1004 if (reg_referenced_p (after_call, PATTERN (insn)))
1005 avoid_return_reg = after_call;
1006 after_call = 0;
1007 }
1008 #endif /* SMALL_REGISTER_CLASSES */
1009
1010 /* Analyze the instruction. */
1011 find_reloads (insn, 0, spill_indirect_levels, global,
1012 spill_reg_order);
1013
1014 /* Remember for later shortcuts which insns had any reloads or
1015 register eliminations.
1016
1017 One might think that it would be worthwhile to mark insns
1018 that need register replacements but not reloads, but this is
1019 not safe because find_reloads may do some manipulation of
1020 the insn (such as swapping commutative operands), which would
1021 be lost when we restore the old pattern after register
1022 replacement. So the actions of find_reloads must be redone in
1023 subsequent passes or in reload_as_needed.
1024
1025 However, it is safe to mark insns that need reloads
1026 but not register replacement. */
1027
1028 PUT_MODE (insn, (did_elimination ? QImode
1029 : n_reloads ? HImode
1030 : GET_MODE (insn) == DImode ? DImode
1031 : VOIDmode));
1032
1033 /* Discard any register replacements done. */
1034 if (did_elimination)
1035 {
1036 obstack_free (&reload_obstack, reload_firstobj);
1037 PATTERN (insn) = old_body;
1038 INSN_CODE (insn) = old_code;
1039 REG_NOTES (insn) = old_notes;
1040 something_needs_elimination = 1;
1041 }
1042
1043 /* If this insn has no reloads, we need not do anything except
1044 in the case of a CALL_INSN when we have caller-saves and
1045 caller-save needs reloads. */
1046
1047 if (n_reloads == 0
1048 && ! (GET_CODE (insn) == CALL_INSN
1049 && caller_save_spill_class != NO_REGS))
1050 continue;
1051
1052 something_needs_reloads = 1;
1053 bzero ((char *) &insn_needs, sizeof insn_needs);
1054
1055 /* Count each reload once in every class
1056 containing the reload's own class. */
1057
1058 for (i = 0; i < n_reloads; i++)
1059 {
1060 register enum reg_class *p;
1061 enum reg_class class = reload_reg_class[i];
1062 int size;
1063 enum machine_mode mode;
1064 int nongroup_need;
1065 struct needs *this_needs;
1066
1067 /* Don't count the dummy reloads, for which one of the
1068 regs mentioned in the insn can be used for reloading.
1069 Don't count optional reloads.
1070 Don't count reloads that got combined with others. */
1071 if (reload_reg_rtx[i] != 0
1072 || reload_optional[i] != 0
1073 || (reload_out[i] == 0 && reload_in[i] == 0
1074 && ! reload_secondary_p[i]))
1075 continue;
1076
1077 /* Show that a reload register of this class is needed
1078 in this basic block. We do not use insn_needs and
1079 insn_groups because they are overly conservative for
1080 this purpose. */
1081 if (global && ! basic_block_needs[(int) class][this_block])
1082 {
1083 basic_block_needs[(int) class][this_block] = 1;
1084 new_basic_block_needs = 1;
1085 }
1086
1087
1088 mode = reload_inmode[i];
1089 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1090 mode = reload_outmode[i];
1091 size = CLASS_MAX_NREGS (class, mode);
1092
1093 /* If this class doesn't want a group, determine if we have
1094 a nongroup need or a regular need. We have a nongroup
1095 need if this reload conflicts with a group reload whose
1096 class intersects with this reload's class. */
1097
1098 nongroup_need = 0;
1099 if (size == 1)
1100 for (j = 0; j < n_reloads; j++)
1101 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1102 (GET_MODE_SIZE (reload_outmode[j])
1103 > GET_MODE_SIZE (reload_inmode[j]))
1104 ? reload_outmode[j]
1105 : reload_inmode[j])
1106 > 1)
1107 && (!reload_optional[j])
1108 && (reload_in[j] != 0 || reload_out[j] != 0
1109 || reload_secondary_p[j])
1110 && reloads_conflict (i, j)
1111 && reg_classes_intersect_p (class,
1112 reload_reg_class[j]))
1113 {
1114 nongroup_need = 1;
1115 break;
1116 }
1117
1118 /* Decide which time-of-use to count this reload for. */
1119 switch (reload_when_needed[i])
1120 {
1121 case RELOAD_OTHER:
1122 this_needs = &insn_needs.other;
1123 break;
1124 case RELOAD_FOR_INPUT:
1125 this_needs = &insn_needs.input;
1126 break;
1127 case RELOAD_FOR_OUTPUT:
1128 this_needs = &insn_needs.output;
1129 break;
1130 case RELOAD_FOR_INSN:
1131 this_needs = &insn_needs.insn;
1132 break;
1133 case RELOAD_FOR_OTHER_ADDRESS:
1134 this_needs = &insn_needs.other_addr;
1135 break;
1136 case RELOAD_FOR_INPUT_ADDRESS:
1137 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1138 break;
1139 case RELOAD_FOR_OUTPUT_ADDRESS:
1140 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1141 break;
1142 case RELOAD_FOR_OPERAND_ADDRESS:
1143 this_needs = &insn_needs.op_addr;
1144 break;
1145 case RELOAD_FOR_OPADDR_ADDR:
1146 this_needs = &insn_needs.op_addr_reload;
1147 break;
1148 }
1149
1150 if (size > 1)
1151 {
1152 enum machine_mode other_mode, allocate_mode;
1153
1154 /* Count number of groups needed separately from
1155 number of individual regs needed. */
1156 this_needs->groups[(int) class]++;
1157 p = reg_class_superclasses[(int) class];
1158 while (*p != LIM_REG_CLASSES)
1159 this_needs->groups[(int) *p++]++;
1160
1161 /* Record size and mode of a group of this class. */
1162 /* If more than one size group is needed,
1163 make all groups the largest needed size. */
1164 if (group_size[(int) class] < size)
1165 {
1166 other_mode = group_mode[(int) class];
1167 allocate_mode = mode;
1168
1169 group_size[(int) class] = size;
1170 group_mode[(int) class] = mode;
1171 }
1172 else
1173 {
1174 other_mode = mode;
1175 allocate_mode = group_mode[(int) class];
1176 }
1177
1178 /* Crash if two dissimilar machine modes both need
1179 groups of consecutive regs of the same class. */
1180
1181 if (other_mode != VOIDmode && other_mode != allocate_mode
1182 && ! modes_equiv_for_class_p (allocate_mode,
1183 other_mode, class))
1184 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1185 insn);
1186 }
1187 else if (size == 1)
1188 {
1189 this_needs->regs[nongroup_need][(int) class] += 1;
1190 p = reg_class_superclasses[(int) class];
1191 while (*p != LIM_REG_CLASSES)
1192 this_needs->regs[nongroup_need][(int) *p++] += 1;
1193 }
1194 else
1195 abort ();
1196 }
1197
1198 /* All reloads have been counted for this insn;
1199 now merge the various times of use.
1200 This sets insn_needs, etc., to the maximum total number
1201 of registers needed at any point in this insn. */
1202
1203 for (i = 0; i < N_REG_CLASSES; i++)
1204 {
1205 int in_max, out_max;
1206
1207 /* Compute normal and nongroup needs. */
1208 for (j = 0; j <= 1; j++)
1209 {
1210 for (in_max = 0, out_max = 0, k = 0;
1211 k < reload_n_operands; k++)
1212 {
1213 in_max
1214 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1215 out_max
1216 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1217 }
1218
1219 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1220 and operand addresses but not things used to reload
1221 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1222 don't conflict with things needed to reload inputs or
1223 outputs. */
1224
1225 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1226 insn_needs.op_addr_reload.regs[j][i]),
1227 in_max);
1228
1229 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1230
1231 insn_needs.input.regs[j][i]
1232 = MAX (insn_needs.input.regs[j][i]
1233 + insn_needs.op_addr.regs[j][i]
1234 + insn_needs.insn.regs[j][i],
1235 in_max + insn_needs.input.regs[j][i]);
1236
1237 insn_needs.output.regs[j][i] += out_max;
1238 insn_needs.other.regs[j][i]
1239 += MAX (MAX (insn_needs.input.regs[j][i],
1240 insn_needs.output.regs[j][i]),
1241 insn_needs.other_addr.regs[j][i]);
1242
1243 }
1244
1245 /* Now compute group needs. */
1246 for (in_max = 0, out_max = 0, j = 0;
1247 j < reload_n_operands; j++)
1248 {
1249 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1250 out_max
1251 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1252 }
1253
1254 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1255 insn_needs.op_addr_reload.groups[i]),
1256 in_max);
1257 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1258
1259 insn_needs.input.groups[i]
1260 = MAX (insn_needs.input.groups[i]
1261 + insn_needs.op_addr.groups[i]
1262 + insn_needs.insn.groups[i],
1263 in_max + insn_needs.input.groups[i]);
1264
1265 insn_needs.output.groups[i] += out_max;
1266 insn_needs.other.groups[i]
1267 += MAX (MAX (insn_needs.input.groups[i],
1268 insn_needs.output.groups[i]),
1269 insn_needs.other_addr.groups[i]);
1270 }
1271
1272 /* If this is a CALL_INSN and caller-saves will need
1273 a spill register, act as if the spill register is
1274 needed for this insn. However, the spill register
1275 can be used by any reload of this insn, so we only
1276 need do something if no need for that class has
1277 been recorded.
1278
1279 The assumption that every CALL_INSN will trigger a
1280 caller-save is highly conservative, however, the number
1281 of cases where caller-saves will need a spill register but
1282 a block containing a CALL_INSN won't need a spill register
1283 of that class should be quite rare.
1284
1285 If a group is needed, the size and mode of the group will
1286 have been set up at the beginning of this loop. */
1287
1288 if (GET_CODE (insn) == CALL_INSN
1289 && caller_save_spill_class != NO_REGS)
1290 {
1291 /* See if this register would conflict with any reload
1292 that needs a group. */
1293 int nongroup_need = 0;
1294 int *caller_save_needs;
1295
1296 for (j = 0; j < n_reloads; j++)
1297 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1298 (GET_MODE_SIZE (reload_outmode[j])
1299 > GET_MODE_SIZE (reload_inmode[j]))
1300 ? reload_outmode[j]
1301 : reload_inmode[j])
1302 > 1)
1303 && reg_classes_intersect_p (caller_save_spill_class,
1304 reload_reg_class[j]))
1305 {
1306 nongroup_need = 1;
1307 break;
1308 }
1309
1310 caller_save_needs
1311 = (caller_save_group_size > 1
1312 ? insn_needs.other.groups
1313 : insn_needs.other.regs[nongroup_need]);
1314
1315 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1316 {
1317 register enum reg_class *p
1318 = reg_class_superclasses[(int) caller_save_spill_class];
1319
1320 caller_save_needs[(int) caller_save_spill_class]++;
1321
1322 while (*p != LIM_REG_CLASSES)
1323 caller_save_needs[(int) *p++] += 1;
1324 }
1325
1326 /* Show that this basic block will need a register of
1327 this class. */
1328
1329 if (global
1330 && ! (basic_block_needs[(int) caller_save_spill_class]
1331 [this_block]))
1332 {
1333 basic_block_needs[(int) caller_save_spill_class]
1334 [this_block] = 1;
1335 new_basic_block_needs = 1;
1336 }
1337 }
1338
1339 #ifdef SMALL_REGISTER_CLASSES
1340 /* If this insn stores the value of a function call,
1341 and that value is in a register that has been spilled,
1342 and if the insn needs a reload in a class
1343 that might use that register as the reload register,
1344 then add add an extra need in that class.
1345 This makes sure we have a register available that does
1346 not overlap the return value. */
1347
1348 if (avoid_return_reg)
1349 {
1350 int regno = REGNO (avoid_return_reg);
1351 int nregs
1352 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1353 int r;
1354 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1355
1356 /* First compute the "basic needs", which counts a
1357 need only in the smallest class in which it
1358 is required. */
1359
1360 bcopy ((char *) insn_needs.other.regs[0],
1361 (char *) basic_needs, sizeof basic_needs);
1362 bcopy ((char *) insn_needs.other.groups,
1363 (char *) basic_groups, sizeof basic_groups);
1364
1365 for (i = 0; i < N_REG_CLASSES; i++)
1366 {
1367 enum reg_class *p;
1368
1369 if (basic_needs[i] >= 0)
1370 for (p = reg_class_superclasses[i];
1371 *p != LIM_REG_CLASSES; p++)
1372 basic_needs[(int) *p] -= basic_needs[i];
1373
1374 if (basic_groups[i] >= 0)
1375 for (p = reg_class_superclasses[i];
1376 *p != LIM_REG_CLASSES; p++)
1377 basic_groups[(int) *p] -= basic_groups[i];
1378 }
1379
1380 /* Now count extra regs if there might be a conflict with
1381 the return value register. */
1382
1383 for (r = regno; r < regno + nregs; r++)
1384 if (spill_reg_order[r] >= 0)
1385 for (i = 0; i < N_REG_CLASSES; i++)
1386 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1387 {
1388 if (basic_needs[i] > 0)
1389 {
1390 enum reg_class *p;
1391
1392 insn_needs.other.regs[0][i]++;
1393 p = reg_class_superclasses[i];
1394 while (*p != LIM_REG_CLASSES)
1395 insn_needs.other.regs[0][(int) *p++]++;
1396 }
1397 if (basic_groups[i] > 0)
1398 {
1399 enum reg_class *p;
1400
1401 insn_needs.other.groups[i]++;
1402 p = reg_class_superclasses[i];
1403 while (*p != LIM_REG_CLASSES)
1404 insn_needs.other.groups[(int) *p++]++;
1405 }
1406 }
1407 }
1408 #endif /* SMALL_REGISTER_CLASSES */
1409
1410 /* For each class, collect maximum need of any insn. */
1411
1412 for (i = 0; i < N_REG_CLASSES; i++)
1413 {
1414 if (max_needs[i] < insn_needs.other.regs[0][i])
1415 {
1416 max_needs[i] = insn_needs.other.regs[0][i];
1417 max_needs_insn[i] = insn;
1418 }
1419 if (max_groups[i] < insn_needs.other.groups[i])
1420 {
1421 max_groups[i] = insn_needs.other.groups[i];
1422 max_groups_insn[i] = insn;
1423 }
1424 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1425 {
1426 max_nongroups[i] = insn_needs.other.regs[1][i];
1427 max_nongroups_insn[i] = insn;
1428 }
1429 }
1430 }
1431 /* Note that there is a continue statement above. */
1432 }
1433
1434 /* If we allocated any new memory locations, make another pass
1435 since it might have changed elimination offsets. */
1436 if (starting_frame_size != get_frame_size ())
1437 something_changed = 1;
1438
1439 if (dumpfile)
1440 for (i = 0; i < N_REG_CLASSES; i++)
1441 {
1442 if (max_needs[i] > 0)
1443 fprintf (dumpfile,
1444 ";; Need %d reg%s of class %s (for insn %d).\n",
1445 max_needs[i], max_needs[i] == 1 ? "" : "s",
1446 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1447 if (max_nongroups[i] > 0)
1448 fprintf (dumpfile,
1449 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1450 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1451 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1452 if (max_groups[i] > 0)
1453 fprintf (dumpfile,
1454 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1455 max_groups[i], max_groups[i] == 1 ? "" : "s",
1456 mode_name[(int) group_mode[i]],
1457 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1458 }
1459
1460 /* If we have caller-saves, set up the save areas and see if caller-save
1461 will need a spill register. */
1462
1463 if (caller_save_needed)
1464 {
1465 /* Set the offsets for setup_save_areas. */
1466 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
1467 ep++)
1468 ep->previous_offset = ep->max_offset;
1469
1470 if ( ! setup_save_areas (&something_changed)
1471 && caller_save_spill_class == NO_REGS)
1472 {
1473 /* The class we will need depends on whether the machine
1474 supports the sum of two registers for an address; see
1475 find_address_reloads for details. */
1476
1477 caller_save_spill_class
1478 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1479 caller_save_group_size
1480 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1481 something_changed = 1;
1482 }
1483 }
1484
1485 /* See if anything that happened changes which eliminations are valid.
1486 For example, on the Sparc, whether or not the frame pointer can
1487 be eliminated can depend on what registers have been used. We need
1488 not check some conditions again (such as flag_omit_frame_pointer)
1489 since they can't have changed. */
1490
1491 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1492 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1493 #ifdef ELIMINABLE_REGS
1494 || ! CAN_ELIMINATE (ep->from, ep->to)
1495 #endif
1496 )
1497 ep->can_eliminate = 0;
1498
1499 /* Look for the case where we have discovered that we can't replace
1500 register A with register B and that means that we will now be
1501 trying to replace register A with register C. This means we can
1502 no longer replace register C with register B and we need to disable
1503 such an elimination, if it exists. This occurs often with A == ap,
1504 B == sp, and C == fp. */
1505
1506 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1507 {
1508 struct elim_table *op;
1509 register int new_to = -1;
1510
1511 if (! ep->can_eliminate && ep->can_eliminate_previous)
1512 {
1513 /* Find the current elimination for ep->from, if there is a
1514 new one. */
1515 for (op = reg_eliminate;
1516 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1517 if (op->from == ep->from && op->can_eliminate)
1518 {
1519 new_to = op->to;
1520 break;
1521 }
1522
1523 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1524 disable it. */
1525 for (op = reg_eliminate;
1526 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1527 if (op->from == new_to && op->to == ep->to)
1528 op->can_eliminate = 0;
1529 }
1530 }
1531
1532 /* See if any registers that we thought we could eliminate the previous
1533 time are no longer eliminable. If so, something has changed and we
1534 must spill the register. Also, recompute the number of eliminable
1535 registers and see if the frame pointer is needed; it is if there is
1536 no elimination of the frame pointer that we can perform. */
1537
1538 frame_pointer_needed = 1;
1539 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1540 {
1541 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1542 && ep->to != HARD_FRAME_POINTER_REGNUM)
1543 frame_pointer_needed = 0;
1544
1545 if (! ep->can_eliminate && ep->can_eliminate_previous)
1546 {
1547 ep->can_eliminate_previous = 0;
1548 spill_hard_reg (ep->from, global, dumpfile, 1);
1549 something_changed = 1;
1550 num_eliminable--;
1551 }
1552 }
1553
1554 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1555 /* If we didn't need a frame pointer last time, but we do now, spill
1556 the hard frame pointer. */
1557 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1558 {
1559 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1560 something_changed = 1;
1561 }
1562 #endif
1563
1564 /* If all needs are met, we win. */
1565
1566 for (i = 0; i < N_REG_CLASSES; i++)
1567 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1568 break;
1569 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1570 break;
1571
1572 /* Not all needs are met; must spill some hard regs. */
1573
1574 /* Put all registers spilled so far back in potential_reload_regs, but
1575 put them at the front, since we've already spilled most of the
1576 pseudos in them (we might have left some pseudos unspilled if they
1577 were in a block that didn't need any spill registers of a conflicting
1578 class. We used to try to mark off the need for those registers,
1579 but doing so properly is very complex and reallocating them is the
1580 simpler approach. First, "pack" potential_reload_regs by pushing
1581 any nonnegative entries towards the end. That will leave room
1582 for the registers we already spilled.
1583
1584 Also, undo the marking of the spill registers from the last time
1585 around in FORBIDDEN_REGS since we will be probably be allocating
1586 them again below.
1587
1588 ??? It is theoretically possible that we might end up not using one
1589 of our previously-spilled registers in this allocation, even though
1590 they are at the head of the list. It's not clear what to do about
1591 this, but it was no better before, when we marked off the needs met
1592 by the previously-spilled registers. With the current code, globals
1593 can be allocated into these registers, but locals cannot. */
1594
1595 if (n_spills)
1596 {
1597 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1598 if (potential_reload_regs[i] != -1)
1599 potential_reload_regs[j--] = potential_reload_regs[i];
1600
1601 for (i = 0; i < n_spills; i++)
1602 {
1603 potential_reload_regs[i] = spill_regs[i];
1604 spill_reg_order[spill_regs[i]] = -1;
1605 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1606 }
1607
1608 n_spills = 0;
1609 }
1610
1611 /* Now find more reload regs to satisfy the remaining need
1612 Do it by ascending class number, since otherwise a reg
1613 might be spilled for a big class and might fail to count
1614 for a smaller class even though it belongs to that class.
1615
1616 Count spilled regs in `spills', and add entries to
1617 `spill_regs' and `spill_reg_order'.
1618
1619 ??? Note there is a problem here.
1620 When there is a need for a group in a high-numbered class,
1621 and also need for non-group regs that come from a lower class,
1622 the non-group regs are chosen first. If there aren't many regs,
1623 they might leave no room for a group.
1624
1625 This was happening on the 386. To fix it, we added the code
1626 that calls possible_group_p, so that the lower class won't
1627 break up the last possible group.
1628
1629 Really fixing the problem would require changes above
1630 in counting the regs already spilled, and in choose_reload_regs.
1631 It might be hard to avoid introducing bugs there. */
1632
1633 CLEAR_HARD_REG_SET (counted_for_groups);
1634 CLEAR_HARD_REG_SET (counted_for_nongroups);
1635
1636 for (class = 0; class < N_REG_CLASSES; class++)
1637 {
1638 /* First get the groups of registers.
1639 If we got single registers first, we might fragment
1640 possible groups. */
1641 while (max_groups[class] > 0)
1642 {
1643 /* If any single spilled regs happen to form groups,
1644 count them now. Maybe we don't really need
1645 to spill another group. */
1646 count_possible_groups (group_size, group_mode, max_groups,
1647 class);
1648
1649 if (max_groups[class] <= 0)
1650 break;
1651
1652 /* Groups of size 2 (the only groups used on most machines)
1653 are treated specially. */
1654 if (group_size[class] == 2)
1655 {
1656 /* First, look for a register that will complete a group. */
1657 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1658 {
1659 int other;
1660
1661 j = potential_reload_regs[i];
1662 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1663 &&
1664 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1665 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1666 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1667 && HARD_REGNO_MODE_OK (other, group_mode[class])
1668 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1669 other)
1670 /* We don't want one part of another group.
1671 We could get "two groups" that overlap! */
1672 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1673 ||
1674 (j < FIRST_PSEUDO_REGISTER - 1
1675 && (other = j + 1, spill_reg_order[other] >= 0)
1676 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1677 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1678 && HARD_REGNO_MODE_OK (j, group_mode[class])
1679 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1680 other)
1681 && ! TEST_HARD_REG_BIT (counted_for_groups,
1682 other))))
1683 {
1684 register enum reg_class *p;
1685
1686 /* We have found one that will complete a group,
1687 so count off one group as provided. */
1688 max_groups[class]--;
1689 p = reg_class_superclasses[class];
1690 while (*p != LIM_REG_CLASSES)
1691 {
1692 if (group_size [(int) *p] <= group_size [class])
1693 max_groups[(int) *p]--;
1694 p++;
1695 }
1696
1697 /* Indicate both these regs are part of a group. */
1698 SET_HARD_REG_BIT (counted_for_groups, j);
1699 SET_HARD_REG_BIT (counted_for_groups, other);
1700 break;
1701 }
1702 }
1703 /* We can't complete a group, so start one. */
1704 #ifdef SMALL_REGISTER_CLASSES
1705 /* Look for a pair neither of which is explicitly used. */
1706 if (i == FIRST_PSEUDO_REGISTER)
1707 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1708 {
1709 int k;
1710 j = potential_reload_regs[i];
1711 /* Verify that J+1 is a potential reload reg. */
1712 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1713 if (potential_reload_regs[k] == j + 1)
1714 break;
1715 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1716 && k < FIRST_PSEUDO_REGISTER
1717 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1718 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1719 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1720 && HARD_REGNO_MODE_OK (j, group_mode[class])
1721 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1722 j + 1)
1723 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1724 /* Reject J at this stage
1725 if J+1 was explicitly used. */
1726 && ! regs_explicitly_used[j + 1])
1727 break;
1728 }
1729 #endif
1730 /* Now try any group at all
1731 whose registers are not in bad_spill_regs. */
1732 if (i == FIRST_PSEUDO_REGISTER)
1733 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1734 {
1735 int k;
1736 j = potential_reload_regs[i];
1737 /* Verify that J+1 is a potential reload reg. */
1738 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1739 if (potential_reload_regs[k] == j + 1)
1740 break;
1741 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1742 && k < FIRST_PSEUDO_REGISTER
1743 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1744 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1745 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1746 && HARD_REGNO_MODE_OK (j, group_mode[class])
1747 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1748 j + 1)
1749 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1750 break;
1751 }
1752
1753 /* I should be the index in potential_reload_regs
1754 of the new reload reg we have found. */
1755
1756 if (i >= FIRST_PSEUDO_REGISTER)
1757 {
1758 /* There are no groups left to spill. */
1759 spill_failure (max_groups_insn[class]);
1760 failure = 1;
1761 goto failed;
1762 }
1763 else
1764 something_changed
1765 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1766 global, dumpfile);
1767 }
1768 else
1769 {
1770 /* For groups of more than 2 registers,
1771 look for a sufficient sequence of unspilled registers,
1772 and spill them all at once. */
1773 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1774 {
1775 int k;
1776
1777 j = potential_reload_regs[i];
1778 if (j >= 0
1779 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1780 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1781 {
1782 /* Check each reg in the sequence. */
1783 for (k = 0; k < group_size[class]; k++)
1784 if (! (spill_reg_order[j + k] < 0
1785 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1786 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1787 break;
1788 /* We got a full sequence, so spill them all. */
1789 if (k == group_size[class])
1790 {
1791 register enum reg_class *p;
1792 for (k = 0; k < group_size[class]; k++)
1793 {
1794 int idx;
1795 SET_HARD_REG_BIT (counted_for_groups, j + k);
1796 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1797 if (potential_reload_regs[idx] == j + k)
1798 break;
1799 something_changed
1800 |= new_spill_reg (idx, class,
1801 max_needs, NULL_PTR,
1802 global, dumpfile);
1803 }
1804
1805 /* We have found one that will complete a group,
1806 so count off one group as provided. */
1807 max_groups[class]--;
1808 p = reg_class_superclasses[class];
1809 while (*p != LIM_REG_CLASSES)
1810 {
1811 if (group_size [(int) *p]
1812 <= group_size [class])
1813 max_groups[(int) *p]--;
1814 p++;
1815 }
1816 break;
1817 }
1818 }
1819 }
1820 /* We couldn't find any registers for this reload.
1821 Avoid going into an infinite loop. */
1822 if (i >= FIRST_PSEUDO_REGISTER)
1823 {
1824 /* There are no groups left. */
1825 spill_failure (max_groups_insn[class]);
1826 failure = 1;
1827 goto failed;
1828 }
1829 }
1830 }
1831
1832 /* Now similarly satisfy all need for single registers. */
1833
1834 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1835 {
1836 /* If we spilled enough regs, but they weren't counted
1837 against the non-group need, see if we can count them now.
1838 If so, we can avoid some actual spilling. */
1839 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1840 for (i = 0; i < n_spills; i++)
1841 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1842 spill_regs[i])
1843 && !TEST_HARD_REG_BIT (counted_for_groups,
1844 spill_regs[i])
1845 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1846 spill_regs[i])
1847 && max_nongroups[class] > 0)
1848 {
1849 register enum reg_class *p;
1850
1851 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1852 max_nongroups[class]--;
1853 p = reg_class_superclasses[class];
1854 while (*p != LIM_REG_CLASSES)
1855 max_nongroups[(int) *p++]--;
1856 }
1857 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1858 break;
1859
1860 /* Consider the potential reload regs that aren't
1861 yet in use as reload regs, in order of preference.
1862 Find the most preferred one that's in this class. */
1863
1864 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1865 if (potential_reload_regs[i] >= 0
1866 && TEST_HARD_REG_BIT (reg_class_contents[class],
1867 potential_reload_regs[i])
1868 /* If this reg will not be available for groups,
1869 pick one that does not foreclose possible groups.
1870 This is a kludge, and not very general,
1871 but it should be sufficient to make the 386 work,
1872 and the problem should not occur on machines with
1873 more registers. */
1874 && (max_nongroups[class] == 0
1875 || possible_group_p (potential_reload_regs[i], max_groups)))
1876 break;
1877
1878 /* If we couldn't get a register, try to get one even if we
1879 might foreclose possible groups. This may cause problems
1880 later, but that's better than aborting now, since it is
1881 possible that we will, in fact, be able to form the needed
1882 group even with this allocation. */
1883
1884 if (i >= FIRST_PSEUDO_REGISTER
1885 && (asm_noperands (max_needs[class] > 0
1886 ? max_needs_insn[class]
1887 : max_nongroups_insn[class])
1888 < 0))
1889 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1890 if (potential_reload_regs[i] >= 0
1891 && TEST_HARD_REG_BIT (reg_class_contents[class],
1892 potential_reload_regs[i]))
1893 break;
1894
1895 /* I should be the index in potential_reload_regs
1896 of the new reload reg we have found. */
1897
1898 if (i >= FIRST_PSEUDO_REGISTER)
1899 {
1900 /* There are no possible registers left to spill. */
1901 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1902 : max_nongroups_insn[class]);
1903 failure = 1;
1904 goto failed;
1905 }
1906 else
1907 something_changed
1908 |= new_spill_reg (i, class, max_needs, max_nongroups,
1909 global, dumpfile);
1910 }
1911 }
1912 }
1913
1914 /* If global-alloc was run, notify it of any register eliminations we have
1915 done. */
1916 if (global)
1917 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1918 if (ep->can_eliminate)
1919 mark_elimination (ep->from, ep->to);
1920
1921 /* Insert code to save and restore call-clobbered hard regs
1922 around calls. Tell if what mode to use so that we will process
1923 those insns in reload_as_needed if we have to. */
1924
1925 if (caller_save_needed)
1926 save_call_clobbered_regs (num_eliminable ? QImode
1927 : caller_save_spill_class != NO_REGS ? HImode
1928 : VOIDmode);
1929
1930 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1931 If that insn didn't set the register (i.e., it copied the register to
1932 memory), just delete that insn instead of the equivalencing insn plus
1933 anything now dead. If we call delete_dead_insn on that insn, we may
1934 delete the insn that actually sets the register if the register die
1935 there and that is incorrect. */
1936
1937 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1938 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1939 && GET_CODE (reg_equiv_init[i]) != NOTE)
1940 {
1941 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1942 delete_dead_insn (reg_equiv_init[i]);
1943 else
1944 {
1945 PUT_CODE (reg_equiv_init[i], NOTE);
1946 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1947 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1948 }
1949 }
1950
1951 /* Use the reload registers where necessary
1952 by generating move instructions to move the must-be-register
1953 values into or out of the reload registers. */
1954
1955 if (something_needs_reloads || something_needs_elimination
1956 || (caller_save_needed && num_eliminable)
1957 || caller_save_spill_class != NO_REGS)
1958 reload_as_needed (first, global);
1959
1960 /* If we were able to eliminate the frame pointer, show that it is no
1961 longer live at the start of any basic block. If it ls live by
1962 virtue of being in a pseudo, that pseudo will be marked live
1963 and hence the frame pointer will be known to be live via that
1964 pseudo. */
1965
1966 if (! frame_pointer_needed)
1967 for (i = 0; i < n_basic_blocks; i++)
1968 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1969 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1970 % REGSET_ELT_BITS));
1971
1972 /* Come here (with failure set nonzero) if we can't get enough spill regs
1973 and we decide not to abort about it. */
1974 failed:
1975
1976 reload_in_progress = 0;
1977
1978 /* Now eliminate all pseudo regs by modifying them into
1979 their equivalent memory references.
1980 The REG-rtx's for the pseudos are modified in place,
1981 so all insns that used to refer to them now refer to memory.
1982
1983 For a reg that has a reg_equiv_address, all those insns
1984 were changed by reloading so that no insns refer to it any longer;
1985 but the DECL_RTL of a variable decl may refer to it,
1986 and if so this causes the debugging info to mention the variable. */
1987
1988 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1989 {
1990 rtx addr = 0;
1991 int in_struct = 0;
1992 if (reg_equiv_mem[i])
1993 {
1994 addr = XEXP (reg_equiv_mem[i], 0);
1995 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1996 }
1997 if (reg_equiv_address[i])
1998 addr = reg_equiv_address[i];
1999 if (addr)
2000 {
2001 if (reg_renumber[i] < 0)
2002 {
2003 rtx reg = regno_reg_rtx[i];
2004 XEXP (reg, 0) = addr;
2005 REG_USERVAR_P (reg) = 0;
2006 MEM_IN_STRUCT_P (reg) = in_struct;
2007 PUT_CODE (reg, MEM);
2008 }
2009 else if (reg_equiv_mem[i])
2010 XEXP (reg_equiv_mem[i], 0) = addr;
2011 }
2012 }
2013
2014 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2015 /* Make a pass over all the insns and remove death notes for things that
2016 are no longer registers or no longer die in the insn (e.g., an input
2017 and output pseudo being tied). */
2018
2019 for (insn = first; insn; insn = NEXT_INSN (insn))
2020 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2021 {
2022 rtx note, next;
2023
2024 for (note = REG_NOTES (insn); note; note = next)
2025 {
2026 next = XEXP (note, 1);
2027 if (REG_NOTE_KIND (note) == REG_DEAD
2028 && (GET_CODE (XEXP (note, 0)) != REG
2029 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2030 remove_note (insn, note);
2031 }
2032 }
2033 #endif
2034
2035 /* Indicate that we no longer have known memory locations or constants. */
2036 reg_equiv_constant = 0;
2037 reg_equiv_memory_loc = 0;
2038
2039 if (scratch_list)
2040 free (scratch_list);
2041 scratch_list = 0;
2042 if (scratch_block)
2043 free (scratch_block);
2044 scratch_block = 0;
2045
2046 CLEAR_HARD_REG_SET (used_spill_regs);
2047 for (i = 0; i < n_spills; i++)
2048 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2049
2050 return failure;
2051 }
2052 \f
2053 /* Nonzero if, after spilling reg REGNO for non-groups,
2054 it will still be possible to find a group if we still need one. */
2055
2056 static int
2057 possible_group_p (regno, max_groups)
2058 int regno;
2059 int *max_groups;
2060 {
2061 int i;
2062 int class = (int) NO_REGS;
2063
2064 for (i = 0; i < (int) N_REG_CLASSES; i++)
2065 if (max_groups[i] > 0)
2066 {
2067 class = i;
2068 break;
2069 }
2070
2071 if (class == (int) NO_REGS)
2072 return 1;
2073
2074 /* Consider each pair of consecutive registers. */
2075 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2076 {
2077 /* Ignore pairs that include reg REGNO. */
2078 if (i == regno || i + 1 == regno)
2079 continue;
2080
2081 /* Ignore pairs that are outside the class that needs the group.
2082 ??? Here we fail to handle the case where two different classes
2083 independently need groups. But this never happens with our
2084 current machine descriptions. */
2085 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2086 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2087 continue;
2088
2089 /* A pair of consecutive regs we can still spill does the trick. */
2090 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2091 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2092 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2093 return 1;
2094
2095 /* A pair of one already spilled and one we can spill does it
2096 provided the one already spilled is not otherwise reserved. */
2097 if (spill_reg_order[i] < 0
2098 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2099 && spill_reg_order[i + 1] >= 0
2100 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2101 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2102 return 1;
2103 if (spill_reg_order[i + 1] < 0
2104 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2105 && spill_reg_order[i] >= 0
2106 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2107 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2108 return 1;
2109 }
2110
2111 return 0;
2112 }
2113 \f
2114 /* Count any groups of CLASS that can be formed from the registers recently
2115 spilled. */
2116
2117 static void
2118 count_possible_groups (group_size, group_mode, max_groups, class)
2119 int *group_size;
2120 enum machine_mode *group_mode;
2121 int *max_groups;
2122 int class;
2123 {
2124 HARD_REG_SET new;
2125 int i, j;
2126
2127 /* Now find all consecutive groups of spilled registers
2128 and mark each group off against the need for such groups.
2129 But don't count them against ordinary need, yet. */
2130
2131 if (group_size[class] == 0)
2132 return;
2133
2134 CLEAR_HARD_REG_SET (new);
2135
2136 /* Make a mask of all the regs that are spill regs in class I. */
2137 for (i = 0; i < n_spills; i++)
2138 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2139 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2140 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2141 SET_HARD_REG_BIT (new, spill_regs[i]);
2142
2143 /* Find each consecutive group of them. */
2144 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2145 if (TEST_HARD_REG_BIT (new, i)
2146 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2147 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2148 {
2149 for (j = 1; j < group_size[class]; j++)
2150 if (! TEST_HARD_REG_BIT (new, i + j))
2151 break;
2152
2153 if (j == group_size[class])
2154 {
2155 /* We found a group. Mark it off against this class's need for
2156 groups, and against each superclass too. */
2157 register enum reg_class *p;
2158
2159 max_groups[class]--;
2160 p = reg_class_superclasses[class];
2161 while (*p != LIM_REG_CLASSES)
2162 {
2163 if (group_size [(int) *p] <= group_size [class])
2164 max_groups[(int) *p]--;
2165 p++;
2166 }
2167
2168 /* Don't count these registers again. */
2169 for (j = 0; j < group_size[class]; j++)
2170 SET_HARD_REG_BIT (counted_for_groups, i + j);
2171 }
2172
2173 /* Skip to the last reg in this group. When i is incremented above,
2174 it will then point to the first reg of the next possible group. */
2175 i += j - 1;
2176 }
2177 }
2178 \f
2179 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2180 another mode that needs to be reloaded for the same register class CLASS.
2181 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2182 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2183
2184 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2185 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2186 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2187 causes unnecessary failures on machines requiring alignment of register
2188 groups when the two modes are different sizes, because the larger mode has
2189 more strict alignment rules than the smaller mode. */
2190
2191 static int
2192 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2193 enum machine_mode allocate_mode, other_mode;
2194 enum reg_class class;
2195 {
2196 register int regno;
2197 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2198 {
2199 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2200 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2201 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2202 return 0;
2203 }
2204 return 1;
2205 }
2206
2207 /* Handle the failure to find a register to spill.
2208 INSN should be one of the insns which needed this particular spill reg. */
2209
2210 static void
2211 spill_failure (insn)
2212 rtx insn;
2213 {
2214 if (asm_noperands (PATTERN (insn)) >= 0)
2215 error_for_asm (insn, "`asm' needs too many reloads");
2216 else
2217 fatal_insn ("Unable to find a register to spill.", insn);
2218 }
2219
2220 /* Add a new register to the tables of available spill-registers
2221 (as well as spilling all pseudos allocated to the register).
2222 I is the index of this register in potential_reload_regs.
2223 CLASS is the regclass whose need is being satisfied.
2224 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2225 so that this register can count off against them.
2226 MAX_NONGROUPS is 0 if this register is part of a group.
2227 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2228
2229 static int
2230 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2231 int i;
2232 int class;
2233 int *max_needs;
2234 int *max_nongroups;
2235 int global;
2236 FILE *dumpfile;
2237 {
2238 register enum reg_class *p;
2239 int val;
2240 int regno = potential_reload_regs[i];
2241
2242 if (i >= FIRST_PSEUDO_REGISTER)
2243 abort (); /* Caller failed to find any register. */
2244
2245 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2246 fatal ("fixed or forbidden register was spilled.\n\
2247 This may be due to a compiler bug or to impossible asm\n\
2248 statements or clauses.");
2249
2250 /* Make reg REGNO an additional reload reg. */
2251
2252 potential_reload_regs[i] = -1;
2253 spill_regs[n_spills] = regno;
2254 spill_reg_order[regno] = n_spills;
2255 if (dumpfile)
2256 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2257
2258 /* Clear off the needs we just satisfied. */
2259
2260 max_needs[class]--;
2261 p = reg_class_superclasses[class];
2262 while (*p != LIM_REG_CLASSES)
2263 max_needs[(int) *p++]--;
2264
2265 if (max_nongroups && max_nongroups[class] > 0)
2266 {
2267 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2268 max_nongroups[class]--;
2269 p = reg_class_superclasses[class];
2270 while (*p != LIM_REG_CLASSES)
2271 max_nongroups[(int) *p++]--;
2272 }
2273
2274 /* Spill every pseudo reg that was allocated to this reg
2275 or to something that overlaps this reg. */
2276
2277 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2278
2279 /* If there are some registers still to eliminate and this register
2280 wasn't ever used before, additional stack space may have to be
2281 allocated to store this register. Thus, we may have changed the offset
2282 between the stack and frame pointers, so mark that something has changed.
2283 (If new pseudos were spilled, thus requiring more space, VAL would have
2284 been set non-zero by the call to spill_hard_reg above since additional
2285 reloads may be needed in that case.
2286
2287 One might think that we need only set VAL to 1 if this is a call-used
2288 register. However, the set of registers that must be saved by the
2289 prologue is not identical to the call-used set. For example, the
2290 register used by the call insn for the return PC is a call-used register,
2291 but must be saved by the prologue. */
2292 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2293 val = 1;
2294
2295 regs_ever_live[spill_regs[n_spills]] = 1;
2296 n_spills++;
2297
2298 return val;
2299 }
2300 \f
2301 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2302 data that is dead in INSN. */
2303
2304 static void
2305 delete_dead_insn (insn)
2306 rtx insn;
2307 {
2308 rtx prev = prev_real_insn (insn);
2309 rtx prev_dest;
2310
2311 /* If the previous insn sets a register that dies in our insn, delete it
2312 too. */
2313 if (prev && GET_CODE (PATTERN (prev)) == SET
2314 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2315 && reg_mentioned_p (prev_dest, PATTERN (insn))
2316 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2317 delete_dead_insn (prev);
2318
2319 PUT_CODE (insn, NOTE);
2320 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2321 NOTE_SOURCE_FILE (insn) = 0;
2322 }
2323
2324 /* Modify the home of pseudo-reg I.
2325 The new home is present in reg_renumber[I].
2326
2327 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2328 or it may be -1, meaning there is none or it is not relevant.
2329 This is used so that all pseudos spilled from a given hard reg
2330 can share one stack slot. */
2331
2332 static void
2333 alter_reg (i, from_reg)
2334 register int i;
2335 int from_reg;
2336 {
2337 /* When outputting an inline function, this can happen
2338 for a reg that isn't actually used. */
2339 if (regno_reg_rtx[i] == 0)
2340 return;
2341
2342 /* If the reg got changed to a MEM at rtl-generation time,
2343 ignore it. */
2344 if (GET_CODE (regno_reg_rtx[i]) != REG)
2345 return;
2346
2347 /* Modify the reg-rtx to contain the new hard reg
2348 number or else to contain its pseudo reg number. */
2349 REGNO (regno_reg_rtx[i])
2350 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2351
2352 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2353 allocate a stack slot for it. */
2354
2355 if (reg_renumber[i] < 0
2356 && reg_n_refs[i] > 0
2357 && reg_equiv_constant[i] == 0
2358 && reg_equiv_memory_loc[i] == 0)
2359 {
2360 register rtx x;
2361 int inherent_size = PSEUDO_REGNO_BYTES (i);
2362 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2363 int adjust = 0;
2364
2365 /* Each pseudo reg has an inherent size which comes from its own mode,
2366 and a total size which provides room for paradoxical subregs
2367 which refer to the pseudo reg in wider modes.
2368
2369 We can use a slot already allocated if it provides both
2370 enough inherent space and enough total space.
2371 Otherwise, we allocate a new slot, making sure that it has no less
2372 inherent space, and no less total space, then the previous slot. */
2373 if (from_reg == -1)
2374 {
2375 /* No known place to spill from => no slot to reuse. */
2376 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2377 if (BYTES_BIG_ENDIAN)
2378 /* Cancel the big-endian correction done in assign_stack_local.
2379 Get the address of the beginning of the slot.
2380 This is so we can do a big-endian correction unconditionally
2381 below. */
2382 adjust = inherent_size - total_size;
2383
2384 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2385 }
2386 /* Reuse a stack slot if possible. */
2387 else if (spill_stack_slot[from_reg] != 0
2388 && spill_stack_slot_width[from_reg] >= total_size
2389 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2390 >= inherent_size))
2391 x = spill_stack_slot[from_reg];
2392 /* Allocate a bigger slot. */
2393 else
2394 {
2395 /* Compute maximum size needed, both for inherent size
2396 and for total size. */
2397 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2398 rtx stack_slot;
2399 if (spill_stack_slot[from_reg])
2400 {
2401 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2402 > inherent_size)
2403 mode = GET_MODE (spill_stack_slot[from_reg]);
2404 if (spill_stack_slot_width[from_reg] > total_size)
2405 total_size = spill_stack_slot_width[from_reg];
2406 }
2407 /* Make a slot with that size. */
2408 x = assign_stack_local (mode, total_size, -1);
2409 stack_slot = x;
2410 if (BYTES_BIG_ENDIAN)
2411 {
2412 /* Cancel the big-endian correction done in assign_stack_local.
2413 Get the address of the beginning of the slot.
2414 This is so we can do a big-endian correction unconditionally
2415 below. */
2416 adjust = GET_MODE_SIZE (mode) - total_size;
2417 if (adjust)
2418 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2419 * BITS_PER_UNIT,
2420 MODE_INT, 1),
2421 plus_constant (XEXP (x, 0), adjust));
2422 }
2423 spill_stack_slot[from_reg] = stack_slot;
2424 spill_stack_slot_width[from_reg] = total_size;
2425 }
2426
2427 /* On a big endian machine, the "address" of the slot
2428 is the address of the low part that fits its inherent mode. */
2429 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2430 adjust += (total_size - inherent_size);
2431
2432 /* If we have any adjustment to make, or if the stack slot is the
2433 wrong mode, make a new stack slot. */
2434 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2435 {
2436 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2437 plus_constant (XEXP (x, 0), adjust));
2438 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2439 }
2440
2441 /* Save the stack slot for later. */
2442 reg_equiv_memory_loc[i] = x;
2443 }
2444 }
2445
2446 /* Mark the slots in regs_ever_live for the hard regs
2447 used by pseudo-reg number REGNO. */
2448
2449 void
2450 mark_home_live (regno)
2451 int regno;
2452 {
2453 register int i, lim;
2454 i = reg_renumber[regno];
2455 if (i < 0)
2456 return;
2457 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2458 while (i < lim)
2459 regs_ever_live[i++] = 1;
2460 }
2461
2462 /* Mark the registers used in SCRATCH as being live. */
2463
2464 static void
2465 mark_scratch_live (scratch)
2466 rtx scratch;
2467 {
2468 register int i;
2469 int regno = REGNO (scratch);
2470 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2471
2472 for (i = regno; i < lim; i++)
2473 regs_ever_live[i] = 1;
2474 }
2475 \f
2476 /* This function handles the tracking of elimination offsets around branches.
2477
2478 X is a piece of RTL being scanned.
2479
2480 INSN is the insn that it came from, if any.
2481
2482 INITIAL_P is non-zero if we are to set the offset to be the initial
2483 offset and zero if we are setting the offset of the label to be the
2484 current offset. */
2485
2486 static void
2487 set_label_offsets (x, insn, initial_p)
2488 rtx x;
2489 rtx insn;
2490 int initial_p;
2491 {
2492 enum rtx_code code = GET_CODE (x);
2493 rtx tem;
2494 int i;
2495 struct elim_table *p;
2496
2497 switch (code)
2498 {
2499 case LABEL_REF:
2500 if (LABEL_REF_NONLOCAL_P (x))
2501 return;
2502
2503 x = XEXP (x, 0);
2504
2505 /* ... fall through ... */
2506
2507 case CODE_LABEL:
2508 /* If we know nothing about this label, set the desired offsets. Note
2509 that this sets the offset at a label to be the offset before a label
2510 if we don't know anything about the label. This is not correct for
2511 the label after a BARRIER, but is the best guess we can make. If
2512 we guessed wrong, we will suppress an elimination that might have
2513 been possible had we been able to guess correctly. */
2514
2515 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2516 {
2517 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2518 offsets_at[CODE_LABEL_NUMBER (x)][i]
2519 = (initial_p ? reg_eliminate[i].initial_offset
2520 : reg_eliminate[i].offset);
2521 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2522 }
2523
2524 /* Otherwise, if this is the definition of a label and it is
2525 preceded by a BARRIER, set our offsets to the known offset of
2526 that label. */
2527
2528 else if (x == insn
2529 && (tem = prev_nonnote_insn (insn)) != 0
2530 && GET_CODE (tem) == BARRIER)
2531 {
2532 num_not_at_initial_offset = 0;
2533 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2534 {
2535 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2536 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2537 if (reg_eliminate[i].can_eliminate
2538 && (reg_eliminate[i].offset
2539 != reg_eliminate[i].initial_offset))
2540 num_not_at_initial_offset++;
2541 }
2542 }
2543
2544 else
2545 /* If neither of the above cases is true, compare each offset
2546 with those previously recorded and suppress any eliminations
2547 where the offsets disagree. */
2548
2549 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2550 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2551 != (initial_p ? reg_eliminate[i].initial_offset
2552 : reg_eliminate[i].offset))
2553 reg_eliminate[i].can_eliminate = 0;
2554
2555 return;
2556
2557 case JUMP_INSN:
2558 set_label_offsets (PATTERN (insn), insn, initial_p);
2559
2560 /* ... fall through ... */
2561
2562 case INSN:
2563 case CALL_INSN:
2564 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2565 and hence must have all eliminations at their initial offsets. */
2566 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2567 if (REG_NOTE_KIND (tem) == REG_LABEL)
2568 set_label_offsets (XEXP (tem, 0), insn, 1);
2569 return;
2570
2571 case ADDR_VEC:
2572 case ADDR_DIFF_VEC:
2573 /* Each of the labels in the address vector must be at their initial
2574 offsets. We want the first first for ADDR_VEC and the second
2575 field for ADDR_DIFF_VEC. */
2576
2577 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2578 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2579 insn, initial_p);
2580 return;
2581
2582 case SET:
2583 /* We only care about setting PC. If the source is not RETURN,
2584 IF_THEN_ELSE, or a label, disable any eliminations not at
2585 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2586 isn't one of those possibilities. For branches to a label,
2587 call ourselves recursively.
2588
2589 Note that this can disable elimination unnecessarily when we have
2590 a non-local goto since it will look like a non-constant jump to
2591 someplace in the current function. This isn't a significant
2592 problem since such jumps will normally be when all elimination
2593 pairs are back to their initial offsets. */
2594
2595 if (SET_DEST (x) != pc_rtx)
2596 return;
2597
2598 switch (GET_CODE (SET_SRC (x)))
2599 {
2600 case PC:
2601 case RETURN:
2602 return;
2603
2604 case LABEL_REF:
2605 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2606 return;
2607
2608 case IF_THEN_ELSE:
2609 tem = XEXP (SET_SRC (x), 1);
2610 if (GET_CODE (tem) == LABEL_REF)
2611 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2612 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2613 break;
2614
2615 tem = XEXP (SET_SRC (x), 2);
2616 if (GET_CODE (tem) == LABEL_REF)
2617 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2618 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2619 break;
2620 return;
2621 }
2622
2623 /* If we reach here, all eliminations must be at their initial
2624 offset because we are doing a jump to a variable address. */
2625 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2626 if (p->offset != p->initial_offset)
2627 p->can_eliminate = 0;
2628 }
2629 }
2630 \f
2631 /* Used for communication between the next two function to properly share
2632 the vector for an ASM_OPERANDS. */
2633
2634 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2635
2636 /* Scan X and replace any eliminable registers (such as fp) with a
2637 replacement (such as sp), plus an offset.
2638
2639 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2640 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2641 MEM, we are allowed to replace a sum of a register and the constant zero
2642 with the register, which we cannot do outside a MEM. In addition, we need
2643 to record the fact that a register is referenced outside a MEM.
2644
2645 If INSN is an insn, it is the insn containing X. If we replace a REG
2646 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2647 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2648 that the REG is being modified.
2649
2650 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2651 That's used when we eliminate in expressions stored in notes.
2652 This means, do not set ref_outside_mem even if the reference
2653 is outside of MEMs.
2654
2655 If we see a modification to a register we know about, take the
2656 appropriate action (see case SET, below).
2657
2658 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2659 replacements done assuming all offsets are at their initial values. If
2660 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2661 encounter, return the actual location so that find_reloads will do
2662 the proper thing. */
2663
2664 rtx
2665 eliminate_regs (x, mem_mode, insn)
2666 rtx x;
2667 enum machine_mode mem_mode;
2668 rtx insn;
2669 {
2670 enum rtx_code code = GET_CODE (x);
2671 struct elim_table *ep;
2672 int regno;
2673 rtx new;
2674 int i, j;
2675 char *fmt;
2676 int copied = 0;
2677
2678 switch (code)
2679 {
2680 case CONST_INT:
2681 case CONST_DOUBLE:
2682 case CONST:
2683 case SYMBOL_REF:
2684 case CODE_LABEL:
2685 case PC:
2686 case CC0:
2687 case ASM_INPUT:
2688 case ADDR_VEC:
2689 case ADDR_DIFF_VEC:
2690 case RETURN:
2691 return x;
2692
2693 case REG:
2694 regno = REGNO (x);
2695
2696 /* First handle the case where we encounter a bare register that
2697 is eliminable. Replace it with a PLUS. */
2698 if (regno < FIRST_PSEUDO_REGISTER)
2699 {
2700 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2701 ep++)
2702 if (ep->from_rtx == x && ep->can_eliminate)
2703 {
2704 if (! mem_mode
2705 /* Refs inside notes don't count for this purpose. */
2706 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2707 || GET_CODE (insn) == INSN_LIST)))
2708 ep->ref_outside_mem = 1;
2709 return plus_constant (ep->to_rtx, ep->previous_offset);
2710 }
2711
2712 }
2713 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2714 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2715 {
2716 /* In this case, find_reloads would attempt to either use an
2717 incorrect address (if something is not at its initial offset)
2718 or substitute an replaced address into an insn (which loses
2719 if the offset is changed by some later action). So we simply
2720 return the replaced stack slot (assuming it is changed by
2721 elimination) and ignore the fact that this is actually a
2722 reference to the pseudo. Ensure we make a copy of the
2723 address in case it is shared. */
2724 new = eliminate_regs (reg_equiv_memory_loc[regno],
2725 mem_mode, insn);
2726 if (new != reg_equiv_memory_loc[regno])
2727 {
2728 cannot_omit_stores[regno] = 1;
2729 return copy_rtx (new);
2730 }
2731 }
2732 return x;
2733
2734 case PLUS:
2735 /* If this is the sum of an eliminable register and a constant, rework
2736 the sum. */
2737 if (GET_CODE (XEXP (x, 0)) == REG
2738 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2739 && CONSTANT_P (XEXP (x, 1)))
2740 {
2741 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2742 ep++)
2743 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2744 {
2745 if (! mem_mode
2746 /* Refs inside notes don't count for this purpose. */
2747 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2748 || GET_CODE (insn) == INSN_LIST)))
2749 ep->ref_outside_mem = 1;
2750
2751 /* The only time we want to replace a PLUS with a REG (this
2752 occurs when the constant operand of the PLUS is the negative
2753 of the offset) is when we are inside a MEM. We won't want
2754 to do so at other times because that would change the
2755 structure of the insn in a way that reload can't handle.
2756 We special-case the commonest situation in
2757 eliminate_regs_in_insn, so just replace a PLUS with a
2758 PLUS here, unless inside a MEM. */
2759 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2760 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2761 return ep->to_rtx;
2762 else
2763 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2764 plus_constant (XEXP (x, 1),
2765 ep->previous_offset));
2766 }
2767
2768 /* If the register is not eliminable, we are done since the other
2769 operand is a constant. */
2770 return x;
2771 }
2772
2773 /* If this is part of an address, we want to bring any constant to the
2774 outermost PLUS. We will do this by doing register replacement in
2775 our operands and seeing if a constant shows up in one of them.
2776
2777 We assume here this is part of an address (or a "load address" insn)
2778 since an eliminable register is not likely to appear in any other
2779 context.
2780
2781 If we have (plus (eliminable) (reg)), we want to produce
2782 (plus (plus (replacement) (reg) (const))). If this was part of a
2783 normal add insn, (plus (replacement) (reg)) will be pushed as a
2784 reload. This is the desired action. */
2785
2786 {
2787 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2788 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2789
2790 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2791 {
2792 /* If one side is a PLUS and the other side is a pseudo that
2793 didn't get a hard register but has a reg_equiv_constant,
2794 we must replace the constant here since it may no longer
2795 be in the position of any operand. */
2796 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2797 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2798 && reg_renumber[REGNO (new1)] < 0
2799 && reg_equiv_constant != 0
2800 && reg_equiv_constant[REGNO (new1)] != 0)
2801 new1 = reg_equiv_constant[REGNO (new1)];
2802 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2803 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2804 && reg_renumber[REGNO (new0)] < 0
2805 && reg_equiv_constant[REGNO (new0)] != 0)
2806 new0 = reg_equiv_constant[REGNO (new0)];
2807
2808 new = form_sum (new0, new1);
2809
2810 /* As above, if we are not inside a MEM we do not want to
2811 turn a PLUS into something else. We might try to do so here
2812 for an addition of 0 if we aren't optimizing. */
2813 if (! mem_mode && GET_CODE (new) != PLUS)
2814 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2815 else
2816 return new;
2817 }
2818 }
2819 return x;
2820
2821 case MULT:
2822 /* If this is the product of an eliminable register and a
2823 constant, apply the distribute law and move the constant out
2824 so that we have (plus (mult ..) ..). This is needed in order
2825 to keep load-address insns valid. This case is pathological.
2826 We ignore the possibility of overflow here. */
2827 if (GET_CODE (XEXP (x, 0)) == REG
2828 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2829 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2830 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2831 ep++)
2832 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2833 {
2834 if (! mem_mode
2835 /* Refs inside notes don't count for this purpose. */
2836 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2837 || GET_CODE (insn) == INSN_LIST)))
2838 ep->ref_outside_mem = 1;
2839
2840 return
2841 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2842 ep->previous_offset * INTVAL (XEXP (x, 1)));
2843 }
2844
2845 /* ... fall through ... */
2846
2847 case CALL:
2848 case COMPARE:
2849 case MINUS:
2850 case DIV: case UDIV:
2851 case MOD: case UMOD:
2852 case AND: case IOR: case XOR:
2853 case ROTATERT: case ROTATE:
2854 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2855 case NE: case EQ:
2856 case GE: case GT: case GEU: case GTU:
2857 case LE: case LT: case LEU: case LTU:
2858 {
2859 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2860 rtx new1
2861 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2862
2863 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2864 return gen_rtx (code, GET_MODE (x), new0, new1);
2865 }
2866 return x;
2867
2868 case EXPR_LIST:
2869 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2870 if (XEXP (x, 0))
2871 {
2872 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2873 if (new != XEXP (x, 0))
2874 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2875 }
2876
2877 /* ... fall through ... */
2878
2879 case INSN_LIST:
2880 /* Now do eliminations in the rest of the chain. If this was
2881 an EXPR_LIST, this might result in allocating more memory than is
2882 strictly needed, but it simplifies the code. */
2883 if (XEXP (x, 1))
2884 {
2885 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2886 if (new != XEXP (x, 1))
2887 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2888 }
2889 return x;
2890
2891 case PRE_INC:
2892 case POST_INC:
2893 case PRE_DEC:
2894 case POST_DEC:
2895 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2896 if (ep->to_rtx == XEXP (x, 0))
2897 {
2898 int size = GET_MODE_SIZE (mem_mode);
2899
2900 /* If more bytes than MEM_MODE are pushed, account for them. */
2901 #ifdef PUSH_ROUNDING
2902 if (ep->to_rtx == stack_pointer_rtx)
2903 size = PUSH_ROUNDING (size);
2904 #endif
2905 if (code == PRE_DEC || code == POST_DEC)
2906 ep->offset += size;
2907 else
2908 ep->offset -= size;
2909 }
2910
2911 /* Fall through to generic unary operation case. */
2912 case STRICT_LOW_PART:
2913 case NEG: case NOT:
2914 case SIGN_EXTEND: case ZERO_EXTEND:
2915 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2916 case FLOAT: case FIX:
2917 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2918 case ABS:
2919 case SQRT:
2920 case FFS:
2921 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2922 if (new != XEXP (x, 0))
2923 return gen_rtx (code, GET_MODE (x), new);
2924 return x;
2925
2926 case SUBREG:
2927 /* Similar to above processing, but preserve SUBREG_WORD.
2928 Convert (subreg (mem)) to (mem) if not paradoxical.
2929 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2930 pseudo didn't get a hard reg, we must replace this with the
2931 eliminated version of the memory location because push_reloads
2932 may do the replacement in certain circumstances. */
2933 if (GET_CODE (SUBREG_REG (x)) == REG
2934 && (GET_MODE_SIZE (GET_MODE (x))
2935 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2936 && reg_equiv_memory_loc != 0
2937 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2938 {
2939 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2940 mem_mode, insn);
2941
2942 /* If we didn't change anything, we must retain the pseudo. */
2943 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2944 new = SUBREG_REG (x);
2945 else
2946 {
2947 /* Otherwise, ensure NEW isn't shared in case we have to reload
2948 it. */
2949 new = copy_rtx (new);
2950
2951 /* In this case, we must show that the pseudo is used in this
2952 insn so that delete_output_reload will do the right thing. */
2953 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2954 && GET_CODE (insn) != INSN_LIST)
2955 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2956 insn);
2957 }
2958 }
2959 else
2960 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2961
2962 if (new != XEXP (x, 0))
2963 {
2964 if (GET_CODE (new) == MEM
2965 && (GET_MODE_SIZE (GET_MODE (x))
2966 <= GET_MODE_SIZE (GET_MODE (new)))
2967 #ifdef LOAD_EXTEND_OP
2968 /* On these machines we will be reloading what is
2969 inside the SUBREG if it originally was a pseudo and
2970 the inner and outer modes are both a word or
2971 smaller. So leave the SUBREG then. */
2972 && ! (GET_CODE (SUBREG_REG (x)) == REG
2973 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2974 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2975 && (GET_MODE_SIZE (GET_MODE (x))
2976 > GET_MODE_SIZE (GET_MODE (new)))
2977 && INTEGRAL_MODE_P (GET_MODE (new))
2978 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
2979 #endif
2980 )
2981 {
2982 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2983 enum machine_mode mode = GET_MODE (x);
2984
2985 if (BYTES_BIG_ENDIAN)
2986 offset += (MIN (UNITS_PER_WORD,
2987 GET_MODE_SIZE (GET_MODE (new)))
2988 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2989
2990 PUT_MODE (new, mode);
2991 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2992 return new;
2993 }
2994 else
2995 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2996 }
2997
2998 return x;
2999
3000 case USE:
3001 /* If using a register that is the source of an eliminate we still
3002 think can be performed, note it cannot be performed since we don't
3003 know how this register is used. */
3004 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3005 if (ep->from_rtx == XEXP (x, 0))
3006 ep->can_eliminate = 0;
3007
3008 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3009 if (new != XEXP (x, 0))
3010 return gen_rtx (code, GET_MODE (x), new);
3011 return x;
3012
3013 case CLOBBER:
3014 /* If clobbering a register that is the replacement register for an
3015 elimination we still think can be performed, note that it cannot
3016 be performed. Otherwise, we need not be concerned about it. */
3017 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3018 if (ep->to_rtx == XEXP (x, 0))
3019 ep->can_eliminate = 0;
3020
3021 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3022 if (new != XEXP (x, 0))
3023 return gen_rtx (code, GET_MODE (x), new);
3024 return x;
3025
3026 case ASM_OPERANDS:
3027 {
3028 rtx *temp_vec;
3029 /* Properly handle sharing input and constraint vectors. */
3030 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3031 {
3032 /* When we come to a new vector not seen before,
3033 scan all its elements; keep the old vector if none
3034 of them changes; otherwise, make a copy. */
3035 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3036 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3037 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3038 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3039 mem_mode, insn);
3040
3041 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3042 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3043 break;
3044
3045 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3046 new_asm_operands_vec = old_asm_operands_vec;
3047 else
3048 new_asm_operands_vec
3049 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3050 }
3051
3052 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3053 if (new_asm_operands_vec == old_asm_operands_vec)
3054 return x;
3055
3056 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3057 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3058 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3059 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3060 ASM_OPERANDS_SOURCE_FILE (x),
3061 ASM_OPERANDS_SOURCE_LINE (x));
3062 new->volatil = x->volatil;
3063 return new;
3064 }
3065
3066 case SET:
3067 /* Check for setting a register that we know about. */
3068 if (GET_CODE (SET_DEST (x)) == REG)
3069 {
3070 /* See if this is setting the replacement register for an
3071 elimination.
3072
3073 If DEST is the hard frame pointer, we do nothing because we
3074 assume that all assignments to the frame pointer are for
3075 non-local gotos and are being done at a time when they are valid
3076 and do not disturb anything else. Some machines want to
3077 eliminate a fake argument pointer (or even a fake frame pointer)
3078 with either the real frame or the stack pointer. Assignments to
3079 the hard frame pointer must not prevent this elimination. */
3080
3081 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3082 ep++)
3083 if (ep->to_rtx == SET_DEST (x)
3084 && SET_DEST (x) != hard_frame_pointer_rtx)
3085 {
3086 /* If it is being incremented, adjust the offset. Otherwise,
3087 this elimination can't be done. */
3088 rtx src = SET_SRC (x);
3089
3090 if (GET_CODE (src) == PLUS
3091 && XEXP (src, 0) == SET_DEST (x)
3092 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3093 ep->offset -= INTVAL (XEXP (src, 1));
3094 else
3095 ep->can_eliminate = 0;
3096 }
3097
3098 /* Now check to see we are assigning to a register that can be
3099 eliminated. If so, it must be as part of a PARALLEL, since we
3100 will not have been called if this is a single SET. So indicate
3101 that we can no longer eliminate this reg. */
3102 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3103 ep++)
3104 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3105 ep->can_eliminate = 0;
3106 }
3107
3108 /* Now avoid the loop below in this common case. */
3109 {
3110 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3111 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3112
3113 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3114 write a CLOBBER insn. */
3115 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3116 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3117 && GET_CODE (insn) != INSN_LIST)
3118 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3119
3120 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3121 return gen_rtx (SET, VOIDmode, new0, new1);
3122 }
3123
3124 return x;
3125
3126 case MEM:
3127 /* Our only special processing is to pass the mode of the MEM to our
3128 recursive call and copy the flags. While we are here, handle this
3129 case more efficiently. */
3130 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3131 if (new != XEXP (x, 0))
3132 {
3133 new = gen_rtx (MEM, GET_MODE (x), new);
3134 new->volatil = x->volatil;
3135 new->unchanging = x->unchanging;
3136 new->in_struct = x->in_struct;
3137 return new;
3138 }
3139 else
3140 return x;
3141 }
3142
3143 /* Process each of our operands recursively. If any have changed, make a
3144 copy of the rtx. */
3145 fmt = GET_RTX_FORMAT (code);
3146 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3147 {
3148 if (*fmt == 'e')
3149 {
3150 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3151 if (new != XEXP (x, i) && ! copied)
3152 {
3153 rtx new_x = rtx_alloc (code);
3154 bcopy ((char *) x, (char *) new_x,
3155 (sizeof (*new_x) - sizeof (new_x->fld)
3156 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3157 x = new_x;
3158 copied = 1;
3159 }
3160 XEXP (x, i) = new;
3161 }
3162 else if (*fmt == 'E')
3163 {
3164 int copied_vec = 0;
3165 for (j = 0; j < XVECLEN (x, i); j++)
3166 {
3167 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3168 if (new != XVECEXP (x, i, j) && ! copied_vec)
3169 {
3170 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3171 &XVECEXP (x, i, 0));
3172 if (! copied)
3173 {
3174 rtx new_x = rtx_alloc (code);
3175 bcopy ((char *) x, (char *) new_x,
3176 (sizeof (*new_x) - sizeof (new_x->fld)
3177 + (sizeof (new_x->fld[0])
3178 * GET_RTX_LENGTH (code))));
3179 x = new_x;
3180 copied = 1;
3181 }
3182 XVEC (x, i) = new_v;
3183 copied_vec = 1;
3184 }
3185 XVECEXP (x, i, j) = new;
3186 }
3187 }
3188 }
3189
3190 return x;
3191 }
3192 \f
3193 /* Scan INSN and eliminate all eliminable registers in it.
3194
3195 If REPLACE is nonzero, do the replacement destructively. Also
3196 delete the insn as dead it if it is setting an eliminable register.
3197
3198 If REPLACE is zero, do all our allocations in reload_obstack.
3199
3200 If no eliminations were done and this insn doesn't require any elimination
3201 processing (these are not identical conditions: it might be updating sp,
3202 but not referencing fp; this needs to be seen during reload_as_needed so
3203 that the offset between fp and sp can be taken into consideration), zero
3204 is returned. Otherwise, 1 is returned. */
3205
3206 static int
3207 eliminate_regs_in_insn (insn, replace)
3208 rtx insn;
3209 int replace;
3210 {
3211 rtx old_body = PATTERN (insn);
3212 rtx old_set = single_set (insn);
3213 rtx new_body;
3214 int val = 0;
3215 struct elim_table *ep;
3216
3217 if (! replace)
3218 push_obstacks (&reload_obstack, &reload_obstack);
3219
3220 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3221 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3222 {
3223 /* Check for setting an eliminable register. */
3224 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3225 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3226 {
3227 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3228 /* If this is setting the frame pointer register to the
3229 hardware frame pointer register and this is an elimination
3230 that will be done (tested above), this insn is really
3231 adjusting the frame pointer downward to compensate for
3232 the adjustment done before a nonlocal goto. */
3233 if (ep->from == FRAME_POINTER_REGNUM
3234 && ep->to == HARD_FRAME_POINTER_REGNUM)
3235 {
3236 rtx src = SET_SRC (old_set);
3237 int offset, ok = 0;
3238
3239 if (src == ep->to_rtx)
3240 offset = 0, ok = 1;
3241 else if (GET_CODE (src) == PLUS
3242 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3243 offset = INTVAL (XEXP (src, 0)), ok = 1;
3244
3245 if (ok)
3246 {
3247 if (replace)
3248 {
3249 rtx src
3250 = plus_constant (ep->to_rtx, offset - ep->offset);
3251
3252 /* First see if this insn remains valid when we
3253 make the change. If not, keep the INSN_CODE
3254 the same and let reload fit it up. */
3255 validate_change (insn, &SET_SRC (old_set), src, 1);
3256 validate_change (insn, &SET_DEST (old_set),
3257 ep->to_rtx, 1);
3258 if (! apply_change_group ())
3259 {
3260 SET_SRC (old_set) = src;
3261 SET_DEST (old_set) = ep->to_rtx;
3262 }
3263 }
3264
3265 val = 1;
3266 goto done;
3267 }
3268 }
3269 #endif
3270
3271 /* In this case this insn isn't serving a useful purpose. We
3272 will delete it in reload_as_needed once we know that this
3273 elimination is, in fact, being done.
3274
3275 If REPLACE isn't set, we can't delete this insn, but needn't
3276 process it since it won't be used unless something changes. */
3277 if (replace)
3278 delete_dead_insn (insn);
3279 val = 1;
3280 goto done;
3281 }
3282
3283 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3284 in the insn is the negative of the offset in FROM. Substitute
3285 (set (reg) (reg to)) for the insn and change its code.
3286
3287 We have to do this here, rather than in eliminate_regs, do that we can
3288 change the insn code. */
3289
3290 if (GET_CODE (SET_SRC (old_set)) == PLUS
3291 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3292 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3293 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3294 ep++)
3295 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3296 && ep->can_eliminate)
3297 {
3298 /* We must stop at the first elimination that will be used.
3299 If this one would replace the PLUS with a REG, do it
3300 now. Otherwise, quit the loop and let eliminate_regs
3301 do its normal replacement. */
3302 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3303 {
3304 /* We assume here that we don't need a PARALLEL of
3305 any CLOBBERs for this assignment. There's not
3306 much we can do if we do need it. */
3307 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3308 SET_DEST (old_set), ep->to_rtx);
3309 INSN_CODE (insn) = -1;
3310 val = 1;
3311 goto done;
3312 }
3313
3314 break;
3315 }
3316 }
3317
3318 old_asm_operands_vec = 0;
3319
3320 /* Replace the body of this insn with a substituted form. If we changed
3321 something, return non-zero.
3322
3323 If we are replacing a body that was a (set X (plus Y Z)), try to
3324 re-recognize the insn. We do this in case we had a simple addition
3325 but now can do this as a load-address. This saves an insn in this
3326 common case. */
3327
3328 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3329 if (new_body != old_body)
3330 {
3331 /* If we aren't replacing things permanently and we changed something,
3332 make another copy to ensure that all the RTL is new. Otherwise
3333 things can go wrong if find_reload swaps commutative operands
3334 and one is inside RTL that has been copied while the other is not. */
3335
3336 /* Don't copy an asm_operands because (1) there's no need and (2)
3337 copy_rtx can't do it properly when there are multiple outputs. */
3338 if (! replace && asm_noperands (old_body) < 0)
3339 new_body = copy_rtx (new_body);
3340
3341 /* If we had a move insn but now we don't, rerecognize it. This will
3342 cause spurious re-recognition if the old move had a PARALLEL since
3343 the new one still will, but we can't call single_set without
3344 having put NEW_BODY into the insn and the re-recognition won't
3345 hurt in this rare case. */
3346 if (old_set != 0
3347 && ((GET_CODE (SET_SRC (old_set)) == REG
3348 && (GET_CODE (new_body) != SET
3349 || GET_CODE (SET_SRC (new_body)) != REG))
3350 /* If this was a load from or store to memory, compare
3351 the MEM in recog_operand to the one in the insn. If they
3352 are not equal, then rerecognize the insn. */
3353 || (old_set != 0
3354 && ((GET_CODE (SET_SRC (old_set)) == MEM
3355 && SET_SRC (old_set) != recog_operand[1])
3356 || (GET_CODE (SET_DEST (old_set)) == MEM
3357 && SET_DEST (old_set) != recog_operand[0])))
3358 /* If this was an add insn before, rerecognize. */
3359 || GET_CODE (SET_SRC (old_set)) == PLUS))
3360 {
3361 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3362 /* If recognition fails, store the new body anyway.
3363 It's normal to have recognition failures here
3364 due to bizarre memory addresses; reloading will fix them. */
3365 PATTERN (insn) = new_body;
3366 }
3367 else
3368 PATTERN (insn) = new_body;
3369
3370 val = 1;
3371 }
3372
3373 /* Loop through all elimination pairs. See if any have changed and
3374 recalculate the number not at initial offset.
3375
3376 Compute the maximum offset (minimum offset if the stack does not
3377 grow downward) for each elimination pair.
3378
3379 We also detect a cases where register elimination cannot be done,
3380 namely, if a register would be both changed and referenced outside a MEM
3381 in the resulting insn since such an insn is often undefined and, even if
3382 not, we cannot know what meaning will be given to it. Note that it is
3383 valid to have a register used in an address in an insn that changes it
3384 (presumably with a pre- or post-increment or decrement).
3385
3386 If anything changes, return nonzero. */
3387
3388 num_not_at_initial_offset = 0;
3389 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3390 {
3391 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3392 ep->can_eliminate = 0;
3393
3394 ep->ref_outside_mem = 0;
3395
3396 if (ep->previous_offset != ep->offset)
3397 val = 1;
3398
3399 ep->previous_offset = ep->offset;
3400 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3401 num_not_at_initial_offset++;
3402
3403 #ifdef STACK_GROWS_DOWNWARD
3404 ep->max_offset = MAX (ep->max_offset, ep->offset);
3405 #else
3406 ep->max_offset = MIN (ep->max_offset, ep->offset);
3407 #endif
3408 }
3409
3410 done:
3411 /* If we changed something, perform elimination in REG_NOTES. This is
3412 needed even when REPLACE is zero because a REG_DEAD note might refer
3413 to a register that we eliminate and could cause a different number
3414 of spill registers to be needed in the final reload pass than in
3415 the pre-passes. */
3416 if (val && REG_NOTES (insn) != 0)
3417 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3418
3419 if (! replace)
3420 pop_obstacks ();
3421
3422 return val;
3423 }
3424
3425 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3426 replacement we currently believe is valid, mark it as not eliminable if X
3427 modifies DEST in any way other than by adding a constant integer to it.
3428
3429 If DEST is the frame pointer, we do nothing because we assume that
3430 all assignments to the hard frame pointer are nonlocal gotos and are being
3431 done at a time when they are valid and do not disturb anything else.
3432 Some machines want to eliminate a fake argument pointer with either the
3433 frame or stack pointer. Assignments to the hard frame pointer must not
3434 prevent this elimination.
3435
3436 Called via note_stores from reload before starting its passes to scan
3437 the insns of the function. */
3438
3439 static void
3440 mark_not_eliminable (dest, x)
3441 rtx dest;
3442 rtx x;
3443 {
3444 register int i;
3445
3446 /* A SUBREG of a hard register here is just changing its mode. We should
3447 not see a SUBREG of an eliminable hard register, but check just in
3448 case. */
3449 if (GET_CODE (dest) == SUBREG)
3450 dest = SUBREG_REG (dest);
3451
3452 if (dest == hard_frame_pointer_rtx)
3453 return;
3454
3455 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3456 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3457 && (GET_CODE (x) != SET
3458 || GET_CODE (SET_SRC (x)) != PLUS
3459 || XEXP (SET_SRC (x), 0) != dest
3460 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3461 {
3462 reg_eliminate[i].can_eliminate_previous
3463 = reg_eliminate[i].can_eliminate = 0;
3464 num_eliminable--;
3465 }
3466 }
3467 \f
3468 /* Kick all pseudos out of hard register REGNO.
3469 If GLOBAL is nonzero, try to find someplace else to put them.
3470 If DUMPFILE is nonzero, log actions taken on that file.
3471
3472 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3473 because we found we can't eliminate some register. In the case, no pseudos
3474 are allowed to be in the register, even if they are only in a block that
3475 doesn't require spill registers, unlike the case when we are spilling this
3476 hard reg to produce another spill register.
3477
3478 Return nonzero if any pseudos needed to be kicked out. */
3479
3480 static int
3481 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3482 register int regno;
3483 int global;
3484 FILE *dumpfile;
3485 int cant_eliminate;
3486 {
3487 enum reg_class class = REGNO_REG_CLASS (regno);
3488 int something_changed = 0;
3489 register int i;
3490
3491 SET_HARD_REG_BIT (forbidden_regs, regno);
3492
3493 if (cant_eliminate)
3494 regs_ever_live[regno] = 1;
3495
3496 /* Spill every pseudo reg that was allocated to this reg
3497 or to something that overlaps this reg. */
3498
3499 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3500 if (reg_renumber[i] >= 0
3501 && reg_renumber[i] <= regno
3502 && (reg_renumber[i]
3503 + HARD_REGNO_NREGS (reg_renumber[i],
3504 PSEUDO_REGNO_MODE (i))
3505 > regno))
3506 {
3507 /* If this register belongs solely to a basic block which needed no
3508 spilling of any class that this register is contained in,
3509 leave it be, unless we are spilling this register because
3510 it was a hard register that can't be eliminated. */
3511
3512 if (! cant_eliminate
3513 && basic_block_needs[0]
3514 && reg_basic_block[i] >= 0
3515 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3516 {
3517 enum reg_class *p;
3518
3519 for (p = reg_class_superclasses[(int) class];
3520 *p != LIM_REG_CLASSES; p++)
3521 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3522 break;
3523
3524 if (*p == LIM_REG_CLASSES)
3525 continue;
3526 }
3527
3528 /* Mark it as no longer having a hard register home. */
3529 reg_renumber[i] = -1;
3530 /* We will need to scan everything again. */
3531 something_changed = 1;
3532 if (global)
3533 retry_global_alloc (i, forbidden_regs);
3534
3535 alter_reg (i, regno);
3536 if (dumpfile)
3537 {
3538 if (reg_renumber[i] == -1)
3539 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3540 else
3541 fprintf (dumpfile, " Register %d now in %d.\n\n",
3542 i, reg_renumber[i]);
3543 }
3544 }
3545 for (i = 0; i < scratch_list_length; i++)
3546 {
3547 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3548 {
3549 if (! cant_eliminate && basic_block_needs[0]
3550 && ! basic_block_needs[(int) class][scratch_block[i]])
3551 {
3552 enum reg_class *p;
3553
3554 for (p = reg_class_superclasses[(int) class];
3555 *p != LIM_REG_CLASSES; p++)
3556 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3557 break;
3558
3559 if (*p == LIM_REG_CLASSES)
3560 continue;
3561 }
3562 PUT_CODE (scratch_list[i], SCRATCH);
3563 scratch_list[i] = 0;
3564 something_changed = 1;
3565 continue;
3566 }
3567 }
3568
3569 return something_changed;
3570 }
3571 \f
3572 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3573 Also mark any hard registers used to store user variables as
3574 forbidden from being used for spill registers. */
3575
3576 static void
3577 scan_paradoxical_subregs (x)
3578 register rtx x;
3579 {
3580 register int i;
3581 register char *fmt;
3582 register enum rtx_code code = GET_CODE (x);
3583
3584 switch (code)
3585 {
3586 case REG:
3587 #ifdef SMALL_REGISTER_CLASSES
3588 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3589 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3590 #endif
3591 return;
3592
3593 case CONST_INT:
3594 case CONST:
3595 case SYMBOL_REF:
3596 case LABEL_REF:
3597 case CONST_DOUBLE:
3598 case CC0:
3599 case PC:
3600 case USE:
3601 case CLOBBER:
3602 return;
3603
3604 case SUBREG:
3605 if (GET_CODE (SUBREG_REG (x)) == REG
3606 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3607 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3608 = GET_MODE_SIZE (GET_MODE (x));
3609 return;
3610 }
3611
3612 fmt = GET_RTX_FORMAT (code);
3613 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3614 {
3615 if (fmt[i] == 'e')
3616 scan_paradoxical_subregs (XEXP (x, i));
3617 else if (fmt[i] == 'E')
3618 {
3619 register int j;
3620 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3621 scan_paradoxical_subregs (XVECEXP (x, i, j));
3622 }
3623 }
3624 }
3625 \f
3626 static int
3627 hard_reg_use_compare (p1, p2)
3628 struct hard_reg_n_uses *p1, *p2;
3629 {
3630 int tem = p1->uses - p2->uses;
3631 if (tem != 0) return tem;
3632 /* If regs are equally good, sort by regno,
3633 so that the results of qsort leave nothing to chance. */
3634 return p1->regno - p2->regno;
3635 }
3636
3637 /* Choose the order to consider regs for use as reload registers
3638 based on how much trouble would be caused by spilling one.
3639 Store them in order of decreasing preference in potential_reload_regs. */
3640
3641 static void
3642 order_regs_for_reload (global)
3643 int global;
3644 {
3645 register int i;
3646 register int o = 0;
3647 int large = 0;
3648
3649 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3650
3651 CLEAR_HARD_REG_SET (bad_spill_regs);
3652
3653 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3654 potential_reload_regs[i] = -1;
3655
3656 /* Count number of uses of each hard reg by pseudo regs allocated to it
3657 and then order them by decreasing use. */
3658
3659 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3660 {
3661 hard_reg_n_uses[i].uses = 0;
3662 hard_reg_n_uses[i].regno = i;
3663 }
3664
3665 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3666 {
3667 int regno = reg_renumber[i];
3668 if (regno >= 0)
3669 {
3670 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3671 while (regno < lim)
3672 {
3673 /* If allocated by local-alloc, show more uses since
3674 we're not going to be able to reallocate it, but
3675 we might if allocated by global alloc. */
3676 if (global && reg_allocno[i] < 0)
3677 hard_reg_n_uses[regno].uses += (reg_n_refs[i] + 1) / 2;
3678
3679 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3680 }
3681 }
3682 large += reg_n_refs[i];
3683 }
3684
3685 /* Now fixed registers (which cannot safely be used for reloading)
3686 get a very high use count so they will be considered least desirable.
3687 Registers used explicitly in the rtl code are almost as bad. */
3688
3689 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3690 {
3691 if (fixed_regs[i])
3692 {
3693 hard_reg_n_uses[i].uses += 2 * large + 2;
3694 SET_HARD_REG_BIT (bad_spill_regs, i);
3695 }
3696 else if (regs_explicitly_used[i])
3697 {
3698 hard_reg_n_uses[i].uses += large + 1;
3699 #ifndef SMALL_REGISTER_CLASSES
3700 /* ??? We are doing this here because of the potential that
3701 bad code may be generated if a register explicitly used in
3702 an insn was used as a spill register for that insn. But
3703 not using these are spill registers may lose on some machine.
3704 We'll have to see how this works out. */
3705 SET_HARD_REG_BIT (bad_spill_regs, i);
3706 #endif
3707 }
3708 }
3709 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3710 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3711
3712 #ifdef ELIMINABLE_REGS
3713 /* If registers other than the frame pointer are eliminable, mark them as
3714 poor choices. */
3715 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3716 {
3717 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3718 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3719 }
3720 #endif
3721
3722 /* Prefer registers not so far used, for use in temporary loading.
3723 Among them, if REG_ALLOC_ORDER is defined, use that order.
3724 Otherwise, prefer registers not preserved by calls. */
3725
3726 #ifdef REG_ALLOC_ORDER
3727 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3728 {
3729 int regno = reg_alloc_order[i];
3730
3731 if (hard_reg_n_uses[regno].uses == 0)
3732 potential_reload_regs[o++] = regno;
3733 }
3734 #else
3735 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3736 {
3737 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3738 potential_reload_regs[o++] = i;
3739 }
3740 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3741 {
3742 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3743 potential_reload_regs[o++] = i;
3744 }
3745 #endif
3746
3747 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3748 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3749
3750 /* Now add the regs that are already used,
3751 preferring those used less often. The fixed and otherwise forbidden
3752 registers will be at the end of this list. */
3753
3754 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3755 if (hard_reg_n_uses[i].uses != 0)
3756 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3757 }
3758 \f
3759 /* Used in reload_as_needed to sort the spilled regs. */
3760
3761 static int
3762 compare_spill_regs (r1, r2)
3763 short *r1, *r2;
3764 {
3765 return *r1 - *r2;
3766 }
3767
3768 /* Reload pseudo-registers into hard regs around each insn as needed.
3769 Additional register load insns are output before the insn that needs it
3770 and perhaps store insns after insns that modify the reloaded pseudo reg.
3771
3772 reg_last_reload_reg and reg_reloaded_contents keep track of
3773 which registers are already available in reload registers.
3774 We update these for the reloads that we perform,
3775 as the insns are scanned. */
3776
3777 static void
3778 reload_as_needed (first, live_known)
3779 rtx first;
3780 int live_known;
3781 {
3782 register rtx insn;
3783 register int i;
3784 int this_block = 0;
3785 rtx x;
3786 rtx after_call = 0;
3787
3788 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3789 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3790 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3791 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3792 reg_has_output_reload = (char *) alloca (max_regno);
3793 for (i = 0; i < n_spills; i++)
3794 {
3795 reg_reloaded_contents[i] = -1;
3796 reg_reloaded_insn[i] = 0;
3797 }
3798
3799 /* Reset all offsets on eliminable registers to their initial values. */
3800 #ifdef ELIMINABLE_REGS
3801 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3802 {
3803 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3804 reg_eliminate[i].initial_offset);
3805 reg_eliminate[i].previous_offset
3806 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3807 }
3808 #else
3809 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3810 reg_eliminate[0].previous_offset
3811 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3812 #endif
3813
3814 num_not_at_initial_offset = 0;
3815
3816 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3817 pack registers with group needs. */
3818 if (n_spills > 1)
3819 {
3820 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3821 for (i = 0; i < n_spills; i++)
3822 spill_reg_order[spill_regs[i]] = i;
3823 }
3824
3825 for (insn = first; insn;)
3826 {
3827 register rtx next = NEXT_INSN (insn);
3828
3829 /* Notice when we move to a new basic block. */
3830 if (live_known && this_block + 1 < n_basic_blocks
3831 && insn == basic_block_head[this_block+1])
3832 ++this_block;
3833
3834 /* If we pass a label, copy the offsets from the label information
3835 into the current offsets of each elimination. */
3836 if (GET_CODE (insn) == CODE_LABEL)
3837 {
3838 num_not_at_initial_offset = 0;
3839 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3840 {
3841 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3842 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3843 if (reg_eliminate[i].can_eliminate
3844 && (reg_eliminate[i].offset
3845 != reg_eliminate[i].initial_offset))
3846 num_not_at_initial_offset++;
3847 }
3848 }
3849
3850 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3851 {
3852 rtx avoid_return_reg = 0;
3853 rtx oldpat = PATTERN (insn);
3854
3855 #ifdef SMALL_REGISTER_CLASSES
3856 /* Set avoid_return_reg if this is an insn
3857 that might use the value of a function call. */
3858 if (GET_CODE (insn) == CALL_INSN)
3859 {
3860 if (GET_CODE (PATTERN (insn)) == SET)
3861 after_call = SET_DEST (PATTERN (insn));
3862 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3863 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3864 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3865 else
3866 after_call = 0;
3867 }
3868 else if (after_call != 0
3869 && !(GET_CODE (PATTERN (insn)) == SET
3870 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3871 {
3872 if (reg_referenced_p (after_call, PATTERN (insn)))
3873 avoid_return_reg = after_call;
3874 after_call = 0;
3875 }
3876 #endif /* SMALL_REGISTER_CLASSES */
3877
3878 /* If this is a USE and CLOBBER of a MEM, ensure that any
3879 references to eliminable registers have been removed. */
3880
3881 if ((GET_CODE (PATTERN (insn)) == USE
3882 || GET_CODE (PATTERN (insn)) == CLOBBER)
3883 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3884 XEXP (XEXP (PATTERN (insn), 0), 0)
3885 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3886 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3887
3888 /* If we need to do register elimination processing, do so.
3889 This might delete the insn, in which case we are done. */
3890 if (num_eliminable && GET_MODE (insn) == QImode)
3891 {
3892 eliminate_regs_in_insn (insn, 1);
3893 if (GET_CODE (insn) == NOTE)
3894 {
3895 insn = next;
3896 continue;
3897 }
3898 }
3899
3900 if (GET_MODE (insn) == VOIDmode)
3901 n_reloads = 0;
3902 /* First find the pseudo regs that must be reloaded for this insn.
3903 This info is returned in the tables reload_... (see reload.h).
3904 Also modify the body of INSN by substituting RELOAD
3905 rtx's for those pseudo regs. */
3906 else
3907 {
3908 bzero (reg_has_output_reload, max_regno);
3909 CLEAR_HARD_REG_SET (reg_is_output_reload);
3910
3911 find_reloads (insn, 1, spill_indirect_levels, live_known,
3912 spill_reg_order);
3913 }
3914
3915 if (n_reloads > 0)
3916 {
3917 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3918 rtx p;
3919 int class;
3920
3921 /* If this block has not had spilling done for a
3922 particular clas and we have any non-optionals that need a
3923 spill reg in that class, abort. */
3924
3925 for (class = 0; class < N_REG_CLASSES; class++)
3926 if (basic_block_needs[class] != 0
3927 && basic_block_needs[class][this_block] == 0)
3928 for (i = 0; i < n_reloads; i++)
3929 if (class == (int) reload_reg_class[i]
3930 && reload_reg_rtx[i] == 0
3931 && ! reload_optional[i]
3932 && (reload_in[i] != 0 || reload_out[i] != 0
3933 || reload_secondary_p[i] != 0))
3934 fatal_insn ("Non-optional registers need a spill register", insn);
3935
3936 /* Now compute which reload regs to reload them into. Perhaps
3937 reusing reload regs from previous insns, or else output
3938 load insns to reload them. Maybe output store insns too.
3939 Record the choices of reload reg in reload_reg_rtx. */
3940 choose_reload_regs (insn, avoid_return_reg);
3941
3942 #ifdef SMALL_REGISTER_CLASSES
3943 /* Merge any reloads that we didn't combine for fear of
3944 increasing the number of spill registers needed but now
3945 discover can be safely merged. */
3946 merge_assigned_reloads (insn);
3947 #endif
3948
3949 /* Generate the insns to reload operands into or out of
3950 their reload regs. */
3951 emit_reload_insns (insn);
3952
3953 /* Substitute the chosen reload regs from reload_reg_rtx
3954 into the insn's body (or perhaps into the bodies of other
3955 load and store insn that we just made for reloading
3956 and that we moved the structure into). */
3957 subst_reloads ();
3958
3959 /* If this was an ASM, make sure that all the reload insns
3960 we have generated are valid. If not, give an error
3961 and delete them. */
3962
3963 if (asm_noperands (PATTERN (insn)) >= 0)
3964 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3965 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3966 && (recog_memoized (p) < 0
3967 || (insn_extract (p),
3968 ! constrain_operands (INSN_CODE (p), 1))))
3969 {
3970 error_for_asm (insn,
3971 "`asm' operand requires impossible reload");
3972 PUT_CODE (p, NOTE);
3973 NOTE_SOURCE_FILE (p) = 0;
3974 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3975 }
3976 }
3977 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3978 is no longer validly lying around to save a future reload.
3979 Note that this does not detect pseudos that were reloaded
3980 for this insn in order to be stored in
3981 (obeying register constraints). That is correct; such reload
3982 registers ARE still valid. */
3983 note_stores (oldpat, forget_old_reloads_1);
3984
3985 /* There may have been CLOBBER insns placed after INSN. So scan
3986 between INSN and NEXT and use them to forget old reloads. */
3987 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3988 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3989 note_stores (PATTERN (x), forget_old_reloads_1);
3990
3991 #ifdef AUTO_INC_DEC
3992 /* Likewise for regs altered by auto-increment in this insn.
3993 But note that the reg-notes are not changed by reloading:
3994 they still contain the pseudo-regs, not the spill regs. */
3995 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3996 if (REG_NOTE_KIND (x) == REG_INC)
3997 {
3998 /* See if this pseudo reg was reloaded in this insn.
3999 If so, its last-reload info is still valid
4000 because it is based on this insn's reload. */
4001 for (i = 0; i < n_reloads; i++)
4002 if (reload_out[i] == XEXP (x, 0))
4003 break;
4004
4005 if (i == n_reloads)
4006 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4007 }
4008 #endif
4009 }
4010 /* A reload reg's contents are unknown after a label. */
4011 if (GET_CODE (insn) == CODE_LABEL)
4012 for (i = 0; i < n_spills; i++)
4013 {
4014 reg_reloaded_contents[i] = -1;
4015 reg_reloaded_insn[i] = 0;
4016 }
4017
4018 /* Don't assume a reload reg is still good after a call insn
4019 if it is a call-used reg. */
4020 else if (GET_CODE (insn) == CALL_INSN)
4021 for (i = 0; i < n_spills; i++)
4022 if (call_used_regs[spill_regs[i]])
4023 {
4024 reg_reloaded_contents[i] = -1;
4025 reg_reloaded_insn[i] = 0;
4026 }
4027
4028 /* In case registers overlap, allow certain insns to invalidate
4029 particular hard registers. */
4030
4031 #ifdef INSN_CLOBBERS_REGNO_P
4032 for (i = 0 ; i < n_spills ; i++)
4033 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4034 {
4035 reg_reloaded_contents[i] = -1;
4036 reg_reloaded_insn[i] = 0;
4037 }
4038 #endif
4039
4040 insn = next;
4041
4042 #ifdef USE_C_ALLOCA
4043 alloca (0);
4044 #endif
4045 }
4046 }
4047
4048 /* Discard all record of any value reloaded from X,
4049 or reloaded in X from someplace else;
4050 unless X is an output reload reg of the current insn.
4051
4052 X may be a hard reg (the reload reg)
4053 or it may be a pseudo reg that was reloaded from. */
4054
4055 static void
4056 forget_old_reloads_1 (x, ignored)
4057 rtx x;
4058 rtx ignored;
4059 {
4060 register int regno;
4061 int nr;
4062 int offset = 0;
4063
4064 /* note_stores does give us subregs of hard regs. */
4065 while (GET_CODE (x) == SUBREG)
4066 {
4067 offset += SUBREG_WORD (x);
4068 x = SUBREG_REG (x);
4069 }
4070
4071 if (GET_CODE (x) != REG)
4072 return;
4073
4074 regno = REGNO (x) + offset;
4075
4076 if (regno >= FIRST_PSEUDO_REGISTER)
4077 nr = 1;
4078 else
4079 {
4080 int i;
4081 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4082 /* Storing into a spilled-reg invalidates its contents.
4083 This can happen if a block-local pseudo is allocated to that reg
4084 and it wasn't spilled because this block's total need is 0.
4085 Then some insn might have an optional reload and use this reg. */
4086 for (i = 0; i < nr; i++)
4087 if (spill_reg_order[regno + i] >= 0
4088 /* But don't do this if the reg actually serves as an output
4089 reload reg in the current instruction. */
4090 && (n_reloads == 0
4091 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4092 {
4093 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4094 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4095 }
4096 }
4097
4098 /* Since value of X has changed,
4099 forget any value previously copied from it. */
4100
4101 while (nr-- > 0)
4102 /* But don't forget a copy if this is the output reload
4103 that establishes the copy's validity. */
4104 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4105 reg_last_reload_reg[regno + nr] = 0;
4106 }
4107 \f
4108 /* For each reload, the mode of the reload register. */
4109 static enum machine_mode reload_mode[MAX_RELOADS];
4110
4111 /* For each reload, the largest number of registers it will require. */
4112 static int reload_nregs[MAX_RELOADS];
4113
4114 /* Comparison function for qsort to decide which of two reloads
4115 should be handled first. *P1 and *P2 are the reload numbers. */
4116
4117 static int
4118 reload_reg_class_lower (p1, p2)
4119 short *p1, *p2;
4120 {
4121 register int r1 = *p1, r2 = *p2;
4122 register int t;
4123
4124 /* Consider required reloads before optional ones. */
4125 t = reload_optional[r1] - reload_optional[r2];
4126 if (t != 0)
4127 return t;
4128
4129 /* Count all solitary classes before non-solitary ones. */
4130 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4131 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4132 if (t != 0)
4133 return t;
4134
4135 /* Aside from solitaires, consider all multi-reg groups first. */
4136 t = reload_nregs[r2] - reload_nregs[r1];
4137 if (t != 0)
4138 return t;
4139
4140 /* Consider reloads in order of increasing reg-class number. */
4141 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4142 if (t != 0)
4143 return t;
4144
4145 /* If reloads are equally urgent, sort by reload number,
4146 so that the results of qsort leave nothing to chance. */
4147 return r1 - r2;
4148 }
4149 \f
4150 /* The following HARD_REG_SETs indicate when each hard register is
4151 used for a reload of various parts of the current insn. */
4152
4153 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4154 static HARD_REG_SET reload_reg_used;
4155 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4156 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4157 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4158 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4159 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4160 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4161 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4162 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4163 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4164 static HARD_REG_SET reload_reg_used_in_op_addr;
4165 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4166 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4167 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4168 static HARD_REG_SET reload_reg_used_in_insn;
4169 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4170 static HARD_REG_SET reload_reg_used_in_other_addr;
4171
4172 /* If reg is in use as a reload reg for any sort of reload. */
4173 static HARD_REG_SET reload_reg_used_at_all;
4174
4175 /* If reg is use as an inherited reload. We just mark the first register
4176 in the group. */
4177 static HARD_REG_SET reload_reg_used_for_inherit;
4178
4179 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4180 TYPE. MODE is used to indicate how many consecutive regs are
4181 actually used. */
4182
4183 static void
4184 mark_reload_reg_in_use (regno, opnum, type, mode)
4185 int regno;
4186 int opnum;
4187 enum reload_type type;
4188 enum machine_mode mode;
4189 {
4190 int nregs = HARD_REGNO_NREGS (regno, mode);
4191 int i;
4192
4193 for (i = regno; i < nregs + regno; i++)
4194 {
4195 switch (type)
4196 {
4197 case RELOAD_OTHER:
4198 SET_HARD_REG_BIT (reload_reg_used, i);
4199 break;
4200
4201 case RELOAD_FOR_INPUT_ADDRESS:
4202 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4203 break;
4204
4205 case RELOAD_FOR_OUTPUT_ADDRESS:
4206 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4207 break;
4208
4209 case RELOAD_FOR_OPERAND_ADDRESS:
4210 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4211 break;
4212
4213 case RELOAD_FOR_OPADDR_ADDR:
4214 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4215 break;
4216
4217 case RELOAD_FOR_OTHER_ADDRESS:
4218 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4219 break;
4220
4221 case RELOAD_FOR_INPUT:
4222 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4223 break;
4224
4225 case RELOAD_FOR_OUTPUT:
4226 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4227 break;
4228
4229 case RELOAD_FOR_INSN:
4230 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4231 break;
4232 }
4233
4234 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4235 }
4236 }
4237
4238 /* Similarly, but show REGNO is no longer in use for a reload. */
4239
4240 static void
4241 clear_reload_reg_in_use (regno, opnum, type, mode)
4242 int regno;
4243 int opnum;
4244 enum reload_type type;
4245 enum machine_mode mode;
4246 {
4247 int nregs = HARD_REGNO_NREGS (regno, mode);
4248 int i;
4249
4250 for (i = regno; i < nregs + regno; i++)
4251 {
4252 switch (type)
4253 {
4254 case RELOAD_OTHER:
4255 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4256 break;
4257
4258 case RELOAD_FOR_INPUT_ADDRESS:
4259 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4260 break;
4261
4262 case RELOAD_FOR_OUTPUT_ADDRESS:
4263 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4264 break;
4265
4266 case RELOAD_FOR_OPERAND_ADDRESS:
4267 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4268 break;
4269
4270 case RELOAD_FOR_OPADDR_ADDR:
4271 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4272 break;
4273
4274 case RELOAD_FOR_OTHER_ADDRESS:
4275 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4276 break;
4277
4278 case RELOAD_FOR_INPUT:
4279 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4280 break;
4281
4282 case RELOAD_FOR_OUTPUT:
4283 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4284 break;
4285
4286 case RELOAD_FOR_INSN:
4287 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4288 break;
4289 }
4290 }
4291 }
4292
4293 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4294 specified by OPNUM and TYPE. */
4295
4296 static int
4297 reload_reg_free_p (regno, opnum, type)
4298 int regno;
4299 int opnum;
4300 enum reload_type type;
4301 {
4302 int i;
4303
4304 /* In use for a RELOAD_OTHER means it's not available for anything except
4305 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4306 to be used only for inputs. */
4307
4308 if (type != RELOAD_FOR_OTHER_ADDRESS
4309 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4310 return 0;
4311
4312 switch (type)
4313 {
4314 case RELOAD_OTHER:
4315 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4316 we can't use it for RELOAD_OTHER. */
4317 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4318 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4319 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4320 return 0;
4321
4322 for (i = 0; i < reload_n_operands; i++)
4323 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4324 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4325 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4326 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4327 return 0;
4328
4329 return 1;
4330
4331 case RELOAD_FOR_INPUT:
4332 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4333 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4334 return 0;
4335
4336 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4337 return 0;
4338
4339 /* If it is used for some other input, can't use it. */
4340 for (i = 0; i < reload_n_operands; i++)
4341 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4342 return 0;
4343
4344 /* If it is used in a later operand's address, can't use it. */
4345 for (i = opnum + 1; i < reload_n_operands; i++)
4346 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4347 return 0;
4348
4349 return 1;
4350
4351 case RELOAD_FOR_INPUT_ADDRESS:
4352 /* Can't use a register if it is used for an input address for this
4353 operand or used as an input in an earlier one. */
4354 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4355 return 0;
4356
4357 for (i = 0; i < opnum; i++)
4358 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4359 return 0;
4360
4361 return 1;
4362
4363 case RELOAD_FOR_OUTPUT_ADDRESS:
4364 /* Can't use a register if it is used for an output address for this
4365 operand or used as an output in this or a later operand. */
4366 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4367 return 0;
4368
4369 for (i = opnum; i < reload_n_operands; i++)
4370 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4371 return 0;
4372
4373 return 1;
4374
4375 case RELOAD_FOR_OPERAND_ADDRESS:
4376 for (i = 0; i < reload_n_operands; i++)
4377 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4378 return 0;
4379
4380 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4381 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4382
4383 case RELOAD_FOR_OPADDR_ADDR:
4384 for (i = 0; i < reload_n_operands; i++)
4385 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4386 return 0;
4387
4388 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4389
4390 case RELOAD_FOR_OUTPUT:
4391 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4392 outputs, or an operand address for this or an earlier output. */
4393 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4394 return 0;
4395
4396 for (i = 0; i < reload_n_operands; i++)
4397 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4398 return 0;
4399
4400 for (i = 0; i <= opnum; i++)
4401 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4402 return 0;
4403
4404 return 1;
4405
4406 case RELOAD_FOR_INSN:
4407 for (i = 0; i < reload_n_operands; i++)
4408 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4409 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4410 return 0;
4411
4412 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4413 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4414
4415 case RELOAD_FOR_OTHER_ADDRESS:
4416 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4417 }
4418 abort ();
4419 }
4420
4421 /* Return 1 if the value in reload reg REGNO, as used by a reload
4422 needed for the part of the insn specified by OPNUM and TYPE,
4423 is not in use for a reload in any prior part of the insn.
4424
4425 We can assume that the reload reg was already tested for availability
4426 at the time it is needed, and we should not check this again,
4427 in case the reg has already been marked in use. */
4428
4429 static int
4430 reload_reg_free_before_p (regno, opnum, type)
4431 int regno;
4432 int opnum;
4433 enum reload_type type;
4434 {
4435 int i;
4436
4437 switch (type)
4438 {
4439 case RELOAD_FOR_OTHER_ADDRESS:
4440 /* These always come first. */
4441 return 1;
4442
4443 case RELOAD_OTHER:
4444 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4445
4446 /* If this use is for part of the insn,
4447 check the reg is not in use for any prior part. It is tempting
4448 to try to do this by falling through from objecs that occur
4449 later in the insn to ones that occur earlier, but that will not
4450 correctly take into account the fact that here we MUST ignore
4451 things that would prevent the register from being allocated in
4452 the first place, since we know that it was allocated. */
4453
4454 case RELOAD_FOR_OUTPUT_ADDRESS:
4455 /* Earlier reloads are for earlier outputs or their addresses,
4456 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4457 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4458 RELOAD_OTHER).. */
4459 for (i = 0; i < opnum; i++)
4460 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4461 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4462 return 0;
4463
4464 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4465 return 0;
4466
4467 for (i = 0; i < reload_n_operands; i++)
4468 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4469 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4470 return 0;
4471
4472 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4473 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4474 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4475
4476 case RELOAD_FOR_OUTPUT:
4477 /* This can't be used in the output address for this operand and
4478 anything that can't be used for it, except that we've already
4479 tested for RELOAD_FOR_INSN objects. */
4480
4481 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4482 return 0;
4483
4484 for (i = 0; i < opnum; i++)
4485 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4486 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4487 return 0;
4488
4489 for (i = 0; i < reload_n_operands; i++)
4490 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4491 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4492 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4493 return 0;
4494
4495 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4496
4497 case RELOAD_FOR_OPERAND_ADDRESS:
4498 case RELOAD_FOR_OPADDR_ADDR:
4499 case RELOAD_FOR_INSN:
4500 /* These can't conflict with inputs, or each other, so all we have to
4501 test is input addresses and the addresses of OTHER items. */
4502
4503 for (i = 0; i < reload_n_operands; i++)
4504 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4505 return 0;
4506
4507 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4508
4509 case RELOAD_FOR_INPUT:
4510 /* The only things earlier are the address for this and
4511 earlier inputs, other inputs (which we know we don't conflict
4512 with), and addresses of RELOAD_OTHER objects. */
4513
4514 for (i = 0; i <= opnum; i++)
4515 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4516 return 0;
4517
4518 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4519
4520 case RELOAD_FOR_INPUT_ADDRESS:
4521 /* Similarly, all we have to check is for use in earlier inputs'
4522 addresses. */
4523 for (i = 0; i < opnum; i++)
4524 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4525 return 0;
4526
4527 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4528 }
4529 abort ();
4530 }
4531
4532 /* Return 1 if the value in reload reg REGNO, as used by a reload
4533 needed for the part of the insn specified by OPNUM and TYPE,
4534 is still available in REGNO at the end of the insn.
4535
4536 We can assume that the reload reg was already tested for availability
4537 at the time it is needed, and we should not check this again,
4538 in case the reg has already been marked in use. */
4539
4540 static int
4541 reload_reg_reaches_end_p (regno, opnum, type)
4542 int regno;
4543 int opnum;
4544 enum reload_type type;
4545 {
4546 int i;
4547
4548 switch (type)
4549 {
4550 case RELOAD_OTHER:
4551 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4552 its value must reach the end. */
4553 return 1;
4554
4555 /* If this use is for part of the insn,
4556 its value reaches if no subsequent part uses the same register.
4557 Just like the above function, don't try to do this with lots
4558 of fallthroughs. */
4559
4560 case RELOAD_FOR_OTHER_ADDRESS:
4561 /* Here we check for everything else, since these don't conflict
4562 with anything else and everything comes later. */
4563
4564 for (i = 0; i < reload_n_operands; i++)
4565 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4566 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4567 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4568 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4569 return 0;
4570
4571 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4572 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4573 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4574
4575 case RELOAD_FOR_INPUT_ADDRESS:
4576 /* Similar, except that we check only for this and subsequent inputs
4577 and the address of only subsequent inputs and we do not need
4578 to check for RELOAD_OTHER objects since they are known not to
4579 conflict. */
4580
4581 for (i = opnum; i < reload_n_operands; i++)
4582 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4583 return 0;
4584
4585 for (i = opnum + 1; i < reload_n_operands; i++)
4586 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4587 return 0;
4588
4589 for (i = 0; i < reload_n_operands; i++)
4590 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4591 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4592 return 0;
4593
4594 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4595 return 0;
4596
4597 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4598 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4599
4600 case RELOAD_FOR_INPUT:
4601 /* Similar to input address, except we start at the next operand for
4602 both input and input address and we do not check for
4603 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4604 would conflict. */
4605
4606 for (i = opnum + 1; i < reload_n_operands; i++)
4607 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4608 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4609 return 0;
4610
4611 /* ... fall through ... */
4612
4613 case RELOAD_FOR_OPERAND_ADDRESS:
4614 /* Check outputs and their addresses. */
4615
4616 for (i = 0; i < reload_n_operands; i++)
4617 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4618 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4619 return 0;
4620
4621 return 1;
4622
4623 case RELOAD_FOR_OPADDR_ADDR:
4624 for (i = 0; i < reload_n_operands; i++)
4625 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4626 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4627 return 0;
4628
4629 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4630 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4631
4632 case RELOAD_FOR_INSN:
4633 /* These conflict with other outputs with RELOAD_OTHER. So
4634 we need only check for output addresses. */
4635
4636 opnum = -1;
4637
4638 /* ... fall through ... */
4639
4640 case RELOAD_FOR_OUTPUT:
4641 case RELOAD_FOR_OUTPUT_ADDRESS:
4642 /* We already know these can't conflict with a later output. So the
4643 only thing to check are later output addresses. */
4644 for (i = opnum + 1; i < reload_n_operands; i++)
4645 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4646 return 0;
4647
4648 return 1;
4649 }
4650
4651 abort ();
4652 }
4653 \f
4654 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4655 Return 0 otherwise.
4656
4657 This function uses the same algorithm as reload_reg_free_p above. */
4658
4659 static int
4660 reloads_conflict (r1, r2)
4661 int r1, r2;
4662 {
4663 enum reload_type r1_type = reload_when_needed[r1];
4664 enum reload_type r2_type = reload_when_needed[r2];
4665 int r1_opnum = reload_opnum[r1];
4666 int r2_opnum = reload_opnum[r2];
4667
4668 /* RELOAD_OTHER conflicts with everything except
4669 RELOAD_FOR_OTHER_ADDRESS. */
4670
4671 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4672 return 1;
4673
4674 /* Otherwise, check conflicts differently for each type. */
4675
4676 switch (r1_type)
4677 {
4678 case RELOAD_FOR_INPUT:
4679 return (r2_type == RELOAD_FOR_INSN
4680 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4681 || r2_type == RELOAD_FOR_OPADDR_ADDR
4682 || r2_type == RELOAD_FOR_INPUT
4683 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4684
4685 case RELOAD_FOR_INPUT_ADDRESS:
4686 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4687 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4688
4689 case RELOAD_FOR_OUTPUT_ADDRESS:
4690 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4691 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4692
4693 case RELOAD_FOR_OPERAND_ADDRESS:
4694 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4695 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4696
4697 case RELOAD_FOR_OPADDR_ADDR:
4698 return (r2_type == RELOAD_FOR_INPUT
4699 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4700
4701 case RELOAD_FOR_OUTPUT:
4702 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4703 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4704 && r2_opnum >= r1_opnum));
4705
4706 case RELOAD_FOR_INSN:
4707 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4708 || r2_type == RELOAD_FOR_INSN
4709 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4710
4711 case RELOAD_FOR_OTHER_ADDRESS:
4712 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4713
4714 case RELOAD_OTHER:
4715 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4716
4717 default:
4718 abort ();
4719 }
4720 }
4721 \f
4722 /* Vector of reload-numbers showing the order in which the reloads should
4723 be processed. */
4724 short reload_order[MAX_RELOADS];
4725
4726 /* Indexed by reload number, 1 if incoming value
4727 inherited from previous insns. */
4728 char reload_inherited[MAX_RELOADS];
4729
4730 /* For an inherited reload, this is the insn the reload was inherited from,
4731 if we know it. Otherwise, this is 0. */
4732 rtx reload_inheritance_insn[MAX_RELOADS];
4733
4734 /* If non-zero, this is a place to get the value of the reload,
4735 rather than using reload_in. */
4736 rtx reload_override_in[MAX_RELOADS];
4737
4738 /* For each reload, the index in spill_regs of the spill register used,
4739 or -1 if we did not need one of the spill registers for this reload. */
4740 int reload_spill_index[MAX_RELOADS];
4741
4742 /* Find a spill register to use as a reload register for reload R.
4743 LAST_RELOAD is non-zero if this is the last reload for the insn being
4744 processed.
4745
4746 Set reload_reg_rtx[R] to the register allocated.
4747
4748 If NOERROR is nonzero, we return 1 if successful,
4749 or 0 if we couldn't find a spill reg and we didn't change anything. */
4750
4751 static int
4752 allocate_reload_reg (r, insn, last_reload, noerror)
4753 int r;
4754 rtx insn;
4755 int last_reload;
4756 int noerror;
4757 {
4758 int i;
4759 int pass;
4760 int count;
4761 rtx new;
4762 int regno;
4763
4764 /* If we put this reload ahead, thinking it is a group,
4765 then insist on finding a group. Otherwise we can grab a
4766 reg that some other reload needs.
4767 (That can happen when we have a 68000 DATA_OR_FP_REG
4768 which is a group of data regs or one fp reg.)
4769 We need not be so restrictive if there are no more reloads
4770 for this insn.
4771
4772 ??? Really it would be nicer to have smarter handling
4773 for that kind of reg class, where a problem like this is normal.
4774 Perhaps those classes should be avoided for reloading
4775 by use of more alternatives. */
4776
4777 int force_group = reload_nregs[r] > 1 && ! last_reload;
4778
4779 /* If we want a single register and haven't yet found one,
4780 take any reg in the right class and not in use.
4781 If we want a consecutive group, here is where we look for it.
4782
4783 We use two passes so we can first look for reload regs to
4784 reuse, which are already in use for other reloads in this insn,
4785 and only then use additional registers.
4786 I think that maximizing reuse is needed to make sure we don't
4787 run out of reload regs. Suppose we have three reloads, and
4788 reloads A and B can share regs. These need two regs.
4789 Suppose A and B are given different regs.
4790 That leaves none for C. */
4791 for (pass = 0; pass < 2; pass++)
4792 {
4793 /* I is the index in spill_regs.
4794 We advance it round-robin between insns to use all spill regs
4795 equally, so that inherited reloads have a chance
4796 of leapfrogging each other. Don't do this, however, when we have
4797 group needs and failure would be fatal; if we only have a relatively
4798 small number of spill registers, and more than one of them has
4799 group needs, then by starting in the middle, we may end up
4800 allocating the first one in such a way that we are not left with
4801 sufficient groups to handle the rest. */
4802
4803 if (noerror || ! force_group)
4804 i = last_spill_reg;
4805 else
4806 i = -1;
4807
4808 for (count = 0; count < n_spills; count++)
4809 {
4810 int class = (int) reload_reg_class[r];
4811
4812 i = (i + 1) % n_spills;
4813
4814 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4815 reload_when_needed[r])
4816 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4817 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4818 /* Look first for regs to share, then for unshared. But
4819 don't share regs used for inherited reloads; they are
4820 the ones we want to preserve. */
4821 && (pass
4822 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4823 spill_regs[i])
4824 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4825 spill_regs[i]))))
4826 {
4827 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4828 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4829 (on 68000) got us two FP regs. If NR is 1,
4830 we would reject both of them. */
4831 if (force_group)
4832 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4833 /* If we need only one reg, we have already won. */
4834 if (nr == 1)
4835 {
4836 /* But reject a single reg if we demand a group. */
4837 if (force_group)
4838 continue;
4839 break;
4840 }
4841 /* Otherwise check that as many consecutive regs as we need
4842 are available here.
4843 Also, don't use for a group registers that are
4844 needed for nongroups. */
4845 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4846 while (nr > 1)
4847 {
4848 regno = spill_regs[i] + nr - 1;
4849 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4850 && spill_reg_order[regno] >= 0
4851 && reload_reg_free_p (regno, reload_opnum[r],
4852 reload_when_needed[r])
4853 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4854 regno)))
4855 break;
4856 nr--;
4857 }
4858 if (nr == 1)
4859 break;
4860 }
4861 }
4862
4863 /* If we found something on pass 1, omit pass 2. */
4864 if (count < n_spills)
4865 break;
4866 }
4867
4868 /* We should have found a spill register by now. */
4869 if (count == n_spills)
4870 {
4871 if (noerror)
4872 return 0;
4873 goto failure;
4874 }
4875
4876 /* I is the index in SPILL_REG_RTX of the reload register we are to
4877 allocate. Get an rtx for it and find its register number. */
4878
4879 new = spill_reg_rtx[i];
4880
4881 if (new == 0 || GET_MODE (new) != reload_mode[r])
4882 spill_reg_rtx[i] = new
4883 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4884
4885 regno = true_regnum (new);
4886
4887 /* Detect when the reload reg can't hold the reload mode.
4888 This used to be one `if', but Sequent compiler can't handle that. */
4889 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4890 {
4891 enum machine_mode test_mode = VOIDmode;
4892 if (reload_in[r])
4893 test_mode = GET_MODE (reload_in[r]);
4894 /* If reload_in[r] has VOIDmode, it means we will load it
4895 in whatever mode the reload reg has: to wit, reload_mode[r].
4896 We have already tested that for validity. */
4897 /* Aside from that, we need to test that the expressions
4898 to reload from or into have modes which are valid for this
4899 reload register. Otherwise the reload insns would be invalid. */
4900 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4901 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4902 if (! (reload_out[r] != 0
4903 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4904 {
4905 /* The reg is OK. */
4906 last_spill_reg = i;
4907
4908 /* Mark as in use for this insn the reload regs we use
4909 for this. */
4910 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4911 reload_when_needed[r], reload_mode[r]);
4912
4913 reload_reg_rtx[r] = new;
4914 reload_spill_index[r] = i;
4915 return 1;
4916 }
4917 }
4918
4919 /* The reg is not OK. */
4920 if (noerror)
4921 return 0;
4922
4923 failure:
4924 if (asm_noperands (PATTERN (insn)) < 0)
4925 /* It's the compiler's fault. */
4926 fatal_insn ("Could not find a spill register", insn);
4927
4928 /* It's the user's fault; the operand's mode and constraint
4929 don't match. Disable this reload so we don't crash in final. */
4930 error_for_asm (insn,
4931 "`asm' operand constraint incompatible with operand size");
4932 reload_in[r] = 0;
4933 reload_out[r] = 0;
4934 reload_reg_rtx[r] = 0;
4935 reload_optional[r] = 1;
4936 reload_secondary_p[r] = 1;
4937
4938 return 1;
4939 }
4940 \f
4941 /* Assign hard reg targets for the pseudo-registers we must reload
4942 into hard regs for this insn.
4943 Also output the instructions to copy them in and out of the hard regs.
4944
4945 For machines with register classes, we are responsible for
4946 finding a reload reg in the proper class. */
4947
4948 static void
4949 choose_reload_regs (insn, avoid_return_reg)
4950 rtx insn;
4951 rtx avoid_return_reg;
4952 {
4953 register int i, j;
4954 int max_group_size = 1;
4955 enum reg_class group_class = NO_REGS;
4956 int inheritance;
4957
4958 rtx save_reload_reg_rtx[MAX_RELOADS];
4959 char save_reload_inherited[MAX_RELOADS];
4960 rtx save_reload_inheritance_insn[MAX_RELOADS];
4961 rtx save_reload_override_in[MAX_RELOADS];
4962 int save_reload_spill_index[MAX_RELOADS];
4963 HARD_REG_SET save_reload_reg_used;
4964 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4965 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4966 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4967 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4968 HARD_REG_SET save_reload_reg_used_in_op_addr;
4969 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4970 HARD_REG_SET save_reload_reg_used_in_insn;
4971 HARD_REG_SET save_reload_reg_used_in_other_addr;
4972 HARD_REG_SET save_reload_reg_used_at_all;
4973
4974 bzero (reload_inherited, MAX_RELOADS);
4975 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4976 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4977
4978 CLEAR_HARD_REG_SET (reload_reg_used);
4979 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4980 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4981 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4982 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4983 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4984
4985 for (i = 0; i < reload_n_operands; i++)
4986 {
4987 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4988 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4989 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4990 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4991 }
4992
4993 #ifdef SMALL_REGISTER_CLASSES
4994 /* Don't bother with avoiding the return reg
4995 if we have no mandatory reload that could use it. */
4996 if (avoid_return_reg)
4997 {
4998 int do_avoid = 0;
4999 int regno = REGNO (avoid_return_reg);
5000 int nregs
5001 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5002 int r;
5003
5004 for (r = regno; r < regno + nregs; r++)
5005 if (spill_reg_order[r] >= 0)
5006 for (j = 0; j < n_reloads; j++)
5007 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5008 && (reload_in[j] != 0 || reload_out[j] != 0
5009 || reload_secondary_p[j])
5010 &&
5011 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5012 do_avoid = 1;
5013 if (!do_avoid)
5014 avoid_return_reg = 0;
5015 }
5016 #endif /* SMALL_REGISTER_CLASSES */
5017
5018 #if 0 /* Not needed, now that we can always retry without inheritance. */
5019 /* See if we have more mandatory reloads than spill regs.
5020 If so, then we cannot risk optimizations that could prevent
5021 reloads from sharing one spill register.
5022
5023 Since we will try finding a better register than reload_reg_rtx
5024 unless it is equal to reload_in or reload_out, count such reloads. */
5025
5026 {
5027 int tem = 0;
5028 #ifdef SMALL_REGISTER_CLASSES
5029 int tem = (avoid_return_reg != 0);
5030 #endif
5031 for (j = 0; j < n_reloads; j++)
5032 if (! reload_optional[j]
5033 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5034 && (reload_reg_rtx[j] == 0
5035 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5036 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5037 tem++;
5038 if (tem > n_spills)
5039 must_reuse = 1;
5040 }
5041 #endif
5042
5043 #ifdef SMALL_REGISTER_CLASSES
5044 /* Don't use the subroutine call return reg for a reload
5045 if we are supposed to avoid it. */
5046 if (avoid_return_reg)
5047 {
5048 int regno = REGNO (avoid_return_reg);
5049 int nregs
5050 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5051 int r;
5052
5053 for (r = regno; r < regno + nregs; r++)
5054 if (spill_reg_order[r] >= 0)
5055 SET_HARD_REG_BIT (reload_reg_used, r);
5056 }
5057 #endif /* SMALL_REGISTER_CLASSES */
5058
5059 /* In order to be certain of getting the registers we need,
5060 we must sort the reloads into order of increasing register class.
5061 Then our grabbing of reload registers will parallel the process
5062 that provided the reload registers.
5063
5064 Also note whether any of the reloads wants a consecutive group of regs.
5065 If so, record the maximum size of the group desired and what
5066 register class contains all the groups needed by this insn. */
5067
5068 for (j = 0; j < n_reloads; j++)
5069 {
5070 reload_order[j] = j;
5071 reload_spill_index[j] = -1;
5072
5073 reload_mode[j]
5074 = (reload_inmode[j] == VOIDmode
5075 || (GET_MODE_SIZE (reload_outmode[j])
5076 > GET_MODE_SIZE (reload_inmode[j])))
5077 ? reload_outmode[j] : reload_inmode[j];
5078
5079 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5080
5081 if (reload_nregs[j] > 1)
5082 {
5083 max_group_size = MAX (reload_nregs[j], max_group_size);
5084 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5085 }
5086
5087 /* If we have already decided to use a certain register,
5088 don't use it in another way. */
5089 if (reload_reg_rtx[j])
5090 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5091 reload_when_needed[j], reload_mode[j]);
5092 }
5093
5094 if (n_reloads > 1)
5095 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5096
5097 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5098 sizeof reload_reg_rtx);
5099 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5100 bcopy ((char *) reload_inheritance_insn,
5101 (char *) save_reload_inheritance_insn,
5102 sizeof reload_inheritance_insn);
5103 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5104 sizeof reload_override_in);
5105 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5106 sizeof reload_spill_index);
5107 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5108 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5109 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5110 reload_reg_used_in_op_addr);
5111
5112 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5113 reload_reg_used_in_op_addr_reload);
5114
5115 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5116 reload_reg_used_in_insn);
5117 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5118 reload_reg_used_in_other_addr);
5119
5120 for (i = 0; i < reload_n_operands; i++)
5121 {
5122 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5123 reload_reg_used_in_output[i]);
5124 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5125 reload_reg_used_in_input[i]);
5126 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5127 reload_reg_used_in_input_addr[i]);
5128 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5129 reload_reg_used_in_output_addr[i]);
5130 }
5131
5132 /* If -O, try first with inheritance, then turning it off.
5133 If not -O, don't do inheritance.
5134 Using inheritance when not optimizing leads to paradoxes
5135 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5136 because one side of the comparison might be inherited. */
5137
5138 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5139 {
5140 /* Process the reloads in order of preference just found.
5141 Beyond this point, subregs can be found in reload_reg_rtx.
5142
5143 This used to look for an existing reloaded home for all
5144 of the reloads, and only then perform any new reloads.
5145 But that could lose if the reloads were done out of reg-class order
5146 because a later reload with a looser constraint might have an old
5147 home in a register needed by an earlier reload with a tighter constraint.
5148
5149 To solve this, we make two passes over the reloads, in the order
5150 described above. In the first pass we try to inherit a reload
5151 from a previous insn. If there is a later reload that needs a
5152 class that is a proper subset of the class being processed, we must
5153 also allocate a spill register during the first pass.
5154
5155 Then make a second pass over the reloads to allocate any reloads
5156 that haven't been given registers yet. */
5157
5158 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5159
5160 for (j = 0; j < n_reloads; j++)
5161 {
5162 register int r = reload_order[j];
5163
5164 /* Ignore reloads that got marked inoperative. */
5165 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5166 continue;
5167
5168 /* If find_reloads chose a to use reload_in or reload_out as a reload
5169 register, we don't need to chose one. Otherwise, try even if it found
5170 one since we might save an insn if we find the value lying around. */
5171 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5172 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5173 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5174 continue;
5175
5176 #if 0 /* No longer needed for correct operation.
5177 It might give better code, or might not; worth an experiment? */
5178 /* If this is an optional reload, we can't inherit from earlier insns
5179 until we are sure that any non-optional reloads have been allocated.
5180 The following code takes advantage of the fact that optional reloads
5181 are at the end of reload_order. */
5182 if (reload_optional[r] != 0)
5183 for (i = 0; i < j; i++)
5184 if ((reload_out[reload_order[i]] != 0
5185 || reload_in[reload_order[i]] != 0
5186 || reload_secondary_p[reload_order[i]])
5187 && ! reload_optional[reload_order[i]]
5188 && reload_reg_rtx[reload_order[i]] == 0)
5189 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5190 #endif
5191
5192 /* First see if this pseudo is already available as reloaded
5193 for a previous insn. We cannot try to inherit for reloads
5194 that are smaller than the maximum number of registers needed
5195 for groups unless the register we would allocate cannot be used
5196 for the groups.
5197
5198 We could check here to see if this is a secondary reload for
5199 an object that is already in a register of the desired class.
5200 This would avoid the need for the secondary reload register.
5201 But this is complex because we can't easily determine what
5202 objects might want to be loaded via this reload. So let a register
5203 be allocated here. In `emit_reload_insns' we suppress one of the
5204 loads in the case described above. */
5205
5206 if (inheritance)
5207 {
5208 register int regno = -1;
5209 enum machine_mode mode;
5210
5211 if (reload_in[r] == 0)
5212 ;
5213 else if (GET_CODE (reload_in[r]) == REG)
5214 {
5215 regno = REGNO (reload_in[r]);
5216 mode = GET_MODE (reload_in[r]);
5217 }
5218 else if (GET_CODE (reload_in_reg[r]) == REG)
5219 {
5220 regno = REGNO (reload_in_reg[r]);
5221 mode = GET_MODE (reload_in_reg[r]);
5222 }
5223 #if 0
5224 /* This won't work, since REGNO can be a pseudo reg number.
5225 Also, it takes much more hair to keep track of all the things
5226 that can invalidate an inherited reload of part of a pseudoreg. */
5227 else if (GET_CODE (reload_in[r]) == SUBREG
5228 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5229 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5230 #endif
5231
5232 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5233 {
5234 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5235
5236 if (reg_reloaded_contents[i] == regno
5237 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5238 >= GET_MODE_SIZE (mode))
5239 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5240 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5241 spill_regs[i])
5242 && (reload_nregs[r] == max_group_size
5243 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5244 spill_regs[i]))
5245 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5246 reload_when_needed[r])
5247 && reload_reg_free_before_p (spill_regs[i],
5248 reload_opnum[r],
5249 reload_when_needed[r]))
5250 {
5251 /* If a group is needed, verify that all the subsequent
5252 registers still have their values intact. */
5253 int nr
5254 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5255 int k;
5256
5257 for (k = 1; k < nr; k++)
5258 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5259 != regno)
5260 break;
5261
5262 if (k == nr)
5263 {
5264 int i1;
5265
5266 /* We found a register that contains the
5267 value we need. If this register is the
5268 same as an `earlyclobber' operand of the
5269 current insn, just mark it as a place to
5270 reload from since we can't use it as the
5271 reload register itself. */
5272
5273 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5274 if (reg_overlap_mentioned_for_reload_p
5275 (reg_last_reload_reg[regno],
5276 reload_earlyclobbers[i1]))
5277 break;
5278
5279 if (i1 != n_earlyclobbers
5280 /* Don't really use the inherited spill reg
5281 if we need it wider than we've got it. */
5282 || (GET_MODE_SIZE (reload_mode[r])
5283 > GET_MODE_SIZE (mode)))
5284 reload_override_in[r] = reg_last_reload_reg[regno];
5285 else
5286 {
5287 int k;
5288 /* We can use this as a reload reg. */
5289 /* Mark the register as in use for this part of
5290 the insn. */
5291 mark_reload_reg_in_use (spill_regs[i],
5292 reload_opnum[r],
5293 reload_when_needed[r],
5294 reload_mode[r]);
5295 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5296 reload_inherited[r] = 1;
5297 reload_inheritance_insn[r]
5298 = reg_reloaded_insn[i];
5299 reload_spill_index[r] = i;
5300 for (k = 0; k < nr; k++)
5301 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5302 spill_regs[i + k]);
5303 }
5304 }
5305 }
5306 }
5307 }
5308
5309 /* Here's another way to see if the value is already lying around. */
5310 if (inheritance
5311 && reload_in[r] != 0
5312 && ! reload_inherited[r]
5313 && reload_out[r] == 0
5314 && (CONSTANT_P (reload_in[r])
5315 || GET_CODE (reload_in[r]) == PLUS
5316 || GET_CODE (reload_in[r]) == REG
5317 || GET_CODE (reload_in[r]) == MEM)
5318 && (reload_nregs[r] == max_group_size
5319 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5320 {
5321 register rtx equiv
5322 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5323 -1, NULL_PTR, 0, reload_mode[r]);
5324 int regno;
5325
5326 if (equiv != 0)
5327 {
5328 if (GET_CODE (equiv) == REG)
5329 regno = REGNO (equiv);
5330 else if (GET_CODE (equiv) == SUBREG)
5331 {
5332 /* This must be a SUBREG of a hard register.
5333 Make a new REG since this might be used in an
5334 address and not all machines support SUBREGs
5335 there. */
5336 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5337 equiv = gen_rtx (REG, reload_mode[r], regno);
5338 }
5339 else
5340 abort ();
5341 }
5342
5343 /* If we found a spill reg, reject it unless it is free
5344 and of the desired class. */
5345 if (equiv != 0
5346 && ((spill_reg_order[regno] >= 0
5347 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5348 reload_when_needed[r]))
5349 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5350 regno)))
5351 equiv = 0;
5352
5353 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5354 equiv = 0;
5355
5356 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5357 equiv = 0;
5358
5359 /* We found a register that contains the value we need.
5360 If this register is the same as an `earlyclobber' operand
5361 of the current insn, just mark it as a place to reload from
5362 since we can't use it as the reload register itself. */
5363
5364 if (equiv != 0)
5365 for (i = 0; i < n_earlyclobbers; i++)
5366 if (reg_overlap_mentioned_for_reload_p (equiv,
5367 reload_earlyclobbers[i]))
5368 {
5369 reload_override_in[r] = equiv;
5370 equiv = 0;
5371 break;
5372 }
5373
5374 /* JRV: If the equiv register we have found is
5375 explicitly clobbered in the current insn, mark but
5376 don't use, as above. */
5377
5378 if (equiv != 0 && regno_clobbered_p (regno, insn))
5379 {
5380 reload_override_in[r] = equiv;
5381 equiv = 0;
5382 }
5383
5384 /* If we found an equivalent reg, say no code need be generated
5385 to load it, and use it as our reload reg. */
5386 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5387 {
5388 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5389 int k;
5390 reload_reg_rtx[r] = equiv;
5391 reload_inherited[r] = 1;
5392
5393 /* If any of the hard registers in EQUIV are spill
5394 registers, mark them as in use for this insn. */
5395 for (k = 0; k < nr; k++)
5396 {
5397 i = spill_reg_order[regno + k];
5398 if (i >= 0)
5399 {
5400 mark_reload_reg_in_use (regno, reload_opnum[r],
5401 reload_when_needed[r],
5402 reload_mode[r]);
5403 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5404 regno + k);
5405 }
5406 }
5407 }
5408 }
5409
5410 /* If we found a register to use already, or if this is an optional
5411 reload, we are done. */
5412 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5413 continue;
5414
5415 #if 0 /* No longer needed for correct operation. Might or might not
5416 give better code on the average. Want to experiment? */
5417
5418 /* See if there is a later reload that has a class different from our
5419 class that intersects our class or that requires less register
5420 than our reload. If so, we must allocate a register to this
5421 reload now, since that reload might inherit a previous reload
5422 and take the only available register in our class. Don't do this
5423 for optional reloads since they will force all previous reloads
5424 to be allocated. Also don't do this for reloads that have been
5425 turned off. */
5426
5427 for (i = j + 1; i < n_reloads; i++)
5428 {
5429 int s = reload_order[i];
5430
5431 if ((reload_in[s] == 0 && reload_out[s] == 0
5432 && ! reload_secondary_p[s])
5433 || reload_optional[s])
5434 continue;
5435
5436 if ((reload_reg_class[s] != reload_reg_class[r]
5437 && reg_classes_intersect_p (reload_reg_class[r],
5438 reload_reg_class[s]))
5439 || reload_nregs[s] < reload_nregs[r])
5440 break;
5441 }
5442
5443 if (i == n_reloads)
5444 continue;
5445
5446 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5447 #endif
5448 }
5449
5450 /* Now allocate reload registers for anything non-optional that
5451 didn't get one yet. */
5452 for (j = 0; j < n_reloads; j++)
5453 {
5454 register int r = reload_order[j];
5455
5456 /* Ignore reloads that got marked inoperative. */
5457 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5458 continue;
5459
5460 /* Skip reloads that already have a register allocated or are
5461 optional. */
5462 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5463 continue;
5464
5465 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5466 break;
5467 }
5468
5469 /* If that loop got all the way, we have won. */
5470 if (j == n_reloads)
5471 break;
5472
5473 fail:
5474 /* Loop around and try without any inheritance. */
5475 /* First undo everything done by the failed attempt
5476 to allocate with inheritance. */
5477 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5478 sizeof reload_reg_rtx);
5479 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5480 sizeof reload_inherited);
5481 bcopy ((char *) save_reload_inheritance_insn,
5482 (char *) reload_inheritance_insn,
5483 sizeof reload_inheritance_insn);
5484 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5485 sizeof reload_override_in);
5486 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5487 sizeof reload_spill_index);
5488 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5489 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5490 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5491 save_reload_reg_used_in_op_addr);
5492 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5493 save_reload_reg_used_in_op_addr_reload);
5494 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5495 save_reload_reg_used_in_insn);
5496 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5497 save_reload_reg_used_in_other_addr);
5498
5499 for (i = 0; i < reload_n_operands; i++)
5500 {
5501 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5502 save_reload_reg_used_in_input[i]);
5503 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5504 save_reload_reg_used_in_output[i]);
5505 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5506 save_reload_reg_used_in_input_addr[i]);
5507 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5508 save_reload_reg_used_in_output_addr[i]);
5509 }
5510 }
5511
5512 /* If we thought we could inherit a reload, because it seemed that
5513 nothing else wanted the same reload register earlier in the insn,
5514 verify that assumption, now that all reloads have been assigned. */
5515
5516 for (j = 0; j < n_reloads; j++)
5517 {
5518 register int r = reload_order[j];
5519
5520 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5521 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5522 reload_opnum[r],
5523 reload_when_needed[r]))
5524 reload_inherited[r] = 0;
5525
5526 /* If we found a better place to reload from,
5527 validate it in the same fashion, if it is a reload reg. */
5528 if (reload_override_in[r]
5529 && (GET_CODE (reload_override_in[r]) == REG
5530 || GET_CODE (reload_override_in[r]) == SUBREG))
5531 {
5532 int regno = true_regnum (reload_override_in[r]);
5533 if (spill_reg_order[regno] >= 0
5534 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5535 reload_when_needed[r]))
5536 reload_override_in[r] = 0;
5537 }
5538 }
5539
5540 /* Now that reload_override_in is known valid,
5541 actually override reload_in. */
5542 for (j = 0; j < n_reloads; j++)
5543 if (reload_override_in[j])
5544 reload_in[j] = reload_override_in[j];
5545
5546 /* If this reload won't be done because it has been cancelled or is
5547 optional and not inherited, clear reload_reg_rtx so other
5548 routines (such as subst_reloads) don't get confused. */
5549 for (j = 0; j < n_reloads; j++)
5550 if (reload_reg_rtx[j] != 0
5551 && ((reload_optional[j] && ! reload_inherited[j])
5552 || (reload_in[j] == 0 && reload_out[j] == 0
5553 && ! reload_secondary_p[j])))
5554 {
5555 int regno = true_regnum (reload_reg_rtx[j]);
5556
5557 if (spill_reg_order[regno] >= 0)
5558 clear_reload_reg_in_use (regno, reload_opnum[j],
5559 reload_when_needed[j], reload_mode[j]);
5560 reload_reg_rtx[j] = 0;
5561 }
5562
5563 /* Record which pseudos and which spill regs have output reloads. */
5564 for (j = 0; j < n_reloads; j++)
5565 {
5566 register int r = reload_order[j];
5567
5568 i = reload_spill_index[r];
5569
5570 /* I is nonneg if this reload used one of the spill regs.
5571 If reload_reg_rtx[r] is 0, this is an optional reload
5572 that we opted to ignore. */
5573 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5574 && reload_reg_rtx[r] != 0)
5575 {
5576 register int nregno = REGNO (reload_out[r]);
5577 int nr = 1;
5578
5579 if (nregno < FIRST_PSEUDO_REGISTER)
5580 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5581
5582 while (--nr >= 0)
5583 reg_has_output_reload[nregno + nr] = 1;
5584
5585 if (i >= 0)
5586 {
5587 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5588 while (--nr >= 0)
5589 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5590 }
5591
5592 if (reload_when_needed[r] != RELOAD_OTHER
5593 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5594 && reload_when_needed[r] != RELOAD_FOR_INSN)
5595 abort ();
5596 }
5597 }
5598 }
5599 \f
5600 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5601 reloads of the same item for fear that we might not have enough reload
5602 registers. However, normally they will get the same reload register
5603 and hence actually need not be loaded twice.
5604
5605 Here we check for the most common case of this phenomenon: when we have
5606 a number of reloads for the same object, each of which were allocated
5607 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5608 reload, and is not modified in the insn itself. If we find such,
5609 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5610 This will not increase the number of spill registers needed and will
5611 prevent redundant code. */
5612
5613 #ifdef SMALL_REGISTER_CLASSES
5614
5615 static void
5616 merge_assigned_reloads (insn)
5617 rtx insn;
5618 {
5619 int i, j;
5620
5621 /* Scan all the reloads looking for ones that only load values and
5622 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5623 assigned and not modified by INSN. */
5624
5625 for (i = 0; i < n_reloads; i++)
5626 {
5627 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5628 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5629 || reg_set_p (reload_reg_rtx[i], insn))
5630 continue;
5631
5632 /* Look at all other reloads. Ensure that the only use of this
5633 reload_reg_rtx is in a reload that just loads the same value
5634 as we do. Note that any secondary reloads must be of the identical
5635 class since the values, modes, and result registers are the
5636 same, so we need not do anything with any secondary reloads. */
5637
5638 for (j = 0; j < n_reloads; j++)
5639 {
5640 if (i == j || reload_reg_rtx[j] == 0
5641 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5642 reload_reg_rtx[i]))
5643 continue;
5644
5645 /* If the reload regs aren't exactly the same (e.g, different modes)
5646 or if the values are different, we can't merge anything with this
5647 reload register. */
5648
5649 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5650 || reload_out[j] != 0 || reload_in[j] == 0
5651 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5652 break;
5653 }
5654
5655 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5656 we, in fact, found any matching reloads. */
5657
5658 if (j == n_reloads)
5659 {
5660 for (j = 0; j < n_reloads; j++)
5661 if (i != j && reload_reg_rtx[j] != 0
5662 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5663 {
5664 reload_when_needed[i] = RELOAD_OTHER;
5665 reload_in[j] = 0;
5666 transfer_replacements (i, j);
5667 }
5668
5669 /* If this is now RELOAD_OTHER, look for any reloads that load
5670 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5671 if they were for inputs, RELOAD_OTHER for outputs. Note that
5672 this test is equivalent to looking for reloads for this operand
5673 number. */
5674
5675 if (reload_when_needed[i] == RELOAD_OTHER)
5676 for (j = 0; j < n_reloads; j++)
5677 if (reload_in[j] != 0
5678 && reload_when_needed[i] != RELOAD_OTHER
5679 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5680 reload_in[i]))
5681 reload_when_needed[j]
5682 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5683 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5684 }
5685 }
5686 }
5687 #endif /* SMALL_RELOAD_CLASSES */
5688 \f
5689 /* Output insns to reload values in and out of the chosen reload regs. */
5690
5691 static void
5692 emit_reload_insns (insn)
5693 rtx insn;
5694 {
5695 register int j;
5696 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5697 rtx other_input_address_reload_insns = 0;
5698 rtx other_input_reload_insns = 0;
5699 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5700 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5701 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5702 rtx operand_reload_insns = 0;
5703 rtx other_operand_reload_insns = 0;
5704 rtx following_insn = NEXT_INSN (insn);
5705 rtx before_insn = insn;
5706 int special;
5707 /* Values to be put in spill_reg_store are put here first. */
5708 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5709
5710 for (j = 0; j < reload_n_operands; j++)
5711 input_reload_insns[j] = input_address_reload_insns[j]
5712 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5713
5714 /* Now output the instructions to copy the data into and out of the
5715 reload registers. Do these in the order that the reloads were reported,
5716 since reloads of base and index registers precede reloads of operands
5717 and the operands may need the base and index registers reloaded. */
5718
5719 for (j = 0; j < n_reloads; j++)
5720 {
5721 register rtx old;
5722 rtx oldequiv_reg = 0;
5723 rtx this_reload_insn = 0;
5724
5725 if (reload_spill_index[j] >= 0)
5726 new_spill_reg_store[reload_spill_index[j]] = 0;
5727
5728 old = reload_in[j];
5729 if (old != 0 && ! reload_inherited[j]
5730 && ! rtx_equal_p (reload_reg_rtx[j], old)
5731 && reload_reg_rtx[j] != 0)
5732 {
5733 register rtx reloadreg = reload_reg_rtx[j];
5734 rtx oldequiv = 0;
5735 enum machine_mode mode;
5736 rtx *where;
5737
5738 /* Determine the mode to reload in.
5739 This is very tricky because we have three to choose from.
5740 There is the mode the insn operand wants (reload_inmode[J]).
5741 There is the mode of the reload register RELOADREG.
5742 There is the intrinsic mode of the operand, which we could find
5743 by stripping some SUBREGs.
5744 It turns out that RELOADREG's mode is irrelevant:
5745 we can change that arbitrarily.
5746
5747 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5748 then the reload reg may not support QImode moves, so use SImode.
5749 If foo is in memory due to spilling a pseudo reg, this is safe,
5750 because the QImode value is in the least significant part of a
5751 slot big enough for a SImode. If foo is some other sort of
5752 memory reference, then it is impossible to reload this case,
5753 so previous passes had better make sure this never happens.
5754
5755 Then consider a one-word union which has SImode and one of its
5756 members is a float, being fetched as (SUBREG:SF union:SI).
5757 We must fetch that as SFmode because we could be loading into
5758 a float-only register. In this case OLD's mode is correct.
5759
5760 Consider an immediate integer: it has VOIDmode. Here we need
5761 to get a mode from something else.
5762
5763 In some cases, there is a fourth mode, the operand's
5764 containing mode. If the insn specifies a containing mode for
5765 this operand, it overrides all others.
5766
5767 I am not sure whether the algorithm here is always right,
5768 but it does the right things in those cases. */
5769
5770 mode = GET_MODE (old);
5771 if (mode == VOIDmode)
5772 mode = reload_inmode[j];
5773
5774 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5775 /* If we need a secondary register for this operation, see if
5776 the value is already in a register in that class. Don't
5777 do this if the secondary register will be used as a scratch
5778 register. */
5779
5780 if (reload_secondary_in_reload[j] >= 0
5781 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5782 && optimize)
5783 oldequiv
5784 = find_equiv_reg (old, insn,
5785 reload_reg_class[reload_secondary_in_reload[j]],
5786 -1, NULL_PTR, 0, mode);
5787 #endif
5788
5789 /* If reloading from memory, see if there is a register
5790 that already holds the same value. If so, reload from there.
5791 We can pass 0 as the reload_reg_p argument because
5792 any other reload has either already been emitted,
5793 in which case find_equiv_reg will see the reload-insn,
5794 or has yet to be emitted, in which case it doesn't matter
5795 because we will use this equiv reg right away. */
5796
5797 if (oldequiv == 0 && optimize
5798 && (GET_CODE (old) == MEM
5799 || (GET_CODE (old) == REG
5800 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5801 && reg_renumber[REGNO (old)] < 0)))
5802 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5803 -1, NULL_PTR, 0, mode);
5804
5805 if (oldequiv)
5806 {
5807 int regno = true_regnum (oldequiv);
5808
5809 /* If OLDEQUIV is a spill register, don't use it for this
5810 if any other reload needs it at an earlier stage of this insn
5811 or at this stage. */
5812 if (spill_reg_order[regno] >= 0
5813 && (! reload_reg_free_p (regno, reload_opnum[j],
5814 reload_when_needed[j])
5815 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5816 reload_when_needed[j])))
5817 oldequiv = 0;
5818
5819 /* If OLDEQUIV is not a spill register,
5820 don't use it if any other reload wants it. */
5821 if (spill_reg_order[regno] < 0)
5822 {
5823 int k;
5824 for (k = 0; k < n_reloads; k++)
5825 if (reload_reg_rtx[k] != 0 && k != j
5826 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5827 oldequiv))
5828 {
5829 oldequiv = 0;
5830 break;
5831 }
5832 }
5833
5834 /* If it is no cheaper to copy from OLDEQUIV into the
5835 reload register than it would be to move from memory,
5836 don't use it. Likewise, if we need a secondary register
5837 or memory. */
5838
5839 if (oldequiv != 0
5840 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5841 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5842 reload_reg_class[j])
5843 >= MEMORY_MOVE_COST (mode)))
5844 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5845 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5846 mode, oldequiv)
5847 != NO_REGS)
5848 #endif
5849 #ifdef SECONDARY_MEMORY_NEEDED
5850 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5851 REGNO_REG_CLASS (regno),
5852 mode)
5853 #endif
5854 ))
5855 oldequiv = 0;
5856 }
5857
5858 if (oldequiv == 0)
5859 oldequiv = old;
5860 else if (GET_CODE (oldequiv) == REG)
5861 oldequiv_reg = oldequiv;
5862 else if (GET_CODE (oldequiv) == SUBREG)
5863 oldequiv_reg = SUBREG_REG (oldequiv);
5864
5865 /* If we are reloading from a register that was recently stored in
5866 with an output-reload, see if we can prove there was
5867 actually no need to store the old value in it. */
5868
5869 if (optimize && GET_CODE (oldequiv) == REG
5870 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5871 && spill_reg_order[REGNO (oldequiv)] >= 0
5872 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5873 && find_reg_note (insn, REG_DEAD, reload_in[j])
5874 /* This is unsafe if operand occurs more than once in current
5875 insn. Perhaps some occurrences weren't reloaded. */
5876 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5877 delete_output_reload
5878 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5879
5880 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5881 then load RELOADREG from OLDEQUIV. Note that we cannot use
5882 gen_lowpart_common since it can do the wrong thing when
5883 RELOADREG has a multi-word mode. Note that RELOADREG
5884 must always be a REG here. */
5885
5886 if (GET_MODE (reloadreg) != mode)
5887 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5888 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5889 oldequiv = SUBREG_REG (oldequiv);
5890 if (GET_MODE (oldequiv) != VOIDmode
5891 && mode != GET_MODE (oldequiv))
5892 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5893
5894 /* Switch to the right place to emit the reload insns. */
5895 switch (reload_when_needed[j])
5896 {
5897 case RELOAD_OTHER:
5898 where = &other_input_reload_insns;
5899 break;
5900 case RELOAD_FOR_INPUT:
5901 where = &input_reload_insns[reload_opnum[j]];
5902 break;
5903 case RELOAD_FOR_INPUT_ADDRESS:
5904 where = &input_address_reload_insns[reload_opnum[j]];
5905 break;
5906 case RELOAD_FOR_OUTPUT_ADDRESS:
5907 where = &output_address_reload_insns[reload_opnum[j]];
5908 break;
5909 case RELOAD_FOR_OPERAND_ADDRESS:
5910 where = &operand_reload_insns;
5911 break;
5912 case RELOAD_FOR_OPADDR_ADDR:
5913 where = &other_operand_reload_insns;
5914 break;
5915 case RELOAD_FOR_OTHER_ADDRESS:
5916 where = &other_input_address_reload_insns;
5917 break;
5918 default:
5919 abort ();
5920 }
5921
5922 push_to_sequence (*where);
5923 special = 0;
5924
5925 /* Auto-increment addresses must be reloaded in a special way. */
5926 if (GET_CODE (oldequiv) == POST_INC
5927 || GET_CODE (oldequiv) == POST_DEC
5928 || GET_CODE (oldequiv) == PRE_INC
5929 || GET_CODE (oldequiv) == PRE_DEC)
5930 {
5931 /* We are not going to bother supporting the case where a
5932 incremented register can't be copied directly from
5933 OLDEQUIV since this seems highly unlikely. */
5934 if (reload_secondary_in_reload[j] >= 0)
5935 abort ();
5936 /* Prevent normal processing of this reload. */
5937 special = 1;
5938 /* Output a special code sequence for this case. */
5939 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5940 }
5941
5942 /* If we are reloading a pseudo-register that was set by the previous
5943 insn, see if we can get rid of that pseudo-register entirely
5944 by redirecting the previous insn into our reload register. */
5945
5946 else if (optimize && GET_CODE (old) == REG
5947 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5948 && dead_or_set_p (insn, old)
5949 /* This is unsafe if some other reload
5950 uses the same reg first. */
5951 && reload_reg_free_before_p (REGNO (reloadreg),
5952 reload_opnum[j],
5953 reload_when_needed[j]))
5954 {
5955 rtx temp = PREV_INSN (insn);
5956 while (temp && GET_CODE (temp) == NOTE)
5957 temp = PREV_INSN (temp);
5958 if (temp
5959 && GET_CODE (temp) == INSN
5960 && GET_CODE (PATTERN (temp)) == SET
5961 && SET_DEST (PATTERN (temp)) == old
5962 /* Make sure we can access insn_operand_constraint. */
5963 && asm_noperands (PATTERN (temp)) < 0
5964 /* This is unsafe if prev insn rejects our reload reg. */
5965 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5966 reloadreg)
5967 /* This is unsafe if operand occurs more than once in current
5968 insn. Perhaps some occurrences aren't reloaded. */
5969 && count_occurrences (PATTERN (insn), old) == 1
5970 /* Don't risk splitting a matching pair of operands. */
5971 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5972 {
5973 /* Store into the reload register instead of the pseudo. */
5974 SET_DEST (PATTERN (temp)) = reloadreg;
5975 /* If these are the only uses of the pseudo reg,
5976 pretend for GDB it lives in the reload reg we used. */
5977 if (reg_n_deaths[REGNO (old)] == 1
5978 && reg_n_sets[REGNO (old)] == 1)
5979 {
5980 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5981 alter_reg (REGNO (old), -1);
5982 }
5983 special = 1;
5984 }
5985 }
5986
5987 /* We can't do that, so output an insn to load RELOADREG. */
5988
5989 if (! special)
5990 {
5991 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5992 rtx second_reload_reg = 0;
5993 enum insn_code icode;
5994
5995 /* If we have a secondary reload, pick up the secondary register
5996 and icode, if any. If OLDEQUIV and OLD are different or
5997 if this is an in-out reload, recompute whether or not we
5998 still need a secondary register and what the icode should
5999 be. If we still need a secondary register and the class or
6000 icode is different, go back to reloading from OLD if using
6001 OLDEQUIV means that we got the wrong type of register. We
6002 cannot have different class or icode due to an in-out reload
6003 because we don't make such reloads when both the input and
6004 output need secondary reload registers. */
6005
6006 if (reload_secondary_in_reload[j] >= 0)
6007 {
6008 int secondary_reload = reload_secondary_in_reload[j];
6009 rtx real_oldequiv = oldequiv;
6010 rtx real_old = old;
6011
6012 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6013 and similarly for OLD.
6014 See comments in get_secondary_reload in reload.c. */
6015 if (GET_CODE (oldequiv) == REG
6016 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6017 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6018 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6019
6020 if (GET_CODE (old) == REG
6021 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6022 && reg_equiv_mem[REGNO (old)] != 0)
6023 real_old = reg_equiv_mem[REGNO (old)];
6024
6025 second_reload_reg = reload_reg_rtx[secondary_reload];
6026 icode = reload_secondary_in_icode[j];
6027
6028 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6029 || (reload_in[j] != 0 && reload_out[j] != 0))
6030 {
6031 enum reg_class new_class
6032 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6033 mode, real_oldequiv);
6034
6035 if (new_class == NO_REGS)
6036 second_reload_reg = 0;
6037 else
6038 {
6039 enum insn_code new_icode;
6040 enum machine_mode new_mode;
6041
6042 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6043 REGNO (second_reload_reg)))
6044 oldequiv = old, real_oldequiv = real_old;
6045 else
6046 {
6047 new_icode = reload_in_optab[(int) mode];
6048 if (new_icode != CODE_FOR_nothing
6049 && ((insn_operand_predicate[(int) new_icode][0]
6050 && ! ((*insn_operand_predicate[(int) new_icode][0])
6051 (reloadreg, mode)))
6052 || (insn_operand_predicate[(int) new_icode][1]
6053 && ! ((*insn_operand_predicate[(int) new_icode][1])
6054 (real_oldequiv, mode)))))
6055 new_icode = CODE_FOR_nothing;
6056
6057 if (new_icode == CODE_FOR_nothing)
6058 new_mode = mode;
6059 else
6060 new_mode = insn_operand_mode[(int) new_icode][2];
6061
6062 if (GET_MODE (second_reload_reg) != new_mode)
6063 {
6064 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6065 new_mode))
6066 oldequiv = old, real_oldequiv = real_old;
6067 else
6068 second_reload_reg
6069 = gen_rtx (REG, new_mode,
6070 REGNO (second_reload_reg));
6071 }
6072 }
6073 }
6074 }
6075
6076 /* If we still need a secondary reload register, check
6077 to see if it is being used as a scratch or intermediate
6078 register and generate code appropriately. If we need
6079 a scratch register, use REAL_OLDEQUIV since the form of
6080 the insn may depend on the actual address if it is
6081 a MEM. */
6082
6083 if (second_reload_reg)
6084 {
6085 if (icode != CODE_FOR_nothing)
6086 {
6087 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6088 second_reload_reg));
6089 special = 1;
6090 }
6091 else
6092 {
6093 /* See if we need a scratch register to load the
6094 intermediate register (a tertiary reload). */
6095 enum insn_code tertiary_icode
6096 = reload_secondary_in_icode[secondary_reload];
6097
6098 if (tertiary_icode != CODE_FOR_nothing)
6099 {
6100 rtx third_reload_reg
6101 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6102
6103 emit_insn ((GEN_FCN (tertiary_icode)
6104 (second_reload_reg, real_oldequiv,
6105 third_reload_reg)));
6106 }
6107 else
6108 gen_reload (second_reload_reg, oldequiv,
6109 reload_opnum[j],
6110 reload_when_needed[j]);
6111
6112 oldequiv = second_reload_reg;
6113 }
6114 }
6115 }
6116 #endif
6117
6118 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6119 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6120 reload_when_needed[j]);
6121
6122 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6123 /* We may have to make a REG_DEAD note for the secondary reload
6124 register in the insns we just made. Find the last insn that
6125 mentioned the register. */
6126 if (! special && second_reload_reg
6127 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6128 {
6129 rtx prev;
6130
6131 for (prev = get_last_insn (); prev;
6132 prev = PREV_INSN (prev))
6133 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6134 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6135 PATTERN (prev)))
6136 {
6137 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6138 second_reload_reg,
6139 REG_NOTES (prev));
6140 break;
6141 }
6142 }
6143 #endif
6144 }
6145
6146 this_reload_insn = get_last_insn ();
6147 /* End this sequence. */
6148 *where = get_insns ();
6149 end_sequence ();
6150 }
6151
6152 /* Add a note saying the input reload reg
6153 dies in this insn, if anyone cares. */
6154 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6155 if (old != 0
6156 && reload_reg_rtx[j] != old
6157 && reload_reg_rtx[j] != 0
6158 && reload_out[j] == 0
6159 && ! reload_inherited[j]
6160 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6161 {
6162 register rtx reloadreg = reload_reg_rtx[j];
6163
6164 #if 0
6165 /* We can't abort here because we need to support this for sched.c.
6166 It's not terrible to miss a REG_DEAD note, but we should try
6167 to figure out how to do this correctly. */
6168 /* The code below is incorrect for address-only reloads. */
6169 if (reload_when_needed[j] != RELOAD_OTHER
6170 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6171 abort ();
6172 #endif
6173
6174 /* Add a death note to this insn, for an input reload. */
6175
6176 if ((reload_when_needed[j] == RELOAD_OTHER
6177 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6178 && ! dead_or_set_p (insn, reloadreg))
6179 REG_NOTES (insn)
6180 = gen_rtx (EXPR_LIST, REG_DEAD,
6181 reloadreg, REG_NOTES (insn));
6182 }
6183
6184 /* When we inherit a reload, the last marked death of the reload reg
6185 may no longer really be a death. */
6186 if (reload_reg_rtx[j] != 0
6187 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6188 && reload_inherited[j])
6189 {
6190 /* Handle inheriting an output reload.
6191 Remove the death note from the output reload insn. */
6192 if (reload_spill_index[j] >= 0
6193 && GET_CODE (reload_in[j]) == REG
6194 && spill_reg_store[reload_spill_index[j]] != 0
6195 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6196 REG_DEAD, REGNO (reload_reg_rtx[j])))
6197 remove_death (REGNO (reload_reg_rtx[j]),
6198 spill_reg_store[reload_spill_index[j]]);
6199 /* Likewise for input reloads that were inherited. */
6200 else if (reload_spill_index[j] >= 0
6201 && GET_CODE (reload_in[j]) == REG
6202 && spill_reg_store[reload_spill_index[j]] == 0
6203 && reload_inheritance_insn[j] != 0
6204 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6205 REGNO (reload_reg_rtx[j])))
6206 remove_death (REGNO (reload_reg_rtx[j]),
6207 reload_inheritance_insn[j]);
6208 else
6209 {
6210 rtx prev;
6211
6212 /* We got this register from find_equiv_reg.
6213 Search back for its last death note and get rid of it.
6214 But don't search back too far.
6215 Don't go past a place where this reg is set,
6216 since a death note before that remains valid. */
6217 for (prev = PREV_INSN (insn);
6218 prev && GET_CODE (prev) != CODE_LABEL;
6219 prev = PREV_INSN (prev))
6220 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6221 && dead_or_set_p (prev, reload_reg_rtx[j]))
6222 {
6223 if (find_regno_note (prev, REG_DEAD,
6224 REGNO (reload_reg_rtx[j])))
6225 remove_death (REGNO (reload_reg_rtx[j]), prev);
6226 break;
6227 }
6228 }
6229 }
6230
6231 /* We might have used find_equiv_reg above to choose an alternate
6232 place from which to reload. If so, and it died, we need to remove
6233 that death and move it to one of the insns we just made. */
6234
6235 if (oldequiv_reg != 0
6236 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6237 {
6238 rtx prev, prev1;
6239
6240 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6241 prev = PREV_INSN (prev))
6242 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6243 && dead_or_set_p (prev, oldequiv_reg))
6244 {
6245 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6246 {
6247 for (prev1 = this_reload_insn;
6248 prev1; prev1 = PREV_INSN (prev1))
6249 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6250 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6251 PATTERN (prev1)))
6252 {
6253 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6254 oldequiv_reg,
6255 REG_NOTES (prev1));
6256 break;
6257 }
6258 remove_death (REGNO (oldequiv_reg), prev);
6259 }
6260 break;
6261 }
6262 }
6263 #endif
6264
6265 /* If we are reloading a register that was recently stored in with an
6266 output-reload, see if we can prove there was
6267 actually no need to store the old value in it. */
6268
6269 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6270 && reload_in[j] != 0
6271 && GET_CODE (reload_in[j]) == REG
6272 #if 0
6273 /* There doesn't seem to be any reason to restrict this to pseudos
6274 and doing so loses in the case where we are copying from a
6275 register of the wrong class. */
6276 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6277 #endif
6278 && spill_reg_store[reload_spill_index[j]] != 0
6279 /* This is unsafe if some other reload uses the same reg first. */
6280 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6281 reload_opnum[j], reload_when_needed[j])
6282 && dead_or_set_p (insn, reload_in[j])
6283 /* This is unsafe if operand occurs more than once in current
6284 insn. Perhaps some occurrences weren't reloaded. */
6285 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6286 delete_output_reload (insn, j,
6287 spill_reg_store[reload_spill_index[j]]);
6288
6289 /* Input-reloading is done. Now do output-reloading,
6290 storing the value from the reload-register after the main insn
6291 if reload_out[j] is nonzero.
6292
6293 ??? At some point we need to support handling output reloads of
6294 JUMP_INSNs or insns that set cc0. */
6295 old = reload_out[j];
6296 if (old != 0
6297 && reload_reg_rtx[j] != old
6298 && reload_reg_rtx[j] != 0)
6299 {
6300 register rtx reloadreg = reload_reg_rtx[j];
6301 register rtx second_reloadreg = 0;
6302 rtx note, p;
6303 enum machine_mode mode;
6304 int special = 0;
6305
6306 /* An output operand that dies right away does need a reload,
6307 but need not be copied from it. Show the new location in the
6308 REG_UNUSED note. */
6309 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6310 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6311 {
6312 XEXP (note, 0) = reload_reg_rtx[j];
6313 continue;
6314 }
6315 /* Likewise for a SUBREG of an operand that dies. */
6316 else if (GET_CODE (old) == SUBREG
6317 && GET_CODE (SUBREG_REG (old)) == REG
6318 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6319 SUBREG_REG (old))))
6320 {
6321 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6322 reload_reg_rtx[j]);
6323 continue;
6324 }
6325 else if (GET_CODE (old) == SCRATCH)
6326 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6327 but we don't want to make an output reload. */
6328 continue;
6329
6330 #if 0
6331 /* Strip off of OLD any size-increasing SUBREGs such as
6332 (SUBREG:SI foo:QI 0). */
6333
6334 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6335 && (GET_MODE_SIZE (GET_MODE (old))
6336 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6337 old = SUBREG_REG (old);
6338 #endif
6339
6340 /* If is a JUMP_INSN, we can't support output reloads yet. */
6341 if (GET_CODE (insn) == JUMP_INSN)
6342 abort ();
6343
6344 if (reload_when_needed[j] == RELOAD_OTHER)
6345 start_sequence ();
6346 else
6347 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6348
6349 /* Determine the mode to reload in.
6350 See comments above (for input reloading). */
6351
6352 mode = GET_MODE (old);
6353 if (mode == VOIDmode)
6354 {
6355 /* VOIDmode should never happen for an output. */
6356 if (asm_noperands (PATTERN (insn)) < 0)
6357 /* It's the compiler's fault. */
6358 fatal_insn ("VOIDmode on an output", insn);
6359 error_for_asm (insn, "output operand is constant in `asm'");
6360 /* Prevent crash--use something we know is valid. */
6361 mode = word_mode;
6362 old = gen_rtx (REG, mode, REGNO (reloadreg));
6363 }
6364
6365 if (GET_MODE (reloadreg) != mode)
6366 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6367
6368 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6369
6370 /* If we need two reload regs, set RELOADREG to the intermediate
6371 one, since it will be stored into OLD. We might need a secondary
6372 register only for an input reload, so check again here. */
6373
6374 if (reload_secondary_out_reload[j] >= 0)
6375 {
6376 rtx real_old = old;
6377
6378 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6379 && reg_equiv_mem[REGNO (old)] != 0)
6380 real_old = reg_equiv_mem[REGNO (old)];
6381
6382 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6383 mode, real_old)
6384 != NO_REGS))
6385 {
6386 second_reloadreg = reloadreg;
6387 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6388
6389 /* See if RELOADREG is to be used as a scratch register
6390 or as an intermediate register. */
6391 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6392 {
6393 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6394 (real_old, second_reloadreg, reloadreg)));
6395 special = 1;
6396 }
6397 else
6398 {
6399 /* See if we need both a scratch and intermediate reload
6400 register. */
6401
6402 int secondary_reload = reload_secondary_out_reload[j];
6403 enum insn_code tertiary_icode
6404 = reload_secondary_out_icode[secondary_reload];
6405
6406 if (GET_MODE (reloadreg) != mode)
6407 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6408
6409 if (tertiary_icode != CODE_FOR_nothing)
6410 {
6411 rtx third_reloadreg
6412 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6413 rtx tem;
6414
6415 /* Copy primary reload reg to secondary reload reg.
6416 (Note that these have been swapped above, then
6417 secondary reload reg to OLD using our insn. */
6418
6419 /* If REAL_OLD is a paradoxical SUBREG, remove it
6420 and try to put the opposite SUBREG on
6421 RELOADREG. */
6422 if (GET_CODE (real_old) == SUBREG
6423 && (GET_MODE_SIZE (GET_MODE (real_old))
6424 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6425 && 0 != (tem = gen_lowpart_common
6426 (GET_MODE (SUBREG_REG (real_old)),
6427 reloadreg)))
6428 real_old = SUBREG_REG (real_old), reloadreg = tem;
6429
6430 gen_reload (reloadreg, second_reloadreg,
6431 reload_opnum[j], reload_when_needed[j]);
6432 emit_insn ((GEN_FCN (tertiary_icode)
6433 (real_old, reloadreg, third_reloadreg)));
6434 special = 1;
6435 }
6436
6437 else
6438 /* Copy between the reload regs here and then to
6439 OUT later. */
6440
6441 gen_reload (reloadreg, second_reloadreg,
6442 reload_opnum[j], reload_when_needed[j]);
6443 }
6444 }
6445 }
6446 #endif
6447
6448 /* Output the last reload insn. */
6449 if (! special)
6450 gen_reload (old, reloadreg, reload_opnum[j],
6451 reload_when_needed[j]);
6452
6453 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6454 /* If final will look at death notes for this reg,
6455 put one on the last output-reload insn to use it. Similarly
6456 for any secondary register. */
6457 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6458 for (p = get_last_insn (); p; p = PREV_INSN (p))
6459 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6460 && reg_overlap_mentioned_for_reload_p (reloadreg,
6461 PATTERN (p)))
6462 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6463 reloadreg, REG_NOTES (p));
6464
6465 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6466 if (! special
6467 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6468 for (p = get_last_insn (); p; p = PREV_INSN (p))
6469 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6470 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6471 PATTERN (p)))
6472 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6473 second_reloadreg, REG_NOTES (p));
6474 #endif
6475 #endif
6476 /* Look at all insns we emitted, just to be safe. */
6477 for (p = get_insns (); p; p = NEXT_INSN (p))
6478 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6479 {
6480 /* If this output reload doesn't come from a spill reg,
6481 clear any memory of reloaded copies of the pseudo reg.
6482 If this output reload comes from a spill reg,
6483 reg_has_output_reload will make this do nothing. */
6484 note_stores (PATTERN (p), forget_old_reloads_1);
6485
6486 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6487 && reload_spill_index[j] >= 0)
6488 new_spill_reg_store[reload_spill_index[j]] = p;
6489 }
6490
6491 if (reload_when_needed[j] == RELOAD_OTHER)
6492 emit_insns (output_reload_insns[reload_opnum[j]]);
6493
6494 output_reload_insns[reload_opnum[j]] = get_insns ();
6495 end_sequence ();
6496 }
6497 }
6498
6499 /* Now write all the insns we made for reloads in the order expected by
6500 the allocation functions. Prior to the insn being reloaded, we write
6501 the following reloads:
6502
6503 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6504
6505 RELOAD_OTHER reloads, output in ascending order by reload number.
6506
6507 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6508 the RELOAD_FOR_INPUT reload for the operand.
6509
6510 RELOAD_FOR_OPADDR_ADDRS reloads.
6511
6512 RELOAD_FOR_OPERAND_ADDRESS reloads.
6513
6514 After the insn being reloaded, we write the following:
6515
6516 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6517 the RELOAD_FOR_OUTPUT reload for that operand.
6518
6519 Any RELOAD_OTHER output reloads, output in descending order by
6520 reload number. */
6521
6522 emit_insns_before (other_input_address_reload_insns, before_insn);
6523 emit_insns_before (other_input_reload_insns, before_insn);
6524
6525 for (j = 0; j < reload_n_operands; j++)
6526 {
6527 emit_insns_before (input_address_reload_insns[j], before_insn);
6528 emit_insns_before (input_reload_insns[j], before_insn);
6529 }
6530
6531 emit_insns_before (other_operand_reload_insns, before_insn);
6532 emit_insns_before (operand_reload_insns, before_insn);
6533
6534 for (j = 0; j < reload_n_operands; j++)
6535 {
6536 emit_insns_before (output_address_reload_insns[j], following_insn);
6537 emit_insns_before (output_reload_insns[j], following_insn);
6538 }
6539
6540 /* Move death notes from INSN
6541 to output-operand-address and output reload insns. */
6542 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6543 {
6544 rtx insn1;
6545 /* Loop over those insns, last ones first. */
6546 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6547 insn1 = PREV_INSN (insn1))
6548 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6549 {
6550 rtx source = SET_SRC (PATTERN (insn1));
6551 rtx dest = SET_DEST (PATTERN (insn1));
6552
6553 /* The note we will examine next. */
6554 rtx reg_notes = REG_NOTES (insn);
6555 /* The place that pointed to this note. */
6556 rtx *prev_reg_note = &REG_NOTES (insn);
6557
6558 /* If the note is for something used in the source of this
6559 reload insn, or in the output address, move the note. */
6560 while (reg_notes)
6561 {
6562 rtx next_reg_notes = XEXP (reg_notes, 1);
6563 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6564 && GET_CODE (XEXP (reg_notes, 0)) == REG
6565 && ((GET_CODE (dest) != REG
6566 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6567 dest))
6568 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6569 source)))
6570 {
6571 *prev_reg_note = next_reg_notes;
6572 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6573 REG_NOTES (insn1) = reg_notes;
6574 }
6575 else
6576 prev_reg_note = &XEXP (reg_notes, 1);
6577
6578 reg_notes = next_reg_notes;
6579 }
6580 }
6581 }
6582 #endif
6583
6584 /* For all the spill regs newly reloaded in this instruction,
6585 record what they were reloaded from, so subsequent instructions
6586 can inherit the reloads.
6587
6588 Update spill_reg_store for the reloads of this insn.
6589 Copy the elements that were updated in the loop above. */
6590
6591 for (j = 0; j < n_reloads; j++)
6592 {
6593 register int r = reload_order[j];
6594 register int i = reload_spill_index[r];
6595
6596 /* I is nonneg if this reload used one of the spill regs.
6597 If reload_reg_rtx[r] is 0, this is an optional reload
6598 that we opted to ignore. */
6599
6600 if (i >= 0 && reload_reg_rtx[r] != 0)
6601 {
6602 int nr
6603 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6604 int k;
6605 int part_reaches_end = 0;
6606 int all_reaches_end = 1;
6607
6608 /* For a multi register reload, we need to check if all or part
6609 of the value lives to the end. */
6610 for (k = 0; k < nr; k++)
6611 {
6612 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6613 reload_when_needed[r]))
6614 part_reaches_end = 1;
6615 else
6616 all_reaches_end = 0;
6617 }
6618
6619 /* Ignore reloads that don't reach the end of the insn in
6620 entirety. */
6621 if (all_reaches_end)
6622 {
6623 /* First, clear out memory of what used to be in this spill reg.
6624 If consecutive registers are used, clear them all. */
6625
6626 for (k = 0; k < nr; k++)
6627 {
6628 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6629 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6630 }
6631
6632 /* Maybe the spill reg contains a copy of reload_out. */
6633 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6634 {
6635 register int nregno = REGNO (reload_out[r]);
6636 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6637 : HARD_REGNO_NREGS (nregno,
6638 GET_MODE (reload_reg_rtx[r])));
6639
6640 spill_reg_store[i] = new_spill_reg_store[i];
6641 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6642
6643 /* If NREGNO is a hard register, it may occupy more than
6644 one register. If it does, say what is in the
6645 rest of the registers assuming that both registers
6646 agree on how many words the object takes. If not,
6647 invalidate the subsequent registers. */
6648
6649 if (nregno < FIRST_PSEUDO_REGISTER)
6650 for (k = 1; k < nnr; k++)
6651 reg_last_reload_reg[nregno + k]
6652 = (nr == nnr
6653 ? gen_rtx (REG,
6654 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6655 REGNO (reload_reg_rtx[r]) + k)
6656 : 0);
6657
6658 /* Now do the inverse operation. */
6659 for (k = 0; k < nr; k++)
6660 {
6661 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6662 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6663 ? nregno
6664 : nregno + k);
6665 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6666 }
6667 }
6668
6669 /* Maybe the spill reg contains a copy of reload_in. Only do
6670 something if there will not be an output reload for
6671 the register being reloaded. */
6672 else if (reload_out[r] == 0
6673 && reload_in[r] != 0
6674 && ((GET_CODE (reload_in[r]) == REG
6675 && ! reg_has_output_reload[REGNO (reload_in[r])])
6676 || (GET_CODE (reload_in_reg[r]) == REG
6677 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6678 {
6679 register int nregno;
6680 int nnr;
6681
6682 if (GET_CODE (reload_in[r]) == REG)
6683 nregno = REGNO (reload_in[r]);
6684 else
6685 nregno = REGNO (reload_in_reg[r]);
6686
6687 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6688 : HARD_REGNO_NREGS (nregno,
6689 GET_MODE (reload_reg_rtx[r])));
6690
6691 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6692
6693 if (nregno < FIRST_PSEUDO_REGISTER)
6694 for (k = 1; k < nnr; k++)
6695 reg_last_reload_reg[nregno + k]
6696 = (nr == nnr
6697 ? gen_rtx (REG,
6698 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6699 REGNO (reload_reg_rtx[r]) + k)
6700 : 0);
6701
6702 /* Unless we inherited this reload, show we haven't
6703 recently done a store. */
6704 if (! reload_inherited[r])
6705 spill_reg_store[i] = 0;
6706
6707 for (k = 0; k < nr; k++)
6708 {
6709 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6710 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6711 ? nregno
6712 : nregno + k);
6713 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6714 = insn;
6715 }
6716 }
6717 }
6718
6719 /* However, if part of the reload reaches the end, then we must
6720 invalidate the old info for the part that survives to the end. */
6721 else if (part_reaches_end)
6722 {
6723 for (k = 0; k < nr; k++)
6724 if (reload_reg_reaches_end_p (spill_regs[i] + k,
6725 reload_opnum[r],
6726 reload_when_needed[r]))
6727 {
6728 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6729 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6730 }
6731 }
6732 }
6733
6734 /* The following if-statement was #if 0'd in 1.34 (or before...).
6735 It's reenabled in 1.35 because supposedly nothing else
6736 deals with this problem. */
6737
6738 /* If a register gets output-reloaded from a non-spill register,
6739 that invalidates any previous reloaded copy of it.
6740 But forget_old_reloads_1 won't get to see it, because
6741 it thinks only about the original insn. So invalidate it here. */
6742 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6743 {
6744 register int nregno = REGNO (reload_out[r]);
6745 if (nregno >= FIRST_PSEUDO_REGISTER)
6746 reg_last_reload_reg[nregno] = 0;
6747 else
6748 {
6749 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
6750
6751 while (num_regs-- > 0)
6752 reg_last_reload_reg[nregno + num_regs] = 0;
6753 }
6754 }
6755 }
6756 }
6757 \f
6758 /* Emit code to perform a reload from IN (which may be a reload register) to
6759 OUT (which may also be a reload register). IN or OUT is from operand
6760 OPNUM with reload type TYPE.
6761
6762 Returns first insn emitted. */
6763
6764 rtx
6765 gen_reload (out, in, opnum, type)
6766 rtx out;
6767 rtx in;
6768 int opnum;
6769 enum reload_type type;
6770 {
6771 rtx last = get_last_insn ();
6772 rtx tem;
6773
6774 /* If IN is a paradoxical SUBREG, remove it and try to put the
6775 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6776 if (GET_CODE (in) == SUBREG
6777 && (GET_MODE_SIZE (GET_MODE (in))
6778 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
6779 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
6780 in = SUBREG_REG (in), out = tem;
6781 else if (GET_CODE (out) == SUBREG
6782 && (GET_MODE_SIZE (GET_MODE (out))
6783 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
6784 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
6785 out = SUBREG_REG (out), in = tem;
6786
6787 /* How to do this reload can get quite tricky. Normally, we are being
6788 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6789 register that didn't get a hard register. In that case we can just
6790 call emit_move_insn.
6791
6792 We can also be asked to reload a PLUS that adds a register or a MEM to
6793 another register, constant or MEM. This can occur during frame pointer
6794 elimination and while reloading addresses. This case is handled by
6795 trying to emit a single insn to perform the add. If it is not valid,
6796 we use a two insn sequence.
6797
6798 Finally, we could be called to handle an 'o' constraint by putting
6799 an address into a register. In that case, we first try to do this
6800 with a named pattern of "reload_load_address". If no such pattern
6801 exists, we just emit a SET insn and hope for the best (it will normally
6802 be valid on machines that use 'o').
6803
6804 This entire process is made complex because reload will never
6805 process the insns we generate here and so we must ensure that
6806 they will fit their constraints and also by the fact that parts of
6807 IN might be being reloaded separately and replaced with spill registers.
6808 Because of this, we are, in some sense, just guessing the right approach
6809 here. The one listed above seems to work.
6810
6811 ??? At some point, this whole thing needs to be rethought. */
6812
6813 if (GET_CODE (in) == PLUS
6814 && (GET_CODE (XEXP (in, 0)) == REG
6815 || GET_CODE (XEXP (in, 0)) == MEM)
6816 && (GET_CODE (XEXP (in, 1)) == REG
6817 || CONSTANT_P (XEXP (in, 1))
6818 || GET_CODE (XEXP (in, 1)) == MEM))
6819 {
6820 /* We need to compute the sum of a register or a MEM and another
6821 register, constant, or MEM, and put it into the reload
6822 register. The best possible way of doing this is if the machine
6823 has a three-operand ADD insn that accepts the required operands.
6824
6825 The simplest approach is to try to generate such an insn and see if it
6826 is recognized and matches its constraints. If so, it can be used.
6827
6828 It might be better not to actually emit the insn unless it is valid,
6829 but we need to pass the insn as an operand to `recog' and
6830 `insn_extract' and it is simpler to emit and then delete the insn if
6831 not valid than to dummy things up. */
6832
6833 rtx op0, op1, tem, insn;
6834 int code;
6835
6836 op0 = find_replacement (&XEXP (in, 0));
6837 op1 = find_replacement (&XEXP (in, 1));
6838
6839 /* Since constraint checking is strict, commutativity won't be
6840 checked, so we need to do that here to avoid spurious failure
6841 if the add instruction is two-address and the second operand
6842 of the add is the same as the reload reg, which is frequently
6843 the case. If the insn would be A = B + A, rearrange it so
6844 it will be A = A + B as constrain_operands expects. */
6845
6846 if (GET_CODE (XEXP (in, 1)) == REG
6847 && REGNO (out) == REGNO (XEXP (in, 1)))
6848 tem = op0, op0 = op1, op1 = tem;
6849
6850 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6851 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6852
6853 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
6854 code = recog_memoized (insn);
6855
6856 if (code >= 0)
6857 {
6858 insn_extract (insn);
6859 /* We want constrain operands to treat this insn strictly in
6860 its validity determination, i.e., the way it would after reload
6861 has completed. */
6862 if (constrain_operands (code, 1))
6863 return insn;
6864 }
6865
6866 delete_insns_since (last);
6867
6868 /* If that failed, we must use a conservative two-insn sequence.
6869 use move to copy constant, MEM, or pseudo register to the reload
6870 register since "move" will be able to handle an arbitrary operand,
6871 unlike add which can't, in general. Then add the registers.
6872
6873 If there is another way to do this for a specific machine, a
6874 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6875 we emit below. */
6876
6877 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6878 || (GET_CODE (op1) == REG
6879 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6880 tem = op0, op0 = op1, op1 = tem;
6881
6882 emit_insn (gen_move_insn (out, op0));
6883
6884 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6885 This fixes a problem on the 32K where the stack pointer cannot
6886 be used as an operand of an add insn. */
6887
6888 if (rtx_equal_p (op0, op1))
6889 op1 = out;
6890
6891 insn = emit_insn (gen_add2_insn (out, op1));
6892
6893 /* If that failed, copy the address register to the reload register.
6894 Then add the constant to the reload register. */
6895
6896 code = recog_memoized (insn);
6897
6898 if (code >= 0)
6899 {
6900 insn_extract (insn);
6901 /* We want constrain operands to treat this insn strictly in
6902 its validity determination, i.e., the way it would after reload
6903 has completed. */
6904 if (constrain_operands (code, 1))
6905 return insn;
6906 }
6907
6908 delete_insns_since (last);
6909
6910 emit_insn (gen_move_insn (out, op1));
6911 emit_insn (gen_add2_insn (out, op0));
6912 }
6913
6914 #ifdef SECONDARY_MEMORY_NEEDED
6915 /* If we need a memory location to do the move, do it that way. */
6916 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6917 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
6918 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6919 REGNO_REG_CLASS (REGNO (out)),
6920 GET_MODE (out)))
6921 {
6922 /* Get the memory to use and rewrite both registers to its mode. */
6923 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
6924
6925 if (GET_MODE (loc) != GET_MODE (out))
6926 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
6927
6928 if (GET_MODE (loc) != GET_MODE (in))
6929 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6930
6931 emit_insn (gen_move_insn (loc, in));
6932 emit_insn (gen_move_insn (out, loc));
6933 }
6934 #endif
6935
6936 /* If IN is a simple operand, use gen_move_insn. */
6937 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6938 emit_insn (gen_move_insn (out, in));
6939
6940 #ifdef HAVE_reload_load_address
6941 else if (HAVE_reload_load_address)
6942 emit_insn (gen_reload_load_address (out, in));
6943 #endif
6944
6945 /* Otherwise, just write (set OUT IN) and hope for the best. */
6946 else
6947 emit_insn (gen_rtx (SET, VOIDmode, out, in));
6948
6949 /* Return the first insn emitted.
6950 We can not just return get_last_insn, because there may have
6951 been multiple instructions emitted. Also note that gen_move_insn may
6952 emit more than one insn itself, so we can not assume that there is one
6953 insn emitted per emit_insn_before call. */
6954
6955 return last ? NEXT_INSN (last) : get_insns ();
6956 }
6957 \f
6958 /* Delete a previously made output-reload
6959 whose result we now believe is not needed.
6960 First we double-check.
6961
6962 INSN is the insn now being processed.
6963 OUTPUT_RELOAD_INSN is the insn of the output reload.
6964 J is the reload-number for this insn. */
6965
6966 static void
6967 delete_output_reload (insn, j, output_reload_insn)
6968 rtx insn;
6969 int j;
6970 rtx output_reload_insn;
6971 {
6972 register rtx i1;
6973
6974 /* Get the raw pseudo-register referred to. */
6975
6976 rtx reg = reload_in[j];
6977 while (GET_CODE (reg) == SUBREG)
6978 reg = SUBREG_REG (reg);
6979
6980 /* If the pseudo-reg we are reloading is no longer referenced
6981 anywhere between the store into it and here,
6982 and no jumps or labels intervene, then the value can get
6983 here through the reload reg alone.
6984 Otherwise, give up--return. */
6985 for (i1 = NEXT_INSN (output_reload_insn);
6986 i1 != insn; i1 = NEXT_INSN (i1))
6987 {
6988 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6989 return;
6990 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6991 && reg_mentioned_p (reg, PATTERN (i1)))
6992 return;
6993 }
6994
6995 if (cannot_omit_stores[REGNO (reg)])
6996 return;
6997
6998 /* If this insn will store in the pseudo again,
6999 the previous store can be removed. */
7000 if (reload_out[j] == reload_in[j])
7001 delete_insn (output_reload_insn);
7002
7003 /* See if the pseudo reg has been completely replaced
7004 with reload regs. If so, delete the store insn
7005 and forget we had a stack slot for the pseudo. */
7006 else if (reg_n_deaths[REGNO (reg)] == 1
7007 && reg_basic_block[REGNO (reg)] >= 0
7008 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7009 {
7010 rtx i2;
7011
7012 /* We know that it was used only between here
7013 and the beginning of the current basic block.
7014 (We also know that the last use before INSN was
7015 the output reload we are thinking of deleting, but never mind that.)
7016 Search that range; see if any ref remains. */
7017 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7018 {
7019 rtx set = single_set (i2);
7020
7021 /* Uses which just store in the pseudo don't count,
7022 since if they are the only uses, they are dead. */
7023 if (set != 0 && SET_DEST (set) == reg)
7024 continue;
7025 if (GET_CODE (i2) == CODE_LABEL
7026 || GET_CODE (i2) == JUMP_INSN)
7027 break;
7028 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7029 && reg_mentioned_p (reg, PATTERN (i2)))
7030 /* Some other ref remains;
7031 we can't do anything. */
7032 return;
7033 }
7034
7035 /* Delete the now-dead stores into this pseudo. */
7036 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7037 {
7038 rtx set = single_set (i2);
7039
7040 if (set != 0 && SET_DEST (set) == reg)
7041 delete_insn (i2);
7042 if (GET_CODE (i2) == CODE_LABEL
7043 || GET_CODE (i2) == JUMP_INSN)
7044 break;
7045 }
7046
7047 /* For the debugging info,
7048 say the pseudo lives in this reload reg. */
7049 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7050 alter_reg (REGNO (reg), -1);
7051 }
7052 }
7053 \f
7054 /* Output reload-insns to reload VALUE into RELOADREG.
7055 VALUE is an autoincrement or autodecrement RTX whose operand
7056 is a register or memory location;
7057 so reloading involves incrementing that location.
7058
7059 INC_AMOUNT is the number to increment or decrement by (always positive).
7060 This cannot be deduced from VALUE. */
7061
7062 static void
7063 inc_for_reload (reloadreg, value, inc_amount)
7064 rtx reloadreg;
7065 rtx value;
7066 int inc_amount;
7067 {
7068 /* REG or MEM to be copied and incremented. */
7069 rtx incloc = XEXP (value, 0);
7070 /* Nonzero if increment after copying. */
7071 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7072 rtx last;
7073 rtx inc;
7074 rtx add_insn;
7075 int code;
7076
7077 /* No hard register is equivalent to this register after
7078 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7079 we could inc/dec that register as well (maybe even using it for
7080 the source), but I'm not sure it's worth worrying about. */
7081 if (GET_CODE (incloc) == REG)
7082 reg_last_reload_reg[REGNO (incloc)] = 0;
7083
7084 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7085 inc_amount = - inc_amount;
7086
7087 inc = GEN_INT (inc_amount);
7088
7089 /* If this is post-increment, first copy the location to the reload reg. */
7090 if (post)
7091 emit_insn (gen_move_insn (reloadreg, incloc));
7092
7093 /* See if we can directly increment INCLOC. Use a method similar to that
7094 in gen_reload. */
7095
7096 last = get_last_insn ();
7097 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7098 gen_rtx (PLUS, GET_MODE (incloc),
7099 incloc, inc)));
7100
7101 code = recog_memoized (add_insn);
7102 if (code >= 0)
7103 {
7104 insn_extract (add_insn);
7105 if (constrain_operands (code, 1))
7106 {
7107 /* If this is a pre-increment and we have incremented the value
7108 where it lives, copy the incremented value to RELOADREG to
7109 be used as an address. */
7110
7111 if (! post)
7112 emit_insn (gen_move_insn (reloadreg, incloc));
7113
7114 return;
7115 }
7116 }
7117
7118 delete_insns_since (last);
7119
7120 /* If couldn't do the increment directly, must increment in RELOADREG.
7121 The way we do this depends on whether this is pre- or post-increment.
7122 For pre-increment, copy INCLOC to the reload register, increment it
7123 there, then save back. */
7124
7125 if (! post)
7126 {
7127 emit_insn (gen_move_insn (reloadreg, incloc));
7128 emit_insn (gen_add2_insn (reloadreg, inc));
7129 emit_insn (gen_move_insn (incloc, reloadreg));
7130 }
7131 else
7132 {
7133 /* Postincrement.
7134 Because this might be a jump insn or a compare, and because RELOADREG
7135 may not be available after the insn in an input reload, we must do
7136 the incrementation before the insn being reloaded for.
7137
7138 We have already copied INCLOC to RELOADREG. Increment the copy in
7139 RELOADREG, save that back, then decrement RELOADREG so it has
7140 the original value. */
7141
7142 emit_insn (gen_add2_insn (reloadreg, inc));
7143 emit_insn (gen_move_insn (incloc, reloadreg));
7144 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7145 }
7146
7147 return;
7148 }
7149 \f
7150 /* Return 1 if we are certain that the constraint-string STRING allows
7151 the hard register REG. Return 0 if we can't be sure of this. */
7152
7153 static int
7154 constraint_accepts_reg_p (string, reg)
7155 char *string;
7156 rtx reg;
7157 {
7158 int value = 0;
7159 int regno = true_regnum (reg);
7160 int c;
7161
7162 /* Initialize for first alternative. */
7163 value = 0;
7164 /* Check that each alternative contains `g' or `r'. */
7165 while (1)
7166 switch (c = *string++)
7167 {
7168 case 0:
7169 /* If an alternative lacks `g' or `r', we lose. */
7170 return value;
7171 case ',':
7172 /* If an alternative lacks `g' or `r', we lose. */
7173 if (value == 0)
7174 return 0;
7175 /* Initialize for next alternative. */
7176 value = 0;
7177 break;
7178 case 'g':
7179 case 'r':
7180 /* Any general reg wins for this alternative. */
7181 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7182 value = 1;
7183 break;
7184 default:
7185 /* Any reg in specified class wins for this alternative. */
7186 {
7187 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7188
7189 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7190 value = 1;
7191 }
7192 }
7193 }
7194 \f
7195 /* Return the number of places FIND appears within X, but don't count
7196 an occurrence if some SET_DEST is FIND. */
7197
7198 static int
7199 count_occurrences (x, find)
7200 register rtx x, find;
7201 {
7202 register int i, j;
7203 register enum rtx_code code;
7204 register char *format_ptr;
7205 int count;
7206
7207 if (x == find)
7208 return 1;
7209 if (x == 0)
7210 return 0;
7211
7212 code = GET_CODE (x);
7213
7214 switch (code)
7215 {
7216 case REG:
7217 case QUEUED:
7218 case CONST_INT:
7219 case CONST_DOUBLE:
7220 case SYMBOL_REF:
7221 case CODE_LABEL:
7222 case PC:
7223 case CC0:
7224 return 0;
7225
7226 case SET:
7227 if (SET_DEST (x) == find)
7228 return count_occurrences (SET_SRC (x), find);
7229 break;
7230 }
7231
7232 format_ptr = GET_RTX_FORMAT (code);
7233 count = 0;
7234
7235 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7236 {
7237 switch (*format_ptr++)
7238 {
7239 case 'e':
7240 count += count_occurrences (XEXP (x, i), find);
7241 break;
7242
7243 case 'E':
7244 if (XVEC (x, i) != NULL)
7245 {
7246 for (j = 0; j < XVECLEN (x, i); j++)
7247 count += count_occurrences (XVECEXP (x, i, j), find);
7248 }
7249 break;
7250 }
7251 }
7252 return count;
7253 }