(emit_reload_insns): Fix precedence errors.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include <stdio.h>
23 #include "config.h"
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37 #include "real.h"
38
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
45
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
49
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
53
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
60
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
64
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
71
72
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
75 #endif
76
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
79 #endif
80 \f
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
84
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
88
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
92
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
98
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
104
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
109
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
113
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
116
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
120
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
127
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
133
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 static int n_spills;
136
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
142
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
147
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
153
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
158
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
163
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166 static HARD_REG_SET bad_spill_regs;
167
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
172
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
177
178 HARD_REG_SET used_spill_regs;
179
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
182
183 static int last_spill_reg;
184
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
191
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
196
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
200
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
206
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
212
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
219
220 static char spill_indirect_levels;
221
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
225
226 char indirect_symref_ok;
227
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
229
230 char double_reg_address_ok;
231
232 /* Record the stack slot for each spilled hard register. */
233
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
235
236 /* Width allocated so far for that stack slot. */
237
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
244
245 char *basic_block_needs[N_REG_CLASSES];
246
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
250
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253
254 int caller_save_needed;
255
256 /* Set to 1 while reload_as_needed is operating.
257 Required by some machines to handle any generated moves differently. */
258
259 int reload_in_progress = 0;
260
261 /* These arrays record the insn_code of insns that may be needed to
262 perform input and output reloads of special objects. They provide a
263 place to pass a scratch register. */
264
265 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
267
268 /* This obstack is used for allocation of rtl during register elimination.
269 The allocated storage can be freed once find_reloads has processed the
270 insn. */
271
272 struct obstack reload_obstack;
273 char *reload_firstobj;
274
275 #define obstack_chunk_alloc xmalloc
276 #define obstack_chunk_free free
277
278 /* List of labels that must never be deleted. */
279 extern rtx forced_labels;
280
281 /* Allocation number table from global register allocation. */
282 extern int *reg_allocno;
283 \f
284 /* This structure is used to record information about register eliminations.
285 Each array entry describes one possible way of eliminating a register
286 in favor of another. If there is more than one way of eliminating a
287 particular register, the most preferred should be specified first. */
288
289 static struct elim_table
290 {
291 int from; /* Register number to be eliminated. */
292 int to; /* Register number used as replacement. */
293 int initial_offset; /* Initial difference between values. */
294 int can_eliminate; /* Non-zero if this elimination can be done. */
295 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
296 insns made by reload. */
297 int offset; /* Current offset between the two regs. */
298 int max_offset; /* Maximum offset between the two regs. */
299 int previous_offset; /* Offset at end of previous insn. */
300 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
301 rtx from_rtx; /* REG rtx for the register to be eliminated.
302 We cannot simply compare the number since
303 we might then spuriously replace a hard
304 register corresponding to a pseudo
305 assigned to the reg to be eliminated. */
306 rtx to_rtx; /* REG rtx for the replacement. */
307 } reg_eliminate[] =
308
309 /* If a set of eliminable registers was specified, define the table from it.
310 Otherwise, default to the normal case of the frame pointer being
311 replaced by the stack pointer. */
312
313 #ifdef ELIMINABLE_REGS
314 ELIMINABLE_REGS;
315 #else
316 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
317 #endif
318
319 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
320
321 /* Record the number of pending eliminations that have an offset not equal
322 to their initial offset. If non-zero, we use a new copy of each
323 replacement result in any insns encountered. */
324 static int num_not_at_initial_offset;
325
326 /* Count the number of registers that we may be able to eliminate. */
327 static int num_eliminable;
328
329 /* For each label, we record the offset of each elimination. If we reach
330 a label by more than one path and an offset differs, we cannot do the
331 elimination. This information is indexed by the number of the label.
332 The first table is an array of flags that records whether we have yet
333 encountered a label and the second table is an array of arrays, one
334 entry in the latter array for each elimination. */
335
336 static char *offsets_known_at;
337 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
338
339 /* Number of labels in the current function. */
340
341 static int num_labels;
342
343 struct hard_reg_n_uses { int regno; int uses; };
344 \f
345 static int possible_group_p PROTO((int, int *));
346 static void count_possible_groups PROTO((int *, enum machine_mode *,
347 int *, int));
348 static int modes_equiv_for_class_p PROTO((enum machine_mode,
349 enum machine_mode,
350 enum reg_class));
351 static void spill_failure PROTO((rtx));
352 static int new_spill_reg PROTO((int, int, int *, int *, int,
353 FILE *));
354 static void delete_dead_insn PROTO((rtx));
355 static void alter_reg PROTO((int, int));
356 static void mark_scratch_live PROTO((rtx));
357 static void set_label_offsets PROTO((rtx, rtx, int));
358 static int eliminate_regs_in_insn PROTO((rtx, int));
359 static void mark_not_eliminable PROTO((rtx, rtx));
360 static int spill_hard_reg PROTO((int, int, FILE *, int));
361 static void scan_paradoxical_subregs PROTO((rtx));
362 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
363 struct hard_reg_n_uses *));
364 static void order_regs_for_reload PROTO((int));
365 static int compare_spill_regs PROTO((short *, short *));
366 static void reload_as_needed PROTO((rtx, int));
367 static void forget_old_reloads_1 PROTO((rtx, rtx));
368 static int reload_reg_class_lower PROTO((short *, short *));
369 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
370 enum machine_mode));
371 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
372 enum machine_mode));
373 static int reload_reg_free_p PROTO((int, int, enum reload_type));
374 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
375 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
376 static int reloads_conflict PROTO((int, int));
377 static int allocate_reload_reg PROTO((int, rtx, int, int));
378 static void choose_reload_regs PROTO((rtx, rtx));
379 static void merge_assigned_reloads PROTO((rtx));
380 static void emit_reload_insns PROTO((rtx));
381 static void delete_output_reload PROTO((rtx, int, rtx));
382 static void inc_for_reload PROTO((rtx, rtx, int));
383 static int constraint_accepts_reg_p PROTO((char *, rtx));
384 static int count_occurrences PROTO((rtx, rtx));
385 \f
386 /* Initialize the reload pass once per compilation. */
387
388 void
389 init_reload ()
390 {
391 register int i;
392
393 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
394 Set spill_indirect_levels to the number of levels such addressing is
395 permitted, zero if it is not permitted at all. */
396
397 register rtx tem
398 = gen_rtx (MEM, Pmode,
399 gen_rtx (PLUS, Pmode,
400 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
401 GEN_INT (4)));
402 spill_indirect_levels = 0;
403
404 while (memory_address_p (QImode, tem))
405 {
406 spill_indirect_levels++;
407 tem = gen_rtx (MEM, Pmode, tem);
408 }
409
410 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
411
412 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
413 indirect_symref_ok = memory_address_p (QImode, tem);
414
415 /* See if reg+reg is a valid (and offsettable) address. */
416
417 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
418 {
419 tem = gen_rtx (PLUS, Pmode,
420 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
421 gen_rtx (REG, Pmode, i));
422 /* This way, we make sure that reg+reg is an offsettable address. */
423 tem = plus_constant (tem, 4);
424
425 if (memory_address_p (QImode, tem))
426 {
427 double_reg_address_ok = 1;
428 break;
429 }
430 }
431
432 /* Initialize obstack for our rtl allocation. */
433 gcc_obstack_init (&reload_obstack);
434 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
435 }
436
437 /* Main entry point for the reload pass.
438
439 FIRST is the first insn of the function being compiled.
440
441 GLOBAL nonzero means we were called from global_alloc
442 and should attempt to reallocate any pseudoregs that we
443 displace from hard regs we will use for reloads.
444 If GLOBAL is zero, we do not have enough information to do that,
445 so any pseudo reg that is spilled must go to the stack.
446
447 DUMPFILE is the global-reg debugging dump file stream, or 0.
448 If it is nonzero, messages are written to it to describe
449 which registers are seized as reload regs, which pseudo regs
450 are spilled from them, and where the pseudo regs are reallocated to.
451
452 Return value is nonzero if reload failed
453 and we must not do any more for this function. */
454
455 int
456 reload (first, global, dumpfile)
457 rtx first;
458 int global;
459 FILE *dumpfile;
460 {
461 register int class;
462 register int i, j, k;
463 register rtx insn;
464 register struct elim_table *ep;
465
466 int something_changed;
467 int something_needs_reloads;
468 int something_needs_elimination;
469 int new_basic_block_needs;
470 enum reg_class caller_save_spill_class = NO_REGS;
471 int caller_save_group_size = 1;
472
473 /* Nonzero means we couldn't get enough spill regs. */
474 int failure = 0;
475
476 /* The basic block number currently being processed for INSN. */
477 int this_block;
478
479 /* Make sure even insns with volatile mem refs are recognizable. */
480 init_recog ();
481
482 /* Enable find_equiv_reg to distinguish insns made by reload. */
483 reload_first_uid = get_max_uid ();
484
485 for (i = 0; i < N_REG_CLASSES; i++)
486 basic_block_needs[i] = 0;
487
488 #ifdef SECONDARY_MEMORY_NEEDED
489 /* Initialize the secondary memory table. */
490 clear_secondary_mem ();
491 #endif
492
493 /* Remember which hard regs appear explicitly
494 before we merge into `regs_ever_live' the ones in which
495 pseudo regs have been allocated. */
496 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
497
498 /* We don't have a stack slot for any spill reg yet. */
499 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
500 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
501
502 /* Initialize the save area information for caller-save, in case some
503 are needed. */
504 init_save_areas ();
505
506 /* Compute which hard registers are now in use
507 as homes for pseudo registers.
508 This is done here rather than (eg) in global_alloc
509 because this point is reached even if not optimizing. */
510
511 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
512 mark_home_live (i);
513
514 for (i = 0; i < scratch_list_length; i++)
515 if (scratch_list[i])
516 mark_scratch_live (scratch_list[i]);
517
518 /* Make sure that the last insn in the chain
519 is not something that needs reloading. */
520 emit_note (NULL_PTR, NOTE_INSN_DELETED);
521
522 /* Find all the pseudo registers that didn't get hard regs
523 but do have known equivalent constants or memory slots.
524 These include parameters (known equivalent to parameter slots)
525 and cse'd or loop-moved constant memory addresses.
526
527 Record constant equivalents in reg_equiv_constant
528 so they will be substituted by find_reloads.
529 Record memory equivalents in reg_mem_equiv so they can
530 be substituted eventually by altering the REG-rtx's. */
531
532 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
533 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
534 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
535 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
536 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
537 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
538 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
539 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
540 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
541 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
542 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
543 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
544 cannot_omit_stores = (char *) alloca (max_regno);
545 bzero (cannot_omit_stores, max_regno);
546
547 #ifdef SMALL_REGISTER_CLASSES
548 CLEAR_HARD_REG_SET (forbidden_regs);
549 #endif
550
551 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
552 Also find all paradoxical subregs and find largest such for each pseudo.
553 On machines with small register classes, record hard registers that
554 are used for user variables. These can never be used for spills. */
555
556 for (insn = first; insn; insn = NEXT_INSN (insn))
557 {
558 rtx set = single_set (insn);
559
560 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
561 {
562 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
563 if (note
564 #ifdef LEGITIMATE_PIC_OPERAND_P
565 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
566 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
567 #endif
568 )
569 {
570 rtx x = XEXP (note, 0);
571 i = REGNO (SET_DEST (set));
572 if (i > LAST_VIRTUAL_REGISTER)
573 {
574 if (GET_CODE (x) == MEM)
575 reg_equiv_memory_loc[i] = x;
576 else if (CONSTANT_P (x))
577 {
578 if (LEGITIMATE_CONSTANT_P (x))
579 reg_equiv_constant[i] = x;
580 else
581 reg_equiv_memory_loc[i]
582 = force_const_mem (GET_MODE (SET_DEST (set)), x);
583 }
584 else
585 continue;
586
587 /* If this register is being made equivalent to a MEM
588 and the MEM is not SET_SRC, the equivalencing insn
589 is one with the MEM as a SET_DEST and it occurs later.
590 So don't mark this insn now. */
591 if (GET_CODE (x) != MEM
592 || rtx_equal_p (SET_SRC (set), x))
593 reg_equiv_init[i] = insn;
594 }
595 }
596 }
597
598 /* If this insn is setting a MEM from a register equivalent to it,
599 this is the equivalencing insn. */
600 else if (set && GET_CODE (SET_DEST (set)) == MEM
601 && GET_CODE (SET_SRC (set)) == REG
602 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
603 && rtx_equal_p (SET_DEST (set),
604 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
605 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
606
607 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
608 scan_paradoxical_subregs (PATTERN (insn));
609 }
610
611 /* Does this function require a frame pointer? */
612
613 frame_pointer_needed = (! flag_omit_frame_pointer
614 #ifdef EXIT_IGNORE_STACK
615 /* ?? If EXIT_IGNORE_STACK is set, we will not save
616 and restore sp for alloca. So we can't eliminate
617 the frame pointer in that case. At some point,
618 we should improve this by emitting the
619 sp-adjusting insns for this case. */
620 || (current_function_calls_alloca
621 && EXIT_IGNORE_STACK)
622 #endif
623 || FRAME_POINTER_REQUIRED);
624
625 num_eliminable = 0;
626
627 /* Initialize the table of registers to eliminate. The way we do this
628 depends on how the eliminable registers were defined. */
629 #ifdef ELIMINABLE_REGS
630 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
631 {
632 ep->can_eliminate = ep->can_eliminate_previous
633 = (CAN_ELIMINATE (ep->from, ep->to)
634 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
635 }
636 #else
637 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
638 = ! frame_pointer_needed;
639 #endif
640
641 /* Count the number of eliminable registers and build the FROM and TO
642 REG rtx's. Note that code in gen_rtx will cause, e.g.,
643 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
644 We depend on this. */
645 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
646 {
647 num_eliminable += ep->can_eliminate;
648 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
649 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
650 }
651
652 num_labels = max_label_num () - get_first_label_num ();
653
654 /* Allocate the tables used to store offset information at labels. */
655 offsets_known_at = (char *) alloca (num_labels);
656 offsets_at
657 = (int (*)[NUM_ELIMINABLE_REGS])
658 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
659
660 offsets_known_at -= get_first_label_num ();
661 offsets_at -= get_first_label_num ();
662
663 /* Alter each pseudo-reg rtx to contain its hard reg number.
664 Assign stack slots to the pseudos that lack hard regs or equivalents.
665 Do not touch virtual registers. */
666
667 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
668 alter_reg (i, -1);
669
670 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
671 because the stack size may be a part of the offset computation for
672 register elimination. */
673 assign_stack_local (BLKmode, 0, 0);
674
675 /* If we have some registers we think can be eliminated, scan all insns to
676 see if there is an insn that sets one of these registers to something
677 other than itself plus a constant. If so, the register cannot be
678 eliminated. Doing this scan here eliminates an extra pass through the
679 main reload loop in the most common case where register elimination
680 cannot be done. */
681 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
682 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
683 || GET_CODE (insn) == CALL_INSN)
684 note_stores (PATTERN (insn), mark_not_eliminable);
685
686 #ifndef REGISTER_CONSTRAINTS
687 /* If all the pseudo regs have hard regs,
688 except for those that are never referenced,
689 we know that no reloads are needed. */
690 /* But that is not true if there are register constraints, since
691 in that case some pseudos might be in the wrong kind of hard reg. */
692
693 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
694 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
695 break;
696
697 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
698 return;
699 #endif
700
701 /* Compute the order of preference for hard registers to spill.
702 Store them by decreasing preference in potential_reload_regs. */
703
704 order_regs_for_reload (global);
705
706 /* So far, no hard regs have been spilled. */
707 n_spills = 0;
708 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
709 spill_reg_order[i] = -1;
710
711 /* Initialize to -1, which means take the first spill register. */
712 last_spill_reg = -1;
713
714 /* On most machines, we can't use any register explicitly used in the
715 rtl as a spill register. But on some, we have to. Those will have
716 taken care to keep the life of hard regs as short as possible. */
717
718 #ifndef SMALL_REGISTER_CLASSES
719 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
720 #endif
721
722 /* Spill any hard regs that we know we can't eliminate. */
723 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
724 if (! ep->can_eliminate)
725 spill_hard_reg (ep->from, global, dumpfile, 1);
726
727 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
728 if (frame_pointer_needed)
729 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
730 #endif
731
732 if (global)
733 for (i = 0; i < N_REG_CLASSES; i++)
734 {
735 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
736 bzero (basic_block_needs[i], n_basic_blocks);
737 }
738
739 /* From now on, we need to emit any moves without making new pseudos. */
740 reload_in_progress = 1;
741
742 /* This loop scans the entire function each go-round
743 and repeats until one repetition spills no additional hard regs. */
744
745 /* This flag is set when a pseudo reg is spilled,
746 to require another pass. Note that getting an additional reload
747 reg does not necessarily imply any pseudo reg was spilled;
748 sometimes we find a reload reg that no pseudo reg was allocated in. */
749 something_changed = 1;
750 /* This flag is set if there are any insns that require reloading. */
751 something_needs_reloads = 0;
752 /* This flag is set if there are any insns that require register
753 eliminations. */
754 something_needs_elimination = 0;
755 while (something_changed)
756 {
757 rtx after_call = 0;
758
759 /* For each class, number of reload regs needed in that class.
760 This is the maximum over all insns of the needs in that class
761 of the individual insn. */
762 int max_needs[N_REG_CLASSES];
763 /* For each class, size of group of consecutive regs
764 that is needed for the reloads of this class. */
765 int group_size[N_REG_CLASSES];
766 /* For each class, max number of consecutive groups needed.
767 (Each group contains group_size[CLASS] consecutive registers.) */
768 int max_groups[N_REG_CLASSES];
769 /* For each class, max number needed of regs that don't belong
770 to any of the groups. */
771 int max_nongroups[N_REG_CLASSES];
772 /* For each class, the machine mode which requires consecutive
773 groups of regs of that class.
774 If two different modes ever require groups of one class,
775 they must be the same size and equally restrictive for that class,
776 otherwise we can't handle the complexity. */
777 enum machine_mode group_mode[N_REG_CLASSES];
778 /* Record the insn where each maximum need is first found. */
779 rtx max_needs_insn[N_REG_CLASSES];
780 rtx max_groups_insn[N_REG_CLASSES];
781 rtx max_nongroups_insn[N_REG_CLASSES];
782 rtx x;
783 int starting_frame_size = get_frame_size ();
784 int previous_frame_pointer_needed = frame_pointer_needed;
785 static char *reg_class_names[] = REG_CLASS_NAMES;
786
787 something_changed = 0;
788 bzero ((char *) max_needs, sizeof max_needs);
789 bzero ((char *) max_groups, sizeof max_groups);
790 bzero ((char *) max_nongroups, sizeof max_nongroups);
791 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
792 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
793 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
794 bzero ((char *) group_size, sizeof group_size);
795 for (i = 0; i < N_REG_CLASSES; i++)
796 group_mode[i] = VOIDmode;
797
798 /* Keep track of which basic blocks are needing the reloads. */
799 this_block = 0;
800
801 /* Remember whether any element of basic_block_needs
802 changes from 0 to 1 in this pass. */
803 new_basic_block_needs = 0;
804
805 /* Reset all offsets on eliminable registers to their initial values. */
806 #ifdef ELIMINABLE_REGS
807 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
808 {
809 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
810 ep->previous_offset = ep->offset
811 = ep->max_offset = ep->initial_offset;
812 }
813 #else
814 #ifdef INITIAL_FRAME_POINTER_OFFSET
815 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
816 #else
817 if (!FRAME_POINTER_REQUIRED)
818 abort ();
819 reg_eliminate[0].initial_offset = 0;
820 #endif
821 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
822 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
823 #endif
824
825 num_not_at_initial_offset = 0;
826
827 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
828
829 /* Set a known offset for each forced label to be at the initial offset
830 of each elimination. We do this because we assume that all
831 computed jumps occur from a location where each elimination is
832 at its initial offset. */
833
834 for (x = forced_labels; x; x = XEXP (x, 1))
835 if (XEXP (x, 0))
836 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
837
838 /* For each pseudo register that has an equivalent location defined,
839 try to eliminate any eliminable registers (such as the frame pointer)
840 assuming initial offsets for the replacement register, which
841 is the normal case.
842
843 If the resulting location is directly addressable, substitute
844 the MEM we just got directly for the old REG.
845
846 If it is not addressable but is a constant or the sum of a hard reg
847 and constant, it is probably not addressable because the constant is
848 out of range, in that case record the address; we will generate
849 hairy code to compute the address in a register each time it is
850 needed. Similarly if it is a hard register, but one that is not
851 valid as an address register.
852
853 If the location is not addressable, but does not have one of the
854 above forms, assign a stack slot. We have to do this to avoid the
855 potential of producing lots of reloads if, e.g., a location involves
856 a pseudo that didn't get a hard register and has an equivalent memory
857 location that also involves a pseudo that didn't get a hard register.
858
859 Perhaps at some point we will improve reload_when_needed handling
860 so this problem goes away. But that's very hairy. */
861
862 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
863 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
864 {
865 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
866
867 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
868 XEXP (x, 0)))
869 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
870 else if (CONSTANT_P (XEXP (x, 0))
871 || (GET_CODE (XEXP (x, 0)) == REG
872 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
873 || (GET_CODE (XEXP (x, 0)) == PLUS
874 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
875 && (REGNO (XEXP (XEXP (x, 0), 0))
876 < FIRST_PSEUDO_REGISTER)
877 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
878 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
879 else
880 {
881 /* Make a new stack slot. Then indicate that something
882 changed so we go back and recompute offsets for
883 eliminable registers because the allocation of memory
884 below might change some offset. reg_equiv_{mem,address}
885 will be set up for this pseudo on the next pass around
886 the loop. */
887 reg_equiv_memory_loc[i] = 0;
888 reg_equiv_init[i] = 0;
889 alter_reg (i, -1);
890 something_changed = 1;
891 }
892 }
893
894 /* If we allocated another pseudo to the stack, redo elimination
895 bookkeeping. */
896 if (something_changed)
897 continue;
898
899 /* If caller-saves needs a group, initialize the group to include
900 the size and mode required for caller-saves. */
901
902 if (caller_save_group_size > 1)
903 {
904 group_mode[(int) caller_save_spill_class] = Pmode;
905 group_size[(int) caller_save_spill_class] = caller_save_group_size;
906 }
907
908 /* Compute the most additional registers needed by any instruction.
909 Collect information separately for each class of regs. */
910
911 for (insn = first; insn; insn = NEXT_INSN (insn))
912 {
913 if (global && this_block + 1 < n_basic_blocks
914 && insn == basic_block_head[this_block+1])
915 ++this_block;
916
917 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
918 might include REG_LABEL), we need to see what effects this
919 has on the known offsets at labels. */
920
921 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
922 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
923 && REG_NOTES (insn) != 0))
924 set_label_offsets (insn, insn, 0);
925
926 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
927 {
928 /* Nonzero means don't use a reload reg that overlaps
929 the place where a function value can be returned. */
930 rtx avoid_return_reg = 0;
931
932 rtx old_body = PATTERN (insn);
933 int old_code = INSN_CODE (insn);
934 rtx old_notes = REG_NOTES (insn);
935 int did_elimination = 0;
936
937 /* To compute the number of reload registers of each class
938 needed for an insn, we must simulate what choose_reload_regs
939 can do. We do this by splitting an insn into an "input" and
940 an "output" part. RELOAD_OTHER reloads are used in both.
941 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
942 which must be live over the entire input section of reloads,
943 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
944 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
945 inputs.
946
947 The registers needed for output are RELOAD_OTHER and
948 RELOAD_FOR_OUTPUT, which are live for the entire output
949 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
950 reloads for each operand.
951
952 The total number of registers needed is the maximum of the
953 inputs and outputs. */
954
955 struct needs
956 {
957 /* [0] is normal, [1] is nongroup. */
958 int regs[2][N_REG_CLASSES];
959 int groups[N_REG_CLASSES];
960 };
961
962 /* Each `struct needs' corresponds to one RELOAD_... type. */
963 struct {
964 struct needs other;
965 struct needs input;
966 struct needs output;
967 struct needs insn;
968 struct needs other_addr;
969 struct needs op_addr;
970 struct needs op_addr_reload;
971 struct needs in_addr[MAX_RECOG_OPERANDS];
972 struct needs out_addr[MAX_RECOG_OPERANDS];
973 } insn_needs;
974
975 /* If needed, eliminate any eliminable registers. */
976 if (num_eliminable)
977 did_elimination = eliminate_regs_in_insn (insn, 0);
978
979 #ifdef SMALL_REGISTER_CLASSES
980 /* Set avoid_return_reg if this is an insn
981 that might use the value of a function call. */
982 if (GET_CODE (insn) == CALL_INSN)
983 {
984 if (GET_CODE (PATTERN (insn)) == SET)
985 after_call = SET_DEST (PATTERN (insn));
986 else if (GET_CODE (PATTERN (insn)) == PARALLEL
987 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
988 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
989 else
990 after_call = 0;
991 }
992 else if (after_call != 0
993 && !(GET_CODE (PATTERN (insn)) == SET
994 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
995 {
996 if (reg_referenced_p (after_call, PATTERN (insn)))
997 avoid_return_reg = after_call;
998 after_call = 0;
999 }
1000 #endif /* SMALL_REGISTER_CLASSES */
1001
1002 /* Analyze the instruction. */
1003 find_reloads (insn, 0, spill_indirect_levels, global,
1004 spill_reg_order);
1005
1006 /* Remember for later shortcuts which insns had any reloads or
1007 register eliminations.
1008
1009 One might think that it would be worthwhile to mark insns
1010 that need register replacements but not reloads, but this is
1011 not safe because find_reloads may do some manipulation of
1012 the insn (such as swapping commutative operands), which would
1013 be lost when we restore the old pattern after register
1014 replacement. So the actions of find_reloads must be redone in
1015 subsequent passes or in reload_as_needed.
1016
1017 However, it is safe to mark insns that need reloads
1018 but not register replacement. */
1019
1020 PUT_MODE (insn, (did_elimination ? QImode
1021 : n_reloads ? HImode
1022 : GET_MODE (insn) == DImode ? DImode
1023 : VOIDmode));
1024
1025 /* Discard any register replacements done. */
1026 if (did_elimination)
1027 {
1028 obstack_free (&reload_obstack, reload_firstobj);
1029 PATTERN (insn) = old_body;
1030 INSN_CODE (insn) = old_code;
1031 REG_NOTES (insn) = old_notes;
1032 something_needs_elimination = 1;
1033 }
1034
1035 /* If this insn has no reloads, we need not do anything except
1036 in the case of a CALL_INSN when we have caller-saves and
1037 caller-save needs reloads. */
1038
1039 if (n_reloads == 0
1040 && ! (GET_CODE (insn) == CALL_INSN
1041 && caller_save_spill_class != NO_REGS))
1042 continue;
1043
1044 something_needs_reloads = 1;
1045 bzero ((char *) &insn_needs, sizeof insn_needs);
1046
1047 /* Count each reload once in every class
1048 containing the reload's own class. */
1049
1050 for (i = 0; i < n_reloads; i++)
1051 {
1052 register enum reg_class *p;
1053 enum reg_class class = reload_reg_class[i];
1054 int size;
1055 enum machine_mode mode;
1056 int nongroup_need;
1057 struct needs *this_needs;
1058
1059 /* Don't count the dummy reloads, for which one of the
1060 regs mentioned in the insn can be used for reloading.
1061 Don't count optional reloads.
1062 Don't count reloads that got combined with others. */
1063 if (reload_reg_rtx[i] != 0
1064 || reload_optional[i] != 0
1065 || (reload_out[i] == 0 && reload_in[i] == 0
1066 && ! reload_secondary_p[i]))
1067 continue;
1068
1069 /* Show that a reload register of this class is needed
1070 in this basic block. We do not use insn_needs and
1071 insn_groups because they are overly conservative for
1072 this purpose. */
1073 if (global && ! basic_block_needs[(int) class][this_block])
1074 {
1075 basic_block_needs[(int) class][this_block] = 1;
1076 new_basic_block_needs = 1;
1077 }
1078
1079
1080 mode = reload_inmode[i];
1081 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1082 mode = reload_outmode[i];
1083 size = CLASS_MAX_NREGS (class, mode);
1084
1085 /* If this class doesn't want a group, determine if we have
1086 a nongroup need or a regular need. We have a nongroup
1087 need if this reload conflicts with a group reload whose
1088 class intersects with this reload's class. */
1089
1090 nongroup_need = 0;
1091 if (size == 1)
1092 for (j = 0; j < n_reloads; j++)
1093 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1094 (GET_MODE_SIZE (reload_outmode[j])
1095 > GET_MODE_SIZE (reload_inmode[j]))
1096 ? reload_outmode[j]
1097 : reload_inmode[j])
1098 > 1)
1099 && (!reload_optional[j])
1100 && (reload_in[j] != 0 || reload_out[j] != 0
1101 || reload_secondary_p[j])
1102 && reloads_conflict (i, j)
1103 && reg_classes_intersect_p (class,
1104 reload_reg_class[j]))
1105 {
1106 nongroup_need = 1;
1107 break;
1108 }
1109
1110 /* Decide which time-of-use to count this reload for. */
1111 switch (reload_when_needed[i])
1112 {
1113 case RELOAD_OTHER:
1114 this_needs = &insn_needs.other;
1115 break;
1116 case RELOAD_FOR_INPUT:
1117 this_needs = &insn_needs.input;
1118 break;
1119 case RELOAD_FOR_OUTPUT:
1120 this_needs = &insn_needs.output;
1121 break;
1122 case RELOAD_FOR_INSN:
1123 this_needs = &insn_needs.insn;
1124 break;
1125 case RELOAD_FOR_OTHER_ADDRESS:
1126 this_needs = &insn_needs.other_addr;
1127 break;
1128 case RELOAD_FOR_INPUT_ADDRESS:
1129 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1130 break;
1131 case RELOAD_FOR_OUTPUT_ADDRESS:
1132 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1133 break;
1134 case RELOAD_FOR_OPERAND_ADDRESS:
1135 this_needs = &insn_needs.op_addr;
1136 break;
1137 case RELOAD_FOR_OPADDR_ADDR:
1138 this_needs = &insn_needs.op_addr_reload;
1139 break;
1140 }
1141
1142 if (size > 1)
1143 {
1144 enum machine_mode other_mode, allocate_mode;
1145
1146 /* Count number of groups needed separately from
1147 number of individual regs needed. */
1148 this_needs->groups[(int) class]++;
1149 p = reg_class_superclasses[(int) class];
1150 while (*p != LIM_REG_CLASSES)
1151 this_needs->groups[(int) *p++]++;
1152
1153 /* Record size and mode of a group of this class. */
1154 /* If more than one size group is needed,
1155 make all groups the largest needed size. */
1156 if (group_size[(int) class] < size)
1157 {
1158 other_mode = group_mode[(int) class];
1159 allocate_mode = mode;
1160
1161 group_size[(int) class] = size;
1162 group_mode[(int) class] = mode;
1163 }
1164 else
1165 {
1166 other_mode = mode;
1167 allocate_mode = group_mode[(int) class];
1168 }
1169
1170 /* Crash if two dissimilar machine modes both need
1171 groups of consecutive regs of the same class. */
1172
1173 if (other_mode != VOIDmode && other_mode != allocate_mode
1174 && ! modes_equiv_for_class_p (allocate_mode,
1175 other_mode, class))
1176 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1177 insn);
1178 }
1179 else if (size == 1)
1180 {
1181 this_needs->regs[nongroup_need][(int) class] += 1;
1182 p = reg_class_superclasses[(int) class];
1183 while (*p != LIM_REG_CLASSES)
1184 this_needs->regs[nongroup_need][(int) *p++] += 1;
1185 }
1186 else
1187 abort ();
1188 }
1189
1190 /* All reloads have been counted for this insn;
1191 now merge the various times of use.
1192 This sets insn_needs, etc., to the maximum total number
1193 of registers needed at any point in this insn. */
1194
1195 for (i = 0; i < N_REG_CLASSES; i++)
1196 {
1197 int in_max, out_max;
1198
1199 /* Compute normal and nongroup needs. */
1200 for (j = 0; j <= 1; j++)
1201 {
1202 for (in_max = 0, out_max = 0, k = 0;
1203 k < reload_n_operands; k++)
1204 {
1205 in_max
1206 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1207 out_max
1208 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1209 }
1210
1211 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1212 and operand addresses but not things used to reload
1213 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1214 don't conflict with things needed to reload inputs or
1215 outputs. */
1216
1217 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1218 insn_needs.op_addr_reload.regs[j][i]),
1219 in_max);
1220
1221 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1222
1223 insn_needs.input.regs[j][i]
1224 = MAX (insn_needs.input.regs[j][i]
1225 + insn_needs.op_addr.regs[j][i]
1226 + insn_needs.insn.regs[j][i],
1227 in_max + insn_needs.input.regs[j][i]);
1228
1229 insn_needs.output.regs[j][i] += out_max;
1230 insn_needs.other.regs[j][i]
1231 += MAX (MAX (insn_needs.input.regs[j][i],
1232 insn_needs.output.regs[j][i]),
1233 insn_needs.other_addr.regs[j][i]);
1234
1235 }
1236
1237 /* Now compute group needs. */
1238 for (in_max = 0, out_max = 0, j = 0;
1239 j < reload_n_operands; j++)
1240 {
1241 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1242 out_max
1243 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1244 }
1245
1246 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1247 insn_needs.op_addr_reload.groups[i]),
1248 in_max);
1249 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1250
1251 insn_needs.input.groups[i]
1252 = MAX (insn_needs.input.groups[i]
1253 + insn_needs.op_addr.groups[i]
1254 + insn_needs.insn.groups[i],
1255 in_max + insn_needs.input.groups[i]);
1256
1257 insn_needs.output.groups[i] += out_max;
1258 insn_needs.other.groups[i]
1259 += MAX (MAX (insn_needs.input.groups[i],
1260 insn_needs.output.groups[i]),
1261 insn_needs.other_addr.groups[i]);
1262 }
1263
1264 /* If this is a CALL_INSN and caller-saves will need
1265 a spill register, act as if the spill register is
1266 needed for this insn. However, the spill register
1267 can be used by any reload of this insn, so we only
1268 need do something if no need for that class has
1269 been recorded.
1270
1271 The assumption that every CALL_INSN will trigger a
1272 caller-save is highly conservative, however, the number
1273 of cases where caller-saves will need a spill register but
1274 a block containing a CALL_INSN won't need a spill register
1275 of that class should be quite rare.
1276
1277 If a group is needed, the size and mode of the group will
1278 have been set up at the beginning of this loop. */
1279
1280 if (GET_CODE (insn) == CALL_INSN
1281 && caller_save_spill_class != NO_REGS)
1282 {
1283 /* See if this register would conflict with any reload
1284 that needs a group. */
1285 int nongroup_need = 0;
1286 int *caller_save_needs;
1287
1288 for (j = 0; j < n_reloads; j++)
1289 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1290 (GET_MODE_SIZE (reload_outmode[j])
1291 > GET_MODE_SIZE (reload_inmode[j]))
1292 ? reload_outmode[j]
1293 : reload_inmode[j])
1294 > 1)
1295 && reg_classes_intersect_p (caller_save_spill_class,
1296 reload_reg_class[j]))
1297 {
1298 nongroup_need = 1;
1299 break;
1300 }
1301
1302 caller_save_needs
1303 = (caller_save_group_size > 1
1304 ? insn_needs.other.groups
1305 : insn_needs.other.regs[nongroup_need]);
1306
1307 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1308 {
1309 register enum reg_class *p
1310 = reg_class_superclasses[(int) caller_save_spill_class];
1311
1312 caller_save_needs[(int) caller_save_spill_class]++;
1313
1314 while (*p != LIM_REG_CLASSES)
1315 caller_save_needs[(int) *p++] += 1;
1316 }
1317
1318 /* Show that this basic block will need a register of
1319 this class. */
1320
1321 if (global
1322 && ! (basic_block_needs[(int) caller_save_spill_class]
1323 [this_block]))
1324 {
1325 basic_block_needs[(int) caller_save_spill_class]
1326 [this_block] = 1;
1327 new_basic_block_needs = 1;
1328 }
1329 }
1330
1331 #ifdef SMALL_REGISTER_CLASSES
1332 /* If this insn stores the value of a function call,
1333 and that value is in a register that has been spilled,
1334 and if the insn needs a reload in a class
1335 that might use that register as the reload register,
1336 then add add an extra need in that class.
1337 This makes sure we have a register available that does
1338 not overlap the return value. */
1339
1340 if (avoid_return_reg)
1341 {
1342 int regno = REGNO (avoid_return_reg);
1343 int nregs
1344 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1345 int r;
1346 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1347
1348 /* First compute the "basic needs", which counts a
1349 need only in the smallest class in which it
1350 is required. */
1351
1352 bcopy ((char *) insn_needs.other.regs[0],
1353 (char *) basic_needs, sizeof basic_needs);
1354 bcopy ((char *) insn_needs.other.groups,
1355 (char *) basic_groups, sizeof basic_groups);
1356
1357 for (i = 0; i < N_REG_CLASSES; i++)
1358 {
1359 enum reg_class *p;
1360
1361 if (basic_needs[i] >= 0)
1362 for (p = reg_class_superclasses[i];
1363 *p != LIM_REG_CLASSES; p++)
1364 basic_needs[(int) *p] -= basic_needs[i];
1365
1366 if (basic_groups[i] >= 0)
1367 for (p = reg_class_superclasses[i];
1368 *p != LIM_REG_CLASSES; p++)
1369 basic_groups[(int) *p] -= basic_groups[i];
1370 }
1371
1372 /* Now count extra regs if there might be a conflict with
1373 the return value register. */
1374
1375 for (r = regno; r < regno + nregs; r++)
1376 if (spill_reg_order[r] >= 0)
1377 for (i = 0; i < N_REG_CLASSES; i++)
1378 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1379 {
1380 if (basic_needs[i] > 0)
1381 {
1382 enum reg_class *p;
1383
1384 insn_needs.other.regs[0][i]++;
1385 p = reg_class_superclasses[i];
1386 while (*p != LIM_REG_CLASSES)
1387 insn_needs.other.regs[0][(int) *p++]++;
1388 }
1389 if (basic_groups[i] > 0)
1390 {
1391 enum reg_class *p;
1392
1393 insn_needs.other.groups[i]++;
1394 p = reg_class_superclasses[i];
1395 while (*p != LIM_REG_CLASSES)
1396 insn_needs.other.groups[(int) *p++]++;
1397 }
1398 }
1399 }
1400 #endif /* SMALL_REGISTER_CLASSES */
1401
1402 /* For each class, collect maximum need of any insn. */
1403
1404 for (i = 0; i < N_REG_CLASSES; i++)
1405 {
1406 if (max_needs[i] < insn_needs.other.regs[0][i])
1407 {
1408 max_needs[i] = insn_needs.other.regs[0][i];
1409 max_needs_insn[i] = insn;
1410 }
1411 if (max_groups[i] < insn_needs.other.groups[i])
1412 {
1413 max_groups[i] = insn_needs.other.groups[i];
1414 max_groups_insn[i] = insn;
1415 }
1416 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1417 {
1418 max_nongroups[i] = insn_needs.other.regs[1][i];
1419 max_nongroups_insn[i] = insn;
1420 }
1421 }
1422 }
1423 /* Note that there is a continue statement above. */
1424 }
1425
1426 /* If we allocated any new memory locations, make another pass
1427 since it might have changed elimination offsets. */
1428 if (starting_frame_size != get_frame_size ())
1429 something_changed = 1;
1430
1431 if (dumpfile)
1432 for (i = 0; i < N_REG_CLASSES; i++)
1433 {
1434 if (max_needs[i] > 0)
1435 fprintf (dumpfile,
1436 ";; Need %d reg%s of class %s (for insn %d).\n",
1437 max_needs[i], max_needs[i] == 1 ? "" : "s",
1438 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1439 if (max_nongroups[i] > 0)
1440 fprintf (dumpfile,
1441 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1442 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1443 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1444 if (max_groups[i] > 0)
1445 fprintf (dumpfile,
1446 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1447 max_groups[i], max_groups[i] == 1 ? "" : "s",
1448 mode_name[(int) group_mode[i]],
1449 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1450 }
1451
1452 /* If we have caller-saves, set up the save areas and see if caller-save
1453 will need a spill register. */
1454
1455 if (caller_save_needed
1456 && ! setup_save_areas (&something_changed)
1457 && caller_save_spill_class == NO_REGS)
1458 {
1459 /* The class we will need depends on whether the machine
1460 supports the sum of two registers for an address; see
1461 find_address_reloads for details. */
1462
1463 caller_save_spill_class
1464 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1465 caller_save_group_size
1466 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1467 something_changed = 1;
1468 }
1469
1470 /* See if anything that happened changes which eliminations are valid.
1471 For example, on the Sparc, whether or not the frame pointer can
1472 be eliminated can depend on what registers have been used. We need
1473 not check some conditions again (such as flag_omit_frame_pointer)
1474 since they can't have changed. */
1475
1476 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1477 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1478 #ifdef ELIMINABLE_REGS
1479 || ! CAN_ELIMINATE (ep->from, ep->to)
1480 #endif
1481 )
1482 ep->can_eliminate = 0;
1483
1484 /* Look for the case where we have discovered that we can't replace
1485 register A with register B and that means that we will now be
1486 trying to replace register A with register C. This means we can
1487 no longer replace register C with register B and we need to disable
1488 such an elimination, if it exists. This occurs often with A == ap,
1489 B == sp, and C == fp. */
1490
1491 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1492 {
1493 struct elim_table *op;
1494 register int new_to = -1;
1495
1496 if (! ep->can_eliminate && ep->can_eliminate_previous)
1497 {
1498 /* Find the current elimination for ep->from, if there is a
1499 new one. */
1500 for (op = reg_eliminate;
1501 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1502 if (op->from == ep->from && op->can_eliminate)
1503 {
1504 new_to = op->to;
1505 break;
1506 }
1507
1508 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1509 disable it. */
1510 for (op = reg_eliminate;
1511 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1512 if (op->from == new_to && op->to == ep->to)
1513 op->can_eliminate = 0;
1514 }
1515 }
1516
1517 /* See if any registers that we thought we could eliminate the previous
1518 time are no longer eliminable. If so, something has changed and we
1519 must spill the register. Also, recompute the number of eliminable
1520 registers and see if the frame pointer is needed; it is if there is
1521 no elimination of the frame pointer that we can perform. */
1522
1523 frame_pointer_needed = 1;
1524 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1525 {
1526 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1527 && ep->to != HARD_FRAME_POINTER_REGNUM)
1528 frame_pointer_needed = 0;
1529
1530 if (! ep->can_eliminate && ep->can_eliminate_previous)
1531 {
1532 ep->can_eliminate_previous = 0;
1533 spill_hard_reg (ep->from, global, dumpfile, 1);
1534 something_changed = 1;
1535 num_eliminable--;
1536 }
1537 }
1538
1539 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1540 /* If we didn't need a frame pointer last time, but we do now, spill
1541 the hard frame pointer. */
1542 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1543 {
1544 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1545 something_changed = 1;
1546 }
1547 #endif
1548
1549 /* If all needs are met, we win. */
1550
1551 for (i = 0; i < N_REG_CLASSES; i++)
1552 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1553 break;
1554 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1555 break;
1556
1557 /* Not all needs are met; must spill some hard regs. */
1558
1559 /* Put all registers spilled so far back in potential_reload_regs, but
1560 put them at the front, since we've already spilled most of the
1561 pseudos in them (we might have left some pseudos unspilled if they
1562 were in a block that didn't need any spill registers of a conflicting
1563 class. We used to try to mark off the need for those registers,
1564 but doing so properly is very complex and reallocating them is the
1565 simpler approach. First, "pack" potential_reload_regs by pushing
1566 any nonnegative entries towards the end. That will leave room
1567 for the registers we already spilled.
1568
1569 Also, undo the marking of the spill registers from the last time
1570 around in FORBIDDEN_REGS since we will be probably be allocating
1571 them again below.
1572
1573 ??? It is theoretically possible that we might end up not using one
1574 of our previously-spilled registers in this allocation, even though
1575 they are at the head of the list. It's not clear what to do about
1576 this, but it was no better before, when we marked off the needs met
1577 by the previously-spilled registers. With the current code, globals
1578 can be allocated into these registers, but locals cannot. */
1579
1580 if (n_spills)
1581 {
1582 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1583 if (potential_reload_regs[i] != -1)
1584 potential_reload_regs[j--] = potential_reload_regs[i];
1585
1586 for (i = 0; i < n_spills; i++)
1587 {
1588 potential_reload_regs[i] = spill_regs[i];
1589 spill_reg_order[spill_regs[i]] = -1;
1590 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1591 }
1592
1593 n_spills = 0;
1594 }
1595
1596 /* Now find more reload regs to satisfy the remaining need
1597 Do it by ascending class number, since otherwise a reg
1598 might be spilled for a big class and might fail to count
1599 for a smaller class even though it belongs to that class.
1600
1601 Count spilled regs in `spills', and add entries to
1602 `spill_regs' and `spill_reg_order'.
1603
1604 ??? Note there is a problem here.
1605 When there is a need for a group in a high-numbered class,
1606 and also need for non-group regs that come from a lower class,
1607 the non-group regs are chosen first. If there aren't many regs,
1608 they might leave no room for a group.
1609
1610 This was happening on the 386. To fix it, we added the code
1611 that calls possible_group_p, so that the lower class won't
1612 break up the last possible group.
1613
1614 Really fixing the problem would require changes above
1615 in counting the regs already spilled, and in choose_reload_regs.
1616 It might be hard to avoid introducing bugs there. */
1617
1618 CLEAR_HARD_REG_SET (counted_for_groups);
1619 CLEAR_HARD_REG_SET (counted_for_nongroups);
1620
1621 for (class = 0; class < N_REG_CLASSES; class++)
1622 {
1623 /* First get the groups of registers.
1624 If we got single registers first, we might fragment
1625 possible groups. */
1626 while (max_groups[class] > 0)
1627 {
1628 /* If any single spilled regs happen to form groups,
1629 count them now. Maybe we don't really need
1630 to spill another group. */
1631 count_possible_groups (group_size, group_mode, max_groups,
1632 class);
1633
1634 if (max_groups[class] <= 0)
1635 break;
1636
1637 /* Groups of size 2 (the only groups used on most machines)
1638 are treated specially. */
1639 if (group_size[class] == 2)
1640 {
1641 /* First, look for a register that will complete a group. */
1642 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1643 {
1644 int other;
1645
1646 j = potential_reload_regs[i];
1647 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1648 &&
1649 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1650 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1651 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1652 && HARD_REGNO_MODE_OK (other, group_mode[class])
1653 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1654 other)
1655 /* We don't want one part of another group.
1656 We could get "two groups" that overlap! */
1657 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1658 ||
1659 (j < FIRST_PSEUDO_REGISTER - 1
1660 && (other = j + 1, spill_reg_order[other] >= 0)
1661 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1662 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1663 && HARD_REGNO_MODE_OK (j, group_mode[class])
1664 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1665 other)
1666 && ! TEST_HARD_REG_BIT (counted_for_groups,
1667 other))))
1668 {
1669 register enum reg_class *p;
1670
1671 /* We have found one that will complete a group,
1672 so count off one group as provided. */
1673 max_groups[class]--;
1674 p = reg_class_superclasses[class];
1675 while (*p != LIM_REG_CLASSES)
1676 {
1677 if (group_size [(int) *p] <= group_size [class])
1678 max_groups[(int) *p]--;
1679 p++;
1680 }
1681
1682 /* Indicate both these regs are part of a group. */
1683 SET_HARD_REG_BIT (counted_for_groups, j);
1684 SET_HARD_REG_BIT (counted_for_groups, other);
1685 break;
1686 }
1687 }
1688 /* We can't complete a group, so start one. */
1689 #ifdef SMALL_REGISTER_CLASSES
1690 /* Look for a pair neither of which is explicitly used. */
1691 if (i == FIRST_PSEUDO_REGISTER)
1692 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1693 {
1694 int k;
1695 j = potential_reload_regs[i];
1696 /* Verify that J+1 is a potential reload reg. */
1697 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1698 if (potential_reload_regs[k] == j + 1)
1699 break;
1700 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1701 && k < FIRST_PSEUDO_REGISTER
1702 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1703 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1704 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1705 && HARD_REGNO_MODE_OK (j, group_mode[class])
1706 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1707 j + 1)
1708 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1709 /* Reject J at this stage
1710 if J+1 was explicitly used. */
1711 && ! regs_explicitly_used[j + 1])
1712 break;
1713 }
1714 #endif
1715 /* Now try any group at all
1716 whose registers are not in bad_spill_regs. */
1717 if (i == FIRST_PSEUDO_REGISTER)
1718 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1719 {
1720 int k;
1721 j = potential_reload_regs[i];
1722 /* Verify that J+1 is a potential reload reg. */
1723 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1724 if (potential_reload_regs[k] == j + 1)
1725 break;
1726 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1727 && k < FIRST_PSEUDO_REGISTER
1728 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1729 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1730 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1731 && HARD_REGNO_MODE_OK (j, group_mode[class])
1732 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1733 j + 1)
1734 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1735 break;
1736 }
1737
1738 /* I should be the index in potential_reload_regs
1739 of the new reload reg we have found. */
1740
1741 if (i >= FIRST_PSEUDO_REGISTER)
1742 {
1743 /* There are no groups left to spill. */
1744 spill_failure (max_groups_insn[class]);
1745 failure = 1;
1746 goto failed;
1747 }
1748 else
1749 something_changed
1750 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1751 global, dumpfile);
1752 }
1753 else
1754 {
1755 /* For groups of more than 2 registers,
1756 look for a sufficient sequence of unspilled registers,
1757 and spill them all at once. */
1758 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1759 {
1760 int k;
1761
1762 j = potential_reload_regs[i];
1763 if (j >= 0
1764 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1765 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1766 {
1767 /* Check each reg in the sequence. */
1768 for (k = 0; k < group_size[class]; k++)
1769 if (! (spill_reg_order[j + k] < 0
1770 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1771 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1772 break;
1773 /* We got a full sequence, so spill them all. */
1774 if (k == group_size[class])
1775 {
1776 register enum reg_class *p;
1777 for (k = 0; k < group_size[class]; k++)
1778 {
1779 int idx;
1780 SET_HARD_REG_BIT (counted_for_groups, j + k);
1781 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1782 if (potential_reload_regs[idx] == j + k)
1783 break;
1784 something_changed
1785 |= new_spill_reg (idx, class,
1786 max_needs, NULL_PTR,
1787 global, dumpfile);
1788 }
1789
1790 /* We have found one that will complete a group,
1791 so count off one group as provided. */
1792 max_groups[class]--;
1793 p = reg_class_superclasses[class];
1794 while (*p != LIM_REG_CLASSES)
1795 {
1796 if (group_size [(int) *p]
1797 <= group_size [class])
1798 max_groups[(int) *p]--;
1799 p++;
1800 }
1801 break;
1802 }
1803 }
1804 }
1805 /* We couldn't find any registers for this reload.
1806 Avoid going into an infinite loop. */
1807 if (i >= FIRST_PSEUDO_REGISTER)
1808 {
1809 /* There are no groups left. */
1810 spill_failure (max_groups_insn[class]);
1811 failure = 1;
1812 goto failed;
1813 }
1814 }
1815 }
1816
1817 /* Now similarly satisfy all need for single registers. */
1818
1819 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1820 {
1821 #ifdef SMALL_REGISTER_CLASSES
1822 /* This should be right for all machines, but only the 386
1823 is known to need it, so this conditional plays safe.
1824 ??? For 2.5, try making this unconditional. */
1825 /* If we spilled enough regs, but they weren't counted
1826 against the non-group need, see if we can count them now.
1827 If so, we can avoid some actual spilling. */
1828 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1829 for (i = 0; i < n_spills; i++)
1830 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1831 spill_regs[i])
1832 && !TEST_HARD_REG_BIT (counted_for_groups,
1833 spill_regs[i])
1834 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1835 spill_regs[i])
1836 && max_nongroups[class] > 0)
1837 {
1838 register enum reg_class *p;
1839
1840 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1841 max_nongroups[class]--;
1842 p = reg_class_superclasses[class];
1843 while (*p != LIM_REG_CLASSES)
1844 max_nongroups[(int) *p++]--;
1845 }
1846 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1847 break;
1848 #endif
1849
1850 /* Consider the potential reload regs that aren't
1851 yet in use as reload regs, in order of preference.
1852 Find the most preferred one that's in this class. */
1853
1854 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1855 if (potential_reload_regs[i] >= 0
1856 && TEST_HARD_REG_BIT (reg_class_contents[class],
1857 potential_reload_regs[i])
1858 /* If this reg will not be available for groups,
1859 pick one that does not foreclose possible groups.
1860 This is a kludge, and not very general,
1861 but it should be sufficient to make the 386 work,
1862 and the problem should not occur on machines with
1863 more registers. */
1864 && (max_nongroups[class] == 0
1865 || possible_group_p (potential_reload_regs[i], max_groups)))
1866 break;
1867
1868 /* If we couldn't get a register, try to get one even if we
1869 might foreclose possible groups. This may cause problems
1870 later, but that's better than aborting now, since it is
1871 possible that we will, in fact, be able to form the needed
1872 group even with this allocation. */
1873
1874 if (i >= FIRST_PSEUDO_REGISTER
1875 && (asm_noperands (max_needs[class] > 0
1876 ? max_needs_insn[class]
1877 : max_nongroups_insn[class])
1878 < 0))
1879 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1880 if (potential_reload_regs[i] >= 0
1881 && TEST_HARD_REG_BIT (reg_class_contents[class],
1882 potential_reload_regs[i]))
1883 break;
1884
1885 /* I should be the index in potential_reload_regs
1886 of the new reload reg we have found. */
1887
1888 if (i >= FIRST_PSEUDO_REGISTER)
1889 {
1890 /* There are no possible registers left to spill. */
1891 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1892 : max_nongroups_insn[class]);
1893 failure = 1;
1894 goto failed;
1895 }
1896 else
1897 something_changed
1898 |= new_spill_reg (i, class, max_needs, max_nongroups,
1899 global, dumpfile);
1900 }
1901 }
1902 }
1903
1904 /* If global-alloc was run, notify it of any register eliminations we have
1905 done. */
1906 if (global)
1907 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1908 if (ep->can_eliminate)
1909 mark_elimination (ep->from, ep->to);
1910
1911 /* Insert code to save and restore call-clobbered hard regs
1912 around calls. Tell if what mode to use so that we will process
1913 those insns in reload_as_needed if we have to. */
1914
1915 if (caller_save_needed)
1916 save_call_clobbered_regs (num_eliminable ? QImode
1917 : caller_save_spill_class != NO_REGS ? HImode
1918 : VOIDmode);
1919
1920 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1921 If that insn didn't set the register (i.e., it copied the register to
1922 memory), just delete that insn instead of the equivalencing insn plus
1923 anything now dead. If we call delete_dead_insn on that insn, we may
1924 delete the insn that actually sets the register if the register die
1925 there and that is incorrect. */
1926
1927 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1928 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1929 && GET_CODE (reg_equiv_init[i]) != NOTE)
1930 {
1931 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1932 delete_dead_insn (reg_equiv_init[i]);
1933 else
1934 {
1935 PUT_CODE (reg_equiv_init[i], NOTE);
1936 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1937 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1938 }
1939 }
1940
1941 /* Use the reload registers where necessary
1942 by generating move instructions to move the must-be-register
1943 values into or out of the reload registers. */
1944
1945 if (something_needs_reloads || something_needs_elimination
1946 || (caller_save_needed && num_eliminable)
1947 || caller_save_spill_class != NO_REGS)
1948 reload_as_needed (first, global);
1949
1950 /* If we were able to eliminate the frame pointer, show that it is no
1951 longer live at the start of any basic block. If it ls live by
1952 virtue of being in a pseudo, that pseudo will be marked live
1953 and hence the frame pointer will be known to be live via that
1954 pseudo. */
1955
1956 if (! frame_pointer_needed)
1957 for (i = 0; i < n_basic_blocks; i++)
1958 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1959 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1960 % REGSET_ELT_BITS));
1961
1962 /* Come here (with failure set nonzero) if we can't get enough spill regs
1963 and we decide not to abort about it. */
1964 failed:
1965
1966 reload_in_progress = 0;
1967
1968 /* Now eliminate all pseudo regs by modifying them into
1969 their equivalent memory references.
1970 The REG-rtx's for the pseudos are modified in place,
1971 so all insns that used to refer to them now refer to memory.
1972
1973 For a reg that has a reg_equiv_address, all those insns
1974 were changed by reloading so that no insns refer to it any longer;
1975 but the DECL_RTL of a variable decl may refer to it,
1976 and if so this causes the debugging info to mention the variable. */
1977
1978 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1979 {
1980 rtx addr = 0;
1981 int in_struct = 0;
1982 if (reg_equiv_mem[i])
1983 {
1984 addr = XEXP (reg_equiv_mem[i], 0);
1985 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1986 }
1987 if (reg_equiv_address[i])
1988 addr = reg_equiv_address[i];
1989 if (addr)
1990 {
1991 if (reg_renumber[i] < 0)
1992 {
1993 rtx reg = regno_reg_rtx[i];
1994 XEXP (reg, 0) = addr;
1995 REG_USERVAR_P (reg) = 0;
1996 MEM_IN_STRUCT_P (reg) = in_struct;
1997 PUT_CODE (reg, MEM);
1998 }
1999 else if (reg_equiv_mem[i])
2000 XEXP (reg_equiv_mem[i], 0) = addr;
2001 }
2002 }
2003
2004 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2005 /* Make a pass over all the insns and remove death notes for things that
2006 are no longer registers or no longer die in the insn (e.g., an input
2007 and output pseudo being tied). */
2008
2009 for (insn = first; insn; insn = NEXT_INSN (insn))
2010 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2011 {
2012 rtx note, next;
2013
2014 for (note = REG_NOTES (insn); note; note = next)
2015 {
2016 next = XEXP (note, 1);
2017 if (REG_NOTE_KIND (note) == REG_DEAD
2018 && (GET_CODE (XEXP (note, 0)) != REG
2019 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2020 remove_note (insn, note);
2021 }
2022 }
2023 #endif
2024
2025 /* Indicate that we no longer have known memory locations or constants. */
2026 reg_equiv_constant = 0;
2027 reg_equiv_memory_loc = 0;
2028
2029 if (scratch_list)
2030 free (scratch_list);
2031 scratch_list = 0;
2032 if (scratch_block)
2033 free (scratch_block);
2034 scratch_block = 0;
2035
2036 CLEAR_HARD_REG_SET (used_spill_regs);
2037 for (i = 0; i < n_spills; i++)
2038 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2039
2040 return failure;
2041 }
2042 \f
2043 /* Nonzero if, after spilling reg REGNO for non-groups,
2044 it will still be possible to find a group if we still need one. */
2045
2046 static int
2047 possible_group_p (regno, max_groups)
2048 int regno;
2049 int *max_groups;
2050 {
2051 int i;
2052 int class = (int) NO_REGS;
2053
2054 for (i = 0; i < (int) N_REG_CLASSES; i++)
2055 if (max_groups[i] > 0)
2056 {
2057 class = i;
2058 break;
2059 }
2060
2061 if (class == (int) NO_REGS)
2062 return 1;
2063
2064 /* Consider each pair of consecutive registers. */
2065 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2066 {
2067 /* Ignore pairs that include reg REGNO. */
2068 if (i == regno || i + 1 == regno)
2069 continue;
2070
2071 /* Ignore pairs that are outside the class that needs the group.
2072 ??? Here we fail to handle the case where two different classes
2073 independently need groups. But this never happens with our
2074 current machine descriptions. */
2075 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2076 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2077 continue;
2078
2079 /* A pair of consecutive regs we can still spill does the trick. */
2080 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2081 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2082 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2083 return 1;
2084
2085 /* A pair of one already spilled and one we can spill does it
2086 provided the one already spilled is not otherwise reserved. */
2087 if (spill_reg_order[i] < 0
2088 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2089 && spill_reg_order[i + 1] >= 0
2090 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2091 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2092 return 1;
2093 if (spill_reg_order[i + 1] < 0
2094 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2095 && spill_reg_order[i] >= 0
2096 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2097 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2098 return 1;
2099 }
2100
2101 return 0;
2102 }
2103 \f
2104 /* Count any groups of CLASS that can be formed from the registers recently
2105 spilled. */
2106
2107 static void
2108 count_possible_groups (group_size, group_mode, max_groups, class)
2109 int *group_size;
2110 enum machine_mode *group_mode;
2111 int *max_groups;
2112 int class;
2113 {
2114 HARD_REG_SET new;
2115 int i, j;
2116
2117 /* Now find all consecutive groups of spilled registers
2118 and mark each group off against the need for such groups.
2119 But don't count them against ordinary need, yet. */
2120
2121 if (group_size[class] == 0)
2122 return;
2123
2124 CLEAR_HARD_REG_SET (new);
2125
2126 /* Make a mask of all the regs that are spill regs in class I. */
2127 for (i = 0; i < n_spills; i++)
2128 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2129 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2130 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2131 SET_HARD_REG_BIT (new, spill_regs[i]);
2132
2133 /* Find each consecutive group of them. */
2134 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2135 if (TEST_HARD_REG_BIT (new, i)
2136 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2137 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2138 {
2139 for (j = 1; j < group_size[class]; j++)
2140 if (! TEST_HARD_REG_BIT (new, i + j))
2141 break;
2142
2143 if (j == group_size[class])
2144 {
2145 /* We found a group. Mark it off against this class's need for
2146 groups, and against each superclass too. */
2147 register enum reg_class *p;
2148
2149 max_groups[class]--;
2150 p = reg_class_superclasses[class];
2151 while (*p != LIM_REG_CLASSES)
2152 {
2153 if (group_size [(int) *p] <= group_size [class])
2154 max_groups[(int) *p]--;
2155 p++;
2156 }
2157
2158 /* Don't count these registers again. */
2159 for (j = 0; j < group_size[class]; j++)
2160 SET_HARD_REG_BIT (counted_for_groups, i + j);
2161 }
2162
2163 /* Skip to the last reg in this group. When i is incremented above,
2164 it will then point to the first reg of the next possible group. */
2165 i += j - 1;
2166 }
2167 }
2168 \f
2169 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2170 another mode that needs to be reloaded for the same register class CLASS.
2171 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2172 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2173
2174 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2175 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2176 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2177 causes unnecessary failures on machines requiring alignment of register
2178 groups when the two modes are different sizes, because the larger mode has
2179 more strict alignment rules than the smaller mode. */
2180
2181 static int
2182 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2183 enum machine_mode allocate_mode, other_mode;
2184 enum reg_class class;
2185 {
2186 register int regno;
2187 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2188 {
2189 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2190 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2191 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2192 return 0;
2193 }
2194 return 1;
2195 }
2196
2197 /* Handle the failure to find a register to spill.
2198 INSN should be one of the insns which needed this particular spill reg. */
2199
2200 static void
2201 spill_failure (insn)
2202 rtx insn;
2203 {
2204 if (asm_noperands (PATTERN (insn)) >= 0)
2205 error_for_asm (insn, "`asm' needs too many reloads");
2206 else
2207 fatal_insn ("Unable to find a register to spill.", insn);
2208 }
2209
2210 /* Add a new register to the tables of available spill-registers
2211 (as well as spilling all pseudos allocated to the register).
2212 I is the index of this register in potential_reload_regs.
2213 CLASS is the regclass whose need is being satisfied.
2214 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2215 so that this register can count off against them.
2216 MAX_NONGROUPS is 0 if this register is part of a group.
2217 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2218
2219 static int
2220 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2221 int i;
2222 int class;
2223 int *max_needs;
2224 int *max_nongroups;
2225 int global;
2226 FILE *dumpfile;
2227 {
2228 register enum reg_class *p;
2229 int val;
2230 int regno = potential_reload_regs[i];
2231
2232 if (i >= FIRST_PSEUDO_REGISTER)
2233 abort (); /* Caller failed to find any register. */
2234
2235 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2236 fatal ("fixed or forbidden register was spilled.\n\
2237 This may be due to a compiler bug or to impossible asm\n\
2238 statements or clauses.");
2239
2240 /* Make reg REGNO an additional reload reg. */
2241
2242 potential_reload_regs[i] = -1;
2243 spill_regs[n_spills] = regno;
2244 spill_reg_order[regno] = n_spills;
2245 if (dumpfile)
2246 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2247
2248 /* Clear off the needs we just satisfied. */
2249
2250 max_needs[class]--;
2251 p = reg_class_superclasses[class];
2252 while (*p != LIM_REG_CLASSES)
2253 max_needs[(int) *p++]--;
2254
2255 if (max_nongroups && max_nongroups[class] > 0)
2256 {
2257 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2258 max_nongroups[class]--;
2259 p = reg_class_superclasses[class];
2260 while (*p != LIM_REG_CLASSES)
2261 max_nongroups[(int) *p++]--;
2262 }
2263
2264 /* Spill every pseudo reg that was allocated to this reg
2265 or to something that overlaps this reg. */
2266
2267 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2268
2269 /* If there are some registers still to eliminate and this register
2270 wasn't ever used before, additional stack space may have to be
2271 allocated to store this register. Thus, we may have changed the offset
2272 between the stack and frame pointers, so mark that something has changed.
2273 (If new pseudos were spilled, thus requiring more space, VAL would have
2274 been set non-zero by the call to spill_hard_reg above since additional
2275 reloads may be needed in that case.
2276
2277 One might think that we need only set VAL to 1 if this is a call-used
2278 register. However, the set of registers that must be saved by the
2279 prologue is not identical to the call-used set. For example, the
2280 register used by the call insn for the return PC is a call-used register,
2281 but must be saved by the prologue. */
2282 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2283 val = 1;
2284
2285 regs_ever_live[spill_regs[n_spills]] = 1;
2286 n_spills++;
2287
2288 return val;
2289 }
2290 \f
2291 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2292 data that is dead in INSN. */
2293
2294 static void
2295 delete_dead_insn (insn)
2296 rtx insn;
2297 {
2298 rtx prev = prev_real_insn (insn);
2299 rtx prev_dest;
2300
2301 /* If the previous insn sets a register that dies in our insn, delete it
2302 too. */
2303 if (prev && GET_CODE (PATTERN (prev)) == SET
2304 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2305 && reg_mentioned_p (prev_dest, PATTERN (insn))
2306 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2307 delete_dead_insn (prev);
2308
2309 PUT_CODE (insn, NOTE);
2310 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2311 NOTE_SOURCE_FILE (insn) = 0;
2312 }
2313
2314 /* Modify the home of pseudo-reg I.
2315 The new home is present in reg_renumber[I].
2316
2317 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2318 or it may be -1, meaning there is none or it is not relevant.
2319 This is used so that all pseudos spilled from a given hard reg
2320 can share one stack slot. */
2321
2322 static void
2323 alter_reg (i, from_reg)
2324 register int i;
2325 int from_reg;
2326 {
2327 /* When outputting an inline function, this can happen
2328 for a reg that isn't actually used. */
2329 if (regno_reg_rtx[i] == 0)
2330 return;
2331
2332 /* If the reg got changed to a MEM at rtl-generation time,
2333 ignore it. */
2334 if (GET_CODE (regno_reg_rtx[i]) != REG)
2335 return;
2336
2337 /* Modify the reg-rtx to contain the new hard reg
2338 number or else to contain its pseudo reg number. */
2339 REGNO (regno_reg_rtx[i])
2340 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2341
2342 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2343 allocate a stack slot for it. */
2344
2345 if (reg_renumber[i] < 0
2346 && reg_n_refs[i] > 0
2347 && reg_equiv_constant[i] == 0
2348 && reg_equiv_memory_loc[i] == 0)
2349 {
2350 register rtx x;
2351 int inherent_size = PSEUDO_REGNO_BYTES (i);
2352 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2353 int adjust = 0;
2354
2355 /* Each pseudo reg has an inherent size which comes from its own mode,
2356 and a total size which provides room for paradoxical subregs
2357 which refer to the pseudo reg in wider modes.
2358
2359 We can use a slot already allocated if it provides both
2360 enough inherent space and enough total space.
2361 Otherwise, we allocate a new slot, making sure that it has no less
2362 inherent space, and no less total space, then the previous slot. */
2363 if (from_reg == -1)
2364 {
2365 /* No known place to spill from => no slot to reuse. */
2366 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2367 if (BYTES_BIG_ENDIAN)
2368 /* Cancel the big-endian correction done in assign_stack_local.
2369 Get the address of the beginning of the slot.
2370 This is so we can do a big-endian correction unconditionally
2371 below. */
2372 adjust = inherent_size - total_size;
2373
2374 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2375 }
2376 /* Reuse a stack slot if possible. */
2377 else if (spill_stack_slot[from_reg] != 0
2378 && spill_stack_slot_width[from_reg] >= total_size
2379 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2380 >= inherent_size))
2381 x = spill_stack_slot[from_reg];
2382 /* Allocate a bigger slot. */
2383 else
2384 {
2385 /* Compute maximum size needed, both for inherent size
2386 and for total size. */
2387 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2388 rtx stack_slot;
2389 if (spill_stack_slot[from_reg])
2390 {
2391 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2392 > inherent_size)
2393 mode = GET_MODE (spill_stack_slot[from_reg]);
2394 if (spill_stack_slot_width[from_reg] > total_size)
2395 total_size = spill_stack_slot_width[from_reg];
2396 }
2397 /* Make a slot with that size. */
2398 x = assign_stack_local (mode, total_size, -1);
2399 stack_slot = x;
2400 if (BYTES_BIG_ENDIAN)
2401 {
2402 /* Cancel the big-endian correction done in assign_stack_local.
2403 Get the address of the beginning of the slot.
2404 This is so we can do a big-endian correction unconditionally
2405 below. */
2406 adjust = GET_MODE_SIZE (mode) - total_size;
2407 if (adjust)
2408 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2409 * BITS_PER_UNIT,
2410 MODE_INT, 1),
2411 plus_constant (XEXP (x, 0), adjust));
2412 }
2413 spill_stack_slot[from_reg] = stack_slot;
2414 spill_stack_slot_width[from_reg] = total_size;
2415 }
2416
2417 /* On a big endian machine, the "address" of the slot
2418 is the address of the low part that fits its inherent mode. */
2419 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2420 adjust += (total_size - inherent_size);
2421
2422 /* If we have any adjustment to make, or if the stack slot is the
2423 wrong mode, make a new stack slot. */
2424 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2425 {
2426 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2427 plus_constant (XEXP (x, 0), adjust));
2428 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2429 }
2430
2431 /* Save the stack slot for later. */
2432 reg_equiv_memory_loc[i] = x;
2433 }
2434 }
2435
2436 /* Mark the slots in regs_ever_live for the hard regs
2437 used by pseudo-reg number REGNO. */
2438
2439 void
2440 mark_home_live (regno)
2441 int regno;
2442 {
2443 register int i, lim;
2444 i = reg_renumber[regno];
2445 if (i < 0)
2446 return;
2447 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2448 while (i < lim)
2449 regs_ever_live[i++] = 1;
2450 }
2451
2452 /* Mark the registers used in SCRATCH as being live. */
2453
2454 static void
2455 mark_scratch_live (scratch)
2456 rtx scratch;
2457 {
2458 register int i;
2459 int regno = REGNO (scratch);
2460 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2461
2462 for (i = regno; i < lim; i++)
2463 regs_ever_live[i] = 1;
2464 }
2465 \f
2466 /* This function handles the tracking of elimination offsets around branches.
2467
2468 X is a piece of RTL being scanned.
2469
2470 INSN is the insn that it came from, if any.
2471
2472 INITIAL_P is non-zero if we are to set the offset to be the initial
2473 offset and zero if we are setting the offset of the label to be the
2474 current offset. */
2475
2476 static void
2477 set_label_offsets (x, insn, initial_p)
2478 rtx x;
2479 rtx insn;
2480 int initial_p;
2481 {
2482 enum rtx_code code = GET_CODE (x);
2483 rtx tem;
2484 int i;
2485 struct elim_table *p;
2486
2487 switch (code)
2488 {
2489 case LABEL_REF:
2490 if (LABEL_REF_NONLOCAL_P (x))
2491 return;
2492
2493 x = XEXP (x, 0);
2494
2495 /* ... fall through ... */
2496
2497 case CODE_LABEL:
2498 /* If we know nothing about this label, set the desired offsets. Note
2499 that this sets the offset at a label to be the offset before a label
2500 if we don't know anything about the label. This is not correct for
2501 the label after a BARRIER, but is the best guess we can make. If
2502 we guessed wrong, we will suppress an elimination that might have
2503 been possible had we been able to guess correctly. */
2504
2505 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2506 {
2507 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2508 offsets_at[CODE_LABEL_NUMBER (x)][i]
2509 = (initial_p ? reg_eliminate[i].initial_offset
2510 : reg_eliminate[i].offset);
2511 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2512 }
2513
2514 /* Otherwise, if this is the definition of a label and it is
2515 preceded by a BARRIER, set our offsets to the known offset of
2516 that label. */
2517
2518 else if (x == insn
2519 && (tem = prev_nonnote_insn (insn)) != 0
2520 && GET_CODE (tem) == BARRIER)
2521 {
2522 num_not_at_initial_offset = 0;
2523 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2524 {
2525 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2526 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2527 if (reg_eliminate[i].can_eliminate
2528 && (reg_eliminate[i].offset
2529 != reg_eliminate[i].initial_offset))
2530 num_not_at_initial_offset++;
2531 }
2532 }
2533
2534 else
2535 /* If neither of the above cases is true, compare each offset
2536 with those previously recorded and suppress any eliminations
2537 where the offsets disagree. */
2538
2539 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2540 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2541 != (initial_p ? reg_eliminate[i].initial_offset
2542 : reg_eliminate[i].offset))
2543 reg_eliminate[i].can_eliminate = 0;
2544
2545 return;
2546
2547 case JUMP_INSN:
2548 set_label_offsets (PATTERN (insn), insn, initial_p);
2549
2550 /* ... fall through ... */
2551
2552 case INSN:
2553 case CALL_INSN:
2554 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2555 and hence must have all eliminations at their initial offsets. */
2556 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2557 if (REG_NOTE_KIND (tem) == REG_LABEL)
2558 set_label_offsets (XEXP (tem, 0), insn, 1);
2559 return;
2560
2561 case ADDR_VEC:
2562 case ADDR_DIFF_VEC:
2563 /* Each of the labels in the address vector must be at their initial
2564 offsets. We want the first first for ADDR_VEC and the second
2565 field for ADDR_DIFF_VEC. */
2566
2567 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2568 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2569 insn, initial_p);
2570 return;
2571
2572 case SET:
2573 /* We only care about setting PC. If the source is not RETURN,
2574 IF_THEN_ELSE, or a label, disable any eliminations not at
2575 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2576 isn't one of those possibilities. For branches to a label,
2577 call ourselves recursively.
2578
2579 Note that this can disable elimination unnecessarily when we have
2580 a non-local goto since it will look like a non-constant jump to
2581 someplace in the current function. This isn't a significant
2582 problem since such jumps will normally be when all elimination
2583 pairs are back to their initial offsets. */
2584
2585 if (SET_DEST (x) != pc_rtx)
2586 return;
2587
2588 switch (GET_CODE (SET_SRC (x)))
2589 {
2590 case PC:
2591 case RETURN:
2592 return;
2593
2594 case LABEL_REF:
2595 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2596 return;
2597
2598 case IF_THEN_ELSE:
2599 tem = XEXP (SET_SRC (x), 1);
2600 if (GET_CODE (tem) == LABEL_REF)
2601 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2602 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2603 break;
2604
2605 tem = XEXP (SET_SRC (x), 2);
2606 if (GET_CODE (tem) == LABEL_REF)
2607 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2608 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2609 break;
2610 return;
2611 }
2612
2613 /* If we reach here, all eliminations must be at their initial
2614 offset because we are doing a jump to a variable address. */
2615 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2616 if (p->offset != p->initial_offset)
2617 p->can_eliminate = 0;
2618 }
2619 }
2620 \f
2621 /* Used for communication between the next two function to properly share
2622 the vector for an ASM_OPERANDS. */
2623
2624 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2625
2626 /* Scan X and replace any eliminable registers (such as fp) with a
2627 replacement (such as sp), plus an offset.
2628
2629 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2630 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2631 MEM, we are allowed to replace a sum of a register and the constant zero
2632 with the register, which we cannot do outside a MEM. In addition, we need
2633 to record the fact that a register is referenced outside a MEM.
2634
2635 If INSN is an insn, it is the insn containing X. If we replace a REG
2636 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2637 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2638 that the REG is being modified.
2639
2640 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2641 That's used when we eliminate in expressions stored in notes.
2642 This means, do not set ref_outside_mem even if the reference
2643 is outside of MEMs.
2644
2645 If we see a modification to a register we know about, take the
2646 appropriate action (see case SET, below).
2647
2648 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2649 replacements done assuming all offsets are at their initial values. If
2650 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2651 encounter, return the actual location so that find_reloads will do
2652 the proper thing. */
2653
2654 rtx
2655 eliminate_regs (x, mem_mode, insn)
2656 rtx x;
2657 enum machine_mode mem_mode;
2658 rtx insn;
2659 {
2660 enum rtx_code code = GET_CODE (x);
2661 struct elim_table *ep;
2662 int regno;
2663 rtx new;
2664 int i, j;
2665 char *fmt;
2666 int copied = 0;
2667
2668 switch (code)
2669 {
2670 case CONST_INT:
2671 case CONST_DOUBLE:
2672 case CONST:
2673 case SYMBOL_REF:
2674 case CODE_LABEL:
2675 case PC:
2676 case CC0:
2677 case ASM_INPUT:
2678 case ADDR_VEC:
2679 case ADDR_DIFF_VEC:
2680 case RETURN:
2681 return x;
2682
2683 case REG:
2684 regno = REGNO (x);
2685
2686 /* First handle the case where we encounter a bare register that
2687 is eliminable. Replace it with a PLUS. */
2688 if (regno < FIRST_PSEUDO_REGISTER)
2689 {
2690 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2691 ep++)
2692 if (ep->from_rtx == x && ep->can_eliminate)
2693 {
2694 if (! mem_mode
2695 /* Refs inside notes don't count for this purpose. */
2696 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2697 || GET_CODE (insn) == INSN_LIST)))
2698 ep->ref_outside_mem = 1;
2699 return plus_constant (ep->to_rtx, ep->previous_offset);
2700 }
2701
2702 }
2703 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2704 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2705 {
2706 /* In this case, find_reloads would attempt to either use an
2707 incorrect address (if something is not at its initial offset)
2708 or substitute an replaced address into an insn (which loses
2709 if the offset is changed by some later action). So we simply
2710 return the replaced stack slot (assuming it is changed by
2711 elimination) and ignore the fact that this is actually a
2712 reference to the pseudo. Ensure we make a copy of the
2713 address in case it is shared. */
2714 new = eliminate_regs (reg_equiv_memory_loc[regno],
2715 mem_mode, insn);
2716 if (new != reg_equiv_memory_loc[regno])
2717 {
2718 cannot_omit_stores[regno] = 1;
2719 return copy_rtx (new);
2720 }
2721 }
2722 return x;
2723
2724 case PLUS:
2725 /* If this is the sum of an eliminable register and a constant, rework
2726 the sum. */
2727 if (GET_CODE (XEXP (x, 0)) == REG
2728 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2729 && CONSTANT_P (XEXP (x, 1)))
2730 {
2731 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2732 ep++)
2733 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2734 {
2735 if (! mem_mode
2736 /* Refs inside notes don't count for this purpose. */
2737 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2738 || GET_CODE (insn) == INSN_LIST)))
2739 ep->ref_outside_mem = 1;
2740
2741 /* The only time we want to replace a PLUS with a REG (this
2742 occurs when the constant operand of the PLUS is the negative
2743 of the offset) is when we are inside a MEM. We won't want
2744 to do so at other times because that would change the
2745 structure of the insn in a way that reload can't handle.
2746 We special-case the commonest situation in
2747 eliminate_regs_in_insn, so just replace a PLUS with a
2748 PLUS here, unless inside a MEM. */
2749 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2750 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2751 return ep->to_rtx;
2752 else
2753 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2754 plus_constant (XEXP (x, 1),
2755 ep->previous_offset));
2756 }
2757
2758 /* If the register is not eliminable, we are done since the other
2759 operand is a constant. */
2760 return x;
2761 }
2762
2763 /* If this is part of an address, we want to bring any constant to the
2764 outermost PLUS. We will do this by doing register replacement in
2765 our operands and seeing if a constant shows up in one of them.
2766
2767 We assume here this is part of an address (or a "load address" insn)
2768 since an eliminable register is not likely to appear in any other
2769 context.
2770
2771 If we have (plus (eliminable) (reg)), we want to produce
2772 (plus (plus (replacement) (reg) (const))). If this was part of a
2773 normal add insn, (plus (replacement) (reg)) will be pushed as a
2774 reload. This is the desired action. */
2775
2776 {
2777 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2778 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2779
2780 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2781 {
2782 /* If one side is a PLUS and the other side is a pseudo that
2783 didn't get a hard register but has a reg_equiv_constant,
2784 we must replace the constant here since it may no longer
2785 be in the position of any operand. */
2786 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2787 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2788 && reg_renumber[REGNO (new1)] < 0
2789 && reg_equiv_constant != 0
2790 && reg_equiv_constant[REGNO (new1)] != 0)
2791 new1 = reg_equiv_constant[REGNO (new1)];
2792 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2793 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2794 && reg_renumber[REGNO (new0)] < 0
2795 && reg_equiv_constant[REGNO (new0)] != 0)
2796 new0 = reg_equiv_constant[REGNO (new0)];
2797
2798 new = form_sum (new0, new1);
2799
2800 /* As above, if we are not inside a MEM we do not want to
2801 turn a PLUS into something else. We might try to do so here
2802 for an addition of 0 if we aren't optimizing. */
2803 if (! mem_mode && GET_CODE (new) != PLUS)
2804 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2805 else
2806 return new;
2807 }
2808 }
2809 return x;
2810
2811 case MULT:
2812 /* If this is the product of an eliminable register and a
2813 constant, apply the distribute law and move the constant out
2814 so that we have (plus (mult ..) ..). This is needed in order
2815 to keep load-address insns valid. This case is pathological.
2816 We ignore the possibility of overflow here. */
2817 if (GET_CODE (XEXP (x, 0)) == REG
2818 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2819 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2820 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2821 ep++)
2822 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2823 {
2824 if (! mem_mode
2825 /* Refs inside notes don't count for this purpose. */
2826 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2827 || GET_CODE (insn) == INSN_LIST)))
2828 ep->ref_outside_mem = 1;
2829
2830 return
2831 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2832 ep->previous_offset * INTVAL (XEXP (x, 1)));
2833 }
2834
2835 /* ... fall through ... */
2836
2837 case CALL:
2838 case COMPARE:
2839 case MINUS:
2840 case DIV: case UDIV:
2841 case MOD: case UMOD:
2842 case AND: case IOR: case XOR:
2843 case ROTATERT: case ROTATE:
2844 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2845 case NE: case EQ:
2846 case GE: case GT: case GEU: case GTU:
2847 case LE: case LT: case LEU: case LTU:
2848 {
2849 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2850 rtx new1
2851 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2852
2853 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2854 return gen_rtx (code, GET_MODE (x), new0, new1);
2855 }
2856 return x;
2857
2858 case EXPR_LIST:
2859 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2860 if (XEXP (x, 0))
2861 {
2862 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2863 if (new != XEXP (x, 0))
2864 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2865 }
2866
2867 /* ... fall through ... */
2868
2869 case INSN_LIST:
2870 /* Now do eliminations in the rest of the chain. If this was
2871 an EXPR_LIST, this might result in allocating more memory than is
2872 strictly needed, but it simplifies the code. */
2873 if (XEXP (x, 1))
2874 {
2875 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2876 if (new != XEXP (x, 1))
2877 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2878 }
2879 return x;
2880
2881 case PRE_INC:
2882 case POST_INC:
2883 case PRE_DEC:
2884 case POST_DEC:
2885 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2886 if (ep->to_rtx == XEXP (x, 0))
2887 {
2888 int size = GET_MODE_SIZE (mem_mode);
2889
2890 /* If more bytes than MEM_MODE are pushed, account for them. */
2891 #ifdef PUSH_ROUNDING
2892 if (ep->to_rtx == stack_pointer_rtx)
2893 size = PUSH_ROUNDING (size);
2894 #endif
2895 if (code == PRE_DEC || code == POST_DEC)
2896 ep->offset += size;
2897 else
2898 ep->offset -= size;
2899 }
2900
2901 /* Fall through to generic unary operation case. */
2902 case USE:
2903 case STRICT_LOW_PART:
2904 case NEG: case NOT:
2905 case SIGN_EXTEND: case ZERO_EXTEND:
2906 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2907 case FLOAT: case FIX:
2908 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2909 case ABS:
2910 case SQRT:
2911 case FFS:
2912 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2913 if (new != XEXP (x, 0))
2914 return gen_rtx (code, GET_MODE (x), new);
2915 return x;
2916
2917 case SUBREG:
2918 /* Similar to above processing, but preserve SUBREG_WORD.
2919 Convert (subreg (mem)) to (mem) if not paradoxical.
2920 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2921 pseudo didn't get a hard reg, we must replace this with the
2922 eliminated version of the memory location because push_reloads
2923 may do the replacement in certain circumstances. */
2924 if (GET_CODE (SUBREG_REG (x)) == REG
2925 && (GET_MODE_SIZE (GET_MODE (x))
2926 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2927 && reg_equiv_memory_loc != 0
2928 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2929 {
2930 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2931 mem_mode, insn);
2932
2933 /* If we didn't change anything, we must retain the pseudo. */
2934 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2935 new = SUBREG_REG (x);
2936 else
2937 {
2938 /* Otherwise, ensure NEW isn't shared in case we have to reload
2939 it. */
2940 new = copy_rtx (new);
2941
2942 /* In this case, we must show that the pseudo is used in this
2943 insn so that delete_output_reload will do the right thing. */
2944 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2945 && GET_CODE (insn) != INSN_LIST)
2946 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2947 insn);
2948 }
2949 }
2950 else
2951 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2952
2953 if (new != XEXP (x, 0))
2954 {
2955 if (GET_CODE (new) == MEM
2956 && (GET_MODE_SIZE (GET_MODE (x))
2957 <= GET_MODE_SIZE (GET_MODE (new)))
2958 #ifdef LOAD_EXTEND_OP
2959 /* On these machines we will be reloading what is
2960 inside the SUBREG if it originally was a pseudo and
2961 the inner and outer modes are both a word or
2962 smaller. So leave the SUBREG then. */
2963 && ! (GET_CODE (SUBREG_REG (x)) == REG
2964 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2965 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2966 && (GET_MODE_SIZE (GET_MODE (x))
2967 > GET_MODE_SIZE (GET_MODE (new)))
2968 && INTEGRAL_MODE_P (GET_MODE (new))
2969 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
2970 #endif
2971 )
2972 {
2973 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2974 enum machine_mode mode = GET_MODE (x);
2975
2976 if (BYTES_BIG_ENDIAN)
2977 offset += (MIN (UNITS_PER_WORD,
2978 GET_MODE_SIZE (GET_MODE (new)))
2979 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2980
2981 PUT_MODE (new, mode);
2982 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2983 return new;
2984 }
2985 else
2986 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2987 }
2988
2989 return x;
2990
2991 case CLOBBER:
2992 /* If clobbering a register that is the replacement register for an
2993 elimination we still think can be performed, note that it cannot
2994 be performed. Otherwise, we need not be concerned about it. */
2995 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2996 if (ep->to_rtx == XEXP (x, 0))
2997 ep->can_eliminate = 0;
2998
2999 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3000 if (new != XEXP (x, 0))
3001 return gen_rtx (code, GET_MODE (x), new);
3002 return x;
3003
3004 case ASM_OPERANDS:
3005 {
3006 rtx *temp_vec;
3007 /* Properly handle sharing input and constraint vectors. */
3008 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3009 {
3010 /* When we come to a new vector not seen before,
3011 scan all its elements; keep the old vector if none
3012 of them changes; otherwise, make a copy. */
3013 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3014 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3015 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3016 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3017 mem_mode, insn);
3018
3019 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3020 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3021 break;
3022
3023 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3024 new_asm_operands_vec = old_asm_operands_vec;
3025 else
3026 new_asm_operands_vec
3027 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3028 }
3029
3030 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3031 if (new_asm_operands_vec == old_asm_operands_vec)
3032 return x;
3033
3034 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3035 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3036 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3037 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3038 ASM_OPERANDS_SOURCE_FILE (x),
3039 ASM_OPERANDS_SOURCE_LINE (x));
3040 new->volatil = x->volatil;
3041 return new;
3042 }
3043
3044 case SET:
3045 /* Check for setting a register that we know about. */
3046 if (GET_CODE (SET_DEST (x)) == REG)
3047 {
3048 /* See if this is setting the replacement register for an
3049 elimination.
3050
3051 If DEST is the hard frame pointer, we do nothing because we
3052 assume that all assignments to the frame pointer are for
3053 non-local gotos and are being done at a time when they are valid
3054 and do not disturb anything else. Some machines want to
3055 eliminate a fake argument pointer (or even a fake frame pointer)
3056 with either the real frame or the stack pointer. Assignments to
3057 the hard frame pointer must not prevent this elimination. */
3058
3059 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3060 ep++)
3061 if (ep->to_rtx == SET_DEST (x)
3062 && SET_DEST (x) != hard_frame_pointer_rtx)
3063 {
3064 /* If it is being incremented, adjust the offset. Otherwise,
3065 this elimination can't be done. */
3066 rtx src = SET_SRC (x);
3067
3068 if (GET_CODE (src) == PLUS
3069 && XEXP (src, 0) == SET_DEST (x)
3070 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3071 ep->offset -= INTVAL (XEXP (src, 1));
3072 else
3073 ep->can_eliminate = 0;
3074 }
3075
3076 /* Now check to see we are assigning to a register that can be
3077 eliminated. If so, it must be as part of a PARALLEL, since we
3078 will not have been called if this is a single SET. So indicate
3079 that we can no longer eliminate this reg. */
3080 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3081 ep++)
3082 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3083 ep->can_eliminate = 0;
3084 }
3085
3086 /* Now avoid the loop below in this common case. */
3087 {
3088 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3089 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3090
3091 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3092 write a CLOBBER insn. */
3093 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3094 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3095 && GET_CODE (insn) != INSN_LIST)
3096 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3097
3098 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3099 return gen_rtx (SET, VOIDmode, new0, new1);
3100 }
3101
3102 return x;
3103
3104 case MEM:
3105 /* Our only special processing is to pass the mode of the MEM to our
3106 recursive call and copy the flags. While we are here, handle this
3107 case more efficiently. */
3108 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3109 if (new != XEXP (x, 0))
3110 {
3111 new = gen_rtx (MEM, GET_MODE (x), new);
3112 new->volatil = x->volatil;
3113 new->unchanging = x->unchanging;
3114 new->in_struct = x->in_struct;
3115 return new;
3116 }
3117 else
3118 return x;
3119 }
3120
3121 /* Process each of our operands recursively. If any have changed, make a
3122 copy of the rtx. */
3123 fmt = GET_RTX_FORMAT (code);
3124 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3125 {
3126 if (*fmt == 'e')
3127 {
3128 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3129 if (new != XEXP (x, i) && ! copied)
3130 {
3131 rtx new_x = rtx_alloc (code);
3132 bcopy ((char *) x, (char *) new_x,
3133 (sizeof (*new_x) - sizeof (new_x->fld)
3134 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3135 x = new_x;
3136 copied = 1;
3137 }
3138 XEXP (x, i) = new;
3139 }
3140 else if (*fmt == 'E')
3141 {
3142 int copied_vec = 0;
3143 for (j = 0; j < XVECLEN (x, i); j++)
3144 {
3145 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3146 if (new != XVECEXP (x, i, j) && ! copied_vec)
3147 {
3148 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3149 &XVECEXP (x, i, 0));
3150 if (! copied)
3151 {
3152 rtx new_x = rtx_alloc (code);
3153 bcopy ((char *) x, (char *) new_x,
3154 (sizeof (*new_x) - sizeof (new_x->fld)
3155 + (sizeof (new_x->fld[0])
3156 * GET_RTX_LENGTH (code))));
3157 x = new_x;
3158 copied = 1;
3159 }
3160 XVEC (x, i) = new_v;
3161 copied_vec = 1;
3162 }
3163 XVECEXP (x, i, j) = new;
3164 }
3165 }
3166 }
3167
3168 return x;
3169 }
3170 \f
3171 /* Scan INSN and eliminate all eliminable registers in it.
3172
3173 If REPLACE is nonzero, do the replacement destructively. Also
3174 delete the insn as dead it if it is setting an eliminable register.
3175
3176 If REPLACE is zero, do all our allocations in reload_obstack.
3177
3178 If no eliminations were done and this insn doesn't require any elimination
3179 processing (these are not identical conditions: it might be updating sp,
3180 but not referencing fp; this needs to be seen during reload_as_needed so
3181 that the offset between fp and sp can be taken into consideration), zero
3182 is returned. Otherwise, 1 is returned. */
3183
3184 static int
3185 eliminate_regs_in_insn (insn, replace)
3186 rtx insn;
3187 int replace;
3188 {
3189 rtx old_body = PATTERN (insn);
3190 rtx old_set = single_set (insn);
3191 rtx new_body;
3192 int val = 0;
3193 struct elim_table *ep;
3194
3195 if (! replace)
3196 push_obstacks (&reload_obstack, &reload_obstack);
3197
3198 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3199 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3200 {
3201 /* Check for setting an eliminable register. */
3202 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3203 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3204 {
3205 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3206 /* If this is setting the frame pointer register to the
3207 hardware frame pointer register and this is an elimination
3208 that will be done (tested above), this insn is really
3209 adjusting the frame pointer downward to compensate for
3210 the adjustment done before a nonlocal goto. */
3211 if (ep->from == FRAME_POINTER_REGNUM
3212 && ep->to == HARD_FRAME_POINTER_REGNUM)
3213 {
3214 rtx src = SET_SRC (old_set);
3215 int offset, ok = 0;
3216
3217 if (src == ep->to_rtx)
3218 offset = 0, ok = 1;
3219 else if (GET_CODE (src) == PLUS
3220 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3221 offset = INTVAL (XEXP (src, 0)), ok = 1;
3222
3223 if (ok)
3224 {
3225 if (replace)
3226 {
3227 rtx src
3228 = plus_constant (ep->to_rtx, offset - ep->offset);
3229
3230 /* First see if this insn remains valid when we
3231 make the change. If not, keep the INSN_CODE
3232 the same and let reload fit it up. */
3233 validate_change (insn, &SET_SRC (old_set), src, 1);
3234 validate_change (insn, &SET_DEST (old_set),
3235 ep->to_rtx, 1);
3236 if (! apply_change_group ())
3237 {
3238 SET_SRC (old_set) = src;
3239 SET_DEST (old_set) = ep->to_rtx;
3240 }
3241 }
3242
3243 val = 1;
3244 goto done;
3245 }
3246 }
3247 #endif
3248
3249 /* In this case this insn isn't serving a useful purpose. We
3250 will delete it in reload_as_needed once we know that this
3251 elimination is, in fact, being done.
3252
3253 If REPLACE isn't set, we can't delete this insn, but needn't
3254 process it since it won't be used unless something changes. */
3255 if (replace)
3256 delete_dead_insn (insn);
3257 val = 1;
3258 goto done;
3259 }
3260
3261 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3262 in the insn is the negative of the offset in FROM. Substitute
3263 (set (reg) (reg to)) for the insn and change its code.
3264
3265 We have to do this here, rather than in eliminate_regs, do that we can
3266 change the insn code. */
3267
3268 if (GET_CODE (SET_SRC (old_set)) == PLUS
3269 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3270 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3271 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3272 ep++)
3273 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3274 && ep->can_eliminate)
3275 {
3276 /* We must stop at the first elimination that will be used.
3277 If this one would replace the PLUS with a REG, do it
3278 now. Otherwise, quit the loop and let eliminate_regs
3279 do its normal replacement. */
3280 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3281 {
3282 /* We assume here that we don't need a PARALLEL of
3283 any CLOBBERs for this assignment. There's not
3284 much we can do if we do need it. */
3285 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3286 SET_DEST (old_set), ep->to_rtx);
3287 INSN_CODE (insn) = -1;
3288 val = 1;
3289 goto done;
3290 }
3291
3292 break;
3293 }
3294 }
3295
3296 old_asm_operands_vec = 0;
3297
3298 /* Replace the body of this insn with a substituted form. If we changed
3299 something, return non-zero.
3300
3301 If we are replacing a body that was a (set X (plus Y Z)), try to
3302 re-recognize the insn. We do this in case we had a simple addition
3303 but now can do this as a load-address. This saves an insn in this
3304 common case. */
3305
3306 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3307 if (new_body != old_body)
3308 {
3309 /* If we aren't replacing things permanently and we changed something,
3310 make another copy to ensure that all the RTL is new. Otherwise
3311 things can go wrong if find_reload swaps commutative operands
3312 and one is inside RTL that has been copied while the other is not. */
3313
3314 /* Don't copy an asm_operands because (1) there's no need and (2)
3315 copy_rtx can't do it properly when there are multiple outputs. */
3316 if (! replace && asm_noperands (old_body) < 0)
3317 new_body = copy_rtx (new_body);
3318
3319 /* If we had a move insn but now we don't, rerecognize it. This will
3320 cause spurious re-recognition if the old move had a PARALLEL since
3321 the new one still will, but we can't call single_set without
3322 having put NEW_BODY into the insn and the re-recognition won't
3323 hurt in this rare case. */
3324 if (old_set != 0
3325 && ((GET_CODE (SET_SRC (old_set)) == REG
3326 && (GET_CODE (new_body) != SET
3327 || GET_CODE (SET_SRC (new_body)) != REG))
3328 /* If this was a load from or store to memory, compare
3329 the MEM in recog_operand to the one in the insn. If they
3330 are not equal, then rerecognize the insn. */
3331 || (old_set != 0
3332 && ((GET_CODE (SET_SRC (old_set)) == MEM
3333 && SET_SRC (old_set) != recog_operand[1])
3334 || (GET_CODE (SET_DEST (old_set)) == MEM
3335 && SET_DEST (old_set) != recog_operand[0])))
3336 /* If this was an add insn before, rerecognize. */
3337 || GET_CODE (SET_SRC (old_set)) == PLUS))
3338 {
3339 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3340 /* If recognition fails, store the new body anyway.
3341 It's normal to have recognition failures here
3342 due to bizarre memory addresses; reloading will fix them. */
3343 PATTERN (insn) = new_body;
3344 }
3345 else
3346 PATTERN (insn) = new_body;
3347
3348 val = 1;
3349 }
3350
3351 /* Loop through all elimination pairs. See if any have changed and
3352 recalculate the number not at initial offset.
3353
3354 Compute the maximum offset (minimum offset if the stack does not
3355 grow downward) for each elimination pair.
3356
3357 We also detect a cases where register elimination cannot be done,
3358 namely, if a register would be both changed and referenced outside a MEM
3359 in the resulting insn since such an insn is often undefined and, even if
3360 not, we cannot know what meaning will be given to it. Note that it is
3361 valid to have a register used in an address in an insn that changes it
3362 (presumably with a pre- or post-increment or decrement).
3363
3364 If anything changes, return nonzero. */
3365
3366 num_not_at_initial_offset = 0;
3367 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3368 {
3369 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3370 ep->can_eliminate = 0;
3371
3372 ep->ref_outside_mem = 0;
3373
3374 if (ep->previous_offset != ep->offset)
3375 val = 1;
3376
3377 ep->previous_offset = ep->offset;
3378 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3379 num_not_at_initial_offset++;
3380
3381 #ifdef STACK_GROWS_DOWNWARD
3382 ep->max_offset = MAX (ep->max_offset, ep->offset);
3383 #else
3384 ep->max_offset = MIN (ep->max_offset, ep->offset);
3385 #endif
3386 }
3387
3388 done:
3389 /* If we changed something, perform elimination in REG_NOTES. This is
3390 needed even when REPLACE is zero because a REG_DEAD note might refer
3391 to a register that we eliminate and could cause a different number
3392 of spill registers to be needed in the final reload pass than in
3393 the pre-passes. */
3394 if (val && REG_NOTES (insn) != 0)
3395 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3396
3397 if (! replace)
3398 pop_obstacks ();
3399
3400 return val;
3401 }
3402
3403 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3404 replacement we currently believe is valid, mark it as not eliminable if X
3405 modifies DEST in any way other than by adding a constant integer to it.
3406
3407 If DEST is the frame pointer, we do nothing because we assume that
3408 all assignments to the hard frame pointer are nonlocal gotos and are being
3409 done at a time when they are valid and do not disturb anything else.
3410 Some machines want to eliminate a fake argument pointer with either the
3411 frame or stack pointer. Assignments to the hard frame pointer must not
3412 prevent this elimination.
3413
3414 Called via note_stores from reload before starting its passes to scan
3415 the insns of the function. */
3416
3417 static void
3418 mark_not_eliminable (dest, x)
3419 rtx dest;
3420 rtx x;
3421 {
3422 register int i;
3423
3424 /* A SUBREG of a hard register here is just changing its mode. We should
3425 not see a SUBREG of an eliminable hard register, but check just in
3426 case. */
3427 if (GET_CODE (dest) == SUBREG)
3428 dest = SUBREG_REG (dest);
3429
3430 if (dest == hard_frame_pointer_rtx)
3431 return;
3432
3433 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3434 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3435 && (GET_CODE (x) != SET
3436 || GET_CODE (SET_SRC (x)) != PLUS
3437 || XEXP (SET_SRC (x), 0) != dest
3438 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3439 {
3440 reg_eliminate[i].can_eliminate_previous
3441 = reg_eliminate[i].can_eliminate = 0;
3442 num_eliminable--;
3443 }
3444 }
3445 \f
3446 /* Kick all pseudos out of hard register REGNO.
3447 If GLOBAL is nonzero, try to find someplace else to put them.
3448 If DUMPFILE is nonzero, log actions taken on that file.
3449
3450 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3451 because we found we can't eliminate some register. In the case, no pseudos
3452 are allowed to be in the register, even if they are only in a block that
3453 doesn't require spill registers, unlike the case when we are spilling this
3454 hard reg to produce another spill register.
3455
3456 Return nonzero if any pseudos needed to be kicked out. */
3457
3458 static int
3459 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3460 register int regno;
3461 int global;
3462 FILE *dumpfile;
3463 int cant_eliminate;
3464 {
3465 enum reg_class class = REGNO_REG_CLASS (regno);
3466 int something_changed = 0;
3467 register int i;
3468
3469 SET_HARD_REG_BIT (forbidden_regs, regno);
3470
3471 if (cant_eliminate)
3472 regs_ever_live[regno] = 1;
3473
3474 /* Spill every pseudo reg that was allocated to this reg
3475 or to something that overlaps this reg. */
3476
3477 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3478 if (reg_renumber[i] >= 0
3479 && reg_renumber[i] <= regno
3480 && (reg_renumber[i]
3481 + HARD_REGNO_NREGS (reg_renumber[i],
3482 PSEUDO_REGNO_MODE (i))
3483 > regno))
3484 {
3485 /* If this register belongs solely to a basic block which needed no
3486 spilling of any class that this register is contained in,
3487 leave it be, unless we are spilling this register because
3488 it was a hard register that can't be eliminated. */
3489
3490 if (! cant_eliminate
3491 && basic_block_needs[0]
3492 && reg_basic_block[i] >= 0
3493 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3494 {
3495 enum reg_class *p;
3496
3497 for (p = reg_class_superclasses[(int) class];
3498 *p != LIM_REG_CLASSES; p++)
3499 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3500 break;
3501
3502 if (*p == LIM_REG_CLASSES)
3503 continue;
3504 }
3505
3506 /* Mark it as no longer having a hard register home. */
3507 reg_renumber[i] = -1;
3508 /* We will need to scan everything again. */
3509 something_changed = 1;
3510 if (global)
3511 retry_global_alloc (i, forbidden_regs);
3512
3513 alter_reg (i, regno);
3514 if (dumpfile)
3515 {
3516 if (reg_renumber[i] == -1)
3517 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3518 else
3519 fprintf (dumpfile, " Register %d now in %d.\n\n",
3520 i, reg_renumber[i]);
3521 }
3522 }
3523 for (i = 0; i < scratch_list_length; i++)
3524 {
3525 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3526 {
3527 if (! cant_eliminate && basic_block_needs[0]
3528 && ! basic_block_needs[(int) class][scratch_block[i]])
3529 {
3530 enum reg_class *p;
3531
3532 for (p = reg_class_superclasses[(int) class];
3533 *p != LIM_REG_CLASSES; p++)
3534 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3535 break;
3536
3537 if (*p == LIM_REG_CLASSES)
3538 continue;
3539 }
3540 PUT_CODE (scratch_list[i], SCRATCH);
3541 scratch_list[i] = 0;
3542 something_changed = 1;
3543 continue;
3544 }
3545 }
3546
3547 return something_changed;
3548 }
3549 \f
3550 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3551 Also mark any hard registers used to store user variables as
3552 forbidden from being used for spill registers. */
3553
3554 static void
3555 scan_paradoxical_subregs (x)
3556 register rtx x;
3557 {
3558 register int i;
3559 register char *fmt;
3560 register enum rtx_code code = GET_CODE (x);
3561
3562 switch (code)
3563 {
3564 case REG:
3565 #ifdef SMALL_REGISTER_CLASSES
3566 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3567 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3568 #endif
3569 return;
3570
3571 case CONST_INT:
3572 case CONST:
3573 case SYMBOL_REF:
3574 case LABEL_REF:
3575 case CONST_DOUBLE:
3576 case CC0:
3577 case PC:
3578 case USE:
3579 case CLOBBER:
3580 return;
3581
3582 case SUBREG:
3583 if (GET_CODE (SUBREG_REG (x)) == REG
3584 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3585 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3586 = GET_MODE_SIZE (GET_MODE (x));
3587 return;
3588 }
3589
3590 fmt = GET_RTX_FORMAT (code);
3591 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3592 {
3593 if (fmt[i] == 'e')
3594 scan_paradoxical_subregs (XEXP (x, i));
3595 else if (fmt[i] == 'E')
3596 {
3597 register int j;
3598 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3599 scan_paradoxical_subregs (XVECEXP (x, i, j));
3600 }
3601 }
3602 }
3603 \f
3604 static int
3605 hard_reg_use_compare (p1, p2)
3606 struct hard_reg_n_uses *p1, *p2;
3607 {
3608 int tem = p1->uses - p2->uses;
3609 if (tem != 0) return tem;
3610 /* If regs are equally good, sort by regno,
3611 so that the results of qsort leave nothing to chance. */
3612 return p1->regno - p2->regno;
3613 }
3614
3615 /* Choose the order to consider regs for use as reload registers
3616 based on how much trouble would be caused by spilling one.
3617 Store them in order of decreasing preference in potential_reload_regs. */
3618
3619 static void
3620 order_regs_for_reload (global)
3621 int global;
3622 {
3623 register int i;
3624 register int o = 0;
3625 int large = 0;
3626
3627 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3628
3629 CLEAR_HARD_REG_SET (bad_spill_regs);
3630
3631 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3632 potential_reload_regs[i] = -1;
3633
3634 /* Count number of uses of each hard reg by pseudo regs allocated to it
3635 and then order them by decreasing use. */
3636
3637 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3638 {
3639 hard_reg_n_uses[i].uses = 0;
3640 hard_reg_n_uses[i].regno = i;
3641 }
3642
3643 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3644 {
3645 int regno = reg_renumber[i];
3646 if (regno >= 0)
3647 {
3648 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3649 while (regno < lim)
3650 {
3651 /* If allocated by local-alloc, show more uses since
3652 we're not going to be able to reallocate it, but
3653 we might if allocated by global alloc. */
3654 if (global && reg_allocno[i] < 0)
3655 hard_reg_n_uses[regno].uses += (reg_n_refs[i] + 1) / 2;
3656
3657 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3658 }
3659 }
3660 large += reg_n_refs[i];
3661 }
3662
3663 /* Now fixed registers (which cannot safely be used for reloading)
3664 get a very high use count so they will be considered least desirable.
3665 Registers used explicitly in the rtl code are almost as bad. */
3666
3667 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3668 {
3669 if (fixed_regs[i])
3670 {
3671 hard_reg_n_uses[i].uses += 2 * large + 2;
3672 SET_HARD_REG_BIT (bad_spill_regs, i);
3673 }
3674 else if (regs_explicitly_used[i])
3675 {
3676 hard_reg_n_uses[i].uses += large + 1;
3677 #ifndef SMALL_REGISTER_CLASSES
3678 /* ??? We are doing this here because of the potential that
3679 bad code may be generated if a register explicitly used in
3680 an insn was used as a spill register for that insn. But
3681 not using these are spill registers may lose on some machine.
3682 We'll have to see how this works out. */
3683 SET_HARD_REG_BIT (bad_spill_regs, i);
3684 #endif
3685 }
3686 }
3687 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3688 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3689
3690 #ifdef ELIMINABLE_REGS
3691 /* If registers other than the frame pointer are eliminable, mark them as
3692 poor choices. */
3693 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3694 {
3695 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3696 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3697 }
3698 #endif
3699
3700 /* Prefer registers not so far used, for use in temporary loading.
3701 Among them, if REG_ALLOC_ORDER is defined, use that order.
3702 Otherwise, prefer registers not preserved by calls. */
3703
3704 #ifdef REG_ALLOC_ORDER
3705 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3706 {
3707 int regno = reg_alloc_order[i];
3708
3709 if (hard_reg_n_uses[regno].uses == 0)
3710 potential_reload_regs[o++] = regno;
3711 }
3712 #else
3713 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3714 {
3715 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3716 potential_reload_regs[o++] = i;
3717 }
3718 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3719 {
3720 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3721 potential_reload_regs[o++] = i;
3722 }
3723 #endif
3724
3725 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3726 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3727
3728 /* Now add the regs that are already used,
3729 preferring those used less often. The fixed and otherwise forbidden
3730 registers will be at the end of this list. */
3731
3732 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3733 if (hard_reg_n_uses[i].uses != 0)
3734 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3735 }
3736 \f
3737 /* Used in reload_as_needed to sort the spilled regs. */
3738
3739 static int
3740 compare_spill_regs (r1, r2)
3741 short *r1, *r2;
3742 {
3743 return *r1 - *r2;
3744 }
3745
3746 /* Reload pseudo-registers into hard regs around each insn as needed.
3747 Additional register load insns are output before the insn that needs it
3748 and perhaps store insns after insns that modify the reloaded pseudo reg.
3749
3750 reg_last_reload_reg and reg_reloaded_contents keep track of
3751 which registers are already available in reload registers.
3752 We update these for the reloads that we perform,
3753 as the insns are scanned. */
3754
3755 static void
3756 reload_as_needed (first, live_known)
3757 rtx first;
3758 int live_known;
3759 {
3760 register rtx insn;
3761 register int i;
3762 int this_block = 0;
3763 rtx x;
3764 rtx after_call = 0;
3765
3766 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3767 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3768 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3769 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3770 reg_has_output_reload = (char *) alloca (max_regno);
3771 for (i = 0; i < n_spills; i++)
3772 {
3773 reg_reloaded_contents[i] = -1;
3774 reg_reloaded_insn[i] = 0;
3775 }
3776
3777 /* Reset all offsets on eliminable registers to their initial values. */
3778 #ifdef ELIMINABLE_REGS
3779 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3780 {
3781 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3782 reg_eliminate[i].initial_offset);
3783 reg_eliminate[i].previous_offset
3784 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3785 }
3786 #else
3787 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3788 reg_eliminate[0].previous_offset
3789 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3790 #endif
3791
3792 num_not_at_initial_offset = 0;
3793
3794 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3795 pack registers with group needs. */
3796 if (n_spills > 1)
3797 {
3798 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3799 for (i = 0; i < n_spills; i++)
3800 spill_reg_order[spill_regs[i]] = i;
3801 }
3802
3803 for (insn = first; insn;)
3804 {
3805 register rtx next = NEXT_INSN (insn);
3806
3807 /* Notice when we move to a new basic block. */
3808 if (live_known && this_block + 1 < n_basic_blocks
3809 && insn == basic_block_head[this_block+1])
3810 ++this_block;
3811
3812 /* If we pass a label, copy the offsets from the label information
3813 into the current offsets of each elimination. */
3814 if (GET_CODE (insn) == CODE_LABEL)
3815 {
3816 num_not_at_initial_offset = 0;
3817 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3818 {
3819 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3820 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3821 if (reg_eliminate[i].can_eliminate
3822 && (reg_eliminate[i].offset
3823 != reg_eliminate[i].initial_offset))
3824 num_not_at_initial_offset++;
3825 }
3826 }
3827
3828 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3829 {
3830 rtx avoid_return_reg = 0;
3831 rtx oldpat = PATTERN (insn);
3832
3833 #ifdef SMALL_REGISTER_CLASSES
3834 /* Set avoid_return_reg if this is an insn
3835 that might use the value of a function call. */
3836 if (GET_CODE (insn) == CALL_INSN)
3837 {
3838 if (GET_CODE (PATTERN (insn)) == SET)
3839 after_call = SET_DEST (PATTERN (insn));
3840 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3841 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3842 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3843 else
3844 after_call = 0;
3845 }
3846 else if (after_call != 0
3847 && !(GET_CODE (PATTERN (insn)) == SET
3848 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3849 {
3850 if (reg_referenced_p (after_call, PATTERN (insn)))
3851 avoid_return_reg = after_call;
3852 after_call = 0;
3853 }
3854 #endif /* SMALL_REGISTER_CLASSES */
3855
3856 /* If this is a USE and CLOBBER of a MEM, ensure that any
3857 references to eliminable registers have been removed. */
3858
3859 if ((GET_CODE (PATTERN (insn)) == USE
3860 || GET_CODE (PATTERN (insn)) == CLOBBER)
3861 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3862 XEXP (XEXP (PATTERN (insn), 0), 0)
3863 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3864 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3865
3866 /* If we need to do register elimination processing, do so.
3867 This might delete the insn, in which case we are done. */
3868 if (num_eliminable && GET_MODE (insn) == QImode)
3869 {
3870 eliminate_regs_in_insn (insn, 1);
3871 if (GET_CODE (insn) == NOTE)
3872 {
3873 insn = next;
3874 continue;
3875 }
3876 }
3877
3878 if (GET_MODE (insn) == VOIDmode)
3879 n_reloads = 0;
3880 /* First find the pseudo regs that must be reloaded for this insn.
3881 This info is returned in the tables reload_... (see reload.h).
3882 Also modify the body of INSN by substituting RELOAD
3883 rtx's for those pseudo regs. */
3884 else
3885 {
3886 bzero (reg_has_output_reload, max_regno);
3887 CLEAR_HARD_REG_SET (reg_is_output_reload);
3888
3889 find_reloads (insn, 1, spill_indirect_levels, live_known,
3890 spill_reg_order);
3891 }
3892
3893 if (n_reloads > 0)
3894 {
3895 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3896 rtx p;
3897 int class;
3898
3899 /* If this block has not had spilling done for a
3900 particular clas and we have any non-optionals that need a
3901 spill reg in that class, abort. */
3902
3903 for (class = 0; class < N_REG_CLASSES; class++)
3904 if (basic_block_needs[class] != 0
3905 && basic_block_needs[class][this_block] == 0)
3906 for (i = 0; i < n_reloads; i++)
3907 if (class == (int) reload_reg_class[i]
3908 && reload_reg_rtx[i] == 0
3909 && ! reload_optional[i]
3910 && (reload_in[i] != 0 || reload_out[i] != 0
3911 || reload_secondary_p[i] != 0))
3912 fatal_insn ("Non-optional registers need a spill register", insn);
3913
3914 /* Now compute which reload regs to reload them into. Perhaps
3915 reusing reload regs from previous insns, or else output
3916 load insns to reload them. Maybe output store insns too.
3917 Record the choices of reload reg in reload_reg_rtx. */
3918 choose_reload_regs (insn, avoid_return_reg);
3919
3920 #ifdef SMALL_REGISTER_CLASSES
3921 /* Merge any reloads that we didn't combine for fear of
3922 increasing the number of spill registers needed but now
3923 discover can be safely merged. */
3924 merge_assigned_reloads (insn);
3925 #endif
3926
3927 /* Generate the insns to reload operands into or out of
3928 their reload regs. */
3929 emit_reload_insns (insn);
3930
3931 /* Substitute the chosen reload regs from reload_reg_rtx
3932 into the insn's body (or perhaps into the bodies of other
3933 load and store insn that we just made for reloading
3934 and that we moved the structure into). */
3935 subst_reloads ();
3936
3937 /* If this was an ASM, make sure that all the reload insns
3938 we have generated are valid. If not, give an error
3939 and delete them. */
3940
3941 if (asm_noperands (PATTERN (insn)) >= 0)
3942 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3943 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3944 && (recog_memoized (p) < 0
3945 || (insn_extract (p),
3946 ! constrain_operands (INSN_CODE (p), 1))))
3947 {
3948 error_for_asm (insn,
3949 "`asm' operand requires impossible reload");
3950 PUT_CODE (p, NOTE);
3951 NOTE_SOURCE_FILE (p) = 0;
3952 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3953 }
3954 }
3955 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3956 is no longer validly lying around to save a future reload.
3957 Note that this does not detect pseudos that were reloaded
3958 for this insn in order to be stored in
3959 (obeying register constraints). That is correct; such reload
3960 registers ARE still valid. */
3961 note_stores (oldpat, forget_old_reloads_1);
3962
3963 /* There may have been CLOBBER insns placed after INSN. So scan
3964 between INSN and NEXT and use them to forget old reloads. */
3965 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3966 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3967 note_stores (PATTERN (x), forget_old_reloads_1);
3968
3969 #ifdef AUTO_INC_DEC
3970 /* Likewise for regs altered by auto-increment in this insn.
3971 But note that the reg-notes are not changed by reloading:
3972 they still contain the pseudo-regs, not the spill regs. */
3973 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3974 if (REG_NOTE_KIND (x) == REG_INC)
3975 {
3976 /* See if this pseudo reg was reloaded in this insn.
3977 If so, its last-reload info is still valid
3978 because it is based on this insn's reload. */
3979 for (i = 0; i < n_reloads; i++)
3980 if (reload_out[i] == XEXP (x, 0))
3981 break;
3982
3983 if (i == n_reloads)
3984 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3985 }
3986 #endif
3987 }
3988 /* A reload reg's contents are unknown after a label. */
3989 if (GET_CODE (insn) == CODE_LABEL)
3990 for (i = 0; i < n_spills; i++)
3991 {
3992 reg_reloaded_contents[i] = -1;
3993 reg_reloaded_insn[i] = 0;
3994 }
3995
3996 /* Don't assume a reload reg is still good after a call insn
3997 if it is a call-used reg. */
3998 else if (GET_CODE (insn) == CALL_INSN)
3999 for (i = 0; i < n_spills; i++)
4000 if (call_used_regs[spill_regs[i]])
4001 {
4002 reg_reloaded_contents[i] = -1;
4003 reg_reloaded_insn[i] = 0;
4004 }
4005
4006 /* In case registers overlap, allow certain insns to invalidate
4007 particular hard registers. */
4008
4009 #ifdef INSN_CLOBBERS_REGNO_P
4010 for (i = 0 ; i < n_spills ; i++)
4011 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4012 {
4013 reg_reloaded_contents[i] = -1;
4014 reg_reloaded_insn[i] = 0;
4015 }
4016 #endif
4017
4018 insn = next;
4019
4020 #ifdef USE_C_ALLOCA
4021 alloca (0);
4022 #endif
4023 }
4024 }
4025
4026 /* Discard all record of any value reloaded from X,
4027 or reloaded in X from someplace else;
4028 unless X is an output reload reg of the current insn.
4029
4030 X may be a hard reg (the reload reg)
4031 or it may be a pseudo reg that was reloaded from. */
4032
4033 static void
4034 forget_old_reloads_1 (x, ignored)
4035 rtx x;
4036 rtx ignored;
4037 {
4038 register int regno;
4039 int nr;
4040 int offset = 0;
4041
4042 /* note_stores does give us subregs of hard regs. */
4043 while (GET_CODE (x) == SUBREG)
4044 {
4045 offset += SUBREG_WORD (x);
4046 x = SUBREG_REG (x);
4047 }
4048
4049 if (GET_CODE (x) != REG)
4050 return;
4051
4052 regno = REGNO (x) + offset;
4053
4054 if (regno >= FIRST_PSEUDO_REGISTER)
4055 nr = 1;
4056 else
4057 {
4058 int i;
4059 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4060 /* Storing into a spilled-reg invalidates its contents.
4061 This can happen if a block-local pseudo is allocated to that reg
4062 and it wasn't spilled because this block's total need is 0.
4063 Then some insn might have an optional reload and use this reg. */
4064 for (i = 0; i < nr; i++)
4065 if (spill_reg_order[regno + i] >= 0
4066 /* But don't do this if the reg actually serves as an output
4067 reload reg in the current instruction. */
4068 && (n_reloads == 0
4069 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4070 {
4071 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4072 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4073 }
4074 }
4075
4076 /* Since value of X has changed,
4077 forget any value previously copied from it. */
4078
4079 while (nr-- > 0)
4080 /* But don't forget a copy if this is the output reload
4081 that establishes the copy's validity. */
4082 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4083 reg_last_reload_reg[regno + nr] = 0;
4084 }
4085 \f
4086 /* For each reload, the mode of the reload register. */
4087 static enum machine_mode reload_mode[MAX_RELOADS];
4088
4089 /* For each reload, the largest number of registers it will require. */
4090 static int reload_nregs[MAX_RELOADS];
4091
4092 /* Comparison function for qsort to decide which of two reloads
4093 should be handled first. *P1 and *P2 are the reload numbers. */
4094
4095 static int
4096 reload_reg_class_lower (p1, p2)
4097 short *p1, *p2;
4098 {
4099 register int r1 = *p1, r2 = *p2;
4100 register int t;
4101
4102 /* Consider required reloads before optional ones. */
4103 t = reload_optional[r1] - reload_optional[r2];
4104 if (t != 0)
4105 return t;
4106
4107 /* Count all solitary classes before non-solitary ones. */
4108 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4109 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4110 if (t != 0)
4111 return t;
4112
4113 /* Aside from solitaires, consider all multi-reg groups first. */
4114 t = reload_nregs[r2] - reload_nregs[r1];
4115 if (t != 0)
4116 return t;
4117
4118 /* Consider reloads in order of increasing reg-class number. */
4119 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4120 if (t != 0)
4121 return t;
4122
4123 /* If reloads are equally urgent, sort by reload number,
4124 so that the results of qsort leave nothing to chance. */
4125 return r1 - r2;
4126 }
4127 \f
4128 /* The following HARD_REG_SETs indicate when each hard register is
4129 used for a reload of various parts of the current insn. */
4130
4131 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4132 static HARD_REG_SET reload_reg_used;
4133 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4134 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4135 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4136 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4137 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4138 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4139 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4140 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4141 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4142 static HARD_REG_SET reload_reg_used_in_op_addr;
4143 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4144 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4145 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4146 static HARD_REG_SET reload_reg_used_in_insn;
4147 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4148 static HARD_REG_SET reload_reg_used_in_other_addr;
4149
4150 /* If reg is in use as a reload reg for any sort of reload. */
4151 static HARD_REG_SET reload_reg_used_at_all;
4152
4153 /* If reg is use as an inherited reload. We just mark the first register
4154 in the group. */
4155 static HARD_REG_SET reload_reg_used_for_inherit;
4156
4157 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4158 TYPE. MODE is used to indicate how many consecutive regs are
4159 actually used. */
4160
4161 static void
4162 mark_reload_reg_in_use (regno, opnum, type, mode)
4163 int regno;
4164 int opnum;
4165 enum reload_type type;
4166 enum machine_mode mode;
4167 {
4168 int nregs = HARD_REGNO_NREGS (regno, mode);
4169 int i;
4170
4171 for (i = regno; i < nregs + regno; i++)
4172 {
4173 switch (type)
4174 {
4175 case RELOAD_OTHER:
4176 SET_HARD_REG_BIT (reload_reg_used, i);
4177 break;
4178
4179 case RELOAD_FOR_INPUT_ADDRESS:
4180 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4181 break;
4182
4183 case RELOAD_FOR_OUTPUT_ADDRESS:
4184 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4185 break;
4186
4187 case RELOAD_FOR_OPERAND_ADDRESS:
4188 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4189 break;
4190
4191 case RELOAD_FOR_OPADDR_ADDR:
4192 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4193 break;
4194
4195 case RELOAD_FOR_OTHER_ADDRESS:
4196 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4197 break;
4198
4199 case RELOAD_FOR_INPUT:
4200 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4201 break;
4202
4203 case RELOAD_FOR_OUTPUT:
4204 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4205 break;
4206
4207 case RELOAD_FOR_INSN:
4208 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4209 break;
4210 }
4211
4212 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4213 }
4214 }
4215
4216 /* Similarly, but show REGNO is no longer in use for a reload. */
4217
4218 static void
4219 clear_reload_reg_in_use (regno, opnum, type, mode)
4220 int regno;
4221 int opnum;
4222 enum reload_type type;
4223 enum machine_mode mode;
4224 {
4225 int nregs = HARD_REGNO_NREGS (regno, mode);
4226 int i;
4227
4228 for (i = regno; i < nregs + regno; i++)
4229 {
4230 switch (type)
4231 {
4232 case RELOAD_OTHER:
4233 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4234 break;
4235
4236 case RELOAD_FOR_INPUT_ADDRESS:
4237 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4238 break;
4239
4240 case RELOAD_FOR_OUTPUT_ADDRESS:
4241 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4242 break;
4243
4244 case RELOAD_FOR_OPERAND_ADDRESS:
4245 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4246 break;
4247
4248 case RELOAD_FOR_OPADDR_ADDR:
4249 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4250 break;
4251
4252 case RELOAD_FOR_OTHER_ADDRESS:
4253 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4254 break;
4255
4256 case RELOAD_FOR_INPUT:
4257 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4258 break;
4259
4260 case RELOAD_FOR_OUTPUT:
4261 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4262 break;
4263
4264 case RELOAD_FOR_INSN:
4265 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4266 break;
4267 }
4268 }
4269 }
4270
4271 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4272 specified by OPNUM and TYPE. */
4273
4274 static int
4275 reload_reg_free_p (regno, opnum, type)
4276 int regno;
4277 int opnum;
4278 enum reload_type type;
4279 {
4280 int i;
4281
4282 /* In use for a RELOAD_OTHER means it's not available for anything except
4283 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4284 to be used only for inputs. */
4285
4286 if (type != RELOAD_FOR_OTHER_ADDRESS
4287 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4288 return 0;
4289
4290 switch (type)
4291 {
4292 case RELOAD_OTHER:
4293 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4294 we can't use it for RELOAD_OTHER. */
4295 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4296 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4297 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4298 return 0;
4299
4300 for (i = 0; i < reload_n_operands; i++)
4301 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4302 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4303 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4304 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4305 return 0;
4306
4307 return 1;
4308
4309 case RELOAD_FOR_INPUT:
4310 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4311 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4312 return 0;
4313
4314 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4315 return 0;
4316
4317 /* If it is used for some other input, can't use it. */
4318 for (i = 0; i < reload_n_operands; i++)
4319 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4320 return 0;
4321
4322 /* If it is used in a later operand's address, can't use it. */
4323 for (i = opnum + 1; i < reload_n_operands; i++)
4324 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4325 return 0;
4326
4327 return 1;
4328
4329 case RELOAD_FOR_INPUT_ADDRESS:
4330 /* Can't use a register if it is used for an input address for this
4331 operand or used as an input in an earlier one. */
4332 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4333 return 0;
4334
4335 for (i = 0; i < opnum; i++)
4336 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4337 return 0;
4338
4339 return 1;
4340
4341 case RELOAD_FOR_OUTPUT_ADDRESS:
4342 /* Can't use a register if it is used for an output address for this
4343 operand or used as an output in this or a later operand. */
4344 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4345 return 0;
4346
4347 for (i = opnum; i < reload_n_operands; i++)
4348 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4349 return 0;
4350
4351 return 1;
4352
4353 case RELOAD_FOR_OPERAND_ADDRESS:
4354 for (i = 0; i < reload_n_operands; i++)
4355 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4356 return 0;
4357
4358 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4359 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4360
4361 case RELOAD_FOR_OPADDR_ADDR:
4362 for (i = 0; i < reload_n_operands; i++)
4363 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4364 return 0;
4365
4366 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4367
4368 case RELOAD_FOR_OUTPUT:
4369 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4370 outputs, or an operand address for this or an earlier output. */
4371 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4372 return 0;
4373
4374 for (i = 0; i < reload_n_operands; i++)
4375 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4376 return 0;
4377
4378 for (i = 0; i <= opnum; i++)
4379 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4380 return 0;
4381
4382 return 1;
4383
4384 case RELOAD_FOR_INSN:
4385 for (i = 0; i < reload_n_operands; i++)
4386 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4387 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4388 return 0;
4389
4390 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4391 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4392
4393 case RELOAD_FOR_OTHER_ADDRESS:
4394 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4395 }
4396 abort ();
4397 }
4398
4399 /* Return 1 if the value in reload reg REGNO, as used by a reload
4400 needed for the part of the insn specified by OPNUM and TYPE,
4401 is not in use for a reload in any prior part of the insn.
4402
4403 We can assume that the reload reg was already tested for availability
4404 at the time it is needed, and we should not check this again,
4405 in case the reg has already been marked in use. */
4406
4407 static int
4408 reload_reg_free_before_p (regno, opnum, type)
4409 int regno;
4410 int opnum;
4411 enum reload_type type;
4412 {
4413 int i;
4414
4415 switch (type)
4416 {
4417 case RELOAD_FOR_OTHER_ADDRESS:
4418 /* These always come first. */
4419 return 1;
4420
4421 case RELOAD_OTHER:
4422 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4423
4424 /* If this use is for part of the insn,
4425 check the reg is not in use for any prior part. It is tempting
4426 to try to do this by falling through from objecs that occur
4427 later in the insn to ones that occur earlier, but that will not
4428 correctly take into account the fact that here we MUST ignore
4429 things that would prevent the register from being allocated in
4430 the first place, since we know that it was allocated. */
4431
4432 case RELOAD_FOR_OUTPUT_ADDRESS:
4433 /* Earlier reloads are for earlier outputs or their addresses,
4434 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4435 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4436 RELOAD_OTHER).. */
4437 for (i = 0; i < opnum; i++)
4438 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4439 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4440 return 0;
4441
4442 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4443 return 0;
4444
4445 for (i = 0; i < reload_n_operands; i++)
4446 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4447 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4448 return 0;
4449
4450 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4451 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4452 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4453
4454 case RELOAD_FOR_OUTPUT:
4455 /* This can't be used in the output address for this operand and
4456 anything that can't be used for it, except that we've already
4457 tested for RELOAD_FOR_INSN objects. */
4458
4459 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4460 return 0;
4461
4462 for (i = 0; i < opnum; i++)
4463 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4464 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4465 return 0;
4466
4467 for (i = 0; i < reload_n_operands; i++)
4468 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4469 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4470 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4471 return 0;
4472
4473 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4474
4475 case RELOAD_FOR_OPERAND_ADDRESS:
4476 case RELOAD_FOR_OPADDR_ADDR:
4477 case RELOAD_FOR_INSN:
4478 /* These can't conflict with inputs, or each other, so all we have to
4479 test is input addresses and the addresses of OTHER items. */
4480
4481 for (i = 0; i < reload_n_operands; i++)
4482 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4483 return 0;
4484
4485 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4486
4487 case RELOAD_FOR_INPUT:
4488 /* The only things earlier are the address for this and
4489 earlier inputs, other inputs (which we know we don't conflict
4490 with), and addresses of RELOAD_OTHER objects. */
4491
4492 for (i = 0; i <= opnum; i++)
4493 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4494 return 0;
4495
4496 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4497
4498 case RELOAD_FOR_INPUT_ADDRESS:
4499 /* Similarly, all we have to check is for use in earlier inputs'
4500 addresses. */
4501 for (i = 0; i < opnum; i++)
4502 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4503 return 0;
4504
4505 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4506 }
4507 abort ();
4508 }
4509
4510 /* Return 1 if the value in reload reg REGNO, as used by a reload
4511 needed for the part of the insn specified by OPNUM and TYPE,
4512 is still available in REGNO at the end of the insn.
4513
4514 We can assume that the reload reg was already tested for availability
4515 at the time it is needed, and we should not check this again,
4516 in case the reg has already been marked in use. */
4517
4518 static int
4519 reload_reg_reaches_end_p (regno, opnum, type)
4520 int regno;
4521 int opnum;
4522 enum reload_type type;
4523 {
4524 int i;
4525
4526 switch (type)
4527 {
4528 case RELOAD_OTHER:
4529 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4530 its value must reach the end. */
4531 return 1;
4532
4533 /* If this use is for part of the insn,
4534 its value reaches if no subsequent part uses the same register.
4535 Just like the above function, don't try to do this with lots
4536 of fallthroughs. */
4537
4538 case RELOAD_FOR_OTHER_ADDRESS:
4539 /* Here we check for everything else, since these don't conflict
4540 with anything else and everything comes later. */
4541
4542 for (i = 0; i < reload_n_operands; i++)
4543 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4544 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4545 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4546 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4547 return 0;
4548
4549 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4550 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4551 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4552
4553 case RELOAD_FOR_INPUT_ADDRESS:
4554 /* Similar, except that we check only for this and subsequent inputs
4555 and the address of only subsequent inputs and we do not need
4556 to check for RELOAD_OTHER objects since they are known not to
4557 conflict. */
4558
4559 for (i = opnum; i < reload_n_operands; i++)
4560 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4561 return 0;
4562
4563 for (i = opnum + 1; i < reload_n_operands; i++)
4564 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4565 return 0;
4566
4567 for (i = 0; i < reload_n_operands; i++)
4568 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4569 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4570 return 0;
4571
4572 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4573 return 0;
4574
4575 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4576 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4577
4578 case RELOAD_FOR_INPUT:
4579 /* Similar to input address, except we start at the next operand for
4580 both input and input address and we do not check for
4581 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4582 would conflict. */
4583
4584 for (i = opnum + 1; i < reload_n_operands; i++)
4585 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4586 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4587 return 0;
4588
4589 /* ... fall through ... */
4590
4591 case RELOAD_FOR_OPERAND_ADDRESS:
4592 /* Check outputs and their addresses. */
4593
4594 for (i = 0; i < reload_n_operands; i++)
4595 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4596 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4597 return 0;
4598
4599 return 1;
4600
4601 case RELOAD_FOR_OPADDR_ADDR:
4602 for (i = 0; i < reload_n_operands; i++)
4603 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4604 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4605 return 0;
4606
4607 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4608 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4609
4610 case RELOAD_FOR_INSN:
4611 /* These conflict with other outputs with RELOAD_OTHER. So
4612 we need only check for output addresses. */
4613
4614 opnum = -1;
4615
4616 /* ... fall through ... */
4617
4618 case RELOAD_FOR_OUTPUT:
4619 case RELOAD_FOR_OUTPUT_ADDRESS:
4620 /* We already know these can't conflict with a later output. So the
4621 only thing to check are later output addresses. */
4622 for (i = opnum + 1; i < reload_n_operands; i++)
4623 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4624 return 0;
4625
4626 return 1;
4627 }
4628
4629 abort ();
4630 }
4631 \f
4632 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4633 Return 0 otherwise.
4634
4635 This function uses the same algorithm as reload_reg_free_p above. */
4636
4637 static int
4638 reloads_conflict (r1, r2)
4639 int r1, r2;
4640 {
4641 enum reload_type r1_type = reload_when_needed[r1];
4642 enum reload_type r2_type = reload_when_needed[r2];
4643 int r1_opnum = reload_opnum[r1];
4644 int r2_opnum = reload_opnum[r2];
4645
4646 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4647
4648 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4649 return 1;
4650
4651 /* Otherwise, check conflicts differently for each type. */
4652
4653 switch (r1_type)
4654 {
4655 case RELOAD_FOR_INPUT:
4656 return (r2_type == RELOAD_FOR_INSN
4657 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4658 || r2_type == RELOAD_FOR_OPADDR_ADDR
4659 || r2_type == RELOAD_FOR_INPUT
4660 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4661
4662 case RELOAD_FOR_INPUT_ADDRESS:
4663 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4664 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4665
4666 case RELOAD_FOR_OUTPUT_ADDRESS:
4667 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4668 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4669
4670 case RELOAD_FOR_OPERAND_ADDRESS:
4671 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4672 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4673
4674 case RELOAD_FOR_OPADDR_ADDR:
4675 return (r2_type == RELOAD_FOR_INPUT
4676 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4677
4678 case RELOAD_FOR_OUTPUT:
4679 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4680 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4681 && r2_opnum >= r1_opnum));
4682
4683 case RELOAD_FOR_INSN:
4684 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4685 || r2_type == RELOAD_FOR_INSN
4686 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4687
4688 case RELOAD_FOR_OTHER_ADDRESS:
4689 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4690
4691 case RELOAD_OTHER:
4692 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4693
4694 default:
4695 abort ();
4696 }
4697 }
4698 \f
4699 /* Vector of reload-numbers showing the order in which the reloads should
4700 be processed. */
4701 short reload_order[MAX_RELOADS];
4702
4703 /* Indexed by reload number, 1 if incoming value
4704 inherited from previous insns. */
4705 char reload_inherited[MAX_RELOADS];
4706
4707 /* For an inherited reload, this is the insn the reload was inherited from,
4708 if we know it. Otherwise, this is 0. */
4709 rtx reload_inheritance_insn[MAX_RELOADS];
4710
4711 /* If non-zero, this is a place to get the value of the reload,
4712 rather than using reload_in. */
4713 rtx reload_override_in[MAX_RELOADS];
4714
4715 /* For each reload, the index in spill_regs of the spill register used,
4716 or -1 if we did not need one of the spill registers for this reload. */
4717 int reload_spill_index[MAX_RELOADS];
4718
4719 /* Find a spill register to use as a reload register for reload R.
4720 LAST_RELOAD is non-zero if this is the last reload for the insn being
4721 processed.
4722
4723 Set reload_reg_rtx[R] to the register allocated.
4724
4725 If NOERROR is nonzero, we return 1 if successful,
4726 or 0 if we couldn't find a spill reg and we didn't change anything. */
4727
4728 static int
4729 allocate_reload_reg (r, insn, last_reload, noerror)
4730 int r;
4731 rtx insn;
4732 int last_reload;
4733 int noerror;
4734 {
4735 int i;
4736 int pass;
4737 int count;
4738 rtx new;
4739 int regno;
4740
4741 /* If we put this reload ahead, thinking it is a group,
4742 then insist on finding a group. Otherwise we can grab a
4743 reg that some other reload needs.
4744 (That can happen when we have a 68000 DATA_OR_FP_REG
4745 which is a group of data regs or one fp reg.)
4746 We need not be so restrictive if there are no more reloads
4747 for this insn.
4748
4749 ??? Really it would be nicer to have smarter handling
4750 for that kind of reg class, where a problem like this is normal.
4751 Perhaps those classes should be avoided for reloading
4752 by use of more alternatives. */
4753
4754 int force_group = reload_nregs[r] > 1 && ! last_reload;
4755
4756 /* If we want a single register and haven't yet found one,
4757 take any reg in the right class and not in use.
4758 If we want a consecutive group, here is where we look for it.
4759
4760 We use two passes so we can first look for reload regs to
4761 reuse, which are already in use for other reloads in this insn,
4762 and only then use additional registers.
4763 I think that maximizing reuse is needed to make sure we don't
4764 run out of reload regs. Suppose we have three reloads, and
4765 reloads A and B can share regs. These need two regs.
4766 Suppose A and B are given different regs.
4767 That leaves none for C. */
4768 for (pass = 0; pass < 2; pass++)
4769 {
4770 /* I is the index in spill_regs.
4771 We advance it round-robin between insns to use all spill regs
4772 equally, so that inherited reloads have a chance
4773 of leapfrogging each other. Don't do this, however, when we have
4774 group needs and failure would be fatal; if we only have a relatively
4775 small number of spill registers, and more than one of them has
4776 group needs, then by starting in the middle, we may end up
4777 allocating the first one in such a way that we are not left with
4778 sufficient groups to handle the rest. */
4779
4780 if (noerror || ! force_group)
4781 i = last_spill_reg;
4782 else
4783 i = -1;
4784
4785 for (count = 0; count < n_spills; count++)
4786 {
4787 int class = (int) reload_reg_class[r];
4788
4789 i = (i + 1) % n_spills;
4790
4791 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4792 reload_when_needed[r])
4793 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4794 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4795 /* Look first for regs to share, then for unshared. But
4796 don't share regs used for inherited reloads; they are
4797 the ones we want to preserve. */
4798 && (pass
4799 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4800 spill_regs[i])
4801 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4802 spill_regs[i]))))
4803 {
4804 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4805 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4806 (on 68000) got us two FP regs. If NR is 1,
4807 we would reject both of them. */
4808 if (force_group)
4809 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4810 /* If we need only one reg, we have already won. */
4811 if (nr == 1)
4812 {
4813 /* But reject a single reg if we demand a group. */
4814 if (force_group)
4815 continue;
4816 break;
4817 }
4818 /* Otherwise check that as many consecutive regs as we need
4819 are available here.
4820 Also, don't use for a group registers that are
4821 needed for nongroups. */
4822 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4823 while (nr > 1)
4824 {
4825 regno = spill_regs[i] + nr - 1;
4826 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4827 && spill_reg_order[regno] >= 0
4828 && reload_reg_free_p (regno, reload_opnum[r],
4829 reload_when_needed[r])
4830 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4831 regno)))
4832 break;
4833 nr--;
4834 }
4835 if (nr == 1)
4836 break;
4837 }
4838 }
4839
4840 /* If we found something on pass 1, omit pass 2. */
4841 if (count < n_spills)
4842 break;
4843 }
4844
4845 /* We should have found a spill register by now. */
4846 if (count == n_spills)
4847 {
4848 if (noerror)
4849 return 0;
4850 goto failure;
4851 }
4852
4853 /* I is the index in SPILL_REG_RTX of the reload register we are to
4854 allocate. Get an rtx for it and find its register number. */
4855
4856 new = spill_reg_rtx[i];
4857
4858 if (new == 0 || GET_MODE (new) != reload_mode[r])
4859 spill_reg_rtx[i] = new
4860 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4861
4862 regno = true_regnum (new);
4863
4864 /* Detect when the reload reg can't hold the reload mode.
4865 This used to be one `if', but Sequent compiler can't handle that. */
4866 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4867 {
4868 enum machine_mode test_mode = VOIDmode;
4869 if (reload_in[r])
4870 test_mode = GET_MODE (reload_in[r]);
4871 /* If reload_in[r] has VOIDmode, it means we will load it
4872 in whatever mode the reload reg has: to wit, reload_mode[r].
4873 We have already tested that for validity. */
4874 /* Aside from that, we need to test that the expressions
4875 to reload from or into have modes which are valid for this
4876 reload register. Otherwise the reload insns would be invalid. */
4877 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4878 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4879 if (! (reload_out[r] != 0
4880 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4881 {
4882 /* The reg is OK. */
4883 last_spill_reg = i;
4884
4885 /* Mark as in use for this insn the reload regs we use
4886 for this. */
4887 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4888 reload_when_needed[r], reload_mode[r]);
4889
4890 reload_reg_rtx[r] = new;
4891 reload_spill_index[r] = i;
4892 return 1;
4893 }
4894 }
4895
4896 /* The reg is not OK. */
4897 if (noerror)
4898 return 0;
4899
4900 failure:
4901 if (asm_noperands (PATTERN (insn)) < 0)
4902 /* It's the compiler's fault. */
4903 fatal_insn ("Could not find a spill register", insn);
4904
4905 /* It's the user's fault; the operand's mode and constraint
4906 don't match. Disable this reload so we don't crash in final. */
4907 error_for_asm (insn,
4908 "`asm' operand constraint incompatible with operand size");
4909 reload_in[r] = 0;
4910 reload_out[r] = 0;
4911 reload_reg_rtx[r] = 0;
4912 reload_optional[r] = 1;
4913 reload_secondary_p[r] = 1;
4914
4915 return 1;
4916 }
4917 \f
4918 /* Assign hard reg targets for the pseudo-registers we must reload
4919 into hard regs for this insn.
4920 Also output the instructions to copy them in and out of the hard regs.
4921
4922 For machines with register classes, we are responsible for
4923 finding a reload reg in the proper class. */
4924
4925 static void
4926 choose_reload_regs (insn, avoid_return_reg)
4927 rtx insn;
4928 rtx avoid_return_reg;
4929 {
4930 register int i, j;
4931 int max_group_size = 1;
4932 enum reg_class group_class = NO_REGS;
4933 int inheritance;
4934
4935 rtx save_reload_reg_rtx[MAX_RELOADS];
4936 char save_reload_inherited[MAX_RELOADS];
4937 rtx save_reload_inheritance_insn[MAX_RELOADS];
4938 rtx save_reload_override_in[MAX_RELOADS];
4939 int save_reload_spill_index[MAX_RELOADS];
4940 HARD_REG_SET save_reload_reg_used;
4941 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4942 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4943 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4944 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4945 HARD_REG_SET save_reload_reg_used_in_op_addr;
4946 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4947 HARD_REG_SET save_reload_reg_used_in_insn;
4948 HARD_REG_SET save_reload_reg_used_in_other_addr;
4949 HARD_REG_SET save_reload_reg_used_at_all;
4950
4951 bzero (reload_inherited, MAX_RELOADS);
4952 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4953 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4954
4955 CLEAR_HARD_REG_SET (reload_reg_used);
4956 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4957 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4958 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4959 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4960 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4961
4962 for (i = 0; i < reload_n_operands; i++)
4963 {
4964 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4965 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4966 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4967 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4968 }
4969
4970 #ifdef SMALL_REGISTER_CLASSES
4971 /* Don't bother with avoiding the return reg
4972 if we have no mandatory reload that could use it. */
4973 if (avoid_return_reg)
4974 {
4975 int do_avoid = 0;
4976 int regno = REGNO (avoid_return_reg);
4977 int nregs
4978 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4979 int r;
4980
4981 for (r = regno; r < regno + nregs; r++)
4982 if (spill_reg_order[r] >= 0)
4983 for (j = 0; j < n_reloads; j++)
4984 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4985 && (reload_in[j] != 0 || reload_out[j] != 0
4986 || reload_secondary_p[j])
4987 &&
4988 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4989 do_avoid = 1;
4990 if (!do_avoid)
4991 avoid_return_reg = 0;
4992 }
4993 #endif /* SMALL_REGISTER_CLASSES */
4994
4995 #if 0 /* Not needed, now that we can always retry without inheritance. */
4996 /* See if we have more mandatory reloads than spill regs.
4997 If so, then we cannot risk optimizations that could prevent
4998 reloads from sharing one spill register.
4999
5000 Since we will try finding a better register than reload_reg_rtx
5001 unless it is equal to reload_in or reload_out, count such reloads. */
5002
5003 {
5004 int tem = 0;
5005 #ifdef SMALL_REGISTER_CLASSES
5006 int tem = (avoid_return_reg != 0);
5007 #endif
5008 for (j = 0; j < n_reloads; j++)
5009 if (! reload_optional[j]
5010 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5011 && (reload_reg_rtx[j] == 0
5012 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5013 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5014 tem++;
5015 if (tem > n_spills)
5016 must_reuse = 1;
5017 }
5018 #endif
5019
5020 #ifdef SMALL_REGISTER_CLASSES
5021 /* Don't use the subroutine call return reg for a reload
5022 if we are supposed to avoid it. */
5023 if (avoid_return_reg)
5024 {
5025 int regno = REGNO (avoid_return_reg);
5026 int nregs
5027 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5028 int r;
5029
5030 for (r = regno; r < regno + nregs; r++)
5031 if (spill_reg_order[r] >= 0)
5032 SET_HARD_REG_BIT (reload_reg_used, r);
5033 }
5034 #endif /* SMALL_REGISTER_CLASSES */
5035
5036 /* In order to be certain of getting the registers we need,
5037 we must sort the reloads into order of increasing register class.
5038 Then our grabbing of reload registers will parallel the process
5039 that provided the reload registers.
5040
5041 Also note whether any of the reloads wants a consecutive group of regs.
5042 If so, record the maximum size of the group desired and what
5043 register class contains all the groups needed by this insn. */
5044
5045 for (j = 0; j < n_reloads; j++)
5046 {
5047 reload_order[j] = j;
5048 reload_spill_index[j] = -1;
5049
5050 reload_mode[j]
5051 = (reload_inmode[j] == VOIDmode
5052 || (GET_MODE_SIZE (reload_outmode[j])
5053 > GET_MODE_SIZE (reload_inmode[j])))
5054 ? reload_outmode[j] : reload_inmode[j];
5055
5056 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5057
5058 if (reload_nregs[j] > 1)
5059 {
5060 max_group_size = MAX (reload_nregs[j], max_group_size);
5061 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5062 }
5063
5064 /* If we have already decided to use a certain register,
5065 don't use it in another way. */
5066 if (reload_reg_rtx[j])
5067 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5068 reload_when_needed[j], reload_mode[j]);
5069 }
5070
5071 if (n_reloads > 1)
5072 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5073
5074 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5075 sizeof reload_reg_rtx);
5076 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5077 bcopy ((char *) reload_inheritance_insn,
5078 (char *) save_reload_inheritance_insn,
5079 sizeof reload_inheritance_insn);
5080 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5081 sizeof reload_override_in);
5082 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5083 sizeof reload_spill_index);
5084 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5085 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5086 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5087 reload_reg_used_in_op_addr);
5088
5089 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5090 reload_reg_used_in_op_addr_reload);
5091
5092 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5093 reload_reg_used_in_insn);
5094 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5095 reload_reg_used_in_other_addr);
5096
5097 for (i = 0; i < reload_n_operands; i++)
5098 {
5099 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5100 reload_reg_used_in_output[i]);
5101 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5102 reload_reg_used_in_input[i]);
5103 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5104 reload_reg_used_in_input_addr[i]);
5105 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5106 reload_reg_used_in_output_addr[i]);
5107 }
5108
5109 /* If -O, try first with inheritance, then turning it off.
5110 If not -O, don't do inheritance.
5111 Using inheritance when not optimizing leads to paradoxes
5112 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5113 because one side of the comparison might be inherited. */
5114
5115 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5116 {
5117 /* Process the reloads in order of preference just found.
5118 Beyond this point, subregs can be found in reload_reg_rtx.
5119
5120 This used to look for an existing reloaded home for all
5121 of the reloads, and only then perform any new reloads.
5122 But that could lose if the reloads were done out of reg-class order
5123 because a later reload with a looser constraint might have an old
5124 home in a register needed by an earlier reload with a tighter constraint.
5125
5126 To solve this, we make two passes over the reloads, in the order
5127 described above. In the first pass we try to inherit a reload
5128 from a previous insn. If there is a later reload that needs a
5129 class that is a proper subset of the class being processed, we must
5130 also allocate a spill register during the first pass.
5131
5132 Then make a second pass over the reloads to allocate any reloads
5133 that haven't been given registers yet. */
5134
5135 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5136
5137 for (j = 0; j < n_reloads; j++)
5138 {
5139 register int r = reload_order[j];
5140
5141 /* Ignore reloads that got marked inoperative. */
5142 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5143 continue;
5144
5145 /* If find_reloads chose a to use reload_in or reload_out as a reload
5146 register, we don't need to chose one. Otherwise, try even if it found
5147 one since we might save an insn if we find the value lying around. */
5148 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5149 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5150 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5151 continue;
5152
5153 #if 0 /* No longer needed for correct operation.
5154 It might give better code, or might not; worth an experiment? */
5155 /* If this is an optional reload, we can't inherit from earlier insns
5156 until we are sure that any non-optional reloads have been allocated.
5157 The following code takes advantage of the fact that optional reloads
5158 are at the end of reload_order. */
5159 if (reload_optional[r] != 0)
5160 for (i = 0; i < j; i++)
5161 if ((reload_out[reload_order[i]] != 0
5162 || reload_in[reload_order[i]] != 0
5163 || reload_secondary_p[reload_order[i]])
5164 && ! reload_optional[reload_order[i]]
5165 && reload_reg_rtx[reload_order[i]] == 0)
5166 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5167 #endif
5168
5169 /* First see if this pseudo is already available as reloaded
5170 for a previous insn. We cannot try to inherit for reloads
5171 that are smaller than the maximum number of registers needed
5172 for groups unless the register we would allocate cannot be used
5173 for the groups.
5174
5175 We could check here to see if this is a secondary reload for
5176 an object that is already in a register of the desired class.
5177 This would avoid the need for the secondary reload register.
5178 But this is complex because we can't easily determine what
5179 objects might want to be loaded via this reload. So let a register
5180 be allocated here. In `emit_reload_insns' we suppress one of the
5181 loads in the case described above. */
5182
5183 if (inheritance)
5184 {
5185 register int regno = -1;
5186 enum machine_mode mode;
5187
5188 if (reload_in[r] == 0)
5189 ;
5190 else if (GET_CODE (reload_in[r]) == REG)
5191 {
5192 regno = REGNO (reload_in[r]);
5193 mode = GET_MODE (reload_in[r]);
5194 }
5195 else if (GET_CODE (reload_in_reg[r]) == REG)
5196 {
5197 regno = REGNO (reload_in_reg[r]);
5198 mode = GET_MODE (reload_in_reg[r]);
5199 }
5200 #if 0
5201 /* This won't work, since REGNO can be a pseudo reg number.
5202 Also, it takes much more hair to keep track of all the things
5203 that can invalidate an inherited reload of part of a pseudoreg. */
5204 else if (GET_CODE (reload_in[r]) == SUBREG
5205 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5206 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5207 #endif
5208
5209 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5210 {
5211 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5212
5213 if (reg_reloaded_contents[i] == regno
5214 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5215 >= GET_MODE_SIZE (mode))
5216 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5217 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5218 spill_regs[i])
5219 && (reload_nregs[r] == max_group_size
5220 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5221 spill_regs[i]))
5222 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5223 reload_when_needed[r])
5224 && reload_reg_free_before_p (spill_regs[i],
5225 reload_opnum[r],
5226 reload_when_needed[r]))
5227 {
5228 /* If a group is needed, verify that all the subsequent
5229 registers still have their values intact. */
5230 int nr
5231 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5232 int k;
5233
5234 for (k = 1; k < nr; k++)
5235 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5236 != regno)
5237 break;
5238
5239 if (k == nr)
5240 {
5241 int i1;
5242
5243 /* We found a register that contains the
5244 value we need. If this register is the
5245 same as an `earlyclobber' operand of the
5246 current insn, just mark it as a place to
5247 reload from since we can't use it as the
5248 reload register itself. */
5249
5250 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5251 if (reg_overlap_mentioned_for_reload_p
5252 (reg_last_reload_reg[regno],
5253 reload_earlyclobbers[i1]))
5254 break;
5255
5256 if (i1 != n_earlyclobbers
5257 /* Don't really use the inherited spill reg
5258 if we need it wider than we've got it. */
5259 || (GET_MODE_SIZE (reload_mode[r])
5260 > GET_MODE_SIZE (mode)))
5261 reload_override_in[r] = reg_last_reload_reg[regno];
5262 else
5263 {
5264 int k;
5265 /* We can use this as a reload reg. */
5266 /* Mark the register as in use for this part of
5267 the insn. */
5268 mark_reload_reg_in_use (spill_regs[i],
5269 reload_opnum[r],
5270 reload_when_needed[r],
5271 reload_mode[r]);
5272 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5273 reload_inherited[r] = 1;
5274 reload_inheritance_insn[r]
5275 = reg_reloaded_insn[i];
5276 reload_spill_index[r] = i;
5277 for (k = 0; k < nr; k++)
5278 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5279 spill_regs[i + k]);
5280 }
5281 }
5282 }
5283 }
5284 }
5285
5286 /* Here's another way to see if the value is already lying around. */
5287 if (inheritance
5288 && reload_in[r] != 0
5289 && ! reload_inherited[r]
5290 && reload_out[r] == 0
5291 && (CONSTANT_P (reload_in[r])
5292 || GET_CODE (reload_in[r]) == PLUS
5293 || GET_CODE (reload_in[r]) == REG
5294 || GET_CODE (reload_in[r]) == MEM)
5295 && (reload_nregs[r] == max_group_size
5296 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5297 {
5298 register rtx equiv
5299 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5300 -1, NULL_PTR, 0, reload_mode[r]);
5301 int regno;
5302
5303 if (equiv != 0)
5304 {
5305 if (GET_CODE (equiv) == REG)
5306 regno = REGNO (equiv);
5307 else if (GET_CODE (equiv) == SUBREG)
5308 {
5309 /* This must be a SUBREG of a hard register.
5310 Make a new REG since this might be used in an
5311 address and not all machines support SUBREGs
5312 there. */
5313 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5314 equiv = gen_rtx (REG, reload_mode[r], regno);
5315 }
5316 else
5317 abort ();
5318 }
5319
5320 /* If we found a spill reg, reject it unless it is free
5321 and of the desired class. */
5322 if (equiv != 0
5323 && ((spill_reg_order[regno] >= 0
5324 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5325 reload_when_needed[r]))
5326 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5327 regno)))
5328 equiv = 0;
5329
5330 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5331 equiv = 0;
5332
5333 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5334 equiv = 0;
5335
5336 /* We found a register that contains the value we need.
5337 If this register is the same as an `earlyclobber' operand
5338 of the current insn, just mark it as a place to reload from
5339 since we can't use it as the reload register itself. */
5340
5341 if (equiv != 0)
5342 for (i = 0; i < n_earlyclobbers; i++)
5343 if (reg_overlap_mentioned_for_reload_p (equiv,
5344 reload_earlyclobbers[i]))
5345 {
5346 reload_override_in[r] = equiv;
5347 equiv = 0;
5348 break;
5349 }
5350
5351 /* JRV: If the equiv register we have found is explicitly
5352 clobbered in the current insn, mark but don't use, as above. */
5353
5354 if (equiv != 0 && regno_clobbered_p (regno, insn))
5355 {
5356 reload_override_in[r] = equiv;
5357 equiv = 0;
5358 }
5359
5360 /* If we found an equivalent reg, say no code need be generated
5361 to load it, and use it as our reload reg. */
5362 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5363 {
5364 reload_reg_rtx[r] = equiv;
5365 reload_inherited[r] = 1;
5366 /* If it is a spill reg,
5367 mark the spill reg as in use for this insn. */
5368 i = spill_reg_order[regno];
5369 if (i >= 0)
5370 {
5371 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5372 int k;
5373 mark_reload_reg_in_use (regno, reload_opnum[r],
5374 reload_when_needed[r],
5375 reload_mode[r]);
5376 for (k = 0; k < nr; k++)
5377 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5378 }
5379 }
5380 }
5381
5382 /* If we found a register to use already, or if this is an optional
5383 reload, we are done. */
5384 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5385 continue;
5386
5387 #if 0 /* No longer needed for correct operation. Might or might not
5388 give better code on the average. Want to experiment? */
5389
5390 /* See if there is a later reload that has a class different from our
5391 class that intersects our class or that requires less register
5392 than our reload. If so, we must allocate a register to this
5393 reload now, since that reload might inherit a previous reload
5394 and take the only available register in our class. Don't do this
5395 for optional reloads since they will force all previous reloads
5396 to be allocated. Also don't do this for reloads that have been
5397 turned off. */
5398
5399 for (i = j + 1; i < n_reloads; i++)
5400 {
5401 int s = reload_order[i];
5402
5403 if ((reload_in[s] == 0 && reload_out[s] == 0
5404 && ! reload_secondary_p[s])
5405 || reload_optional[s])
5406 continue;
5407
5408 if ((reload_reg_class[s] != reload_reg_class[r]
5409 && reg_classes_intersect_p (reload_reg_class[r],
5410 reload_reg_class[s]))
5411 || reload_nregs[s] < reload_nregs[r])
5412 break;
5413 }
5414
5415 if (i == n_reloads)
5416 continue;
5417
5418 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5419 #endif
5420 }
5421
5422 /* Now allocate reload registers for anything non-optional that
5423 didn't get one yet. */
5424 for (j = 0; j < n_reloads; j++)
5425 {
5426 register int r = reload_order[j];
5427
5428 /* Ignore reloads that got marked inoperative. */
5429 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5430 continue;
5431
5432 /* Skip reloads that already have a register allocated or are
5433 optional. */
5434 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5435 continue;
5436
5437 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5438 break;
5439 }
5440
5441 /* If that loop got all the way, we have won. */
5442 if (j == n_reloads)
5443 break;
5444
5445 fail:
5446 /* Loop around and try without any inheritance. */
5447 /* First undo everything done by the failed attempt
5448 to allocate with inheritance. */
5449 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5450 sizeof reload_reg_rtx);
5451 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5452 sizeof reload_inherited);
5453 bcopy ((char *) save_reload_inheritance_insn,
5454 (char *) reload_inheritance_insn,
5455 sizeof reload_inheritance_insn);
5456 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5457 sizeof reload_override_in);
5458 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5459 sizeof reload_spill_index);
5460 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5461 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5462 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5463 save_reload_reg_used_in_op_addr);
5464 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5465 save_reload_reg_used_in_op_addr_reload);
5466 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5467 save_reload_reg_used_in_insn);
5468 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5469 save_reload_reg_used_in_other_addr);
5470
5471 for (i = 0; i < reload_n_operands; i++)
5472 {
5473 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5474 save_reload_reg_used_in_input[i]);
5475 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5476 save_reload_reg_used_in_output[i]);
5477 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5478 save_reload_reg_used_in_input_addr[i]);
5479 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5480 save_reload_reg_used_in_output_addr[i]);
5481 }
5482 }
5483
5484 /* If we thought we could inherit a reload, because it seemed that
5485 nothing else wanted the same reload register earlier in the insn,
5486 verify that assumption, now that all reloads have been assigned. */
5487
5488 for (j = 0; j < n_reloads; j++)
5489 {
5490 register int r = reload_order[j];
5491
5492 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5493 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5494 reload_opnum[r],
5495 reload_when_needed[r]))
5496 reload_inherited[r] = 0;
5497
5498 /* If we found a better place to reload from,
5499 validate it in the same fashion, if it is a reload reg. */
5500 if (reload_override_in[r]
5501 && (GET_CODE (reload_override_in[r]) == REG
5502 || GET_CODE (reload_override_in[r]) == SUBREG))
5503 {
5504 int regno = true_regnum (reload_override_in[r]);
5505 if (spill_reg_order[regno] >= 0
5506 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5507 reload_when_needed[r]))
5508 reload_override_in[r] = 0;
5509 }
5510 }
5511
5512 /* Now that reload_override_in is known valid,
5513 actually override reload_in. */
5514 for (j = 0; j < n_reloads; j++)
5515 if (reload_override_in[j])
5516 reload_in[j] = reload_override_in[j];
5517
5518 /* If this reload won't be done because it has been cancelled or is
5519 optional and not inherited, clear reload_reg_rtx so other
5520 routines (such as subst_reloads) don't get confused. */
5521 for (j = 0; j < n_reloads; j++)
5522 if (reload_reg_rtx[j] != 0
5523 && ((reload_optional[j] && ! reload_inherited[j])
5524 || (reload_in[j] == 0 && reload_out[j] == 0
5525 && ! reload_secondary_p[j])))
5526 {
5527 int regno = true_regnum (reload_reg_rtx[j]);
5528
5529 if (spill_reg_order[regno] >= 0)
5530 clear_reload_reg_in_use (regno, reload_opnum[j],
5531 reload_when_needed[j], reload_mode[j]);
5532 reload_reg_rtx[j] = 0;
5533 }
5534
5535 /* Record which pseudos and which spill regs have output reloads. */
5536 for (j = 0; j < n_reloads; j++)
5537 {
5538 register int r = reload_order[j];
5539
5540 i = reload_spill_index[r];
5541
5542 /* I is nonneg if this reload used one of the spill regs.
5543 If reload_reg_rtx[r] is 0, this is an optional reload
5544 that we opted to ignore. */
5545 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5546 && reload_reg_rtx[r] != 0)
5547 {
5548 register int nregno = REGNO (reload_out[r]);
5549 int nr = 1;
5550
5551 if (nregno < FIRST_PSEUDO_REGISTER)
5552 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5553
5554 while (--nr >= 0)
5555 reg_has_output_reload[nregno + nr] = 1;
5556
5557 if (i >= 0)
5558 {
5559 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5560 while (--nr >= 0)
5561 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5562 }
5563
5564 if (reload_when_needed[r] != RELOAD_OTHER
5565 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5566 && reload_when_needed[r] != RELOAD_FOR_INSN)
5567 abort ();
5568 }
5569 }
5570 }
5571 \f
5572 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5573 reloads of the same item for fear that we might not have enough reload
5574 registers. However, normally they will get the same reload register
5575 and hence actually need not be loaded twice.
5576
5577 Here we check for the most common case of this phenomenon: when we have
5578 a number of reloads for the same object, each of which were allocated
5579 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5580 reload, and is not modified in the insn itself. If we find such,
5581 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5582 This will not increase the number of spill registers needed and will
5583 prevent redundant code. */
5584
5585 #ifdef SMALL_REGISTER_CLASSES
5586
5587 static void
5588 merge_assigned_reloads (insn)
5589 rtx insn;
5590 {
5591 int i, j;
5592
5593 /* Scan all the reloads looking for ones that only load values and
5594 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5595 assigned and not modified by INSN. */
5596
5597 for (i = 0; i < n_reloads; i++)
5598 {
5599 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5600 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5601 || reg_set_p (reload_reg_rtx[i], insn))
5602 continue;
5603
5604 /* Look at all other reloads. Ensure that the only use of this
5605 reload_reg_rtx is in a reload that just loads the same value
5606 as we do. Note that any secondary reloads must be of the identical
5607 class since the values, modes, and result registers are the
5608 same, so we need not do anything with any secondary reloads. */
5609
5610 for (j = 0; j < n_reloads; j++)
5611 {
5612 if (i == j || reload_reg_rtx[j] == 0
5613 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5614 reload_reg_rtx[i]))
5615 continue;
5616
5617 /* If the reload regs aren't exactly the same (e.g, different modes)
5618 or if the values are different, we can't merge anything with this
5619 reload register. */
5620
5621 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5622 || reload_out[j] != 0 || reload_in[j] == 0
5623 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5624 break;
5625 }
5626
5627 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5628 we, in fact, found any matching reloads. */
5629
5630 if (j == n_reloads)
5631 {
5632 for (j = 0; j < n_reloads; j++)
5633 if (i != j && reload_reg_rtx[j] != 0
5634 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5635 {
5636 reload_when_needed[i] = RELOAD_OTHER;
5637 reload_in[j] = 0;
5638 transfer_replacements (i, j);
5639 }
5640
5641 /* If this is now RELOAD_OTHER, look for any reloads that load
5642 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5643 if they were for inputs, RELOAD_OTHER for outputs. Note that
5644 this test is equivalent to looking for reloads for this operand
5645 number. */
5646
5647 if (reload_when_needed[i] == RELOAD_OTHER)
5648 for (j = 0; j < n_reloads; j++)
5649 if (reload_in[j] != 0
5650 && reload_when_needed[i] != RELOAD_OTHER
5651 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5652 reload_in[i]))
5653 reload_when_needed[j]
5654 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5655 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5656 }
5657 }
5658 }
5659 #endif /* SMALL_RELOAD_CLASSES */
5660 \f
5661 /* Output insns to reload values in and out of the chosen reload regs. */
5662
5663 static void
5664 emit_reload_insns (insn)
5665 rtx insn;
5666 {
5667 register int j;
5668 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5669 rtx other_input_address_reload_insns = 0;
5670 rtx other_input_reload_insns = 0;
5671 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5672 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5673 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5674 rtx operand_reload_insns = 0;
5675 rtx other_operand_reload_insns = 0;
5676 rtx other_output_reload_insns = 0;
5677 rtx following_insn = NEXT_INSN (insn);
5678 rtx before_insn = insn;
5679 int special;
5680 /* Values to be put in spill_reg_store are put here first. */
5681 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5682
5683 for (j = 0; j < reload_n_operands; j++)
5684 input_reload_insns[j] = input_address_reload_insns[j]
5685 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5686
5687 /* Now output the instructions to copy the data into and out of the
5688 reload registers. Do these in the order that the reloads were reported,
5689 since reloads of base and index registers precede reloads of operands
5690 and the operands may need the base and index registers reloaded. */
5691
5692 for (j = 0; j < n_reloads; j++)
5693 {
5694 register rtx old;
5695 rtx oldequiv_reg = 0;
5696
5697 if (reload_spill_index[j] >= 0)
5698 new_spill_reg_store[reload_spill_index[j]] = 0;
5699
5700 old = reload_in[j];
5701 if (old != 0 && ! reload_inherited[j]
5702 && ! rtx_equal_p (reload_reg_rtx[j], old)
5703 && reload_reg_rtx[j] != 0)
5704 {
5705 register rtx reloadreg = reload_reg_rtx[j];
5706 rtx oldequiv = 0;
5707 enum machine_mode mode;
5708 rtx *where;
5709
5710 /* Determine the mode to reload in.
5711 This is very tricky because we have three to choose from.
5712 There is the mode the insn operand wants (reload_inmode[J]).
5713 There is the mode of the reload register RELOADREG.
5714 There is the intrinsic mode of the operand, which we could find
5715 by stripping some SUBREGs.
5716 It turns out that RELOADREG's mode is irrelevant:
5717 we can change that arbitrarily.
5718
5719 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5720 then the reload reg may not support QImode moves, so use SImode.
5721 If foo is in memory due to spilling a pseudo reg, this is safe,
5722 because the QImode value is in the least significant part of a
5723 slot big enough for a SImode. If foo is some other sort of
5724 memory reference, then it is impossible to reload this case,
5725 so previous passes had better make sure this never happens.
5726
5727 Then consider a one-word union which has SImode and one of its
5728 members is a float, being fetched as (SUBREG:SF union:SI).
5729 We must fetch that as SFmode because we could be loading into
5730 a float-only register. In this case OLD's mode is correct.
5731
5732 Consider an immediate integer: it has VOIDmode. Here we need
5733 to get a mode from something else.
5734
5735 In some cases, there is a fourth mode, the operand's
5736 containing mode. If the insn specifies a containing mode for
5737 this operand, it overrides all others.
5738
5739 I am not sure whether the algorithm here is always right,
5740 but it does the right things in those cases. */
5741
5742 mode = GET_MODE (old);
5743 if (mode == VOIDmode)
5744 mode = reload_inmode[j];
5745
5746 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5747 /* If we need a secondary register for this operation, see if
5748 the value is already in a register in that class. Don't
5749 do this if the secondary register will be used as a scratch
5750 register. */
5751
5752 if (reload_secondary_in_reload[j] >= 0
5753 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5754 && optimize)
5755 oldequiv
5756 = find_equiv_reg (old, insn,
5757 reload_reg_class[reload_secondary_in_reload[j]],
5758 -1, NULL_PTR, 0, mode);
5759 #endif
5760
5761 /* If reloading from memory, see if there is a register
5762 that already holds the same value. If so, reload from there.
5763 We can pass 0 as the reload_reg_p argument because
5764 any other reload has either already been emitted,
5765 in which case find_equiv_reg will see the reload-insn,
5766 or has yet to be emitted, in which case it doesn't matter
5767 because we will use this equiv reg right away. */
5768
5769 if (oldequiv == 0 && optimize
5770 && (GET_CODE (old) == MEM
5771 || (GET_CODE (old) == REG
5772 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5773 && reg_renumber[REGNO (old)] < 0)))
5774 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5775 -1, NULL_PTR, 0, mode);
5776
5777 if (oldequiv)
5778 {
5779 int regno = true_regnum (oldequiv);
5780
5781 /* If OLDEQUIV is a spill register, don't use it for this
5782 if any other reload needs it at an earlier stage of this insn
5783 or at this stage. */
5784 if (spill_reg_order[regno] >= 0
5785 && (! reload_reg_free_p (regno, reload_opnum[j],
5786 reload_when_needed[j])
5787 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5788 reload_when_needed[j])))
5789 oldequiv = 0;
5790
5791 /* If OLDEQUIV is not a spill register,
5792 don't use it if any other reload wants it. */
5793 if (spill_reg_order[regno] < 0)
5794 {
5795 int k;
5796 for (k = 0; k < n_reloads; k++)
5797 if (reload_reg_rtx[k] != 0 && k != j
5798 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5799 oldequiv))
5800 {
5801 oldequiv = 0;
5802 break;
5803 }
5804 }
5805
5806 /* If it is no cheaper to copy from OLDEQUIV into the
5807 reload register than it would be to move from memory,
5808 don't use it. Likewise, if we need a secondary register
5809 or memory. */
5810
5811 if (oldequiv != 0
5812 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5813 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5814 reload_reg_class[j])
5815 >= MEMORY_MOVE_COST (mode)))
5816 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5817 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5818 mode, oldequiv)
5819 != NO_REGS)
5820 #endif
5821 #ifdef SECONDARY_MEMORY_NEEDED
5822 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5823 REGNO_REG_CLASS (regno),
5824 mode)
5825 #endif
5826 ))
5827 oldequiv = 0;
5828 }
5829
5830 if (oldequiv == 0)
5831 oldequiv = old;
5832 else if (GET_CODE (oldequiv) == REG)
5833 oldequiv_reg = oldequiv;
5834 else if (GET_CODE (oldequiv) == SUBREG)
5835 oldequiv_reg = SUBREG_REG (oldequiv);
5836
5837 /* If we are reloading from a register that was recently stored in
5838 with an output-reload, see if we can prove there was
5839 actually no need to store the old value in it. */
5840
5841 if (optimize && GET_CODE (oldequiv) == REG
5842 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5843 && spill_reg_order[REGNO (oldequiv)] >= 0
5844 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5845 && find_reg_note (insn, REG_DEAD, reload_in[j])
5846 /* This is unsafe if operand occurs more than once in current
5847 insn. Perhaps some occurrences weren't reloaded. */
5848 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5849 delete_output_reload
5850 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5851
5852 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5853 then load RELOADREG from OLDEQUIV. Note that we cannot use
5854 gen_lowpart_common since it can do the wrong thing when
5855 RELOADREG has a multi-word mode. Note that RELOADREG
5856 must always be a REG here. */
5857
5858 if (GET_MODE (reloadreg) != mode)
5859 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5860 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5861 oldequiv = SUBREG_REG (oldequiv);
5862 if (GET_MODE (oldequiv) != VOIDmode
5863 && mode != GET_MODE (oldequiv))
5864 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5865
5866 /* Switch to the right place to emit the reload insns. */
5867 switch (reload_when_needed[j])
5868 {
5869 case RELOAD_OTHER:
5870 where = &other_input_reload_insns;
5871 break;
5872 case RELOAD_FOR_INPUT:
5873 where = &input_reload_insns[reload_opnum[j]];
5874 break;
5875 case RELOAD_FOR_INPUT_ADDRESS:
5876 where = &input_address_reload_insns[reload_opnum[j]];
5877 break;
5878 case RELOAD_FOR_OUTPUT_ADDRESS:
5879 where = &output_address_reload_insns[reload_opnum[j]];
5880 break;
5881 case RELOAD_FOR_OPERAND_ADDRESS:
5882 where = &operand_reload_insns;
5883 break;
5884 case RELOAD_FOR_OPADDR_ADDR:
5885 where = &other_operand_reload_insns;
5886 break;
5887 case RELOAD_FOR_OTHER_ADDRESS:
5888 where = &other_input_address_reload_insns;
5889 break;
5890 default:
5891 abort ();
5892 }
5893
5894 push_to_sequence (*where);
5895 special = 0;
5896
5897 /* Auto-increment addresses must be reloaded in a special way. */
5898 if (GET_CODE (oldequiv) == POST_INC
5899 || GET_CODE (oldequiv) == POST_DEC
5900 || GET_CODE (oldequiv) == PRE_INC
5901 || GET_CODE (oldequiv) == PRE_DEC)
5902 {
5903 /* We are not going to bother supporting the case where a
5904 incremented register can't be copied directly from
5905 OLDEQUIV since this seems highly unlikely. */
5906 if (reload_secondary_in_reload[j] >= 0)
5907 abort ();
5908 /* Prevent normal processing of this reload. */
5909 special = 1;
5910 /* Output a special code sequence for this case. */
5911 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5912 }
5913
5914 /* If we are reloading a pseudo-register that was set by the previous
5915 insn, see if we can get rid of that pseudo-register entirely
5916 by redirecting the previous insn into our reload register. */
5917
5918 else if (optimize && GET_CODE (old) == REG
5919 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5920 && dead_or_set_p (insn, old)
5921 /* This is unsafe if some other reload
5922 uses the same reg first. */
5923 && reload_reg_free_before_p (REGNO (reloadreg),
5924 reload_opnum[j],
5925 reload_when_needed[j]))
5926 {
5927 rtx temp = PREV_INSN (insn);
5928 while (temp && GET_CODE (temp) == NOTE)
5929 temp = PREV_INSN (temp);
5930 if (temp
5931 && GET_CODE (temp) == INSN
5932 && GET_CODE (PATTERN (temp)) == SET
5933 && SET_DEST (PATTERN (temp)) == old
5934 /* Make sure we can access insn_operand_constraint. */
5935 && asm_noperands (PATTERN (temp)) < 0
5936 /* This is unsafe if prev insn rejects our reload reg. */
5937 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5938 reloadreg)
5939 /* This is unsafe if operand occurs more than once in current
5940 insn. Perhaps some occurrences aren't reloaded. */
5941 && count_occurrences (PATTERN (insn), old) == 1
5942 /* Don't risk splitting a matching pair of operands. */
5943 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5944 {
5945 /* Store into the reload register instead of the pseudo. */
5946 SET_DEST (PATTERN (temp)) = reloadreg;
5947 /* If these are the only uses of the pseudo reg,
5948 pretend for GDB it lives in the reload reg we used. */
5949 if (reg_n_deaths[REGNO (old)] == 1
5950 && reg_n_sets[REGNO (old)] == 1)
5951 {
5952 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5953 alter_reg (REGNO (old), -1);
5954 }
5955 special = 1;
5956 }
5957 }
5958
5959 /* We can't do that, so output an insn to load RELOADREG. */
5960
5961 if (! special)
5962 {
5963 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5964 rtx second_reload_reg = 0;
5965 enum insn_code icode;
5966
5967 /* If we have a secondary reload, pick up the secondary register
5968 and icode, if any. If OLDEQUIV and OLD are different or
5969 if this is an in-out reload, recompute whether or not we
5970 still need a secondary register and what the icode should
5971 be. If we still need a secondary register and the class or
5972 icode is different, go back to reloading from OLD if using
5973 OLDEQUIV means that we got the wrong type of register. We
5974 cannot have different class or icode due to an in-out reload
5975 because we don't make such reloads when both the input and
5976 output need secondary reload registers. */
5977
5978 if (reload_secondary_in_reload[j] >= 0)
5979 {
5980 int secondary_reload = reload_secondary_in_reload[j];
5981 rtx real_oldequiv = oldequiv;
5982 rtx real_old = old;
5983
5984 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5985 and similarly for OLD.
5986 See comments in get_secondary_reload in reload.c. */
5987 if (GET_CODE (oldequiv) == REG
5988 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5989 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5990 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5991
5992 if (GET_CODE (old) == REG
5993 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5994 && reg_equiv_mem[REGNO (old)] != 0)
5995 real_old = reg_equiv_mem[REGNO (old)];
5996
5997 second_reload_reg = reload_reg_rtx[secondary_reload];
5998 icode = reload_secondary_in_icode[j];
5999
6000 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6001 || (reload_in[j] != 0 && reload_out[j] != 0))
6002 {
6003 enum reg_class new_class
6004 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6005 mode, real_oldequiv);
6006
6007 if (new_class == NO_REGS)
6008 second_reload_reg = 0;
6009 else
6010 {
6011 enum insn_code new_icode;
6012 enum machine_mode new_mode;
6013
6014 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6015 REGNO (second_reload_reg)))
6016 oldequiv = old, real_oldequiv = real_old;
6017 else
6018 {
6019 new_icode = reload_in_optab[(int) mode];
6020 if (new_icode != CODE_FOR_nothing
6021 && ((insn_operand_predicate[(int) new_icode][0]
6022 && ! ((*insn_operand_predicate[(int) new_icode][0])
6023 (reloadreg, mode)))
6024 || (insn_operand_predicate[(int) new_icode][1]
6025 && ! ((*insn_operand_predicate[(int) new_icode][1])
6026 (real_oldequiv, mode)))))
6027 new_icode = CODE_FOR_nothing;
6028
6029 if (new_icode == CODE_FOR_nothing)
6030 new_mode = mode;
6031 else
6032 new_mode = insn_operand_mode[(int) new_icode][2];
6033
6034 if (GET_MODE (second_reload_reg) != new_mode)
6035 {
6036 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6037 new_mode))
6038 oldequiv = old, real_oldequiv = real_old;
6039 else
6040 second_reload_reg
6041 = gen_rtx (REG, new_mode,
6042 REGNO (second_reload_reg));
6043 }
6044 }
6045 }
6046 }
6047
6048 /* If we still need a secondary reload register, check
6049 to see if it is being used as a scratch or intermediate
6050 register and generate code appropriately. If we need
6051 a scratch register, use REAL_OLDEQUIV since the form of
6052 the insn may depend on the actual address if it is
6053 a MEM. */
6054
6055 if (second_reload_reg)
6056 {
6057 if (icode != CODE_FOR_nothing)
6058 {
6059 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6060 second_reload_reg));
6061 special = 1;
6062 }
6063 else
6064 {
6065 /* See if we need a scratch register to load the
6066 intermediate register (a tertiary reload). */
6067 enum insn_code tertiary_icode
6068 = reload_secondary_in_icode[secondary_reload];
6069
6070 if (tertiary_icode != CODE_FOR_nothing)
6071 {
6072 rtx third_reload_reg
6073 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6074
6075 emit_insn ((GEN_FCN (tertiary_icode)
6076 (second_reload_reg, real_oldequiv,
6077 third_reload_reg)));
6078 }
6079 else
6080 gen_reload (second_reload_reg, oldequiv,
6081 reload_opnum[j],
6082 reload_when_needed[j]);
6083
6084 oldequiv = second_reload_reg;
6085 }
6086 }
6087 }
6088 #endif
6089
6090 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6091 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6092 reload_when_needed[j]);
6093
6094 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6095 /* We may have to make a REG_DEAD note for the secondary reload
6096 register in the insns we just made. Find the last insn that
6097 mentioned the register. */
6098 if (! special && second_reload_reg
6099 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6100 {
6101 rtx prev;
6102
6103 for (prev = get_last_insn (); prev;
6104 prev = PREV_INSN (prev))
6105 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6106 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6107 PATTERN (prev)))
6108 {
6109 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6110 second_reload_reg,
6111 REG_NOTES (prev));
6112 break;
6113 }
6114 }
6115 #endif
6116 }
6117
6118 /* End this sequence. */
6119 *where = get_insns ();
6120 end_sequence ();
6121 }
6122
6123 /* Add a note saying the input reload reg
6124 dies in this insn, if anyone cares. */
6125 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6126 if (old != 0
6127 && reload_reg_rtx[j] != old
6128 && reload_reg_rtx[j] != 0
6129 && reload_out[j] == 0
6130 && ! reload_inherited[j]
6131 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6132 {
6133 register rtx reloadreg = reload_reg_rtx[j];
6134
6135 #if 0
6136 /* We can't abort here because we need to support this for sched.c.
6137 It's not terrible to miss a REG_DEAD note, but we should try
6138 to figure out how to do this correctly. */
6139 /* The code below is incorrect for address-only reloads. */
6140 if (reload_when_needed[j] != RELOAD_OTHER
6141 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6142 abort ();
6143 #endif
6144
6145 /* Add a death note to this insn, for an input reload. */
6146
6147 if ((reload_when_needed[j] == RELOAD_OTHER
6148 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6149 && ! dead_or_set_p (insn, reloadreg))
6150 REG_NOTES (insn)
6151 = gen_rtx (EXPR_LIST, REG_DEAD,
6152 reloadreg, REG_NOTES (insn));
6153 }
6154
6155 /* When we inherit a reload, the last marked death of the reload reg
6156 may no longer really be a death. */
6157 if (reload_reg_rtx[j] != 0
6158 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6159 && reload_inherited[j])
6160 {
6161 /* Handle inheriting an output reload.
6162 Remove the death note from the output reload insn. */
6163 if (reload_spill_index[j] >= 0
6164 && GET_CODE (reload_in[j]) == REG
6165 && spill_reg_store[reload_spill_index[j]] != 0
6166 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6167 REG_DEAD, REGNO (reload_reg_rtx[j])))
6168 remove_death (REGNO (reload_reg_rtx[j]),
6169 spill_reg_store[reload_spill_index[j]]);
6170 /* Likewise for input reloads that were inherited. */
6171 else if (reload_spill_index[j] >= 0
6172 && GET_CODE (reload_in[j]) == REG
6173 && spill_reg_store[reload_spill_index[j]] == 0
6174 && reload_inheritance_insn[j] != 0
6175 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6176 REGNO (reload_reg_rtx[j])))
6177 remove_death (REGNO (reload_reg_rtx[j]),
6178 reload_inheritance_insn[j]);
6179 else
6180 {
6181 rtx prev;
6182
6183 /* We got this register from find_equiv_reg.
6184 Search back for its last death note and get rid of it.
6185 But don't search back too far.
6186 Don't go past a place where this reg is set,
6187 since a death note before that remains valid. */
6188 for (prev = PREV_INSN (insn);
6189 prev && GET_CODE (prev) != CODE_LABEL;
6190 prev = PREV_INSN (prev))
6191 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6192 && dead_or_set_p (prev, reload_reg_rtx[j]))
6193 {
6194 if (find_regno_note (prev, REG_DEAD,
6195 REGNO (reload_reg_rtx[j])))
6196 remove_death (REGNO (reload_reg_rtx[j]), prev);
6197 break;
6198 }
6199 }
6200 }
6201
6202 /* We might have used find_equiv_reg above to choose an alternate
6203 place from which to reload. If so, and it died, we need to remove
6204 that death and move it to one of the insns we just made. */
6205
6206 if (oldequiv_reg != 0
6207 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6208 {
6209 rtx prev, prev1;
6210
6211 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6212 prev = PREV_INSN (prev))
6213 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6214 && dead_or_set_p (prev, oldequiv_reg))
6215 {
6216 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6217 {
6218 for (prev1 = this_reload_insn;
6219 prev1; prev1 = PREV_INSN (prev1))
6220 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6221 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6222 PATTERN (prev1)))
6223 {
6224 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6225 oldequiv_reg,
6226 REG_NOTES (prev1));
6227 break;
6228 }
6229 remove_death (REGNO (oldequiv_reg), prev);
6230 }
6231 break;
6232 }
6233 }
6234 #endif
6235
6236 /* If we are reloading a register that was recently stored in with an
6237 output-reload, see if we can prove there was
6238 actually no need to store the old value in it. */
6239
6240 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6241 && reload_in[j] != 0
6242 && GET_CODE (reload_in[j]) == REG
6243 #if 0
6244 /* There doesn't seem to be any reason to restrict this to pseudos
6245 and doing so loses in the case where we are copying from a
6246 register of the wrong class. */
6247 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6248 #endif
6249 && spill_reg_store[reload_spill_index[j]] != 0
6250 /* This is unsafe if some other reload uses the same reg first. */
6251 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6252 reload_opnum[j], reload_when_needed[j])
6253 && dead_or_set_p (insn, reload_in[j])
6254 /* This is unsafe if operand occurs more than once in current
6255 insn. Perhaps some occurrences weren't reloaded. */
6256 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6257 delete_output_reload (insn, j,
6258 spill_reg_store[reload_spill_index[j]]);
6259
6260 /* Input-reloading is done. Now do output-reloading,
6261 storing the value from the reload-register after the main insn
6262 if reload_out[j] is nonzero.
6263
6264 ??? At some point we need to support handling output reloads of
6265 JUMP_INSNs or insns that set cc0. */
6266 old = reload_out[j];
6267 if (old != 0
6268 && reload_reg_rtx[j] != old
6269 && reload_reg_rtx[j] != 0)
6270 {
6271 register rtx reloadreg = reload_reg_rtx[j];
6272 register rtx second_reloadreg = 0;
6273 rtx note, p;
6274 enum machine_mode mode;
6275 int special = 0;
6276
6277 /* An output operand that dies right away does need a reload,
6278 but need not be copied from it. Show the new location in the
6279 REG_UNUSED note. */
6280 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6281 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6282 {
6283 XEXP (note, 0) = reload_reg_rtx[j];
6284 continue;
6285 }
6286 /* Likewise for a SUBREG of an operand that dies. */
6287 else if (GET_CODE (old) == SUBREG
6288 && GET_CODE (SUBREG_REG (old)) == REG
6289 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6290 SUBREG_REG (old))))
6291 {
6292 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6293 reload_reg_rtx[j]);
6294 continue;
6295 }
6296 else if (GET_CODE (old) == SCRATCH)
6297 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6298 but we don't want to make an output reload. */
6299 continue;
6300
6301 #if 0
6302 /* Strip off of OLD any size-increasing SUBREGs such as
6303 (SUBREG:SI foo:QI 0). */
6304
6305 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6306 && (GET_MODE_SIZE (GET_MODE (old))
6307 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6308 old = SUBREG_REG (old);
6309 #endif
6310
6311 /* If is a JUMP_INSN, we can't support output reloads yet. */
6312 if (GET_CODE (insn) == JUMP_INSN)
6313 abort ();
6314
6315 if (reload_when_needed[j] == RELOAD_OTHER)
6316 start_sequence ();
6317 else
6318 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6319
6320 /* Determine the mode to reload in.
6321 See comments above (for input reloading). */
6322
6323 mode = GET_MODE (old);
6324 if (mode == VOIDmode)
6325 {
6326 /* VOIDmode should never happen for an output. */
6327 if (asm_noperands (PATTERN (insn)) < 0)
6328 /* It's the compiler's fault. */
6329 fatal_insn ("VOIDmode on an output", insn);
6330 error_for_asm (insn, "output operand is constant in `asm'");
6331 /* Prevent crash--use something we know is valid. */
6332 mode = word_mode;
6333 old = gen_rtx (REG, mode, REGNO (reloadreg));
6334 }
6335
6336 if (GET_MODE (reloadreg) != mode)
6337 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6338
6339 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6340
6341 /* If we need two reload regs, set RELOADREG to the intermediate
6342 one, since it will be stored into OLD. We might need a secondary
6343 register only for an input reload, so check again here. */
6344
6345 if (reload_secondary_out_reload[j] >= 0)
6346 {
6347 rtx real_old = old;
6348
6349 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6350 && reg_equiv_mem[REGNO (old)] != 0)
6351 real_old = reg_equiv_mem[REGNO (old)];
6352
6353 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6354 mode, real_old)
6355 != NO_REGS))
6356 {
6357 second_reloadreg = reloadreg;
6358 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6359
6360 /* See if RELOADREG is to be used as a scratch register
6361 or as an intermediate register. */
6362 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6363 {
6364 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6365 (real_old, second_reloadreg, reloadreg)));
6366 special = 1;
6367 }
6368 else
6369 {
6370 /* See if we need both a scratch and intermediate reload
6371 register. */
6372
6373 int secondary_reload = reload_secondary_out_reload[j];
6374 enum insn_code tertiary_icode
6375 = reload_secondary_out_icode[secondary_reload];
6376
6377 if (GET_MODE (reloadreg) != mode)
6378 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6379
6380 if (tertiary_icode != CODE_FOR_nothing)
6381 {
6382 rtx third_reloadreg
6383 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6384 rtx tem;
6385
6386 /* Copy primary reload reg to secondary reload reg.
6387 (Note that these have been swapped above, then
6388 secondary reload reg to OLD using our insn. */
6389
6390 /* If REAL_OLD is a paradoxical SUBREG, remove it
6391 and try to put the opposite SUBREG on
6392 RELOADREG. */
6393 if (GET_CODE (real_old) == SUBREG
6394 && (GET_MODE_SIZE (GET_MODE (real_old))
6395 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6396 && 0 != (tem = gen_lowpart_common
6397 (GET_MODE (SUBREG_REG (real_old)),
6398 reloadreg)))
6399 real_old = SUBREG_REG (real_old), reloadreg = tem;
6400
6401 gen_reload (reloadreg, second_reloadreg,
6402 reload_opnum[j], reload_when_needed[j]);
6403 emit_insn ((GEN_FCN (tertiary_icode)
6404 (real_old, reloadreg, third_reloadreg)));
6405 special = 1;
6406 }
6407
6408 else
6409 /* Copy between the reload regs here and then to
6410 OUT later. */
6411
6412 gen_reload (reloadreg, second_reloadreg,
6413 reload_opnum[j], reload_when_needed[j]);
6414 }
6415 }
6416 }
6417 #endif
6418
6419 /* Output the last reload insn. */
6420 if (! special)
6421 gen_reload (old, reloadreg, reload_opnum[j],
6422 reload_when_needed[j]);
6423
6424 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6425 /* If final will look at death notes for this reg,
6426 put one on the last output-reload insn to use it. Similarly
6427 for any secondary register. */
6428 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6429 for (p = get_last_insn (); p; p = PREV_INSN (p))
6430 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6431 && reg_overlap_mentioned_for_reload_p (reloadreg,
6432 PATTERN (p)))
6433 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6434 reloadreg, REG_NOTES (p));
6435
6436 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6437 if (! special
6438 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6439 for (p = get_last_insn (); p; p = PREV_INSN (p))
6440 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6441 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6442 PATTERN (p)))
6443 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6444 second_reloadreg, REG_NOTES (p));
6445 #endif
6446 #endif
6447 /* Look at all insns we emitted, just to be safe. */
6448 for (p = get_insns (); p; p = NEXT_INSN (p))
6449 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6450 {
6451 /* If this output reload doesn't come from a spill reg,
6452 clear any memory of reloaded copies of the pseudo reg.
6453 If this output reload comes from a spill reg,
6454 reg_has_output_reload will make this do nothing. */
6455 note_stores (PATTERN (p), forget_old_reloads_1);
6456
6457 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6458 && reload_spill_index[j] >= 0)
6459 new_spill_reg_store[reload_spill_index[j]] = p;
6460 }
6461
6462 if (reload_when_needed[j] == RELOAD_OTHER)
6463 {
6464 if (other_output_reload_insns)
6465 emit_insns (other_output_reload_insns);
6466 other_output_reload_insns = get_insns ();
6467 }
6468 else
6469 output_reload_insns[reload_opnum[j]] = get_insns ();
6470
6471 end_sequence ();
6472 }
6473 }
6474
6475 /* Now write all the insns we made for reloads in the order expected by
6476 the allocation functions. Prior to the insn being reloaded, we write
6477 the following reloads:
6478
6479 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6480
6481 RELOAD_OTHER reloads, output in ascending order by reload number.
6482
6483 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6484 the RELOAD_FOR_INPUT reload for the operand.
6485
6486 RELOAD_FOR_OPADDR_ADDRS reloads.
6487
6488 RELOAD_FOR_OPERAND_ADDRESS reloads.
6489
6490 After the insn being reloaded, we write the following:
6491
6492 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6493 the RELOAD_FOR_OUTPUT reload for that operand.
6494
6495 Any RELOAD_OTHER output reloads, output in descending order by
6496 reload number. */
6497
6498 emit_insns_before (other_input_address_reload_insns, before_insn);
6499 emit_insns_before (other_input_reload_insns, before_insn);
6500
6501 for (j = 0; j < reload_n_operands; j++)
6502 {
6503 emit_insns_before (input_address_reload_insns[j], before_insn);
6504 emit_insns_before (input_reload_insns[j], before_insn);
6505 }
6506
6507 emit_insns_before (other_operand_reload_insns, before_insn);
6508 emit_insns_before (operand_reload_insns, before_insn);
6509
6510 for (j = 0; j < reload_n_operands; j++)
6511 {
6512 emit_insns_before (output_address_reload_insns[j], following_insn);
6513 emit_insns_before (output_reload_insns[j], following_insn);
6514 }
6515
6516 emit_insns_before (other_output_reload_insns, following_insn);
6517
6518 /* Move death notes from INSN
6519 to output-operand-address and output reload insns. */
6520 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6521 {
6522 rtx insn1;
6523 /* Loop over those insns, last ones first. */
6524 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6525 insn1 = PREV_INSN (insn1))
6526 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6527 {
6528 rtx source = SET_SRC (PATTERN (insn1));
6529 rtx dest = SET_DEST (PATTERN (insn1));
6530
6531 /* The note we will examine next. */
6532 rtx reg_notes = REG_NOTES (insn);
6533 /* The place that pointed to this note. */
6534 rtx *prev_reg_note = &REG_NOTES (insn);
6535
6536 /* If the note is for something used in the source of this
6537 reload insn, or in the output address, move the note. */
6538 while (reg_notes)
6539 {
6540 rtx next_reg_notes = XEXP (reg_notes, 1);
6541 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6542 && GET_CODE (XEXP (reg_notes, 0)) == REG
6543 && ((GET_CODE (dest) != REG
6544 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6545 dest))
6546 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6547 source)))
6548 {
6549 *prev_reg_note = next_reg_notes;
6550 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6551 REG_NOTES (insn1) = reg_notes;
6552 }
6553 else
6554 prev_reg_note = &XEXP (reg_notes, 1);
6555
6556 reg_notes = next_reg_notes;
6557 }
6558 }
6559 }
6560 #endif
6561
6562 /* For all the spill regs newly reloaded in this instruction,
6563 record what they were reloaded from, so subsequent instructions
6564 can inherit the reloads.
6565
6566 Update spill_reg_store for the reloads of this insn.
6567 Copy the elements that were updated in the loop above. */
6568
6569 for (j = 0; j < n_reloads; j++)
6570 {
6571 register int r = reload_order[j];
6572 register int i = reload_spill_index[r];
6573
6574 /* I is nonneg if this reload used one of the spill regs.
6575 If reload_reg_rtx[r] is 0, this is an optional reload
6576 that we opted to ignore.
6577
6578 Also ignore reloads that don't reach the end of the insn,
6579 since we will eventually see the one that does. */
6580
6581 if (i >= 0 && reload_reg_rtx[r] != 0
6582 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6583 reload_when_needed[r]))
6584 {
6585 /* First, clear out memory of what used to be in this spill reg.
6586 If consecutive registers are used, clear them all. */
6587 int nr
6588 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6589 int k;
6590
6591 for (k = 0; k < nr; k++)
6592 {
6593 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6594 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6595 }
6596
6597 /* Maybe the spill reg contains a copy of reload_out. */
6598 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6599 {
6600 register int nregno = REGNO (reload_out[r]);
6601 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6602 : HARD_REGNO_NREGS (nregno,
6603 GET_MODE (reload_reg_rtx[r])));
6604
6605 spill_reg_store[i] = new_spill_reg_store[i];
6606 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6607
6608 /* If NREGNO is a hard register, it may occupy more than
6609 one register. If it does, say what is in the
6610 rest of the registers assuming that both registers
6611 agree on how many words the object takes. If not,
6612 invalidate the subsequent registers. */
6613
6614 if (nregno < FIRST_PSEUDO_REGISTER)
6615 for (k = 1; k < nnr; k++)
6616 reg_last_reload_reg[nregno + k]
6617 = (nr == nnr ? gen_rtx (REG,
6618 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6619 REGNO (reload_reg_rtx[r]) + k)
6620 : 0);
6621
6622 /* Now do the inverse operation. */
6623 for (k = 0; k < nr; k++)
6624 {
6625 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6626 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6627 : nregno + k);
6628 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6629 }
6630 }
6631
6632 /* Maybe the spill reg contains a copy of reload_in. Only do
6633 something if there will not be an output reload for
6634 the register being reloaded. */
6635 else if (reload_out[r] == 0
6636 && reload_in[r] != 0
6637 && ((GET_CODE (reload_in[r]) == REG
6638 && ! reg_has_output_reload[REGNO (reload_in[r])])
6639 || (GET_CODE (reload_in_reg[r]) == REG
6640 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6641 {
6642 register int nregno;
6643 int nnr;
6644
6645 if (GET_CODE (reload_in[r]) == REG)
6646 nregno = REGNO (reload_in[r]);
6647 else
6648 nregno = REGNO (reload_in_reg[r]);
6649
6650 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6651 : HARD_REGNO_NREGS (nregno,
6652 GET_MODE (reload_reg_rtx[r])));
6653
6654 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6655
6656 if (nregno < FIRST_PSEUDO_REGISTER)
6657 for (k = 1; k < nnr; k++)
6658 reg_last_reload_reg[nregno + k]
6659 = (nr == nnr ? gen_rtx (REG,
6660 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6661 REGNO (reload_reg_rtx[r]) + k)
6662 : 0);
6663
6664 /* Unless we inherited this reload, show we haven't
6665 recently done a store. */
6666 if (! reload_inherited[r])
6667 spill_reg_store[i] = 0;
6668
6669 for (k = 0; k < nr; k++)
6670 {
6671 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6672 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6673 : nregno + k);
6674 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6675 = insn;
6676 }
6677 }
6678 }
6679
6680 /* The following if-statement was #if 0'd in 1.34 (or before...).
6681 It's reenabled in 1.35 because supposedly nothing else
6682 deals with this problem. */
6683
6684 /* If a register gets output-reloaded from a non-spill register,
6685 that invalidates any previous reloaded copy of it.
6686 But forget_old_reloads_1 won't get to see it, because
6687 it thinks only about the original insn. So invalidate it here. */
6688 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6689 {
6690 register int nregno = REGNO (reload_out[r]);
6691 if (nregno >= FIRST_PSEUDO_REGISTER)
6692 reg_last_reload_reg[nregno] = 0;
6693 else
6694 {
6695 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
6696
6697 while (num_regs-- > 0)
6698 reg_last_reload_reg[nregno + num_regs] = 0;
6699 }
6700 }
6701 }
6702 }
6703 \f
6704 /* Emit code to perform a reload from IN (which may be a reload register) to
6705 OUT (which may also be a reload register). IN or OUT is from operand
6706 OPNUM with reload type TYPE.
6707
6708 Returns first insn emitted. */
6709
6710 rtx
6711 gen_reload (out, in, opnum, type)
6712 rtx out;
6713 rtx in;
6714 int opnum;
6715 enum reload_type type;
6716 {
6717 rtx last = get_last_insn ();
6718 rtx tem;
6719
6720 /* If IN is a paradoxical SUBREG, remove it and try to put the
6721 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6722 if (GET_CODE (in) == SUBREG
6723 && (GET_MODE_SIZE (GET_MODE (in))
6724 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
6725 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
6726 in = SUBREG_REG (in), out = tem;
6727 else if (GET_CODE (out) == SUBREG
6728 && (GET_MODE_SIZE (GET_MODE (out))
6729 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
6730 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
6731 out = SUBREG_REG (out), in = tem;
6732
6733 /* How to do this reload can get quite tricky. Normally, we are being
6734 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6735 register that didn't get a hard register. In that case we can just
6736 call emit_move_insn.
6737
6738 We can also be asked to reload a PLUS that adds a register or a MEM to
6739 another register, constant or MEM. This can occur during frame pointer
6740 elimination and while reloading addresses. This case is handled by
6741 trying to emit a single insn to perform the add. If it is not valid,
6742 we use a two insn sequence.
6743
6744 Finally, we could be called to handle an 'o' constraint by putting
6745 an address into a register. In that case, we first try to do this
6746 with a named pattern of "reload_load_address". If no such pattern
6747 exists, we just emit a SET insn and hope for the best (it will normally
6748 be valid on machines that use 'o').
6749
6750 This entire process is made complex because reload will never
6751 process the insns we generate here and so we must ensure that
6752 they will fit their constraints and also by the fact that parts of
6753 IN might be being reloaded separately and replaced with spill registers.
6754 Because of this, we are, in some sense, just guessing the right approach
6755 here. The one listed above seems to work.
6756
6757 ??? At some point, this whole thing needs to be rethought. */
6758
6759 if (GET_CODE (in) == PLUS
6760 && (GET_CODE (XEXP (in, 0)) == REG
6761 || GET_CODE (XEXP (in, 0)) == MEM)
6762 && (GET_CODE (XEXP (in, 1)) == REG
6763 || CONSTANT_P (XEXP (in, 1))
6764 || GET_CODE (XEXP (in, 1)) == MEM))
6765 {
6766 /* We need to compute the sum of a register or a MEM and another
6767 register, constant, or MEM, and put it into the reload
6768 register. The best possible way of doing this is if the machine
6769 has a three-operand ADD insn that accepts the required operands.
6770
6771 The simplest approach is to try to generate such an insn and see if it
6772 is recognized and matches its constraints. If so, it can be used.
6773
6774 It might be better not to actually emit the insn unless it is valid,
6775 but we need to pass the insn as an operand to `recog' and
6776 `insn_extract' and it is simpler to emit and then delete the insn if
6777 not valid than to dummy things up. */
6778
6779 rtx op0, op1, tem, insn;
6780 int code;
6781
6782 op0 = find_replacement (&XEXP (in, 0));
6783 op1 = find_replacement (&XEXP (in, 1));
6784
6785 /* Since constraint checking is strict, commutativity won't be
6786 checked, so we need to do that here to avoid spurious failure
6787 if the add instruction is two-address and the second operand
6788 of the add is the same as the reload reg, which is frequently
6789 the case. If the insn would be A = B + A, rearrange it so
6790 it will be A = A + B as constrain_operands expects. */
6791
6792 if (GET_CODE (XEXP (in, 1)) == REG
6793 && REGNO (out) == REGNO (XEXP (in, 1)))
6794 tem = op0, op0 = op1, op1 = tem;
6795
6796 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6797 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6798
6799 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
6800 code = recog_memoized (insn);
6801
6802 if (code >= 0)
6803 {
6804 insn_extract (insn);
6805 /* We want constrain operands to treat this insn strictly in
6806 its validity determination, i.e., the way it would after reload
6807 has completed. */
6808 if (constrain_operands (code, 1))
6809 return insn;
6810 }
6811
6812 delete_insns_since (last);
6813
6814 /* If that failed, we must use a conservative two-insn sequence.
6815 use move to copy constant, MEM, or pseudo register to the reload
6816 register since "move" will be able to handle an arbitrary operand,
6817 unlike add which can't, in general. Then add the registers.
6818
6819 If there is another way to do this for a specific machine, a
6820 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6821 we emit below. */
6822
6823 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6824 || (GET_CODE (op1) == REG
6825 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6826 tem = op0, op0 = op1, op1 = tem;
6827
6828 emit_insn (gen_move_insn (out, op0));
6829
6830 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6831 This fixes a problem on the 32K where the stack pointer cannot
6832 be used as an operand of an add insn. */
6833
6834 if (rtx_equal_p (op0, op1))
6835 op1 = out;
6836
6837 insn = emit_insn (gen_add2_insn (out, op1));
6838
6839 /* If that failed, copy the address register to the reload register.
6840 Then add the constant to the reload register. */
6841
6842 code = recog_memoized (insn);
6843
6844 if (code >= 0)
6845 {
6846 insn_extract (insn);
6847 /* We want constrain operands to treat this insn strictly in
6848 its validity determination, i.e., the way it would after reload
6849 has completed. */
6850 if (constrain_operands (code, 1))
6851 return insn;
6852 }
6853
6854 delete_insns_since (last);
6855
6856 emit_insn (gen_move_insn (out, op1));
6857 emit_insn (gen_add2_insn (out, op0));
6858 }
6859
6860 #ifdef SECONDARY_MEMORY_NEEDED
6861 /* If we need a memory location to do the move, do it that way. */
6862 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6863 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
6864 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6865 REGNO_REG_CLASS (REGNO (out)),
6866 GET_MODE (out)))
6867 {
6868 /* Get the memory to use and rewrite both registers to its mode. */
6869 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
6870
6871 if (GET_MODE (loc) != GET_MODE (out))
6872 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
6873
6874 if (GET_MODE (loc) != GET_MODE (in))
6875 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6876
6877 emit_insn (gen_move_insn (loc, in));
6878 emit_insn (gen_move_insn (out, loc));
6879 }
6880 #endif
6881
6882 /* If IN is a simple operand, use gen_move_insn. */
6883 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6884 emit_insn (gen_move_insn (out, in));
6885
6886 #ifdef HAVE_reload_load_address
6887 else if (HAVE_reload_load_address)
6888 emit_insn (gen_reload_load_address (out, in));
6889 #endif
6890
6891 /* Otherwise, just write (set OUT IN) and hope for the best. */
6892 else
6893 emit_insn (gen_rtx (SET, VOIDmode, out, in));
6894
6895 /* Return the first insn emitted.
6896 We can not just return get_last_insn, because there may have
6897 been multiple instructions emitted. Also note that gen_move_insn may
6898 emit more than one insn itself, so we can not assume that there is one
6899 insn emitted per emit_insn_before call. */
6900
6901 return last ? NEXT_INSN (last) : get_insns ();
6902 }
6903 \f
6904 /* Delete a previously made output-reload
6905 whose result we now believe is not needed.
6906 First we double-check.
6907
6908 INSN is the insn now being processed.
6909 OUTPUT_RELOAD_INSN is the insn of the output reload.
6910 J is the reload-number for this insn. */
6911
6912 static void
6913 delete_output_reload (insn, j, output_reload_insn)
6914 rtx insn;
6915 int j;
6916 rtx output_reload_insn;
6917 {
6918 register rtx i1;
6919
6920 /* Get the raw pseudo-register referred to. */
6921
6922 rtx reg = reload_in[j];
6923 while (GET_CODE (reg) == SUBREG)
6924 reg = SUBREG_REG (reg);
6925
6926 /* If the pseudo-reg we are reloading is no longer referenced
6927 anywhere between the store into it and here,
6928 and no jumps or labels intervene, then the value can get
6929 here through the reload reg alone.
6930 Otherwise, give up--return. */
6931 for (i1 = NEXT_INSN (output_reload_insn);
6932 i1 != insn; i1 = NEXT_INSN (i1))
6933 {
6934 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6935 return;
6936 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6937 && reg_mentioned_p (reg, PATTERN (i1)))
6938 return;
6939 }
6940
6941 if (cannot_omit_stores[REGNO (reg)])
6942 return;
6943
6944 /* If this insn will store in the pseudo again,
6945 the previous store can be removed. */
6946 if (reload_out[j] == reload_in[j])
6947 delete_insn (output_reload_insn);
6948
6949 /* See if the pseudo reg has been completely replaced
6950 with reload regs. If so, delete the store insn
6951 and forget we had a stack slot for the pseudo. */
6952 else if (reg_n_deaths[REGNO (reg)] == 1
6953 && reg_basic_block[REGNO (reg)] >= 0
6954 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6955 {
6956 rtx i2;
6957
6958 /* We know that it was used only between here
6959 and the beginning of the current basic block.
6960 (We also know that the last use before INSN was
6961 the output reload we are thinking of deleting, but never mind that.)
6962 Search that range; see if any ref remains. */
6963 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6964 {
6965 rtx set = single_set (i2);
6966
6967 /* Uses which just store in the pseudo don't count,
6968 since if they are the only uses, they are dead. */
6969 if (set != 0 && SET_DEST (set) == reg)
6970 continue;
6971 if (GET_CODE (i2) == CODE_LABEL
6972 || GET_CODE (i2) == JUMP_INSN)
6973 break;
6974 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6975 && reg_mentioned_p (reg, PATTERN (i2)))
6976 /* Some other ref remains;
6977 we can't do anything. */
6978 return;
6979 }
6980
6981 /* Delete the now-dead stores into this pseudo. */
6982 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6983 {
6984 rtx set = single_set (i2);
6985
6986 if (set != 0 && SET_DEST (set) == reg)
6987 delete_insn (i2);
6988 if (GET_CODE (i2) == CODE_LABEL
6989 || GET_CODE (i2) == JUMP_INSN)
6990 break;
6991 }
6992
6993 /* For the debugging info,
6994 say the pseudo lives in this reload reg. */
6995 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6996 alter_reg (REGNO (reg), -1);
6997 }
6998 }
6999 \f
7000 /* Output reload-insns to reload VALUE into RELOADREG.
7001 VALUE is an autoincrement or autodecrement RTX whose operand
7002 is a register or memory location;
7003 so reloading involves incrementing that location.
7004
7005 INC_AMOUNT is the number to increment or decrement by (always positive).
7006 This cannot be deduced from VALUE. */
7007
7008 static void
7009 inc_for_reload (reloadreg, value, inc_amount)
7010 rtx reloadreg;
7011 rtx value;
7012 int inc_amount;
7013 {
7014 /* REG or MEM to be copied and incremented. */
7015 rtx incloc = XEXP (value, 0);
7016 /* Nonzero if increment after copying. */
7017 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7018 rtx last;
7019 rtx inc;
7020 rtx add_insn;
7021 int code;
7022
7023 /* No hard register is equivalent to this register after
7024 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7025 we could inc/dec that register as well (maybe even using it for
7026 the source), but I'm not sure it's worth worrying about. */
7027 if (GET_CODE (incloc) == REG)
7028 reg_last_reload_reg[REGNO (incloc)] = 0;
7029
7030 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7031 inc_amount = - inc_amount;
7032
7033 inc = GEN_INT (inc_amount);
7034
7035 /* If this is post-increment, first copy the location to the reload reg. */
7036 if (post)
7037 emit_insn (gen_move_insn (reloadreg, incloc));
7038
7039 /* See if we can directly increment INCLOC. Use a method similar to that
7040 in gen_reload. */
7041
7042 last = get_last_insn ();
7043 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7044 gen_rtx (PLUS, GET_MODE (incloc),
7045 incloc, inc)));
7046
7047 code = recog_memoized (add_insn);
7048 if (code >= 0)
7049 {
7050 insn_extract (add_insn);
7051 if (constrain_operands (code, 1))
7052 {
7053 /* If this is a pre-increment and we have incremented the value
7054 where it lives, copy the incremented value to RELOADREG to
7055 be used as an address. */
7056
7057 if (! post)
7058 emit_insn (gen_move_insn (reloadreg, incloc));
7059
7060 return;
7061 }
7062 }
7063
7064 delete_insns_since (last);
7065
7066 /* If couldn't do the increment directly, must increment in RELOADREG.
7067 The way we do this depends on whether this is pre- or post-increment.
7068 For pre-increment, copy INCLOC to the reload register, increment it
7069 there, then save back. */
7070
7071 if (! post)
7072 {
7073 emit_insn (gen_move_insn (reloadreg, incloc));
7074 emit_insn (gen_add2_insn (reloadreg, inc));
7075 emit_insn (gen_move_insn (incloc, reloadreg));
7076 }
7077 else
7078 {
7079 /* Postincrement.
7080 Because this might be a jump insn or a compare, and because RELOADREG
7081 may not be available after the insn in an input reload, we must do
7082 the incrementation before the insn being reloaded for.
7083
7084 We have already copied INCLOC to RELOADREG. Increment the copy in
7085 RELOADREG, save that back, then decrement RELOADREG so it has
7086 the original value. */
7087
7088 emit_insn (gen_add2_insn (reloadreg, inc));
7089 emit_insn (gen_move_insn (incloc, reloadreg));
7090 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7091 }
7092
7093 return;
7094 }
7095 \f
7096 /* Return 1 if we are certain that the constraint-string STRING allows
7097 the hard register REG. Return 0 if we can't be sure of this. */
7098
7099 static int
7100 constraint_accepts_reg_p (string, reg)
7101 char *string;
7102 rtx reg;
7103 {
7104 int value = 0;
7105 int regno = true_regnum (reg);
7106 int c;
7107
7108 /* Initialize for first alternative. */
7109 value = 0;
7110 /* Check that each alternative contains `g' or `r'. */
7111 while (1)
7112 switch (c = *string++)
7113 {
7114 case 0:
7115 /* If an alternative lacks `g' or `r', we lose. */
7116 return value;
7117 case ',':
7118 /* If an alternative lacks `g' or `r', we lose. */
7119 if (value == 0)
7120 return 0;
7121 /* Initialize for next alternative. */
7122 value = 0;
7123 break;
7124 case 'g':
7125 case 'r':
7126 /* Any general reg wins for this alternative. */
7127 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7128 value = 1;
7129 break;
7130 default:
7131 /* Any reg in specified class wins for this alternative. */
7132 {
7133 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7134
7135 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7136 value = 1;
7137 }
7138 }
7139 }
7140 \f
7141 /* Return the number of places FIND appears within X, but don't count
7142 an occurrence if some SET_DEST is FIND. */
7143
7144 static int
7145 count_occurrences (x, find)
7146 register rtx x, find;
7147 {
7148 register int i, j;
7149 register enum rtx_code code;
7150 register char *format_ptr;
7151 int count;
7152
7153 if (x == find)
7154 return 1;
7155 if (x == 0)
7156 return 0;
7157
7158 code = GET_CODE (x);
7159
7160 switch (code)
7161 {
7162 case REG:
7163 case QUEUED:
7164 case CONST_INT:
7165 case CONST_DOUBLE:
7166 case SYMBOL_REF:
7167 case CODE_LABEL:
7168 case PC:
7169 case CC0:
7170 return 0;
7171
7172 case SET:
7173 if (SET_DEST (x) == find)
7174 return count_occurrences (SET_SRC (x), find);
7175 break;
7176 }
7177
7178 format_ptr = GET_RTX_FORMAT (code);
7179 count = 0;
7180
7181 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7182 {
7183 switch (*format_ptr++)
7184 {
7185 case 'e':
7186 count += count_occurrences (XEXP (x, i), find);
7187 break;
7188
7189 case 'E':
7190 if (XVEC (x, i) != NULL)
7191 {
7192 for (j = 0; j < XVECLEN (x, i); j++)
7193 count += count_occurrences (XVECEXP (x, i, j), find);
7194 }
7195 break;
7196 }
7197 }
7198 return count;
7199 }