Don't always align reload stack slots to BIGGEST_ALIGNMENT
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include <stdio.h>
23 #include "config.h"
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37 #include "real.h"
38
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
45
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
49
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
53
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
60
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
64
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
71
72
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
75 #endif
76
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
79 #endif
80 \f
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
84
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
88
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
92
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
98
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
104
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
109
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
113
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
116
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
120
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
127
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
133
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 static int n_spills;
136
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
142
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
147
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
153
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
158
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
163
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166 static HARD_REG_SET bad_spill_regs;
167
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
172
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
177
178 HARD_REG_SET used_spill_regs;
179
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
182
183 static int last_spill_reg;
184
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
191
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
196
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
200
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
206
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
212
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
219
220 static char spill_indirect_levels;
221
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
225
226 char indirect_symref_ok;
227
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
229
230 char double_reg_address_ok;
231
232 /* Record the stack slot for each spilled hard register. */
233
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
235
236 /* Width allocated so far for that stack slot. */
237
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
244
245 char *basic_block_needs[N_REG_CLASSES];
246
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
250
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253
254 int caller_save_needed;
255
256 /* The register class to use for a base register when reloading an
257 address. This is normally BASE_REG_CLASS, but it may be different
258 when using SMALL_REGISTER_CLASSES and passing parameters in
259 registers. */
260 enum reg_class reload_address_base_reg_class;
261
262 /* The register class to use for an index register when reloading an
263 address. This is normally INDEX_REG_CLASS, but it may be different
264 when using SMALL_REGISTER_CLASSES and passing parameters in
265 registers. */
266 enum reg_class reload_address_index_reg_class;
267
268 /* Set to 1 while reload_as_needed is operating.
269 Required by some machines to handle any generated moves differently. */
270
271 int reload_in_progress = 0;
272
273 /* These arrays record the insn_code of insns that may be needed to
274 perform input and output reloads of special objects. They provide a
275 place to pass a scratch register. */
276
277 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
278 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
279
280 /* This obstack is used for allocation of rtl during register elimination.
281 The allocated storage can be freed once find_reloads has processed the
282 insn. */
283
284 struct obstack reload_obstack;
285 char *reload_firstobj;
286
287 #define obstack_chunk_alloc xmalloc
288 #define obstack_chunk_free free
289
290 /* List of labels that must never be deleted. */
291 extern rtx forced_labels;
292
293 /* Allocation number table from global register allocation. */
294 extern int *reg_allocno;
295 \f
296 /* This structure is used to record information about register eliminations.
297 Each array entry describes one possible way of eliminating a register
298 in favor of another. If there is more than one way of eliminating a
299 particular register, the most preferred should be specified first. */
300
301 static struct elim_table
302 {
303 int from; /* Register number to be eliminated. */
304 int to; /* Register number used as replacement. */
305 int initial_offset; /* Initial difference between values. */
306 int can_eliminate; /* Non-zero if this elimination can be done. */
307 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
308 insns made by reload. */
309 int offset; /* Current offset between the two regs. */
310 int max_offset; /* Maximum offset between the two regs. */
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
319 } reg_eliminate[] =
320
321 /* If a set of eliminable registers was specified, define the table from it.
322 Otherwise, default to the normal case of the frame pointer being
323 replaced by the stack pointer. */
324
325 #ifdef ELIMINABLE_REGS
326 ELIMINABLE_REGS;
327 #else
328 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
329 #endif
330
331 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
332
333 /* Record the number of pending eliminations that have an offset not equal
334 to their initial offset. If non-zero, we use a new copy of each
335 replacement result in any insns encountered. */
336 static int num_not_at_initial_offset;
337
338 /* Count the number of registers that we may be able to eliminate. */
339 static int num_eliminable;
340
341 /* For each label, we record the offset of each elimination. If we reach
342 a label by more than one path and an offset differs, we cannot do the
343 elimination. This information is indexed by the number of the label.
344 The first table is an array of flags that records whether we have yet
345 encountered a label and the second table is an array of arrays, one
346 entry in the latter array for each elimination. */
347
348 static char *offsets_known_at;
349 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
350
351 /* Number of labels in the current function. */
352
353 static int num_labels;
354
355 struct hard_reg_n_uses { int regno; int uses; };
356 \f
357 static int possible_group_p PROTO((int, int *));
358 static void count_possible_groups PROTO((int *, enum machine_mode *,
359 int *, int));
360 static int modes_equiv_for_class_p PROTO((enum machine_mode,
361 enum machine_mode,
362 enum reg_class));
363 static void spill_failure PROTO((rtx));
364 static int new_spill_reg PROTO((int, int, int *, int *, int,
365 FILE *));
366 static void delete_dead_insn PROTO((rtx));
367 static void alter_reg PROTO((int, int));
368 static void mark_scratch_live PROTO((rtx));
369 static void set_label_offsets PROTO((rtx, rtx, int));
370 static int eliminate_regs_in_insn PROTO((rtx, int));
371 static void mark_not_eliminable PROTO((rtx, rtx));
372 static int spill_hard_reg PROTO((int, int, FILE *, int));
373 static void scan_paradoxical_subregs PROTO((rtx));
374 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void order_regs_for_reload PROTO((int));
376 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
377 static void reload_as_needed PROTO((rtx, int));
378 static void forget_old_reloads_1 PROTO((rtx, rtx));
379 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
380 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
381 enum machine_mode));
382 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
383 enum machine_mode));
384 static int reload_reg_free_p PROTO((int, int, enum reload_type));
385 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
386 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
387 static int reloads_conflict PROTO((int, int));
388 static int allocate_reload_reg PROTO((int, rtx, int, int));
389 static void choose_reload_regs PROTO((rtx, rtx));
390 static void merge_assigned_reloads PROTO((rtx));
391 static void emit_reload_insns PROTO((rtx));
392 static void delete_output_reload PROTO((rtx, int, rtx));
393 static void inc_for_reload PROTO((rtx, rtx, int));
394 static int constraint_accepts_reg_p PROTO((char *, rtx));
395 static int count_occurrences PROTO((rtx, rtx));
396 \f
397 /* Initialize the reload pass once per compilation. */
398
399 void
400 init_reload ()
401 {
402 register int i;
403
404 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
405 Set spill_indirect_levels to the number of levels such addressing is
406 permitted, zero if it is not permitted at all. */
407
408 register rtx tem
409 = gen_rtx (MEM, Pmode,
410 gen_rtx (PLUS, Pmode,
411 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
412 GEN_INT (4)));
413 spill_indirect_levels = 0;
414
415 while (memory_address_p (QImode, tem))
416 {
417 spill_indirect_levels++;
418 tem = gen_rtx (MEM, Pmode, tem);
419 }
420
421 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
422
423 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
424 indirect_symref_ok = memory_address_p (QImode, tem);
425
426 /* See if reg+reg is a valid (and offsettable) address. */
427
428 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
429 {
430 tem = gen_rtx (PLUS, Pmode,
431 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
432 gen_rtx (REG, Pmode, i));
433 /* This way, we make sure that reg+reg is an offsettable address. */
434 tem = plus_constant (tem, 4);
435
436 if (memory_address_p (QImode, tem))
437 {
438 double_reg_address_ok = 1;
439 break;
440 }
441 }
442
443 /* Initialize obstack for our rtl allocation. */
444 gcc_obstack_init (&reload_obstack);
445 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
446
447 /* Decide which register class should be used when reloading
448 addresses. If we are using SMALL_REGISTER_CLASSES, and any
449 parameters are passed in registers, then we do not want to use
450 those registers when reloading an address. Otherwise, if a
451 function argument needs a reload, we may wind up clobbering
452 another argument to the function which was already computed. If
453 we find a subset class which simply avoids those registers, we
454 use it instead. ??? It would be better to only use the
455 restricted class when we actually are loading function arguments,
456 but that is hard to determine. */
457 reload_address_base_reg_class = BASE_REG_CLASS;
458 reload_address_index_reg_class = INDEX_REG_CLASS;
459 #ifdef SMALL_REGISTER_CLASSES
460 if (SMALL_REGISTER_CLASSES)
461 {
462 int regno;
463 HARD_REG_SET base, index;
464 enum reg_class *p;
465
466 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
467 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
468 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
469 {
470 if (FUNCTION_ARG_REGNO_P (regno))
471 {
472 CLEAR_HARD_REG_BIT (base, regno);
473 CLEAR_HARD_REG_BIT (index, regno);
474 }
475 }
476
477 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
478 baseok);
479 for (p = reg_class_subclasses[BASE_REG_CLASS];
480 *p != LIM_REG_CLASSES;
481 p++)
482 {
483 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
484 continue;
485 usebase:
486 reload_address_base_reg_class = *p;
487 break;
488 }
489 baseok:;
490
491 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
492 indexok);
493 for (p = reg_class_subclasses[INDEX_REG_CLASS];
494 *p != LIM_REG_CLASSES;
495 p++)
496 {
497 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
498 continue;
499 useindex:
500 reload_address_index_reg_class = *p;
501 break;
502 }
503 indexok:;
504 }
505 #endif /* SMALL_REGISTER_CLASSES */
506 }
507
508 /* Main entry point for the reload pass.
509
510 FIRST is the first insn of the function being compiled.
511
512 GLOBAL nonzero means we were called from global_alloc
513 and should attempt to reallocate any pseudoregs that we
514 displace from hard regs we will use for reloads.
515 If GLOBAL is zero, we do not have enough information to do that,
516 so any pseudo reg that is spilled must go to the stack.
517
518 DUMPFILE is the global-reg debugging dump file stream, or 0.
519 If it is nonzero, messages are written to it to describe
520 which registers are seized as reload regs, which pseudo regs
521 are spilled from them, and where the pseudo regs are reallocated to.
522
523 Return value is nonzero if reload failed
524 and we must not do any more for this function. */
525
526 int
527 reload (first, global, dumpfile)
528 rtx first;
529 int global;
530 FILE *dumpfile;
531 {
532 register int class;
533 register int i, j, k;
534 register rtx insn;
535 register struct elim_table *ep;
536
537 int something_changed;
538 int something_needs_reloads;
539 int something_needs_elimination;
540 int new_basic_block_needs;
541 enum reg_class caller_save_spill_class = NO_REGS;
542 int caller_save_group_size = 1;
543
544 /* Nonzero means we couldn't get enough spill regs. */
545 int failure = 0;
546
547 /* The basic block number currently being processed for INSN. */
548 int this_block;
549
550 /* Make sure even insns with volatile mem refs are recognizable. */
551 init_recog ();
552
553 /* Enable find_equiv_reg to distinguish insns made by reload. */
554 reload_first_uid = get_max_uid ();
555
556 for (i = 0; i < N_REG_CLASSES; i++)
557 basic_block_needs[i] = 0;
558
559 #ifdef SECONDARY_MEMORY_NEEDED
560 /* Initialize the secondary memory table. */
561 clear_secondary_mem ();
562 #endif
563
564 /* Remember which hard regs appear explicitly
565 before we merge into `regs_ever_live' the ones in which
566 pseudo regs have been allocated. */
567 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
568
569 /* We don't have a stack slot for any spill reg yet. */
570 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
571 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
572
573 /* Initialize the save area information for caller-save, in case some
574 are needed. */
575 init_save_areas ();
576
577 /* Compute which hard registers are now in use
578 as homes for pseudo registers.
579 This is done here rather than (eg) in global_alloc
580 because this point is reached even if not optimizing. */
581
582 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
583 mark_home_live (i);
584
585 for (i = 0; i < scratch_list_length; i++)
586 if (scratch_list[i])
587 mark_scratch_live (scratch_list[i]);
588
589 /* Make sure that the last insn in the chain
590 is not something that needs reloading. */
591 emit_note (NULL_PTR, NOTE_INSN_DELETED);
592
593 /* Find all the pseudo registers that didn't get hard regs
594 but do have known equivalent constants or memory slots.
595 These include parameters (known equivalent to parameter slots)
596 and cse'd or loop-moved constant memory addresses.
597
598 Record constant equivalents in reg_equiv_constant
599 so they will be substituted by find_reloads.
600 Record memory equivalents in reg_mem_equiv so they can
601 be substituted eventually by altering the REG-rtx's. */
602
603 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
604 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
605 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
606 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
607 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
608 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
609 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
610 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
611 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
612 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
613 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
614 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
615 cannot_omit_stores = (char *) alloca (max_regno);
616 bzero (cannot_omit_stores, max_regno);
617
618 #ifdef SMALL_REGISTER_CLASSES
619 if (SMALL_REGISTER_CLASSES)
620 CLEAR_HARD_REG_SET (forbidden_regs);
621 #endif
622
623 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
624 Also find all paradoxical subregs and find largest such for each pseudo.
625 On machines with small register classes, record hard registers that
626 are used for user variables. These can never be used for spills.
627 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
628 caller-saved registers must be marked live. */
629
630 for (insn = first; insn; insn = NEXT_INSN (insn))
631 {
632 rtx set = single_set (insn);
633
634 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
635 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
636 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
637 if (! call_used_regs[i])
638 regs_ever_live[i] = 1;
639
640 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
641 {
642 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
643 if (note
644 #ifdef LEGITIMATE_PIC_OPERAND_P
645 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
646 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
647 #endif
648 )
649 {
650 rtx x = XEXP (note, 0);
651 i = REGNO (SET_DEST (set));
652 if (i > LAST_VIRTUAL_REGISTER)
653 {
654 if (GET_CODE (x) == MEM)
655 reg_equiv_memory_loc[i] = x;
656 else if (CONSTANT_P (x))
657 {
658 if (LEGITIMATE_CONSTANT_P (x))
659 reg_equiv_constant[i] = x;
660 else
661 reg_equiv_memory_loc[i]
662 = force_const_mem (GET_MODE (SET_DEST (set)), x);
663 }
664 else
665 continue;
666
667 /* If this register is being made equivalent to a MEM
668 and the MEM is not SET_SRC, the equivalencing insn
669 is one with the MEM as a SET_DEST and it occurs later.
670 So don't mark this insn now. */
671 if (GET_CODE (x) != MEM
672 || rtx_equal_p (SET_SRC (set), x))
673 reg_equiv_init[i] = insn;
674 }
675 }
676 }
677
678 /* If this insn is setting a MEM from a register equivalent to it,
679 this is the equivalencing insn. */
680 else if (set && GET_CODE (SET_DEST (set)) == MEM
681 && GET_CODE (SET_SRC (set)) == REG
682 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
683 && rtx_equal_p (SET_DEST (set),
684 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
685 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
686
687 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
688 scan_paradoxical_subregs (PATTERN (insn));
689 }
690
691 /* Does this function require a frame pointer? */
692
693 frame_pointer_needed = (! flag_omit_frame_pointer
694 #ifdef EXIT_IGNORE_STACK
695 /* ?? If EXIT_IGNORE_STACK is set, we will not save
696 and restore sp for alloca. So we can't eliminate
697 the frame pointer in that case. At some point,
698 we should improve this by emitting the
699 sp-adjusting insns for this case. */
700 || (current_function_calls_alloca
701 && EXIT_IGNORE_STACK)
702 #endif
703 || FRAME_POINTER_REQUIRED);
704
705 num_eliminable = 0;
706
707 /* Initialize the table of registers to eliminate. The way we do this
708 depends on how the eliminable registers were defined. */
709 #ifdef ELIMINABLE_REGS
710 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
711 {
712 ep->can_eliminate = ep->can_eliminate_previous
713 = (CAN_ELIMINATE (ep->from, ep->to)
714 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
715 }
716 #else
717 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
718 = ! frame_pointer_needed;
719 #endif
720
721 /* Count the number of eliminable registers and build the FROM and TO
722 REG rtx's. Note that code in gen_rtx will cause, e.g.,
723 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
724 We depend on this. */
725 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
726 {
727 num_eliminable += ep->can_eliminate;
728 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
729 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
730 }
731
732 num_labels = max_label_num () - get_first_label_num ();
733
734 /* Allocate the tables used to store offset information at labels. */
735 offsets_known_at = (char *) alloca (num_labels);
736 offsets_at
737 = (int (*)[NUM_ELIMINABLE_REGS])
738 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
739
740 offsets_known_at -= get_first_label_num ();
741 offsets_at -= get_first_label_num ();
742
743 /* Alter each pseudo-reg rtx to contain its hard reg number.
744 Assign stack slots to the pseudos that lack hard regs or equivalents.
745 Do not touch virtual registers. */
746
747 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
748 alter_reg (i, -1);
749
750 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
751 because the stack size may be a part of the offset computation for
752 register elimination. */
753 assign_stack_local (BLKmode, 0, 0);
754
755 /* If we have some registers we think can be eliminated, scan all insns to
756 see if there is an insn that sets one of these registers to something
757 other than itself plus a constant. If so, the register cannot be
758 eliminated. Doing this scan here eliminates an extra pass through the
759 main reload loop in the most common case where register elimination
760 cannot be done. */
761 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
762 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
763 || GET_CODE (insn) == CALL_INSN)
764 note_stores (PATTERN (insn), mark_not_eliminable);
765
766 #ifndef REGISTER_CONSTRAINTS
767 /* If all the pseudo regs have hard regs,
768 except for those that are never referenced,
769 we know that no reloads are needed. */
770 /* But that is not true if there are register constraints, since
771 in that case some pseudos might be in the wrong kind of hard reg. */
772
773 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
774 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
775 break;
776
777 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
778 return;
779 #endif
780
781 /* Compute the order of preference for hard registers to spill.
782 Store them by decreasing preference in potential_reload_regs. */
783
784 order_regs_for_reload (global);
785
786 /* So far, no hard regs have been spilled. */
787 n_spills = 0;
788 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
789 spill_reg_order[i] = -1;
790
791 /* Initialize to -1, which means take the first spill register. */
792 last_spill_reg = -1;
793
794 /* On most machines, we can't use any register explicitly used in the
795 rtl as a spill register. But on some, we have to. Those will have
796 taken care to keep the life of hard regs as short as possible. */
797
798 #ifdef SMALL_REGISTER_CLASSES
799 if (! SMALL_REGISTER_CLASSES)
800 #endif
801 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
802
803 /* Spill any hard regs that we know we can't eliminate. */
804 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
805 if (! ep->can_eliminate)
806 spill_hard_reg (ep->from, global, dumpfile, 1);
807
808 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
809 if (frame_pointer_needed)
810 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
811 #endif
812
813 if (global)
814 for (i = 0; i < N_REG_CLASSES; i++)
815 {
816 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
817 bzero (basic_block_needs[i], n_basic_blocks);
818 }
819
820 /* From now on, we need to emit any moves without making new pseudos. */
821 reload_in_progress = 1;
822
823 /* This loop scans the entire function each go-round
824 and repeats until one repetition spills no additional hard regs. */
825
826 /* This flag is set when a pseudo reg is spilled,
827 to require another pass. Note that getting an additional reload
828 reg does not necessarily imply any pseudo reg was spilled;
829 sometimes we find a reload reg that no pseudo reg was allocated in. */
830 something_changed = 1;
831 /* This flag is set if there are any insns that require reloading. */
832 something_needs_reloads = 0;
833 /* This flag is set if there are any insns that require register
834 eliminations. */
835 something_needs_elimination = 0;
836 while (something_changed)
837 {
838 rtx after_call = 0;
839
840 /* For each class, number of reload regs needed in that class.
841 This is the maximum over all insns of the needs in that class
842 of the individual insn. */
843 int max_needs[N_REG_CLASSES];
844 /* For each class, size of group of consecutive regs
845 that is needed for the reloads of this class. */
846 int group_size[N_REG_CLASSES];
847 /* For each class, max number of consecutive groups needed.
848 (Each group contains group_size[CLASS] consecutive registers.) */
849 int max_groups[N_REG_CLASSES];
850 /* For each class, max number needed of regs that don't belong
851 to any of the groups. */
852 int max_nongroups[N_REG_CLASSES];
853 /* For each class, the machine mode which requires consecutive
854 groups of regs of that class.
855 If two different modes ever require groups of one class,
856 they must be the same size and equally restrictive for that class,
857 otherwise we can't handle the complexity. */
858 enum machine_mode group_mode[N_REG_CLASSES];
859 /* Record the insn where each maximum need is first found. */
860 rtx max_needs_insn[N_REG_CLASSES];
861 rtx max_groups_insn[N_REG_CLASSES];
862 rtx max_nongroups_insn[N_REG_CLASSES];
863 rtx x;
864 HOST_WIDE_INT starting_frame_size = get_frame_size ();
865 int previous_frame_pointer_needed = frame_pointer_needed;
866 static char *reg_class_names[] = REG_CLASS_NAMES;
867
868 something_changed = 0;
869 bzero ((char *) max_needs, sizeof max_needs);
870 bzero ((char *) max_groups, sizeof max_groups);
871 bzero ((char *) max_nongroups, sizeof max_nongroups);
872 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
873 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
874 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
875 bzero ((char *) group_size, sizeof group_size);
876 for (i = 0; i < N_REG_CLASSES; i++)
877 group_mode[i] = VOIDmode;
878
879 /* Keep track of which basic blocks are needing the reloads. */
880 this_block = 0;
881
882 /* Remember whether any element of basic_block_needs
883 changes from 0 to 1 in this pass. */
884 new_basic_block_needs = 0;
885
886 /* Reset all offsets on eliminable registers to their initial values. */
887 #ifdef ELIMINABLE_REGS
888 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
889 {
890 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
891 ep->previous_offset = ep->offset
892 = ep->max_offset = ep->initial_offset;
893 }
894 #else
895 #ifdef INITIAL_FRAME_POINTER_OFFSET
896 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
897 #else
898 if (!FRAME_POINTER_REQUIRED)
899 abort ();
900 reg_eliminate[0].initial_offset = 0;
901 #endif
902 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
903 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
904 #endif
905
906 num_not_at_initial_offset = 0;
907
908 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
909
910 /* Set a known offset for each forced label to be at the initial offset
911 of each elimination. We do this because we assume that all
912 computed jumps occur from a location where each elimination is
913 at its initial offset. */
914
915 for (x = forced_labels; x; x = XEXP (x, 1))
916 if (XEXP (x, 0))
917 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
918
919 /* For each pseudo register that has an equivalent location defined,
920 try to eliminate any eliminable registers (such as the frame pointer)
921 assuming initial offsets for the replacement register, which
922 is the normal case.
923
924 If the resulting location is directly addressable, substitute
925 the MEM we just got directly for the old REG.
926
927 If it is not addressable but is a constant or the sum of a hard reg
928 and constant, it is probably not addressable because the constant is
929 out of range, in that case record the address; we will generate
930 hairy code to compute the address in a register each time it is
931 needed. Similarly if it is a hard register, but one that is not
932 valid as an address register.
933
934 If the location is not addressable, but does not have one of the
935 above forms, assign a stack slot. We have to do this to avoid the
936 potential of producing lots of reloads if, e.g., a location involves
937 a pseudo that didn't get a hard register and has an equivalent memory
938 location that also involves a pseudo that didn't get a hard register.
939
940 Perhaps at some point we will improve reload_when_needed handling
941 so this problem goes away. But that's very hairy. */
942
943 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
944 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
945 {
946 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
947
948 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
949 XEXP (x, 0)))
950 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
951 else if (CONSTANT_P (XEXP (x, 0))
952 || (GET_CODE (XEXP (x, 0)) == REG
953 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
954 || (GET_CODE (XEXP (x, 0)) == PLUS
955 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
956 && (REGNO (XEXP (XEXP (x, 0), 0))
957 < FIRST_PSEUDO_REGISTER)
958 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
959 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
960 else
961 {
962 /* Make a new stack slot. Then indicate that something
963 changed so we go back and recompute offsets for
964 eliminable registers because the allocation of memory
965 below might change some offset. reg_equiv_{mem,address}
966 will be set up for this pseudo on the next pass around
967 the loop. */
968 reg_equiv_memory_loc[i] = 0;
969 reg_equiv_init[i] = 0;
970 alter_reg (i, -1);
971 something_changed = 1;
972 }
973 }
974
975 /* If we allocated another pseudo to the stack, redo elimination
976 bookkeeping. */
977 if (something_changed)
978 continue;
979
980 /* If caller-saves needs a group, initialize the group to include
981 the size and mode required for caller-saves. */
982
983 if (caller_save_group_size > 1)
984 {
985 group_mode[(int) caller_save_spill_class] = Pmode;
986 group_size[(int) caller_save_spill_class] = caller_save_group_size;
987 }
988
989 /* Compute the most additional registers needed by any instruction.
990 Collect information separately for each class of regs. */
991
992 for (insn = first; insn; insn = NEXT_INSN (insn))
993 {
994 if (global && this_block + 1 < n_basic_blocks
995 && insn == basic_block_head[this_block+1])
996 ++this_block;
997
998 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
999 might include REG_LABEL), we need to see what effects this
1000 has on the known offsets at labels. */
1001
1002 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1003 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1004 && REG_NOTES (insn) != 0))
1005 set_label_offsets (insn, insn, 0);
1006
1007 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1008 {
1009 /* Nonzero means don't use a reload reg that overlaps
1010 the place where a function value can be returned. */
1011 rtx avoid_return_reg = 0;
1012
1013 rtx old_body = PATTERN (insn);
1014 int old_code = INSN_CODE (insn);
1015 rtx old_notes = REG_NOTES (insn);
1016 int did_elimination = 0;
1017
1018 /* To compute the number of reload registers of each class
1019 needed for an insn, we must simulate what choose_reload_regs
1020 can do. We do this by splitting an insn into an "input" and
1021 an "output" part. RELOAD_OTHER reloads are used in both.
1022 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1023 which must be live over the entire input section of reloads,
1024 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1025 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1026 inputs.
1027
1028 The registers needed for output are RELOAD_OTHER and
1029 RELOAD_FOR_OUTPUT, which are live for the entire output
1030 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1031 reloads for each operand.
1032
1033 The total number of registers needed is the maximum of the
1034 inputs and outputs. */
1035
1036 struct needs
1037 {
1038 /* [0] is normal, [1] is nongroup. */
1039 int regs[2][N_REG_CLASSES];
1040 int groups[N_REG_CLASSES];
1041 };
1042
1043 /* Each `struct needs' corresponds to one RELOAD_... type. */
1044 struct {
1045 struct needs other;
1046 struct needs input;
1047 struct needs output;
1048 struct needs insn;
1049 struct needs other_addr;
1050 struct needs op_addr;
1051 struct needs op_addr_reload;
1052 struct needs in_addr[MAX_RECOG_OPERANDS];
1053 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1054 struct needs out_addr[MAX_RECOG_OPERANDS];
1055 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1056 } insn_needs;
1057
1058 /* If needed, eliminate any eliminable registers. */
1059 if (num_eliminable)
1060 did_elimination = eliminate_regs_in_insn (insn, 0);
1061
1062 #ifdef SMALL_REGISTER_CLASSES
1063 /* Set avoid_return_reg if this is an insn
1064 that might use the value of a function call. */
1065 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1066 {
1067 if (GET_CODE (PATTERN (insn)) == SET)
1068 after_call = SET_DEST (PATTERN (insn));
1069 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1070 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1071 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1072 else
1073 after_call = 0;
1074 }
1075 else if (SMALL_REGISTER_CLASSES
1076 && after_call != 0
1077 && !(GET_CODE (PATTERN (insn)) == SET
1078 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1079 {
1080 if (reg_referenced_p (after_call, PATTERN (insn)))
1081 avoid_return_reg = after_call;
1082 after_call = 0;
1083 }
1084 #endif /* SMALL_REGISTER_CLASSES */
1085
1086 /* Analyze the instruction. */
1087 find_reloads (insn, 0, spill_indirect_levels, global,
1088 spill_reg_order);
1089
1090 /* Remember for later shortcuts which insns had any reloads or
1091 register eliminations.
1092
1093 One might think that it would be worthwhile to mark insns
1094 that need register replacements but not reloads, but this is
1095 not safe because find_reloads may do some manipulation of
1096 the insn (such as swapping commutative operands), which would
1097 be lost when we restore the old pattern after register
1098 replacement. So the actions of find_reloads must be redone in
1099 subsequent passes or in reload_as_needed.
1100
1101 However, it is safe to mark insns that need reloads
1102 but not register replacement. */
1103
1104 PUT_MODE (insn, (did_elimination ? QImode
1105 : n_reloads ? HImode
1106 : GET_MODE (insn) == DImode ? DImode
1107 : VOIDmode));
1108
1109 /* Discard any register replacements done. */
1110 if (did_elimination)
1111 {
1112 obstack_free (&reload_obstack, reload_firstobj);
1113 PATTERN (insn) = old_body;
1114 INSN_CODE (insn) = old_code;
1115 REG_NOTES (insn) = old_notes;
1116 something_needs_elimination = 1;
1117 }
1118
1119 /* If this insn has no reloads, we need not do anything except
1120 in the case of a CALL_INSN when we have caller-saves and
1121 caller-save needs reloads. */
1122
1123 if (n_reloads == 0
1124 && ! (GET_CODE (insn) == CALL_INSN
1125 && caller_save_spill_class != NO_REGS))
1126 continue;
1127
1128 something_needs_reloads = 1;
1129 bzero ((char *) &insn_needs, sizeof insn_needs);
1130
1131 /* Count each reload once in every class
1132 containing the reload's own class. */
1133
1134 for (i = 0; i < n_reloads; i++)
1135 {
1136 register enum reg_class *p;
1137 enum reg_class class = reload_reg_class[i];
1138 int size;
1139 enum machine_mode mode;
1140 int nongroup_need;
1141 struct needs *this_needs;
1142
1143 /* Don't count the dummy reloads, for which one of the
1144 regs mentioned in the insn can be used for reloading.
1145 Don't count optional reloads.
1146 Don't count reloads that got combined with others. */
1147 if (reload_reg_rtx[i] != 0
1148 || reload_optional[i] != 0
1149 || (reload_out[i] == 0 && reload_in[i] == 0
1150 && ! reload_secondary_p[i]))
1151 continue;
1152
1153 /* Show that a reload register of this class is needed
1154 in this basic block. We do not use insn_needs and
1155 insn_groups because they are overly conservative for
1156 this purpose. */
1157 if (global && ! basic_block_needs[(int) class][this_block])
1158 {
1159 basic_block_needs[(int) class][this_block] = 1;
1160 new_basic_block_needs = 1;
1161 }
1162
1163
1164 mode = reload_inmode[i];
1165 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1166 mode = reload_outmode[i];
1167 size = CLASS_MAX_NREGS (class, mode);
1168
1169 /* If this class doesn't want a group, determine if we have
1170 a nongroup need or a regular need. We have a nongroup
1171 need if this reload conflicts with a group reload whose
1172 class intersects with this reload's class. */
1173
1174 nongroup_need = 0;
1175 if (size == 1)
1176 for (j = 0; j < n_reloads; j++)
1177 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1178 (GET_MODE_SIZE (reload_outmode[j])
1179 > GET_MODE_SIZE (reload_inmode[j]))
1180 ? reload_outmode[j]
1181 : reload_inmode[j])
1182 > 1)
1183 && (!reload_optional[j])
1184 && (reload_in[j] != 0 || reload_out[j] != 0
1185 || reload_secondary_p[j])
1186 && reloads_conflict (i, j)
1187 && reg_classes_intersect_p (class,
1188 reload_reg_class[j]))
1189 {
1190 nongroup_need = 1;
1191 break;
1192 }
1193
1194 /* Decide which time-of-use to count this reload for. */
1195 switch (reload_when_needed[i])
1196 {
1197 case RELOAD_OTHER:
1198 this_needs = &insn_needs.other;
1199 break;
1200 case RELOAD_FOR_INPUT:
1201 this_needs = &insn_needs.input;
1202 break;
1203 case RELOAD_FOR_OUTPUT:
1204 this_needs = &insn_needs.output;
1205 break;
1206 case RELOAD_FOR_INSN:
1207 this_needs = &insn_needs.insn;
1208 break;
1209 case RELOAD_FOR_OTHER_ADDRESS:
1210 this_needs = &insn_needs.other_addr;
1211 break;
1212 case RELOAD_FOR_INPUT_ADDRESS:
1213 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1214 break;
1215 case RELOAD_FOR_INPADDR_ADDRESS:
1216 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1217 break;
1218 case RELOAD_FOR_OUTPUT_ADDRESS:
1219 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1220 break;
1221 case RELOAD_FOR_OUTADDR_ADDRESS:
1222 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1223 break;
1224 case RELOAD_FOR_OPERAND_ADDRESS:
1225 this_needs = &insn_needs.op_addr;
1226 break;
1227 case RELOAD_FOR_OPADDR_ADDR:
1228 this_needs = &insn_needs.op_addr_reload;
1229 break;
1230 }
1231
1232 if (size > 1)
1233 {
1234 enum machine_mode other_mode, allocate_mode;
1235
1236 /* Count number of groups needed separately from
1237 number of individual regs needed. */
1238 this_needs->groups[(int) class]++;
1239 p = reg_class_superclasses[(int) class];
1240 while (*p != LIM_REG_CLASSES)
1241 this_needs->groups[(int) *p++]++;
1242
1243 /* Record size and mode of a group of this class. */
1244 /* If more than one size group is needed,
1245 make all groups the largest needed size. */
1246 if (group_size[(int) class] < size)
1247 {
1248 other_mode = group_mode[(int) class];
1249 allocate_mode = mode;
1250
1251 group_size[(int) class] = size;
1252 group_mode[(int) class] = mode;
1253 }
1254 else
1255 {
1256 other_mode = mode;
1257 allocate_mode = group_mode[(int) class];
1258 }
1259
1260 /* Crash if two dissimilar machine modes both need
1261 groups of consecutive regs of the same class. */
1262
1263 if (other_mode != VOIDmode && other_mode != allocate_mode
1264 && ! modes_equiv_for_class_p (allocate_mode,
1265 other_mode, class))
1266 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1267 insn);
1268 }
1269 else if (size == 1)
1270 {
1271 this_needs->regs[nongroup_need][(int) class] += 1;
1272 p = reg_class_superclasses[(int) class];
1273 while (*p != LIM_REG_CLASSES)
1274 this_needs->regs[nongroup_need][(int) *p++] += 1;
1275 }
1276 else
1277 abort ();
1278 }
1279
1280 /* All reloads have been counted for this insn;
1281 now merge the various times of use.
1282 This sets insn_needs, etc., to the maximum total number
1283 of registers needed at any point in this insn. */
1284
1285 for (i = 0; i < N_REG_CLASSES; i++)
1286 {
1287 int in_max, out_max;
1288
1289 /* Compute normal and nongroup needs. */
1290 for (j = 0; j <= 1; j++)
1291 {
1292 for (in_max = 0, out_max = 0, k = 0;
1293 k < reload_n_operands; k++)
1294 {
1295 in_max
1296 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1297 in_max
1298 = MAX (in_max,
1299 insn_needs.in_addr_addr[k].regs[j][i]);
1300 out_max
1301 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1302 out_max
1303 = MAX (out_max,
1304 insn_needs.out_addr_addr[k].regs[j][i]);
1305 }
1306
1307 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1308 and operand addresses but not things used to reload
1309 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1310 don't conflict with things needed to reload inputs or
1311 outputs. */
1312
1313 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1314 insn_needs.op_addr_reload.regs[j][i]),
1315 in_max);
1316
1317 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1318
1319 insn_needs.input.regs[j][i]
1320 = MAX (insn_needs.input.regs[j][i]
1321 + insn_needs.op_addr.regs[j][i]
1322 + insn_needs.insn.regs[j][i],
1323 in_max + insn_needs.input.regs[j][i]);
1324
1325 insn_needs.output.regs[j][i] += out_max;
1326 insn_needs.other.regs[j][i]
1327 += MAX (MAX (insn_needs.input.regs[j][i],
1328 insn_needs.output.regs[j][i]),
1329 insn_needs.other_addr.regs[j][i]);
1330
1331 }
1332
1333 /* Now compute group needs. */
1334 for (in_max = 0, out_max = 0, j = 0;
1335 j < reload_n_operands; j++)
1336 {
1337 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1338 in_max = MAX (in_max,
1339 insn_needs.in_addr_addr[j].groups[i]);
1340 out_max
1341 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1342 out_max
1343 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1344 }
1345
1346 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1347 insn_needs.op_addr_reload.groups[i]),
1348 in_max);
1349 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1350
1351 insn_needs.input.groups[i]
1352 = MAX (insn_needs.input.groups[i]
1353 + insn_needs.op_addr.groups[i]
1354 + insn_needs.insn.groups[i],
1355 in_max + insn_needs.input.groups[i]);
1356
1357 insn_needs.output.groups[i] += out_max;
1358 insn_needs.other.groups[i]
1359 += MAX (MAX (insn_needs.input.groups[i],
1360 insn_needs.output.groups[i]),
1361 insn_needs.other_addr.groups[i]);
1362 }
1363
1364 /* If this is a CALL_INSN and caller-saves will need
1365 a spill register, act as if the spill register is
1366 needed for this insn. However, the spill register
1367 can be used by any reload of this insn, so we only
1368 need do something if no need for that class has
1369 been recorded.
1370
1371 The assumption that every CALL_INSN will trigger a
1372 caller-save is highly conservative, however, the number
1373 of cases where caller-saves will need a spill register but
1374 a block containing a CALL_INSN won't need a spill register
1375 of that class should be quite rare.
1376
1377 If a group is needed, the size and mode of the group will
1378 have been set up at the beginning of this loop. */
1379
1380 if (GET_CODE (insn) == CALL_INSN
1381 && caller_save_spill_class != NO_REGS)
1382 {
1383 /* See if this register would conflict with any reload
1384 that needs a group. */
1385 int nongroup_need = 0;
1386 int *caller_save_needs;
1387
1388 for (j = 0; j < n_reloads; j++)
1389 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1390 (GET_MODE_SIZE (reload_outmode[j])
1391 > GET_MODE_SIZE (reload_inmode[j]))
1392 ? reload_outmode[j]
1393 : reload_inmode[j])
1394 > 1)
1395 && reg_classes_intersect_p (caller_save_spill_class,
1396 reload_reg_class[j]))
1397 {
1398 nongroup_need = 1;
1399 break;
1400 }
1401
1402 caller_save_needs
1403 = (caller_save_group_size > 1
1404 ? insn_needs.other.groups
1405 : insn_needs.other.regs[nongroup_need]);
1406
1407 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1408 {
1409 register enum reg_class *p
1410 = reg_class_superclasses[(int) caller_save_spill_class];
1411
1412 caller_save_needs[(int) caller_save_spill_class]++;
1413
1414 while (*p != LIM_REG_CLASSES)
1415 caller_save_needs[(int) *p++] += 1;
1416 }
1417
1418 /* Show that this basic block will need a register of
1419 this class. */
1420
1421 if (global
1422 && ! (basic_block_needs[(int) caller_save_spill_class]
1423 [this_block]))
1424 {
1425 basic_block_needs[(int) caller_save_spill_class]
1426 [this_block] = 1;
1427 new_basic_block_needs = 1;
1428 }
1429 }
1430
1431 #ifdef SMALL_REGISTER_CLASSES
1432 /* If this insn stores the value of a function call,
1433 and that value is in a register that has been spilled,
1434 and if the insn needs a reload in a class
1435 that might use that register as the reload register,
1436 then add add an extra need in that class.
1437 This makes sure we have a register available that does
1438 not overlap the return value. */
1439
1440 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1441 {
1442 int regno = REGNO (avoid_return_reg);
1443 int nregs
1444 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1445 int r;
1446 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1447
1448 /* First compute the "basic needs", which counts a
1449 need only in the smallest class in which it
1450 is required. */
1451
1452 bcopy ((char *) insn_needs.other.regs[0],
1453 (char *) basic_needs, sizeof basic_needs);
1454 bcopy ((char *) insn_needs.other.groups,
1455 (char *) basic_groups, sizeof basic_groups);
1456
1457 for (i = 0; i < N_REG_CLASSES; i++)
1458 {
1459 enum reg_class *p;
1460
1461 if (basic_needs[i] >= 0)
1462 for (p = reg_class_superclasses[i];
1463 *p != LIM_REG_CLASSES; p++)
1464 basic_needs[(int) *p] -= basic_needs[i];
1465
1466 if (basic_groups[i] >= 0)
1467 for (p = reg_class_superclasses[i];
1468 *p != LIM_REG_CLASSES; p++)
1469 basic_groups[(int) *p] -= basic_groups[i];
1470 }
1471
1472 /* Now count extra regs if there might be a conflict with
1473 the return value register. */
1474
1475 for (r = regno; r < regno + nregs; r++)
1476 if (spill_reg_order[r] >= 0)
1477 for (i = 0; i < N_REG_CLASSES; i++)
1478 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1479 {
1480 if (basic_needs[i] > 0)
1481 {
1482 enum reg_class *p;
1483
1484 insn_needs.other.regs[0][i]++;
1485 p = reg_class_superclasses[i];
1486 while (*p != LIM_REG_CLASSES)
1487 insn_needs.other.regs[0][(int) *p++]++;
1488 }
1489 if (basic_groups[i] > 0)
1490 {
1491 enum reg_class *p;
1492
1493 insn_needs.other.groups[i]++;
1494 p = reg_class_superclasses[i];
1495 while (*p != LIM_REG_CLASSES)
1496 insn_needs.other.groups[(int) *p++]++;
1497 }
1498 }
1499 }
1500 #endif /* SMALL_REGISTER_CLASSES */
1501
1502 /* For each class, collect maximum need of any insn. */
1503
1504 for (i = 0; i < N_REG_CLASSES; i++)
1505 {
1506 if (max_needs[i] < insn_needs.other.regs[0][i])
1507 {
1508 max_needs[i] = insn_needs.other.regs[0][i];
1509 max_needs_insn[i] = insn;
1510 }
1511 if (max_groups[i] < insn_needs.other.groups[i])
1512 {
1513 max_groups[i] = insn_needs.other.groups[i];
1514 max_groups_insn[i] = insn;
1515 }
1516 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1517 {
1518 max_nongroups[i] = insn_needs.other.regs[1][i];
1519 max_nongroups_insn[i] = insn;
1520 }
1521 }
1522 }
1523 /* Note that there is a continue statement above. */
1524 }
1525
1526 /* If we allocated any new memory locations, make another pass
1527 since it might have changed elimination offsets. */
1528 if (starting_frame_size != get_frame_size ())
1529 something_changed = 1;
1530
1531 if (dumpfile)
1532 for (i = 0; i < N_REG_CLASSES; i++)
1533 {
1534 if (max_needs[i] > 0)
1535 fprintf (dumpfile,
1536 ";; Need %d reg%s of class %s (for insn %d).\n",
1537 max_needs[i], max_needs[i] == 1 ? "" : "s",
1538 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1539 if (max_nongroups[i] > 0)
1540 fprintf (dumpfile,
1541 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1542 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1543 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1544 if (max_groups[i] > 0)
1545 fprintf (dumpfile,
1546 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1547 max_groups[i], max_groups[i] == 1 ? "" : "s",
1548 mode_name[(int) group_mode[i]],
1549 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1550 }
1551
1552 /* If we have caller-saves, set up the save areas and see if caller-save
1553 will need a spill register. */
1554
1555 if (caller_save_needed)
1556 {
1557 /* Set the offsets for setup_save_areas. */
1558 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
1559 ep++)
1560 ep->previous_offset = ep->max_offset;
1561
1562 if ( ! setup_save_areas (&something_changed)
1563 && caller_save_spill_class == NO_REGS)
1564 {
1565 /* The class we will need depends on whether the machine
1566 supports the sum of two registers for an address; see
1567 find_address_reloads for details. */
1568
1569 caller_save_spill_class
1570 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1571 caller_save_group_size
1572 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1573 something_changed = 1;
1574 }
1575 }
1576
1577 /* See if anything that happened changes which eliminations are valid.
1578 For example, on the Sparc, whether or not the frame pointer can
1579 be eliminated can depend on what registers have been used. We need
1580 not check some conditions again (such as flag_omit_frame_pointer)
1581 since they can't have changed. */
1582
1583 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1584 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1585 #ifdef ELIMINABLE_REGS
1586 || ! CAN_ELIMINATE (ep->from, ep->to)
1587 #endif
1588 )
1589 ep->can_eliminate = 0;
1590
1591 /* Look for the case where we have discovered that we can't replace
1592 register A with register B and that means that we will now be
1593 trying to replace register A with register C. This means we can
1594 no longer replace register C with register B and we need to disable
1595 such an elimination, if it exists. This occurs often with A == ap,
1596 B == sp, and C == fp. */
1597
1598 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1599 {
1600 struct elim_table *op;
1601 register int new_to = -1;
1602
1603 if (! ep->can_eliminate && ep->can_eliminate_previous)
1604 {
1605 /* Find the current elimination for ep->from, if there is a
1606 new one. */
1607 for (op = reg_eliminate;
1608 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1609 if (op->from == ep->from && op->can_eliminate)
1610 {
1611 new_to = op->to;
1612 break;
1613 }
1614
1615 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1616 disable it. */
1617 for (op = reg_eliminate;
1618 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1619 if (op->from == new_to && op->to == ep->to)
1620 op->can_eliminate = 0;
1621 }
1622 }
1623
1624 /* See if any registers that we thought we could eliminate the previous
1625 time are no longer eliminable. If so, something has changed and we
1626 must spill the register. Also, recompute the number of eliminable
1627 registers and see if the frame pointer is needed; it is if there is
1628 no elimination of the frame pointer that we can perform. */
1629
1630 frame_pointer_needed = 1;
1631 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1632 {
1633 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1634 && ep->to != HARD_FRAME_POINTER_REGNUM)
1635 frame_pointer_needed = 0;
1636
1637 if (! ep->can_eliminate && ep->can_eliminate_previous)
1638 {
1639 ep->can_eliminate_previous = 0;
1640 spill_hard_reg (ep->from, global, dumpfile, 1);
1641 something_changed = 1;
1642 num_eliminable--;
1643 }
1644 }
1645
1646 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1647 /* If we didn't need a frame pointer last time, but we do now, spill
1648 the hard frame pointer. */
1649 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1650 {
1651 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1652 something_changed = 1;
1653 }
1654 #endif
1655
1656 /* If all needs are met, we win. */
1657
1658 for (i = 0; i < N_REG_CLASSES; i++)
1659 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1660 break;
1661 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1662 break;
1663
1664 /* Not all needs are met; must spill some hard regs. */
1665
1666 /* Put all registers spilled so far back in potential_reload_regs, but
1667 put them at the front, since we've already spilled most of the
1668 pseudos in them (we might have left some pseudos unspilled if they
1669 were in a block that didn't need any spill registers of a conflicting
1670 class. We used to try to mark off the need for those registers,
1671 but doing so properly is very complex and reallocating them is the
1672 simpler approach. First, "pack" potential_reload_regs by pushing
1673 any nonnegative entries towards the end. That will leave room
1674 for the registers we already spilled.
1675
1676 Also, undo the marking of the spill registers from the last time
1677 around in FORBIDDEN_REGS since we will be probably be allocating
1678 them again below.
1679
1680 ??? It is theoretically possible that we might end up not using one
1681 of our previously-spilled registers in this allocation, even though
1682 they are at the head of the list. It's not clear what to do about
1683 this, but it was no better before, when we marked off the needs met
1684 by the previously-spilled registers. With the current code, globals
1685 can be allocated into these registers, but locals cannot. */
1686
1687 if (n_spills)
1688 {
1689 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1690 if (potential_reload_regs[i] != -1)
1691 potential_reload_regs[j--] = potential_reload_regs[i];
1692
1693 for (i = 0; i < n_spills; i++)
1694 {
1695 potential_reload_regs[i] = spill_regs[i];
1696 spill_reg_order[spill_regs[i]] = -1;
1697 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1698 }
1699
1700 n_spills = 0;
1701 }
1702
1703 /* Now find more reload regs to satisfy the remaining need
1704 Do it by ascending class number, since otherwise a reg
1705 might be spilled for a big class and might fail to count
1706 for a smaller class even though it belongs to that class.
1707
1708 Count spilled regs in `spills', and add entries to
1709 `spill_regs' and `spill_reg_order'.
1710
1711 ??? Note there is a problem here.
1712 When there is a need for a group in a high-numbered class,
1713 and also need for non-group regs that come from a lower class,
1714 the non-group regs are chosen first. If there aren't many regs,
1715 they might leave no room for a group.
1716
1717 This was happening on the 386. To fix it, we added the code
1718 that calls possible_group_p, so that the lower class won't
1719 break up the last possible group.
1720
1721 Really fixing the problem would require changes above
1722 in counting the regs already spilled, and in choose_reload_regs.
1723 It might be hard to avoid introducing bugs there. */
1724
1725 CLEAR_HARD_REG_SET (counted_for_groups);
1726 CLEAR_HARD_REG_SET (counted_for_nongroups);
1727
1728 for (class = 0; class < N_REG_CLASSES; class++)
1729 {
1730 /* First get the groups of registers.
1731 If we got single registers first, we might fragment
1732 possible groups. */
1733 while (max_groups[class] > 0)
1734 {
1735 /* If any single spilled regs happen to form groups,
1736 count them now. Maybe we don't really need
1737 to spill another group. */
1738 count_possible_groups (group_size, group_mode, max_groups,
1739 class);
1740
1741 if (max_groups[class] <= 0)
1742 break;
1743
1744 /* Groups of size 2 (the only groups used on most machines)
1745 are treated specially. */
1746 if (group_size[class] == 2)
1747 {
1748 /* First, look for a register that will complete a group. */
1749 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1750 {
1751 int other;
1752
1753 j = potential_reload_regs[i];
1754 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1755 &&
1756 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1757 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1758 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1759 && HARD_REGNO_MODE_OK (other, group_mode[class])
1760 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1761 other)
1762 /* We don't want one part of another group.
1763 We could get "two groups" that overlap! */
1764 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1765 ||
1766 (j < FIRST_PSEUDO_REGISTER - 1
1767 && (other = j + 1, spill_reg_order[other] >= 0)
1768 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1769 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1770 && HARD_REGNO_MODE_OK (j, group_mode[class])
1771 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1772 other)
1773 && ! TEST_HARD_REG_BIT (counted_for_groups,
1774 other))))
1775 {
1776 register enum reg_class *p;
1777
1778 /* We have found one that will complete a group,
1779 so count off one group as provided. */
1780 max_groups[class]--;
1781 p = reg_class_superclasses[class];
1782 while (*p != LIM_REG_CLASSES)
1783 {
1784 if (group_size [(int) *p] <= group_size [class])
1785 max_groups[(int) *p]--;
1786 p++;
1787 }
1788
1789 /* Indicate both these regs are part of a group. */
1790 SET_HARD_REG_BIT (counted_for_groups, j);
1791 SET_HARD_REG_BIT (counted_for_groups, other);
1792 break;
1793 }
1794 }
1795 /* We can't complete a group, so start one. */
1796 #ifdef SMALL_REGISTER_CLASSES
1797 /* Look for a pair neither of which is explicitly used. */
1798 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1799 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1800 {
1801 int k;
1802 j = potential_reload_regs[i];
1803 /* Verify that J+1 is a potential reload reg. */
1804 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1805 if (potential_reload_regs[k] == j + 1)
1806 break;
1807 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1808 && k < FIRST_PSEUDO_REGISTER
1809 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1810 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1811 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1812 && HARD_REGNO_MODE_OK (j, group_mode[class])
1813 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1814 j + 1)
1815 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1816 /* Reject J at this stage
1817 if J+1 was explicitly used. */
1818 && ! regs_explicitly_used[j + 1])
1819 break;
1820 }
1821 #endif
1822 /* Now try any group at all
1823 whose registers are not in bad_spill_regs. */
1824 if (i == FIRST_PSEUDO_REGISTER)
1825 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1826 {
1827 int k;
1828 j = potential_reload_regs[i];
1829 /* Verify that J+1 is a potential reload reg. */
1830 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1831 if (potential_reload_regs[k] == j + 1)
1832 break;
1833 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1834 && k < FIRST_PSEUDO_REGISTER
1835 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1836 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1837 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1838 && HARD_REGNO_MODE_OK (j, group_mode[class])
1839 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1840 j + 1)
1841 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1842 break;
1843 }
1844
1845 /* I should be the index in potential_reload_regs
1846 of the new reload reg we have found. */
1847
1848 if (i >= FIRST_PSEUDO_REGISTER)
1849 {
1850 /* There are no groups left to spill. */
1851 spill_failure (max_groups_insn[class]);
1852 failure = 1;
1853 goto failed;
1854 }
1855 else
1856 something_changed
1857 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1858 global, dumpfile);
1859 }
1860 else
1861 {
1862 /* For groups of more than 2 registers,
1863 look for a sufficient sequence of unspilled registers,
1864 and spill them all at once. */
1865 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1866 {
1867 int k;
1868
1869 j = potential_reload_regs[i];
1870 if (j >= 0
1871 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1872 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1873 {
1874 /* Check each reg in the sequence. */
1875 for (k = 0; k < group_size[class]; k++)
1876 if (! (spill_reg_order[j + k] < 0
1877 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1878 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1879 break;
1880 /* We got a full sequence, so spill them all. */
1881 if (k == group_size[class])
1882 {
1883 register enum reg_class *p;
1884 for (k = 0; k < group_size[class]; k++)
1885 {
1886 int idx;
1887 SET_HARD_REG_BIT (counted_for_groups, j + k);
1888 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1889 if (potential_reload_regs[idx] == j + k)
1890 break;
1891 something_changed
1892 |= new_spill_reg (idx, class,
1893 max_needs, NULL_PTR,
1894 global, dumpfile);
1895 }
1896
1897 /* We have found one that will complete a group,
1898 so count off one group as provided. */
1899 max_groups[class]--;
1900 p = reg_class_superclasses[class];
1901 while (*p != LIM_REG_CLASSES)
1902 {
1903 if (group_size [(int) *p]
1904 <= group_size [class])
1905 max_groups[(int) *p]--;
1906 p++;
1907 }
1908 break;
1909 }
1910 }
1911 }
1912 /* We couldn't find any registers for this reload.
1913 Avoid going into an infinite loop. */
1914 if (i >= FIRST_PSEUDO_REGISTER)
1915 {
1916 /* There are no groups left. */
1917 spill_failure (max_groups_insn[class]);
1918 failure = 1;
1919 goto failed;
1920 }
1921 }
1922 }
1923
1924 /* Now similarly satisfy all need for single registers. */
1925
1926 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1927 {
1928 /* If we spilled enough regs, but they weren't counted
1929 against the non-group need, see if we can count them now.
1930 If so, we can avoid some actual spilling. */
1931 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1932 for (i = 0; i < n_spills; i++)
1933 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1934 spill_regs[i])
1935 && !TEST_HARD_REG_BIT (counted_for_groups,
1936 spill_regs[i])
1937 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1938 spill_regs[i])
1939 && max_nongroups[class] > 0)
1940 {
1941 register enum reg_class *p;
1942
1943 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1944 max_nongroups[class]--;
1945 p = reg_class_superclasses[class];
1946 while (*p != LIM_REG_CLASSES)
1947 max_nongroups[(int) *p++]--;
1948 }
1949 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1950 break;
1951
1952 /* Consider the potential reload regs that aren't
1953 yet in use as reload regs, in order of preference.
1954 Find the most preferred one that's in this class. */
1955
1956 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1957 if (potential_reload_regs[i] >= 0
1958 && TEST_HARD_REG_BIT (reg_class_contents[class],
1959 potential_reload_regs[i])
1960 /* If this reg will not be available for groups,
1961 pick one that does not foreclose possible groups.
1962 This is a kludge, and not very general,
1963 but it should be sufficient to make the 386 work,
1964 and the problem should not occur on machines with
1965 more registers. */
1966 && (max_nongroups[class] == 0
1967 || possible_group_p (potential_reload_regs[i], max_groups)))
1968 break;
1969
1970 /* If we couldn't get a register, try to get one even if we
1971 might foreclose possible groups. This may cause problems
1972 later, but that's better than aborting now, since it is
1973 possible that we will, in fact, be able to form the needed
1974 group even with this allocation. */
1975
1976 if (i >= FIRST_PSEUDO_REGISTER
1977 && (asm_noperands (max_needs[class] > 0
1978 ? max_needs_insn[class]
1979 : max_nongroups_insn[class])
1980 < 0))
1981 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1982 if (potential_reload_regs[i] >= 0
1983 && TEST_HARD_REG_BIT (reg_class_contents[class],
1984 potential_reload_regs[i]))
1985 break;
1986
1987 /* I should be the index in potential_reload_regs
1988 of the new reload reg we have found. */
1989
1990 if (i >= FIRST_PSEUDO_REGISTER)
1991 {
1992 /* There are no possible registers left to spill. */
1993 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1994 : max_nongroups_insn[class]);
1995 failure = 1;
1996 goto failed;
1997 }
1998 else
1999 something_changed
2000 |= new_spill_reg (i, class, max_needs, max_nongroups,
2001 global, dumpfile);
2002 }
2003 }
2004 }
2005
2006 /* If global-alloc was run, notify it of any register eliminations we have
2007 done. */
2008 if (global)
2009 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2010 if (ep->can_eliminate)
2011 mark_elimination (ep->from, ep->to);
2012
2013 /* Insert code to save and restore call-clobbered hard regs
2014 around calls. Tell if what mode to use so that we will process
2015 those insns in reload_as_needed if we have to. */
2016
2017 if (caller_save_needed)
2018 save_call_clobbered_regs (num_eliminable ? QImode
2019 : caller_save_spill_class != NO_REGS ? HImode
2020 : VOIDmode);
2021
2022 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2023 If that insn didn't set the register (i.e., it copied the register to
2024 memory), just delete that insn instead of the equivalencing insn plus
2025 anything now dead. If we call delete_dead_insn on that insn, we may
2026 delete the insn that actually sets the register if the register die
2027 there and that is incorrect. */
2028
2029 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2030 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2031 && GET_CODE (reg_equiv_init[i]) != NOTE)
2032 {
2033 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2034 delete_dead_insn (reg_equiv_init[i]);
2035 else
2036 {
2037 PUT_CODE (reg_equiv_init[i], NOTE);
2038 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2039 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2040 }
2041 }
2042
2043 /* Use the reload registers where necessary
2044 by generating move instructions to move the must-be-register
2045 values into or out of the reload registers. */
2046
2047 if (something_needs_reloads || something_needs_elimination
2048 || (caller_save_needed && num_eliminable)
2049 || caller_save_spill_class != NO_REGS)
2050 reload_as_needed (first, global);
2051
2052 /* If we were able to eliminate the frame pointer, show that it is no
2053 longer live at the start of any basic block. If it ls live by
2054 virtue of being in a pseudo, that pseudo will be marked live
2055 and hence the frame pointer will be known to be live via that
2056 pseudo. */
2057
2058 if (! frame_pointer_needed)
2059 for (i = 0; i < n_basic_blocks; i++)
2060 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
2061 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
2062 % REGSET_ELT_BITS));
2063
2064 /* Come here (with failure set nonzero) if we can't get enough spill regs
2065 and we decide not to abort about it. */
2066 failed:
2067
2068 reload_in_progress = 0;
2069
2070 /* Now eliminate all pseudo regs by modifying them into
2071 their equivalent memory references.
2072 The REG-rtx's for the pseudos are modified in place,
2073 so all insns that used to refer to them now refer to memory.
2074
2075 For a reg that has a reg_equiv_address, all those insns
2076 were changed by reloading so that no insns refer to it any longer;
2077 but the DECL_RTL of a variable decl may refer to it,
2078 and if so this causes the debugging info to mention the variable. */
2079
2080 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2081 {
2082 rtx addr = 0;
2083 int in_struct = 0;
2084 if (reg_equiv_mem[i])
2085 {
2086 addr = XEXP (reg_equiv_mem[i], 0);
2087 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2088 }
2089 if (reg_equiv_address[i])
2090 addr = reg_equiv_address[i];
2091 if (addr)
2092 {
2093 if (reg_renumber[i] < 0)
2094 {
2095 rtx reg = regno_reg_rtx[i];
2096 XEXP (reg, 0) = addr;
2097 REG_USERVAR_P (reg) = 0;
2098 MEM_IN_STRUCT_P (reg) = in_struct;
2099 PUT_CODE (reg, MEM);
2100 }
2101 else if (reg_equiv_mem[i])
2102 XEXP (reg_equiv_mem[i], 0) = addr;
2103 }
2104 }
2105
2106 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2107 /* Make a pass over all the insns and remove death notes for things that
2108 are no longer registers or no longer die in the insn (e.g., an input
2109 and output pseudo being tied). */
2110
2111 for (insn = first; insn; insn = NEXT_INSN (insn))
2112 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2113 {
2114 rtx note, next;
2115
2116 for (note = REG_NOTES (insn); note; note = next)
2117 {
2118 next = XEXP (note, 1);
2119 if (REG_NOTE_KIND (note) == REG_DEAD
2120 && (GET_CODE (XEXP (note, 0)) != REG
2121 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2122 remove_note (insn, note);
2123 }
2124 }
2125 #endif
2126
2127 /* Indicate that we no longer have known memory locations or constants. */
2128 reg_equiv_constant = 0;
2129 reg_equiv_memory_loc = 0;
2130
2131 if (scratch_list)
2132 free (scratch_list);
2133 scratch_list = 0;
2134 if (scratch_block)
2135 free (scratch_block);
2136 scratch_block = 0;
2137
2138 CLEAR_HARD_REG_SET (used_spill_regs);
2139 for (i = 0; i < n_spills; i++)
2140 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2141
2142 return failure;
2143 }
2144 \f
2145 /* Nonzero if, after spilling reg REGNO for non-groups,
2146 it will still be possible to find a group if we still need one. */
2147
2148 static int
2149 possible_group_p (regno, max_groups)
2150 int regno;
2151 int *max_groups;
2152 {
2153 int i;
2154 int class = (int) NO_REGS;
2155
2156 for (i = 0; i < (int) N_REG_CLASSES; i++)
2157 if (max_groups[i] > 0)
2158 {
2159 class = i;
2160 break;
2161 }
2162
2163 if (class == (int) NO_REGS)
2164 return 1;
2165
2166 /* Consider each pair of consecutive registers. */
2167 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2168 {
2169 /* Ignore pairs that include reg REGNO. */
2170 if (i == regno || i + 1 == regno)
2171 continue;
2172
2173 /* Ignore pairs that are outside the class that needs the group.
2174 ??? Here we fail to handle the case where two different classes
2175 independently need groups. But this never happens with our
2176 current machine descriptions. */
2177 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2178 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2179 continue;
2180
2181 /* A pair of consecutive regs we can still spill does the trick. */
2182 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2183 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2184 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2185 return 1;
2186
2187 /* A pair of one already spilled and one we can spill does it
2188 provided the one already spilled is not otherwise reserved. */
2189 if (spill_reg_order[i] < 0
2190 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2191 && spill_reg_order[i + 1] >= 0
2192 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2193 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2194 return 1;
2195 if (spill_reg_order[i + 1] < 0
2196 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2197 && spill_reg_order[i] >= 0
2198 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2199 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2200 return 1;
2201 }
2202
2203 return 0;
2204 }
2205 \f
2206 /* Count any groups of CLASS that can be formed from the registers recently
2207 spilled. */
2208
2209 static void
2210 count_possible_groups (group_size, group_mode, max_groups, class)
2211 int *group_size;
2212 enum machine_mode *group_mode;
2213 int *max_groups;
2214 int class;
2215 {
2216 HARD_REG_SET new;
2217 int i, j;
2218
2219 /* Now find all consecutive groups of spilled registers
2220 and mark each group off against the need for such groups.
2221 But don't count them against ordinary need, yet. */
2222
2223 if (group_size[class] == 0)
2224 return;
2225
2226 CLEAR_HARD_REG_SET (new);
2227
2228 /* Make a mask of all the regs that are spill regs in class I. */
2229 for (i = 0; i < n_spills; i++)
2230 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2231 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2232 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2233 SET_HARD_REG_BIT (new, spill_regs[i]);
2234
2235 /* Find each consecutive group of them. */
2236 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2237 if (TEST_HARD_REG_BIT (new, i)
2238 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2239 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2240 {
2241 for (j = 1; j < group_size[class]; j++)
2242 if (! TEST_HARD_REG_BIT (new, i + j))
2243 break;
2244
2245 if (j == group_size[class])
2246 {
2247 /* We found a group. Mark it off against this class's need for
2248 groups, and against each superclass too. */
2249 register enum reg_class *p;
2250
2251 max_groups[class]--;
2252 p = reg_class_superclasses[class];
2253 while (*p != LIM_REG_CLASSES)
2254 {
2255 if (group_size [(int) *p] <= group_size [class])
2256 max_groups[(int) *p]--;
2257 p++;
2258 }
2259
2260 /* Don't count these registers again. */
2261 for (j = 0; j < group_size[class]; j++)
2262 SET_HARD_REG_BIT (counted_for_groups, i + j);
2263 }
2264
2265 /* Skip to the last reg in this group. When i is incremented above,
2266 it will then point to the first reg of the next possible group. */
2267 i += j - 1;
2268 }
2269 }
2270 \f
2271 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2272 another mode that needs to be reloaded for the same register class CLASS.
2273 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2274 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2275
2276 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2277 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2278 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2279 causes unnecessary failures on machines requiring alignment of register
2280 groups when the two modes are different sizes, because the larger mode has
2281 more strict alignment rules than the smaller mode. */
2282
2283 static int
2284 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2285 enum machine_mode allocate_mode, other_mode;
2286 enum reg_class class;
2287 {
2288 register int regno;
2289 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2290 {
2291 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2292 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2293 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2294 return 0;
2295 }
2296 return 1;
2297 }
2298
2299 /* Handle the failure to find a register to spill.
2300 INSN should be one of the insns which needed this particular spill reg. */
2301
2302 static void
2303 spill_failure (insn)
2304 rtx insn;
2305 {
2306 if (asm_noperands (PATTERN (insn)) >= 0)
2307 error_for_asm (insn, "`asm' needs too many reloads");
2308 else
2309 fatal_insn ("Unable to find a register to spill.", insn);
2310 }
2311
2312 /* Add a new register to the tables of available spill-registers
2313 (as well as spilling all pseudos allocated to the register).
2314 I is the index of this register in potential_reload_regs.
2315 CLASS is the regclass whose need is being satisfied.
2316 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2317 so that this register can count off against them.
2318 MAX_NONGROUPS is 0 if this register is part of a group.
2319 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2320
2321 static int
2322 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2323 int i;
2324 int class;
2325 int *max_needs;
2326 int *max_nongroups;
2327 int global;
2328 FILE *dumpfile;
2329 {
2330 register enum reg_class *p;
2331 int val;
2332 int regno = potential_reload_regs[i];
2333
2334 if (i >= FIRST_PSEUDO_REGISTER)
2335 abort (); /* Caller failed to find any register. */
2336
2337 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2338 fatal ("fixed or forbidden register was spilled.\n\
2339 This may be due to a compiler bug or to impossible asm\n\
2340 statements or clauses.");
2341
2342 /* Make reg REGNO an additional reload reg. */
2343
2344 potential_reload_regs[i] = -1;
2345 spill_regs[n_spills] = regno;
2346 spill_reg_order[regno] = n_spills;
2347 if (dumpfile)
2348 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2349
2350 /* Clear off the needs we just satisfied. */
2351
2352 max_needs[class]--;
2353 p = reg_class_superclasses[class];
2354 while (*p != LIM_REG_CLASSES)
2355 max_needs[(int) *p++]--;
2356
2357 if (max_nongroups && max_nongroups[class] > 0)
2358 {
2359 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2360 max_nongroups[class]--;
2361 p = reg_class_superclasses[class];
2362 while (*p != LIM_REG_CLASSES)
2363 max_nongroups[(int) *p++]--;
2364 }
2365
2366 /* Spill every pseudo reg that was allocated to this reg
2367 or to something that overlaps this reg. */
2368
2369 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2370
2371 /* If there are some registers still to eliminate and this register
2372 wasn't ever used before, additional stack space may have to be
2373 allocated to store this register. Thus, we may have changed the offset
2374 between the stack and frame pointers, so mark that something has changed.
2375 (If new pseudos were spilled, thus requiring more space, VAL would have
2376 been set non-zero by the call to spill_hard_reg above since additional
2377 reloads may be needed in that case.
2378
2379 One might think that we need only set VAL to 1 if this is a call-used
2380 register. However, the set of registers that must be saved by the
2381 prologue is not identical to the call-used set. For example, the
2382 register used by the call insn for the return PC is a call-used register,
2383 but must be saved by the prologue. */
2384 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2385 val = 1;
2386
2387 regs_ever_live[spill_regs[n_spills]] = 1;
2388 n_spills++;
2389
2390 return val;
2391 }
2392 \f
2393 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2394 data that is dead in INSN. */
2395
2396 static void
2397 delete_dead_insn (insn)
2398 rtx insn;
2399 {
2400 rtx prev = prev_real_insn (insn);
2401 rtx prev_dest;
2402
2403 /* If the previous insn sets a register that dies in our insn, delete it
2404 too. */
2405 if (prev && GET_CODE (PATTERN (prev)) == SET
2406 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2407 && reg_mentioned_p (prev_dest, PATTERN (insn))
2408 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2409 delete_dead_insn (prev);
2410
2411 PUT_CODE (insn, NOTE);
2412 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2413 NOTE_SOURCE_FILE (insn) = 0;
2414 }
2415
2416 /* Modify the home of pseudo-reg I.
2417 The new home is present in reg_renumber[I].
2418
2419 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2420 or it may be -1, meaning there is none or it is not relevant.
2421 This is used so that all pseudos spilled from a given hard reg
2422 can share one stack slot. */
2423
2424 static void
2425 alter_reg (i, from_reg)
2426 register int i;
2427 int from_reg;
2428 {
2429 /* When outputting an inline function, this can happen
2430 for a reg that isn't actually used. */
2431 if (regno_reg_rtx[i] == 0)
2432 return;
2433
2434 /* If the reg got changed to a MEM at rtl-generation time,
2435 ignore it. */
2436 if (GET_CODE (regno_reg_rtx[i]) != REG)
2437 return;
2438
2439 /* Modify the reg-rtx to contain the new hard reg
2440 number or else to contain its pseudo reg number. */
2441 REGNO (regno_reg_rtx[i])
2442 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2443
2444 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2445 allocate a stack slot for it. */
2446
2447 if (reg_renumber[i] < 0
2448 && reg_n_refs[i] > 0
2449 && reg_equiv_constant[i] == 0
2450 && reg_equiv_memory_loc[i] == 0)
2451 {
2452 register rtx x;
2453 int inherent_size = PSEUDO_REGNO_BYTES (i);
2454 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2455 int adjust = 0;
2456
2457 /* Each pseudo reg has an inherent size which comes from its own mode,
2458 and a total size which provides room for paradoxical subregs
2459 which refer to the pseudo reg in wider modes.
2460
2461 We can use a slot already allocated if it provides both
2462 enough inherent space and enough total space.
2463 Otherwise, we allocate a new slot, making sure that it has no less
2464 inherent space, and no less total space, then the previous slot. */
2465 if (from_reg == -1)
2466 {
2467 /* No known place to spill from => no slot to reuse. */
2468 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2469 inherent_size == total_size ? 0 : -1);
2470 if (BYTES_BIG_ENDIAN)
2471 /* Cancel the big-endian correction done in assign_stack_local.
2472 Get the address of the beginning of the slot.
2473 This is so we can do a big-endian correction unconditionally
2474 below. */
2475 adjust = inherent_size - total_size;
2476
2477 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2478 }
2479 /* Reuse a stack slot if possible. */
2480 else if (spill_stack_slot[from_reg] != 0
2481 && spill_stack_slot_width[from_reg] >= total_size
2482 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2483 >= inherent_size))
2484 x = spill_stack_slot[from_reg];
2485 /* Allocate a bigger slot. */
2486 else
2487 {
2488 /* Compute maximum size needed, both for inherent size
2489 and for total size. */
2490 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2491 rtx stack_slot;
2492 if (spill_stack_slot[from_reg])
2493 {
2494 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2495 > inherent_size)
2496 mode = GET_MODE (spill_stack_slot[from_reg]);
2497 if (spill_stack_slot_width[from_reg] > total_size)
2498 total_size = spill_stack_slot_width[from_reg];
2499 }
2500 /* Make a slot with that size. */
2501 x = assign_stack_local (mode, total_size,
2502 inherent_size == total_size ? 0 : -1);
2503 stack_slot = x;
2504 if (BYTES_BIG_ENDIAN)
2505 {
2506 /* Cancel the big-endian correction done in assign_stack_local.
2507 Get the address of the beginning of the slot.
2508 This is so we can do a big-endian correction unconditionally
2509 below. */
2510 adjust = GET_MODE_SIZE (mode) - total_size;
2511 if (adjust)
2512 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2513 * BITS_PER_UNIT,
2514 MODE_INT, 1),
2515 plus_constant (XEXP (x, 0), adjust));
2516 }
2517 spill_stack_slot[from_reg] = stack_slot;
2518 spill_stack_slot_width[from_reg] = total_size;
2519 }
2520
2521 /* On a big endian machine, the "address" of the slot
2522 is the address of the low part that fits its inherent mode. */
2523 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2524 adjust += (total_size - inherent_size);
2525
2526 /* If we have any adjustment to make, or if the stack slot is the
2527 wrong mode, make a new stack slot. */
2528 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2529 {
2530 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2531 plus_constant (XEXP (x, 0), adjust));
2532 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2533 }
2534
2535 /* Save the stack slot for later. */
2536 reg_equiv_memory_loc[i] = x;
2537 }
2538 }
2539
2540 /* Mark the slots in regs_ever_live for the hard regs
2541 used by pseudo-reg number REGNO. */
2542
2543 void
2544 mark_home_live (regno)
2545 int regno;
2546 {
2547 register int i, lim;
2548 i = reg_renumber[regno];
2549 if (i < 0)
2550 return;
2551 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2552 while (i < lim)
2553 regs_ever_live[i++] = 1;
2554 }
2555
2556 /* Mark the registers used in SCRATCH as being live. */
2557
2558 static void
2559 mark_scratch_live (scratch)
2560 rtx scratch;
2561 {
2562 register int i;
2563 int regno = REGNO (scratch);
2564 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2565
2566 for (i = regno; i < lim; i++)
2567 regs_ever_live[i] = 1;
2568 }
2569 \f
2570 /* This function handles the tracking of elimination offsets around branches.
2571
2572 X is a piece of RTL being scanned.
2573
2574 INSN is the insn that it came from, if any.
2575
2576 INITIAL_P is non-zero if we are to set the offset to be the initial
2577 offset and zero if we are setting the offset of the label to be the
2578 current offset. */
2579
2580 static void
2581 set_label_offsets (x, insn, initial_p)
2582 rtx x;
2583 rtx insn;
2584 int initial_p;
2585 {
2586 enum rtx_code code = GET_CODE (x);
2587 rtx tem;
2588 int i;
2589 struct elim_table *p;
2590
2591 switch (code)
2592 {
2593 case LABEL_REF:
2594 if (LABEL_REF_NONLOCAL_P (x))
2595 return;
2596
2597 x = XEXP (x, 0);
2598
2599 /* ... fall through ... */
2600
2601 case CODE_LABEL:
2602 /* If we know nothing about this label, set the desired offsets. Note
2603 that this sets the offset at a label to be the offset before a label
2604 if we don't know anything about the label. This is not correct for
2605 the label after a BARRIER, but is the best guess we can make. If
2606 we guessed wrong, we will suppress an elimination that might have
2607 been possible had we been able to guess correctly. */
2608
2609 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2610 {
2611 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2612 offsets_at[CODE_LABEL_NUMBER (x)][i]
2613 = (initial_p ? reg_eliminate[i].initial_offset
2614 : reg_eliminate[i].offset);
2615 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2616 }
2617
2618 /* Otherwise, if this is the definition of a label and it is
2619 preceded by a BARRIER, set our offsets to the known offset of
2620 that label. */
2621
2622 else if (x == insn
2623 && (tem = prev_nonnote_insn (insn)) != 0
2624 && GET_CODE (tem) == BARRIER)
2625 {
2626 num_not_at_initial_offset = 0;
2627 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2628 {
2629 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2630 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2631 if (reg_eliminate[i].can_eliminate
2632 && (reg_eliminate[i].offset
2633 != reg_eliminate[i].initial_offset))
2634 num_not_at_initial_offset++;
2635 }
2636 }
2637
2638 else
2639 /* If neither of the above cases is true, compare each offset
2640 with those previously recorded and suppress any eliminations
2641 where the offsets disagree. */
2642
2643 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2644 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2645 != (initial_p ? reg_eliminate[i].initial_offset
2646 : reg_eliminate[i].offset))
2647 reg_eliminate[i].can_eliminate = 0;
2648
2649 return;
2650
2651 case JUMP_INSN:
2652 set_label_offsets (PATTERN (insn), insn, initial_p);
2653
2654 /* ... fall through ... */
2655
2656 case INSN:
2657 case CALL_INSN:
2658 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2659 and hence must have all eliminations at their initial offsets. */
2660 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2661 if (REG_NOTE_KIND (tem) == REG_LABEL)
2662 set_label_offsets (XEXP (tem, 0), insn, 1);
2663 return;
2664
2665 case ADDR_VEC:
2666 case ADDR_DIFF_VEC:
2667 /* Each of the labels in the address vector must be at their initial
2668 offsets. We want the first first for ADDR_VEC and the second
2669 field for ADDR_DIFF_VEC. */
2670
2671 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2672 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2673 insn, initial_p);
2674 return;
2675
2676 case SET:
2677 /* We only care about setting PC. If the source is not RETURN,
2678 IF_THEN_ELSE, or a label, disable any eliminations not at
2679 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2680 isn't one of those possibilities. For branches to a label,
2681 call ourselves recursively.
2682
2683 Note that this can disable elimination unnecessarily when we have
2684 a non-local goto since it will look like a non-constant jump to
2685 someplace in the current function. This isn't a significant
2686 problem since such jumps will normally be when all elimination
2687 pairs are back to their initial offsets. */
2688
2689 if (SET_DEST (x) != pc_rtx)
2690 return;
2691
2692 switch (GET_CODE (SET_SRC (x)))
2693 {
2694 case PC:
2695 case RETURN:
2696 return;
2697
2698 case LABEL_REF:
2699 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2700 return;
2701
2702 case IF_THEN_ELSE:
2703 tem = XEXP (SET_SRC (x), 1);
2704 if (GET_CODE (tem) == LABEL_REF)
2705 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2706 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2707 break;
2708
2709 tem = XEXP (SET_SRC (x), 2);
2710 if (GET_CODE (tem) == LABEL_REF)
2711 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2712 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2713 break;
2714 return;
2715 }
2716
2717 /* If we reach here, all eliminations must be at their initial
2718 offset because we are doing a jump to a variable address. */
2719 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2720 if (p->offset != p->initial_offset)
2721 p->can_eliminate = 0;
2722 }
2723 }
2724 \f
2725 /* Used for communication between the next two function to properly share
2726 the vector for an ASM_OPERANDS. */
2727
2728 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2729
2730 /* Scan X and replace any eliminable registers (such as fp) with a
2731 replacement (such as sp), plus an offset.
2732
2733 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2734 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2735 MEM, we are allowed to replace a sum of a register and the constant zero
2736 with the register, which we cannot do outside a MEM. In addition, we need
2737 to record the fact that a register is referenced outside a MEM.
2738
2739 If INSN is an insn, it is the insn containing X. If we replace a REG
2740 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2741 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2742 that the REG is being modified.
2743
2744 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2745 That's used when we eliminate in expressions stored in notes.
2746 This means, do not set ref_outside_mem even if the reference
2747 is outside of MEMs.
2748
2749 If we see a modification to a register we know about, take the
2750 appropriate action (see case SET, below).
2751
2752 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2753 replacements done assuming all offsets are at their initial values. If
2754 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2755 encounter, return the actual location so that find_reloads will do
2756 the proper thing. */
2757
2758 rtx
2759 eliminate_regs (x, mem_mode, insn)
2760 rtx x;
2761 enum machine_mode mem_mode;
2762 rtx insn;
2763 {
2764 enum rtx_code code = GET_CODE (x);
2765 struct elim_table *ep;
2766 int regno;
2767 rtx new;
2768 int i, j;
2769 char *fmt;
2770 int copied = 0;
2771
2772 switch (code)
2773 {
2774 case CONST_INT:
2775 case CONST_DOUBLE:
2776 case CONST:
2777 case SYMBOL_REF:
2778 case CODE_LABEL:
2779 case PC:
2780 case CC0:
2781 case ASM_INPUT:
2782 case ADDR_VEC:
2783 case ADDR_DIFF_VEC:
2784 case RETURN:
2785 return x;
2786
2787 case REG:
2788 regno = REGNO (x);
2789
2790 /* First handle the case where we encounter a bare register that
2791 is eliminable. Replace it with a PLUS. */
2792 if (regno < FIRST_PSEUDO_REGISTER)
2793 {
2794 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2795 ep++)
2796 if (ep->from_rtx == x && ep->can_eliminate)
2797 {
2798 if (! mem_mode
2799 /* Refs inside notes don't count for this purpose. */
2800 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2801 || GET_CODE (insn) == INSN_LIST)))
2802 ep->ref_outside_mem = 1;
2803 return plus_constant (ep->to_rtx, ep->previous_offset);
2804 }
2805
2806 }
2807 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2808 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2809 {
2810 /* In this case, find_reloads would attempt to either use an
2811 incorrect address (if something is not at its initial offset)
2812 or substitute an replaced address into an insn (which loses
2813 if the offset is changed by some later action). So we simply
2814 return the replaced stack slot (assuming it is changed by
2815 elimination) and ignore the fact that this is actually a
2816 reference to the pseudo. Ensure we make a copy of the
2817 address in case it is shared. */
2818 new = eliminate_regs (reg_equiv_memory_loc[regno],
2819 mem_mode, insn);
2820 if (new != reg_equiv_memory_loc[regno])
2821 {
2822 cannot_omit_stores[regno] = 1;
2823 return copy_rtx (new);
2824 }
2825 }
2826 return x;
2827
2828 case PLUS:
2829 /* If this is the sum of an eliminable register and a constant, rework
2830 the sum. */
2831 if (GET_CODE (XEXP (x, 0)) == REG
2832 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2833 && CONSTANT_P (XEXP (x, 1)))
2834 {
2835 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2836 ep++)
2837 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2838 {
2839 if (! mem_mode
2840 /* Refs inside notes don't count for this purpose. */
2841 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2842 || GET_CODE (insn) == INSN_LIST)))
2843 ep->ref_outside_mem = 1;
2844
2845 /* The only time we want to replace a PLUS with a REG (this
2846 occurs when the constant operand of the PLUS is the negative
2847 of the offset) is when we are inside a MEM. We won't want
2848 to do so at other times because that would change the
2849 structure of the insn in a way that reload can't handle.
2850 We special-case the commonest situation in
2851 eliminate_regs_in_insn, so just replace a PLUS with a
2852 PLUS here, unless inside a MEM. */
2853 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2854 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2855 return ep->to_rtx;
2856 else
2857 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2858 plus_constant (XEXP (x, 1),
2859 ep->previous_offset));
2860 }
2861
2862 /* If the register is not eliminable, we are done since the other
2863 operand is a constant. */
2864 return x;
2865 }
2866
2867 /* If this is part of an address, we want to bring any constant to the
2868 outermost PLUS. We will do this by doing register replacement in
2869 our operands and seeing if a constant shows up in one of them.
2870
2871 We assume here this is part of an address (or a "load address" insn)
2872 since an eliminable register is not likely to appear in any other
2873 context.
2874
2875 If we have (plus (eliminable) (reg)), we want to produce
2876 (plus (plus (replacement) (reg)) (const)). If this was part of a
2877 normal add insn, (plus (replacement) (reg)) will be pushed as a
2878 reload. This is the desired action. */
2879
2880 {
2881 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2882 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2883
2884 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2885 {
2886 /* If one side is a PLUS and the other side is a pseudo that
2887 didn't get a hard register but has a reg_equiv_constant,
2888 we must replace the constant here since it may no longer
2889 be in the position of any operand. */
2890 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2891 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2892 && reg_renumber[REGNO (new1)] < 0
2893 && reg_equiv_constant != 0
2894 && reg_equiv_constant[REGNO (new1)] != 0)
2895 new1 = reg_equiv_constant[REGNO (new1)];
2896 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2897 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2898 && reg_renumber[REGNO (new0)] < 0
2899 && reg_equiv_constant[REGNO (new0)] != 0)
2900 new0 = reg_equiv_constant[REGNO (new0)];
2901
2902 new = form_sum (new0, new1);
2903
2904 /* As above, if we are not inside a MEM we do not want to
2905 turn a PLUS into something else. We might try to do so here
2906 for an addition of 0 if we aren't optimizing. */
2907 if (! mem_mode && GET_CODE (new) != PLUS)
2908 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2909 else
2910 return new;
2911 }
2912 }
2913 return x;
2914
2915 case MINUS:
2916 /* If we have (minus (eliminable) (reg)), we want to produce
2917 (plus (minus (replacement) (reg)) (const)). The main reason being
2918 to be consistent with what is done for PLUS. find_reloads_address
2919 assumes that we do this. */
2920 {
2921 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2922 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2923
2924 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2925 {
2926 if (GET_CODE (new0) == PLUS)
2927 return gen_rtx (PLUS, GET_MODE (x),
2928 gen_rtx (MINUS, GET_MODE (x),
2929 XEXP (new0, 0), new1),
2930 XEXP (new0, 1));
2931 else
2932 return gen_rtx (MINUS, GET_MODE (x), new0, new1);
2933 }
2934 }
2935 return x;
2936
2937 case MULT:
2938 /* If this is the product of an eliminable register and a
2939 constant, apply the distribute law and move the constant out
2940 so that we have (plus (mult ..) ..). This is needed in order
2941 to keep load-address insns valid. This case is pathological.
2942 We ignore the possibility of overflow here. */
2943 if (GET_CODE (XEXP (x, 0)) == REG
2944 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2945 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2946 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2947 ep++)
2948 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2949 {
2950 if (! mem_mode
2951 /* Refs inside notes don't count for this purpose. */
2952 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2953 || GET_CODE (insn) == INSN_LIST)))
2954 ep->ref_outside_mem = 1;
2955
2956 return
2957 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2958 ep->previous_offset * INTVAL (XEXP (x, 1)));
2959 }
2960
2961 /* ... fall through ... */
2962
2963 case CALL:
2964 case COMPARE:
2965 case DIV: case UDIV:
2966 case MOD: case UMOD:
2967 case AND: case IOR: case XOR:
2968 case ROTATERT: case ROTATE:
2969 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2970 case NE: case EQ:
2971 case GE: case GT: case GEU: case GTU:
2972 case LE: case LT: case LEU: case LTU:
2973 {
2974 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2975 rtx new1
2976 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2977
2978 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2979 return gen_rtx (code, GET_MODE (x), new0, new1);
2980 }
2981 return x;
2982
2983 case EXPR_LIST:
2984 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2985 if (XEXP (x, 0))
2986 {
2987 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2988 if (new != XEXP (x, 0))
2989 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2990 }
2991
2992 /* ... fall through ... */
2993
2994 case INSN_LIST:
2995 /* Now do eliminations in the rest of the chain. If this was
2996 an EXPR_LIST, this might result in allocating more memory than is
2997 strictly needed, but it simplifies the code. */
2998 if (XEXP (x, 1))
2999 {
3000 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
3001 if (new != XEXP (x, 1))
3002 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3003 }
3004 return x;
3005
3006 case PRE_INC:
3007 case POST_INC:
3008 case PRE_DEC:
3009 case POST_DEC:
3010 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3011 if (ep->to_rtx == XEXP (x, 0))
3012 {
3013 int size = GET_MODE_SIZE (mem_mode);
3014
3015 /* If more bytes than MEM_MODE are pushed, account for them. */
3016 #ifdef PUSH_ROUNDING
3017 if (ep->to_rtx == stack_pointer_rtx)
3018 size = PUSH_ROUNDING (size);
3019 #endif
3020 if (code == PRE_DEC || code == POST_DEC)
3021 ep->offset += size;
3022 else
3023 ep->offset -= size;
3024 }
3025
3026 /* Fall through to generic unary operation case. */
3027 case STRICT_LOW_PART:
3028 case NEG: case NOT:
3029 case SIGN_EXTEND: case ZERO_EXTEND:
3030 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3031 case FLOAT: case FIX:
3032 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3033 case ABS:
3034 case SQRT:
3035 case FFS:
3036 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3037 if (new != XEXP (x, 0))
3038 return gen_rtx (code, GET_MODE (x), new);
3039 return x;
3040
3041 case SUBREG:
3042 /* Similar to above processing, but preserve SUBREG_WORD.
3043 Convert (subreg (mem)) to (mem) if not paradoxical.
3044 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3045 pseudo didn't get a hard reg, we must replace this with the
3046 eliminated version of the memory location because push_reloads
3047 may do the replacement in certain circumstances. */
3048 if (GET_CODE (SUBREG_REG (x)) == REG
3049 && (GET_MODE_SIZE (GET_MODE (x))
3050 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3051 && reg_equiv_memory_loc != 0
3052 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3053 {
3054 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3055 mem_mode, insn);
3056
3057 /* If we didn't change anything, we must retain the pseudo. */
3058 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3059 new = SUBREG_REG (x);
3060 else
3061 {
3062 /* Otherwise, ensure NEW isn't shared in case we have to reload
3063 it. */
3064 new = copy_rtx (new);
3065
3066 /* In this case, we must show that the pseudo is used in this
3067 insn so that delete_output_reload will do the right thing. */
3068 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3069 && GET_CODE (insn) != INSN_LIST)
3070 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
3071 insn);
3072 }
3073 }
3074 else
3075 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
3076
3077 if (new != XEXP (x, 0))
3078 {
3079 if (GET_CODE (new) == MEM
3080 && (GET_MODE_SIZE (GET_MODE (x))
3081 <= GET_MODE_SIZE (GET_MODE (new)))
3082 #ifdef LOAD_EXTEND_OP
3083 /* On these machines we will be reloading what is
3084 inside the SUBREG if it originally was a pseudo and
3085 the inner and outer modes are both a word or
3086 smaller. So leave the SUBREG then. */
3087 && ! (GET_CODE (SUBREG_REG (x)) == REG
3088 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
3089 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
3090 && (GET_MODE_SIZE (GET_MODE (x))
3091 > GET_MODE_SIZE (GET_MODE (new)))
3092 && INTEGRAL_MODE_P (GET_MODE (new))
3093 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
3094 #endif
3095 )
3096 {
3097 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3098 enum machine_mode mode = GET_MODE (x);
3099
3100 if (BYTES_BIG_ENDIAN)
3101 offset += (MIN (UNITS_PER_WORD,
3102 GET_MODE_SIZE (GET_MODE (new)))
3103 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3104
3105 PUT_MODE (new, mode);
3106 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3107 return new;
3108 }
3109 else
3110 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
3111 }
3112
3113 return x;
3114
3115 case USE:
3116 /* If using a register that is the source of an eliminate we still
3117 think can be performed, note it cannot be performed since we don't
3118 know how this register is used. */
3119 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3120 if (ep->from_rtx == XEXP (x, 0))
3121 ep->can_eliminate = 0;
3122
3123 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3124 if (new != XEXP (x, 0))
3125 return gen_rtx (code, GET_MODE (x), new);
3126 return x;
3127
3128 case CLOBBER:
3129 /* If clobbering a register that is the replacement register for an
3130 elimination we still think can be performed, note that it cannot
3131 be performed. Otherwise, we need not be concerned about it. */
3132 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3133 if (ep->to_rtx == XEXP (x, 0))
3134 ep->can_eliminate = 0;
3135
3136 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3137 if (new != XEXP (x, 0))
3138 return gen_rtx (code, GET_MODE (x), new);
3139 return x;
3140
3141 case ASM_OPERANDS:
3142 {
3143 rtx *temp_vec;
3144 /* Properly handle sharing input and constraint vectors. */
3145 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3146 {
3147 /* When we come to a new vector not seen before,
3148 scan all its elements; keep the old vector if none
3149 of them changes; otherwise, make a copy. */
3150 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3151 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3152 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3153 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3154 mem_mode, insn);
3155
3156 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3157 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3158 break;
3159
3160 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3161 new_asm_operands_vec = old_asm_operands_vec;
3162 else
3163 new_asm_operands_vec
3164 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3165 }
3166
3167 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3168 if (new_asm_operands_vec == old_asm_operands_vec)
3169 return x;
3170
3171 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3172 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3173 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3174 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3175 ASM_OPERANDS_SOURCE_FILE (x),
3176 ASM_OPERANDS_SOURCE_LINE (x));
3177 new->volatil = x->volatil;
3178 return new;
3179 }
3180
3181 case SET:
3182 /* Check for setting a register that we know about. */
3183 if (GET_CODE (SET_DEST (x)) == REG)
3184 {
3185 /* See if this is setting the replacement register for an
3186 elimination.
3187
3188 If DEST is the hard frame pointer, we do nothing because we
3189 assume that all assignments to the frame pointer are for
3190 non-local gotos and are being done at a time when they are valid
3191 and do not disturb anything else. Some machines want to
3192 eliminate a fake argument pointer (or even a fake frame pointer)
3193 with either the real frame or the stack pointer. Assignments to
3194 the hard frame pointer must not prevent this elimination. */
3195
3196 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3197 ep++)
3198 if (ep->to_rtx == SET_DEST (x)
3199 && SET_DEST (x) != hard_frame_pointer_rtx)
3200 {
3201 /* If it is being incremented, adjust the offset. Otherwise,
3202 this elimination can't be done. */
3203 rtx src = SET_SRC (x);
3204
3205 if (GET_CODE (src) == PLUS
3206 && XEXP (src, 0) == SET_DEST (x)
3207 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3208 ep->offset -= INTVAL (XEXP (src, 1));
3209 else
3210 ep->can_eliminate = 0;
3211 }
3212
3213 /* Now check to see we are assigning to a register that can be
3214 eliminated. If so, it must be as part of a PARALLEL, since we
3215 will not have been called if this is a single SET. So indicate
3216 that we can no longer eliminate this reg. */
3217 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3218 ep++)
3219 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3220 ep->can_eliminate = 0;
3221 }
3222
3223 /* Now avoid the loop below in this common case. */
3224 {
3225 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3226 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3227
3228 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3229 write a CLOBBER insn. */
3230 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3231 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3232 && GET_CODE (insn) != INSN_LIST)
3233 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3234
3235 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3236 return gen_rtx (SET, VOIDmode, new0, new1);
3237 }
3238
3239 return x;
3240
3241 case MEM:
3242 /* Our only special processing is to pass the mode of the MEM to our
3243 recursive call and copy the flags. While we are here, handle this
3244 case more efficiently. */
3245 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3246 if (new != XEXP (x, 0))
3247 {
3248 new = gen_rtx (MEM, GET_MODE (x), new);
3249 new->volatil = x->volatil;
3250 new->unchanging = x->unchanging;
3251 new->in_struct = x->in_struct;
3252 return new;
3253 }
3254 else
3255 return x;
3256 }
3257
3258 /* Process each of our operands recursively. If any have changed, make a
3259 copy of the rtx. */
3260 fmt = GET_RTX_FORMAT (code);
3261 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3262 {
3263 if (*fmt == 'e')
3264 {
3265 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3266 if (new != XEXP (x, i) && ! copied)
3267 {
3268 rtx new_x = rtx_alloc (code);
3269 bcopy ((char *) x, (char *) new_x,
3270 (sizeof (*new_x) - sizeof (new_x->fld)
3271 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3272 x = new_x;
3273 copied = 1;
3274 }
3275 XEXP (x, i) = new;
3276 }
3277 else if (*fmt == 'E')
3278 {
3279 int copied_vec = 0;
3280 for (j = 0; j < XVECLEN (x, i); j++)
3281 {
3282 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3283 if (new != XVECEXP (x, i, j) && ! copied_vec)
3284 {
3285 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3286 XVEC (x, i)->elem);
3287 if (! copied)
3288 {
3289 rtx new_x = rtx_alloc (code);
3290 bcopy ((char *) x, (char *) new_x,
3291 (sizeof (*new_x) - sizeof (new_x->fld)
3292 + (sizeof (new_x->fld[0])
3293 * GET_RTX_LENGTH (code))));
3294 x = new_x;
3295 copied = 1;
3296 }
3297 XVEC (x, i) = new_v;
3298 copied_vec = 1;
3299 }
3300 XVECEXP (x, i, j) = new;
3301 }
3302 }
3303 }
3304
3305 return x;
3306 }
3307 \f
3308 /* Scan INSN and eliminate all eliminable registers in it.
3309
3310 If REPLACE is nonzero, do the replacement destructively. Also
3311 delete the insn as dead it if it is setting an eliminable register.
3312
3313 If REPLACE is zero, do all our allocations in reload_obstack.
3314
3315 If no eliminations were done and this insn doesn't require any elimination
3316 processing (these are not identical conditions: it might be updating sp,
3317 but not referencing fp; this needs to be seen during reload_as_needed so
3318 that the offset between fp and sp can be taken into consideration), zero
3319 is returned. Otherwise, 1 is returned. */
3320
3321 static int
3322 eliminate_regs_in_insn (insn, replace)
3323 rtx insn;
3324 int replace;
3325 {
3326 rtx old_body = PATTERN (insn);
3327 rtx old_set = single_set (insn);
3328 rtx new_body;
3329 int val = 0;
3330 struct elim_table *ep;
3331
3332 if (! replace)
3333 push_obstacks (&reload_obstack, &reload_obstack);
3334
3335 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3336 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3337 {
3338 /* Check for setting an eliminable register. */
3339 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3340 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3341 {
3342 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3343 /* If this is setting the frame pointer register to the
3344 hardware frame pointer register and this is an elimination
3345 that will be done (tested above), this insn is really
3346 adjusting the frame pointer downward to compensate for
3347 the adjustment done before a nonlocal goto. */
3348 if (ep->from == FRAME_POINTER_REGNUM
3349 && ep->to == HARD_FRAME_POINTER_REGNUM)
3350 {
3351 rtx src = SET_SRC (old_set);
3352 int offset, ok = 0;
3353 rtx prev_insn, prev_set;
3354
3355 if (src == ep->to_rtx)
3356 offset = 0, ok = 1;
3357 else if (GET_CODE (src) == PLUS
3358 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3359 offset = INTVAL (XEXP (src, 0)), ok = 1;
3360 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3361 && (prev_set = single_set (prev_insn)) != 0
3362 && rtx_equal_p (SET_DEST (prev_set), src))
3363 {
3364 src = SET_SRC (prev_set);
3365 if (src == ep->to_rtx)
3366 offset = 0, ok = 1;
3367 else if (GET_CODE (src) == PLUS
3368 && GET_CODE (XEXP (src, 0)) == CONST_INT
3369 && XEXP (src, 1) == ep->to_rtx)
3370 offset = INTVAL (XEXP (src, 0)), ok = 1;
3371 else if (GET_CODE (src) == PLUS
3372 && GET_CODE (XEXP (src, 1)) == CONST_INT
3373 && XEXP (src, 0) == ep->to_rtx)
3374 offset = INTVAL (XEXP (src, 1)), ok = 1;
3375 }
3376
3377 if (ok)
3378 {
3379 if (replace)
3380 {
3381 rtx src
3382 = plus_constant (ep->to_rtx, offset - ep->offset);
3383
3384 /* First see if this insn remains valid when we
3385 make the change. If not, keep the INSN_CODE
3386 the same and let reload fit it up. */
3387 validate_change (insn, &SET_SRC (old_set), src, 1);
3388 validate_change (insn, &SET_DEST (old_set),
3389 ep->to_rtx, 1);
3390 if (! apply_change_group ())
3391 {
3392 SET_SRC (old_set) = src;
3393 SET_DEST (old_set) = ep->to_rtx;
3394 }
3395 }
3396
3397 val = 1;
3398 goto done;
3399 }
3400 }
3401 #endif
3402
3403 /* In this case this insn isn't serving a useful purpose. We
3404 will delete it in reload_as_needed once we know that this
3405 elimination is, in fact, being done.
3406
3407 If REPLACE isn't set, we can't delete this insn, but needn't
3408 process it since it won't be used unless something changes. */
3409 if (replace)
3410 delete_dead_insn (insn);
3411 val = 1;
3412 goto done;
3413 }
3414
3415 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3416 in the insn is the negative of the offset in FROM. Substitute
3417 (set (reg) (reg to)) for the insn and change its code.
3418
3419 We have to do this here, rather than in eliminate_regs, do that we can
3420 change the insn code. */
3421
3422 if (GET_CODE (SET_SRC (old_set)) == PLUS
3423 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3424 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3425 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3426 ep++)
3427 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3428 && ep->can_eliminate)
3429 {
3430 /* We must stop at the first elimination that will be used.
3431 If this one would replace the PLUS with a REG, do it
3432 now. Otherwise, quit the loop and let eliminate_regs
3433 do its normal replacement. */
3434 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3435 {
3436 /* We assume here that we don't need a PARALLEL of
3437 any CLOBBERs for this assignment. There's not
3438 much we can do if we do need it. */
3439 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3440 SET_DEST (old_set), ep->to_rtx);
3441 INSN_CODE (insn) = -1;
3442 val = 1;
3443 goto done;
3444 }
3445
3446 break;
3447 }
3448 }
3449
3450 old_asm_operands_vec = 0;
3451
3452 /* Replace the body of this insn with a substituted form. If we changed
3453 something, return non-zero.
3454
3455 If we are replacing a body that was a (set X (plus Y Z)), try to
3456 re-recognize the insn. We do this in case we had a simple addition
3457 but now can do this as a load-address. This saves an insn in this
3458 common case. */
3459
3460 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3461 if (new_body != old_body)
3462 {
3463 /* If we aren't replacing things permanently and we changed something,
3464 make another copy to ensure that all the RTL is new. Otherwise
3465 things can go wrong if find_reload swaps commutative operands
3466 and one is inside RTL that has been copied while the other is not. */
3467
3468 /* Don't copy an asm_operands because (1) there's no need and (2)
3469 copy_rtx can't do it properly when there are multiple outputs. */
3470 if (! replace && asm_noperands (old_body) < 0)
3471 new_body = copy_rtx (new_body);
3472
3473 /* If we had a move insn but now we don't, rerecognize it. This will
3474 cause spurious re-recognition if the old move had a PARALLEL since
3475 the new one still will, but we can't call single_set without
3476 having put NEW_BODY into the insn and the re-recognition won't
3477 hurt in this rare case. */
3478 if (old_set != 0
3479 && ((GET_CODE (SET_SRC (old_set)) == REG
3480 && (GET_CODE (new_body) != SET
3481 || GET_CODE (SET_SRC (new_body)) != REG))
3482 /* If this was a load from or store to memory, compare
3483 the MEM in recog_operand to the one in the insn. If they
3484 are not equal, then rerecognize the insn. */
3485 || (old_set != 0
3486 && ((GET_CODE (SET_SRC (old_set)) == MEM
3487 && SET_SRC (old_set) != recog_operand[1])
3488 || (GET_CODE (SET_DEST (old_set)) == MEM
3489 && SET_DEST (old_set) != recog_operand[0])))
3490 /* If this was an add insn before, rerecognize. */
3491 || GET_CODE (SET_SRC (old_set)) == PLUS))
3492 {
3493 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3494 /* If recognition fails, store the new body anyway.
3495 It's normal to have recognition failures here
3496 due to bizarre memory addresses; reloading will fix them. */
3497 PATTERN (insn) = new_body;
3498 }
3499 else
3500 PATTERN (insn) = new_body;
3501
3502 val = 1;
3503 }
3504
3505 /* Loop through all elimination pairs. See if any have changed and
3506 recalculate the number not at initial offset.
3507
3508 Compute the maximum offset (minimum offset if the stack does not
3509 grow downward) for each elimination pair.
3510
3511 We also detect a cases where register elimination cannot be done,
3512 namely, if a register would be both changed and referenced outside a MEM
3513 in the resulting insn since such an insn is often undefined and, even if
3514 not, we cannot know what meaning will be given to it. Note that it is
3515 valid to have a register used in an address in an insn that changes it
3516 (presumably with a pre- or post-increment or decrement).
3517
3518 If anything changes, return nonzero. */
3519
3520 num_not_at_initial_offset = 0;
3521 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3522 {
3523 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3524 ep->can_eliminate = 0;
3525
3526 ep->ref_outside_mem = 0;
3527
3528 if (ep->previous_offset != ep->offset)
3529 val = 1;
3530
3531 ep->previous_offset = ep->offset;
3532 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3533 num_not_at_initial_offset++;
3534
3535 #ifdef STACK_GROWS_DOWNWARD
3536 ep->max_offset = MAX (ep->max_offset, ep->offset);
3537 #else
3538 ep->max_offset = MIN (ep->max_offset, ep->offset);
3539 #endif
3540 }
3541
3542 done:
3543 /* If we changed something, perform elimination in REG_NOTES. This is
3544 needed even when REPLACE is zero because a REG_DEAD note might refer
3545 to a register that we eliminate and could cause a different number
3546 of spill registers to be needed in the final reload pass than in
3547 the pre-passes. */
3548 if (val && REG_NOTES (insn) != 0)
3549 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3550
3551 if (! replace)
3552 pop_obstacks ();
3553
3554 return val;
3555 }
3556
3557 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3558 replacement we currently believe is valid, mark it as not eliminable if X
3559 modifies DEST in any way other than by adding a constant integer to it.
3560
3561 If DEST is the frame pointer, we do nothing because we assume that
3562 all assignments to the hard frame pointer are nonlocal gotos and are being
3563 done at a time when they are valid and do not disturb anything else.
3564 Some machines want to eliminate a fake argument pointer with either the
3565 frame or stack pointer. Assignments to the hard frame pointer must not
3566 prevent this elimination.
3567
3568 Called via note_stores from reload before starting its passes to scan
3569 the insns of the function. */
3570
3571 static void
3572 mark_not_eliminable (dest, x)
3573 rtx dest;
3574 rtx x;
3575 {
3576 register int i;
3577
3578 /* A SUBREG of a hard register here is just changing its mode. We should
3579 not see a SUBREG of an eliminable hard register, but check just in
3580 case. */
3581 if (GET_CODE (dest) == SUBREG)
3582 dest = SUBREG_REG (dest);
3583
3584 if (dest == hard_frame_pointer_rtx)
3585 return;
3586
3587 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3588 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3589 && (GET_CODE (x) != SET
3590 || GET_CODE (SET_SRC (x)) != PLUS
3591 || XEXP (SET_SRC (x), 0) != dest
3592 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3593 {
3594 reg_eliminate[i].can_eliminate_previous
3595 = reg_eliminate[i].can_eliminate = 0;
3596 num_eliminable--;
3597 }
3598 }
3599 \f
3600 /* Kick all pseudos out of hard register REGNO.
3601 If GLOBAL is nonzero, try to find someplace else to put them.
3602 If DUMPFILE is nonzero, log actions taken on that file.
3603
3604 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3605 because we found we can't eliminate some register. In the case, no pseudos
3606 are allowed to be in the register, even if they are only in a block that
3607 doesn't require spill registers, unlike the case when we are spilling this
3608 hard reg to produce another spill register.
3609
3610 Return nonzero if any pseudos needed to be kicked out. */
3611
3612 static int
3613 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3614 register int regno;
3615 int global;
3616 FILE *dumpfile;
3617 int cant_eliminate;
3618 {
3619 enum reg_class class = REGNO_REG_CLASS (regno);
3620 int something_changed = 0;
3621 register int i;
3622
3623 SET_HARD_REG_BIT (forbidden_regs, regno);
3624
3625 if (cant_eliminate)
3626 regs_ever_live[regno] = 1;
3627
3628 /* Spill every pseudo reg that was allocated to this reg
3629 or to something that overlaps this reg. */
3630
3631 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3632 if (reg_renumber[i] >= 0
3633 && reg_renumber[i] <= regno
3634 && (reg_renumber[i]
3635 + HARD_REGNO_NREGS (reg_renumber[i],
3636 PSEUDO_REGNO_MODE (i))
3637 > regno))
3638 {
3639 /* If this register belongs solely to a basic block which needed no
3640 spilling of any class that this register is contained in,
3641 leave it be, unless we are spilling this register because
3642 it was a hard register that can't be eliminated. */
3643
3644 if (! cant_eliminate
3645 && basic_block_needs[0]
3646 && reg_basic_block[i] >= 0
3647 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3648 {
3649 enum reg_class *p;
3650
3651 for (p = reg_class_superclasses[(int) class];
3652 *p != LIM_REG_CLASSES; p++)
3653 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3654 break;
3655
3656 if (*p == LIM_REG_CLASSES)
3657 continue;
3658 }
3659
3660 /* Mark it as no longer having a hard register home. */
3661 reg_renumber[i] = -1;
3662 /* We will need to scan everything again. */
3663 something_changed = 1;
3664 if (global)
3665 retry_global_alloc (i, forbidden_regs);
3666
3667 alter_reg (i, regno);
3668 if (dumpfile)
3669 {
3670 if (reg_renumber[i] == -1)
3671 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3672 else
3673 fprintf (dumpfile, " Register %d now in %d.\n\n",
3674 i, reg_renumber[i]);
3675 }
3676 }
3677 for (i = 0; i < scratch_list_length; i++)
3678 {
3679 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3680 {
3681 if (! cant_eliminate && basic_block_needs[0]
3682 && ! basic_block_needs[(int) class][scratch_block[i]])
3683 {
3684 enum reg_class *p;
3685
3686 for (p = reg_class_superclasses[(int) class];
3687 *p != LIM_REG_CLASSES; p++)
3688 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3689 break;
3690
3691 if (*p == LIM_REG_CLASSES)
3692 continue;
3693 }
3694 PUT_CODE (scratch_list[i], SCRATCH);
3695 scratch_list[i] = 0;
3696 something_changed = 1;
3697 continue;
3698 }
3699 }
3700
3701 return something_changed;
3702 }
3703 \f
3704 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3705 Also mark any hard registers used to store user variables as
3706 forbidden from being used for spill registers. */
3707
3708 static void
3709 scan_paradoxical_subregs (x)
3710 register rtx x;
3711 {
3712 register int i;
3713 register char *fmt;
3714 register enum rtx_code code = GET_CODE (x);
3715
3716 switch (code)
3717 {
3718 case REG:
3719 #ifdef SMALL_REGISTER_CLASSES
3720 if (SMALL_REGISTER_CLASSES
3721 && REGNO (x) < FIRST_PSEUDO_REGISTER
3722 && REG_USERVAR_P (x))
3723 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3724 #endif
3725 return;
3726
3727 case CONST_INT:
3728 case CONST:
3729 case SYMBOL_REF:
3730 case LABEL_REF:
3731 case CONST_DOUBLE:
3732 case CC0:
3733 case PC:
3734 case USE:
3735 case CLOBBER:
3736 return;
3737
3738 case SUBREG:
3739 if (GET_CODE (SUBREG_REG (x)) == REG
3740 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3741 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3742 = GET_MODE_SIZE (GET_MODE (x));
3743 return;
3744 }
3745
3746 fmt = GET_RTX_FORMAT (code);
3747 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3748 {
3749 if (fmt[i] == 'e')
3750 scan_paradoxical_subregs (XEXP (x, i));
3751 else if (fmt[i] == 'E')
3752 {
3753 register int j;
3754 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3755 scan_paradoxical_subregs (XVECEXP (x, i, j));
3756 }
3757 }
3758 }
3759 \f
3760 static int
3761 hard_reg_use_compare (p1p, p2p)
3762 const GENERIC_PTR p1p;
3763 const GENERIC_PTR p2p;
3764 {
3765 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3766 *p2 = (struct hard_reg_n_uses *)p2p;
3767 int tem = p1->uses - p2->uses;
3768 if (tem != 0) return tem;
3769 /* If regs are equally good, sort by regno,
3770 so that the results of qsort leave nothing to chance. */
3771 return p1->regno - p2->regno;
3772 }
3773
3774 /* Choose the order to consider regs for use as reload registers
3775 based on how much trouble would be caused by spilling one.
3776 Store them in order of decreasing preference in potential_reload_regs. */
3777
3778 static void
3779 order_regs_for_reload (global)
3780 int global;
3781 {
3782 register int i;
3783 register int o = 0;
3784 int large = 0;
3785
3786 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3787
3788 CLEAR_HARD_REG_SET (bad_spill_regs);
3789
3790 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3791 potential_reload_regs[i] = -1;
3792
3793 /* Count number of uses of each hard reg by pseudo regs allocated to it
3794 and then order them by decreasing use. */
3795
3796 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3797 {
3798 hard_reg_n_uses[i].uses = 0;
3799 hard_reg_n_uses[i].regno = i;
3800 }
3801
3802 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3803 {
3804 int regno = reg_renumber[i];
3805 if (regno >= 0)
3806 {
3807 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3808 while (regno < lim)
3809 {
3810 /* If allocated by local-alloc, show more uses since
3811 we're not going to be able to reallocate it, but
3812 we might if allocated by global alloc. */
3813 if (global && reg_allocno[i] < 0)
3814 hard_reg_n_uses[regno].uses += (reg_n_refs[i] + 1) / 2;
3815
3816 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3817 }
3818 }
3819 large += reg_n_refs[i];
3820 }
3821
3822 /* Now fixed registers (which cannot safely be used for reloading)
3823 get a very high use count so they will be considered least desirable.
3824 Registers used explicitly in the rtl code are almost as bad. */
3825
3826 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3827 {
3828 if (fixed_regs[i])
3829 {
3830 hard_reg_n_uses[i].uses += 2 * large + 2;
3831 SET_HARD_REG_BIT (bad_spill_regs, i);
3832 }
3833 else if (regs_explicitly_used[i])
3834 {
3835 hard_reg_n_uses[i].uses += large + 1;
3836 /* ??? We are doing this here because of the potential that
3837 bad code may be generated if a register explicitly used in
3838 an insn was used as a spill register for that insn. But
3839 not using these are spill registers may lose on some machine.
3840 We'll have to see how this works out. */
3841 #ifdef SMALL_REGISTER_CLASSES
3842 if (! SMALL_REGISTER_CLASSES)
3843 #endif
3844 SET_HARD_REG_BIT (bad_spill_regs, i);
3845 }
3846 }
3847 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3848 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3849
3850 #ifdef ELIMINABLE_REGS
3851 /* If registers other than the frame pointer are eliminable, mark them as
3852 poor choices. */
3853 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3854 {
3855 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3856 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3857 }
3858 #endif
3859
3860 /* Prefer registers not so far used, for use in temporary loading.
3861 Among them, if REG_ALLOC_ORDER is defined, use that order.
3862 Otherwise, prefer registers not preserved by calls. */
3863
3864 #ifdef REG_ALLOC_ORDER
3865 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3866 {
3867 int regno = reg_alloc_order[i];
3868
3869 if (hard_reg_n_uses[regno].uses == 0)
3870 potential_reload_regs[o++] = regno;
3871 }
3872 #else
3873 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3874 {
3875 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3876 potential_reload_regs[o++] = i;
3877 }
3878 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3879 {
3880 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3881 potential_reload_regs[o++] = i;
3882 }
3883 #endif
3884
3885 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3886 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3887
3888 /* Now add the regs that are already used,
3889 preferring those used less often. The fixed and otherwise forbidden
3890 registers will be at the end of this list. */
3891
3892 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3893 if (hard_reg_n_uses[i].uses != 0)
3894 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3895 }
3896 \f
3897 /* Used in reload_as_needed to sort the spilled regs. */
3898
3899 static int
3900 compare_spill_regs (r1p, r2p)
3901 const GENERIC_PTR r1p;
3902 const GENERIC_PTR r2p;
3903 {
3904 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3905 return r1 - r2;
3906 }
3907
3908 /* Reload pseudo-registers into hard regs around each insn as needed.
3909 Additional register load insns are output before the insn that needs it
3910 and perhaps store insns after insns that modify the reloaded pseudo reg.
3911
3912 reg_last_reload_reg and reg_reloaded_contents keep track of
3913 which registers are already available in reload registers.
3914 We update these for the reloads that we perform,
3915 as the insns are scanned. */
3916
3917 static void
3918 reload_as_needed (first, live_known)
3919 rtx first;
3920 int live_known;
3921 {
3922 register rtx insn;
3923 register int i;
3924 int this_block = 0;
3925 rtx x;
3926 rtx after_call = 0;
3927
3928 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3929 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3930 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3931 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3932 reg_has_output_reload = (char *) alloca (max_regno);
3933 for (i = 0; i < n_spills; i++)
3934 {
3935 reg_reloaded_contents[i] = -1;
3936 reg_reloaded_insn[i] = 0;
3937 }
3938
3939 /* Reset all offsets on eliminable registers to their initial values. */
3940 #ifdef ELIMINABLE_REGS
3941 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3942 {
3943 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3944 reg_eliminate[i].initial_offset);
3945 reg_eliminate[i].previous_offset
3946 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3947 }
3948 #else
3949 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3950 reg_eliminate[0].previous_offset
3951 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3952 #endif
3953
3954 num_not_at_initial_offset = 0;
3955
3956 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3957 pack registers with group needs. */
3958 if (n_spills > 1)
3959 {
3960 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3961 for (i = 0; i < n_spills; i++)
3962 spill_reg_order[spill_regs[i]] = i;
3963 }
3964
3965 for (insn = first; insn;)
3966 {
3967 register rtx next = NEXT_INSN (insn);
3968
3969 /* Notice when we move to a new basic block. */
3970 if (live_known && this_block + 1 < n_basic_blocks
3971 && insn == basic_block_head[this_block+1])
3972 ++this_block;
3973
3974 /* If we pass a label, copy the offsets from the label information
3975 into the current offsets of each elimination. */
3976 if (GET_CODE (insn) == CODE_LABEL)
3977 {
3978 num_not_at_initial_offset = 0;
3979 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3980 {
3981 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3982 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3983 if (reg_eliminate[i].can_eliminate
3984 && (reg_eliminate[i].offset
3985 != reg_eliminate[i].initial_offset))
3986 num_not_at_initial_offset++;
3987 }
3988 }
3989
3990 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3991 {
3992 rtx avoid_return_reg = 0;
3993 rtx oldpat = PATTERN (insn);
3994
3995 #ifdef SMALL_REGISTER_CLASSES
3996 /* Set avoid_return_reg if this is an insn
3997 that might use the value of a function call. */
3998 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
3999 {
4000 if (GET_CODE (PATTERN (insn)) == SET)
4001 after_call = SET_DEST (PATTERN (insn));
4002 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4003 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4004 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4005 else
4006 after_call = 0;
4007 }
4008 else if (SMALL_REGISTER_CLASSES
4009 && after_call != 0
4010 && !(GET_CODE (PATTERN (insn)) == SET
4011 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
4012 {
4013 if (reg_referenced_p (after_call, PATTERN (insn)))
4014 avoid_return_reg = after_call;
4015 after_call = 0;
4016 }
4017 #endif /* SMALL_REGISTER_CLASSES */
4018
4019 /* If this is a USE and CLOBBER of a MEM, ensure that any
4020 references to eliminable registers have been removed. */
4021
4022 if ((GET_CODE (PATTERN (insn)) == USE
4023 || GET_CODE (PATTERN (insn)) == CLOBBER)
4024 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4025 XEXP (XEXP (PATTERN (insn), 0), 0)
4026 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4027 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
4028
4029 /* If we need to do register elimination processing, do so.
4030 This might delete the insn, in which case we are done. */
4031 if (num_eliminable && GET_MODE (insn) == QImode)
4032 {
4033 eliminate_regs_in_insn (insn, 1);
4034 if (GET_CODE (insn) == NOTE)
4035 {
4036 insn = next;
4037 continue;
4038 }
4039 }
4040
4041 if (GET_MODE (insn) == VOIDmode)
4042 n_reloads = 0;
4043 /* First find the pseudo regs that must be reloaded for this insn.
4044 This info is returned in the tables reload_... (see reload.h).
4045 Also modify the body of INSN by substituting RELOAD
4046 rtx's for those pseudo regs. */
4047 else
4048 {
4049 bzero (reg_has_output_reload, max_regno);
4050 CLEAR_HARD_REG_SET (reg_is_output_reload);
4051
4052 find_reloads (insn, 1, spill_indirect_levels, live_known,
4053 spill_reg_order);
4054 }
4055
4056 if (n_reloads > 0)
4057 {
4058 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4059 rtx p;
4060 int class;
4061
4062 /* If this block has not had spilling done for a
4063 particular clas and we have any non-optionals that need a
4064 spill reg in that class, abort. */
4065
4066 for (class = 0; class < N_REG_CLASSES; class++)
4067 if (basic_block_needs[class] != 0
4068 && basic_block_needs[class][this_block] == 0)
4069 for (i = 0; i < n_reloads; i++)
4070 if (class == (int) reload_reg_class[i]
4071 && reload_reg_rtx[i] == 0
4072 && ! reload_optional[i]
4073 && (reload_in[i] != 0 || reload_out[i] != 0
4074 || reload_secondary_p[i] != 0))
4075 fatal_insn ("Non-optional registers need a spill register", insn);
4076
4077 /* Now compute which reload regs to reload them into. Perhaps
4078 reusing reload regs from previous insns, or else output
4079 load insns to reload them. Maybe output store insns too.
4080 Record the choices of reload reg in reload_reg_rtx. */
4081 choose_reload_regs (insn, avoid_return_reg);
4082
4083 #ifdef SMALL_REGISTER_CLASSES
4084 /* Merge any reloads that we didn't combine for fear of
4085 increasing the number of spill registers needed but now
4086 discover can be safely merged. */
4087 if (SMALL_REGISTER_CLASSES)
4088 merge_assigned_reloads (insn);
4089 #endif
4090
4091 /* Generate the insns to reload operands into or out of
4092 their reload regs. */
4093 emit_reload_insns (insn);
4094
4095 /* Substitute the chosen reload regs from reload_reg_rtx
4096 into the insn's body (or perhaps into the bodies of other
4097 load and store insn that we just made for reloading
4098 and that we moved the structure into). */
4099 subst_reloads ();
4100
4101 /* If this was an ASM, make sure that all the reload insns
4102 we have generated are valid. If not, give an error
4103 and delete them. */
4104
4105 if (asm_noperands (PATTERN (insn)) >= 0)
4106 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4107 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4108 && (recog_memoized (p) < 0
4109 || (insn_extract (p),
4110 ! constrain_operands (INSN_CODE (p), 1))))
4111 {
4112 error_for_asm (insn,
4113 "`asm' operand requires impossible reload");
4114 PUT_CODE (p, NOTE);
4115 NOTE_SOURCE_FILE (p) = 0;
4116 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4117 }
4118 }
4119 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4120 is no longer validly lying around to save a future reload.
4121 Note that this does not detect pseudos that were reloaded
4122 for this insn in order to be stored in
4123 (obeying register constraints). That is correct; such reload
4124 registers ARE still valid. */
4125 note_stores (oldpat, forget_old_reloads_1);
4126
4127 /* There may have been CLOBBER insns placed after INSN. So scan
4128 between INSN and NEXT and use them to forget old reloads. */
4129 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4130 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4131 note_stores (PATTERN (x), forget_old_reloads_1);
4132
4133 #ifdef AUTO_INC_DEC
4134 /* Likewise for regs altered by auto-increment in this insn.
4135 But note that the reg-notes are not changed by reloading:
4136 they still contain the pseudo-regs, not the spill regs. */
4137 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4138 if (REG_NOTE_KIND (x) == REG_INC)
4139 {
4140 /* See if this pseudo reg was reloaded in this insn.
4141 If so, its last-reload info is still valid
4142 because it is based on this insn's reload. */
4143 for (i = 0; i < n_reloads; i++)
4144 if (reload_out[i] == XEXP (x, 0))
4145 break;
4146
4147 if (i == n_reloads)
4148 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4149 }
4150 #endif
4151 }
4152 /* A reload reg's contents are unknown after a label. */
4153 if (GET_CODE (insn) == CODE_LABEL)
4154 for (i = 0; i < n_spills; i++)
4155 {
4156 reg_reloaded_contents[i] = -1;
4157 reg_reloaded_insn[i] = 0;
4158 }
4159
4160 /* Don't assume a reload reg is still good after a call insn
4161 if it is a call-used reg. */
4162 else if (GET_CODE (insn) == CALL_INSN)
4163 for (i = 0; i < n_spills; i++)
4164 if (call_used_regs[spill_regs[i]])
4165 {
4166 reg_reloaded_contents[i] = -1;
4167 reg_reloaded_insn[i] = 0;
4168 }
4169
4170 /* In case registers overlap, allow certain insns to invalidate
4171 particular hard registers. */
4172
4173 #ifdef INSN_CLOBBERS_REGNO_P
4174 for (i = 0 ; i < n_spills ; i++)
4175 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4176 {
4177 reg_reloaded_contents[i] = -1;
4178 reg_reloaded_insn[i] = 0;
4179 }
4180 #endif
4181
4182 insn = next;
4183
4184 #ifdef USE_C_ALLOCA
4185 alloca (0);
4186 #endif
4187 }
4188 }
4189
4190 /* Discard all record of any value reloaded from X,
4191 or reloaded in X from someplace else;
4192 unless X is an output reload reg of the current insn.
4193
4194 X may be a hard reg (the reload reg)
4195 or it may be a pseudo reg that was reloaded from. */
4196
4197 static void
4198 forget_old_reloads_1 (x, ignored)
4199 rtx x;
4200 rtx ignored;
4201 {
4202 register int regno;
4203 int nr;
4204 int offset = 0;
4205
4206 /* note_stores does give us subregs of hard regs. */
4207 while (GET_CODE (x) == SUBREG)
4208 {
4209 offset += SUBREG_WORD (x);
4210 x = SUBREG_REG (x);
4211 }
4212
4213 if (GET_CODE (x) != REG)
4214 return;
4215
4216 regno = REGNO (x) + offset;
4217
4218 if (regno >= FIRST_PSEUDO_REGISTER)
4219 nr = 1;
4220 else
4221 {
4222 int i;
4223 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4224 /* Storing into a spilled-reg invalidates its contents.
4225 This can happen if a block-local pseudo is allocated to that reg
4226 and it wasn't spilled because this block's total need is 0.
4227 Then some insn might have an optional reload and use this reg. */
4228 for (i = 0; i < nr; i++)
4229 if (spill_reg_order[regno + i] >= 0
4230 /* But don't do this if the reg actually serves as an output
4231 reload reg in the current instruction. */
4232 && (n_reloads == 0
4233 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4234 {
4235 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4236 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4237 }
4238 }
4239
4240 /* Since value of X has changed,
4241 forget any value previously copied from it. */
4242
4243 while (nr-- > 0)
4244 /* But don't forget a copy if this is the output reload
4245 that establishes the copy's validity. */
4246 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4247 reg_last_reload_reg[regno + nr] = 0;
4248 }
4249 \f
4250 /* For each reload, the mode of the reload register. */
4251 static enum machine_mode reload_mode[MAX_RELOADS];
4252
4253 /* For each reload, the largest number of registers it will require. */
4254 static int reload_nregs[MAX_RELOADS];
4255
4256 /* Comparison function for qsort to decide which of two reloads
4257 should be handled first. *P1 and *P2 are the reload numbers. */
4258
4259 static int
4260 reload_reg_class_lower (r1p, r2p)
4261 const GENERIC_PTR r1p;
4262 const GENERIC_PTR r2p;
4263 {
4264 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4265 register int t;
4266
4267 /* Consider required reloads before optional ones. */
4268 t = reload_optional[r1] - reload_optional[r2];
4269 if (t != 0)
4270 return t;
4271
4272 /* Count all solitary classes before non-solitary ones. */
4273 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4274 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4275 if (t != 0)
4276 return t;
4277
4278 /* Aside from solitaires, consider all multi-reg groups first. */
4279 t = reload_nregs[r2] - reload_nregs[r1];
4280 if (t != 0)
4281 return t;
4282
4283 /* Consider reloads in order of increasing reg-class number. */
4284 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4285 if (t != 0)
4286 return t;
4287
4288 /* If reloads are equally urgent, sort by reload number,
4289 so that the results of qsort leave nothing to chance. */
4290 return r1 - r2;
4291 }
4292 \f
4293 /* The following HARD_REG_SETs indicate when each hard register is
4294 used for a reload of various parts of the current insn. */
4295
4296 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4297 static HARD_REG_SET reload_reg_used;
4298 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4299 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4300 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4301 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4302 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4303 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4304 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4305 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4306 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4307 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4308 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4309 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4310 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4311 static HARD_REG_SET reload_reg_used_in_op_addr;
4312 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4313 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4314 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4315 static HARD_REG_SET reload_reg_used_in_insn;
4316 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4317 static HARD_REG_SET reload_reg_used_in_other_addr;
4318
4319 /* If reg is in use as a reload reg for any sort of reload. */
4320 static HARD_REG_SET reload_reg_used_at_all;
4321
4322 /* If reg is use as an inherited reload. We just mark the first register
4323 in the group. */
4324 static HARD_REG_SET reload_reg_used_for_inherit;
4325
4326 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4327 TYPE. MODE is used to indicate how many consecutive regs are
4328 actually used. */
4329
4330 static void
4331 mark_reload_reg_in_use (regno, opnum, type, mode)
4332 int regno;
4333 int opnum;
4334 enum reload_type type;
4335 enum machine_mode mode;
4336 {
4337 int nregs = HARD_REGNO_NREGS (regno, mode);
4338 int i;
4339
4340 for (i = regno; i < nregs + regno; i++)
4341 {
4342 switch (type)
4343 {
4344 case RELOAD_OTHER:
4345 SET_HARD_REG_BIT (reload_reg_used, i);
4346 break;
4347
4348 case RELOAD_FOR_INPUT_ADDRESS:
4349 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4350 break;
4351
4352 case RELOAD_FOR_INPADDR_ADDRESS:
4353 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4354 break;
4355
4356 case RELOAD_FOR_OUTPUT_ADDRESS:
4357 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4358 break;
4359
4360 case RELOAD_FOR_OUTADDR_ADDRESS:
4361 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4362 break;
4363
4364 case RELOAD_FOR_OPERAND_ADDRESS:
4365 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4366 break;
4367
4368 case RELOAD_FOR_OPADDR_ADDR:
4369 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4370 break;
4371
4372 case RELOAD_FOR_OTHER_ADDRESS:
4373 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4374 break;
4375
4376 case RELOAD_FOR_INPUT:
4377 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4378 break;
4379
4380 case RELOAD_FOR_OUTPUT:
4381 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4382 break;
4383
4384 case RELOAD_FOR_INSN:
4385 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4386 break;
4387 }
4388
4389 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4390 }
4391 }
4392
4393 /* Similarly, but show REGNO is no longer in use for a reload. */
4394
4395 static void
4396 clear_reload_reg_in_use (regno, opnum, type, mode)
4397 int regno;
4398 int opnum;
4399 enum reload_type type;
4400 enum machine_mode mode;
4401 {
4402 int nregs = HARD_REGNO_NREGS (regno, mode);
4403 int i;
4404
4405 for (i = regno; i < nregs + regno; i++)
4406 {
4407 switch (type)
4408 {
4409 case RELOAD_OTHER:
4410 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4411 break;
4412
4413 case RELOAD_FOR_INPUT_ADDRESS:
4414 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4415 break;
4416
4417 case RELOAD_FOR_INPADDR_ADDRESS:
4418 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4419 break;
4420
4421 case RELOAD_FOR_OUTPUT_ADDRESS:
4422 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4423 break;
4424
4425 case RELOAD_FOR_OUTADDR_ADDRESS:
4426 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4427 break;
4428
4429 case RELOAD_FOR_OPERAND_ADDRESS:
4430 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4431 break;
4432
4433 case RELOAD_FOR_OPADDR_ADDR:
4434 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4435 break;
4436
4437 case RELOAD_FOR_OTHER_ADDRESS:
4438 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4439 break;
4440
4441 case RELOAD_FOR_INPUT:
4442 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4443 break;
4444
4445 case RELOAD_FOR_OUTPUT:
4446 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4447 break;
4448
4449 case RELOAD_FOR_INSN:
4450 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4451 break;
4452 }
4453 }
4454 }
4455
4456 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4457 specified by OPNUM and TYPE. */
4458
4459 static int
4460 reload_reg_free_p (regno, opnum, type)
4461 int regno;
4462 int opnum;
4463 enum reload_type type;
4464 {
4465 int i;
4466
4467 /* In use for a RELOAD_OTHER means it's not available for anything. */
4468 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4469 return 0;
4470
4471 switch (type)
4472 {
4473 case RELOAD_OTHER:
4474 /* In use for anything means we can't use it for RELOAD_OTHER. */
4475 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4476 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4477 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4478 return 0;
4479
4480 for (i = 0; i < reload_n_operands; i++)
4481 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4482 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4483 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4484 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4485 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4486 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4487 return 0;
4488
4489 return 1;
4490
4491 case RELOAD_FOR_INPUT:
4492 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4493 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4494 return 0;
4495
4496 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4497 return 0;
4498
4499 /* If it is used for some other input, can't use it. */
4500 for (i = 0; i < reload_n_operands; i++)
4501 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4502 return 0;
4503
4504 /* If it is used in a later operand's address, can't use it. */
4505 for (i = opnum + 1; i < reload_n_operands; i++)
4506 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4507 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4508 return 0;
4509
4510 return 1;
4511
4512 case RELOAD_FOR_INPUT_ADDRESS:
4513 /* Can't use a register if it is used for an input address for this
4514 operand or used as an input in an earlier one. */
4515 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4516 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4517 return 0;
4518
4519 for (i = 0; i < opnum; i++)
4520 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4521 return 0;
4522
4523 return 1;
4524
4525 case RELOAD_FOR_INPADDR_ADDRESS:
4526 /* Can't use a register if it is used for an input address
4527 address for this operand or used as an input in an earlier
4528 one. */
4529 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4530 return 0;
4531
4532 for (i = 0; i < opnum; i++)
4533 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4534 return 0;
4535
4536 return 1;
4537
4538 case RELOAD_FOR_OUTPUT_ADDRESS:
4539 /* Can't use a register if it is used for an output address for this
4540 operand or used as an output in this or a later operand. */
4541 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4542 return 0;
4543
4544 for (i = opnum; i < reload_n_operands; i++)
4545 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4546 return 0;
4547
4548 return 1;
4549
4550 case RELOAD_FOR_OUTADDR_ADDRESS:
4551 /* Can't use a register if it is used for an output address
4552 address for this operand or used as an output in this or a
4553 later operand. */
4554 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4555 return 0;
4556
4557 for (i = opnum; i < reload_n_operands; i++)
4558 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4559 return 0;
4560
4561 return 1;
4562
4563 case RELOAD_FOR_OPERAND_ADDRESS:
4564 for (i = 0; i < reload_n_operands; i++)
4565 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4566 return 0;
4567
4568 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4569 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4570
4571 case RELOAD_FOR_OPADDR_ADDR:
4572 for (i = 0; i < reload_n_operands; i++)
4573 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4574 return 0;
4575
4576 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4577
4578 case RELOAD_FOR_OUTPUT:
4579 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4580 outputs, or an operand address for this or an earlier output. */
4581 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4582 return 0;
4583
4584 for (i = 0; i < reload_n_operands; i++)
4585 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4586 return 0;
4587
4588 for (i = 0; i <= opnum; i++)
4589 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4590 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4591 return 0;
4592
4593 return 1;
4594
4595 case RELOAD_FOR_INSN:
4596 for (i = 0; i < reload_n_operands; i++)
4597 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4598 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4599 return 0;
4600
4601 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4602 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4603
4604 case RELOAD_FOR_OTHER_ADDRESS:
4605 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4606 }
4607 abort ();
4608 }
4609
4610 /* Return 1 if the value in reload reg REGNO, as used by a reload
4611 needed for the part of the insn specified by OPNUM and TYPE,
4612 is not in use for a reload in any prior part of the insn.
4613
4614 We can assume that the reload reg was already tested for availability
4615 at the time it is needed, and we should not check this again,
4616 in case the reg has already been marked in use. */
4617
4618 static int
4619 reload_reg_free_before_p (regno, opnum, type)
4620 int regno;
4621 int opnum;
4622 enum reload_type type;
4623 {
4624 int i;
4625
4626 switch (type)
4627 {
4628 case RELOAD_FOR_OTHER_ADDRESS:
4629 /* These always come first. */
4630 return 1;
4631
4632 case RELOAD_OTHER:
4633 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4634
4635 /* If this use is for part of the insn,
4636 check the reg is not in use for any prior part. It is tempting
4637 to try to do this by falling through from objecs that occur
4638 later in the insn to ones that occur earlier, but that will not
4639 correctly take into account the fact that here we MUST ignore
4640 things that would prevent the register from being allocated in
4641 the first place, since we know that it was allocated. */
4642
4643 case RELOAD_FOR_OUTPUT_ADDRESS:
4644 case RELOAD_FOR_OUTADDR_ADDRESS:
4645 /* Earlier reloads are for earlier outputs or their addresses,
4646 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4647 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4648 RELOAD_OTHER).. */
4649 for (i = 0; i < opnum; i++)
4650 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4651 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4652 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4653 return 0;
4654
4655 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4656 return 0;
4657
4658 for (i = 0; i < reload_n_operands; i++)
4659 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4660 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4661 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4662 return 0;
4663
4664 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4665 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4666 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4667
4668 case RELOAD_FOR_OUTPUT:
4669 /* This can't be used in the output address for this operand and
4670 anything that can't be used for it, except that we've already
4671 tested for RELOAD_FOR_INSN objects. */
4672
4673 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4674 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4675 return 0;
4676
4677 for (i = 0; i < opnum; i++)
4678 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4679 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4680 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4681 return 0;
4682
4683 for (i = 0; i < reload_n_operands; i++)
4684 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4685 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4686 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4687 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4688 return 0;
4689
4690 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4691
4692 case RELOAD_FOR_OPERAND_ADDRESS:
4693 case RELOAD_FOR_OPADDR_ADDR:
4694 case RELOAD_FOR_INSN:
4695 /* These can't conflict with inputs, or each other, so all we have to
4696 test is input addresses and the addresses of OTHER items. */
4697
4698 for (i = 0; i < reload_n_operands; i++)
4699 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4700 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4701 return 0;
4702
4703 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4704
4705 case RELOAD_FOR_INPUT:
4706 /* The only things earlier are the address for this and
4707 earlier inputs, other inputs (which we know we don't conflict
4708 with), and addresses of RELOAD_OTHER objects. */
4709
4710 for (i = 0; i <= opnum; i++)
4711 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4712 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4713 return 0;
4714
4715 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4716
4717 case RELOAD_FOR_INPUT_ADDRESS:
4718 case RELOAD_FOR_INPADDR_ADDRESS:
4719 /* Similarly, all we have to check is for use in earlier inputs'
4720 addresses. */
4721 for (i = 0; i < opnum; i++)
4722 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4723 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4724 return 0;
4725
4726 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4727 }
4728 abort ();
4729 }
4730
4731 /* Return 1 if the value in reload reg REGNO, as used by a reload
4732 needed for the part of the insn specified by OPNUM and TYPE,
4733 is still available in REGNO at the end of the insn.
4734
4735 We can assume that the reload reg was already tested for availability
4736 at the time it is needed, and we should not check this again,
4737 in case the reg has already been marked in use. */
4738
4739 static int
4740 reload_reg_reaches_end_p (regno, opnum, type)
4741 int regno;
4742 int opnum;
4743 enum reload_type type;
4744 {
4745 int i;
4746
4747 switch (type)
4748 {
4749 case RELOAD_OTHER:
4750 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4751 its value must reach the end. */
4752 return 1;
4753
4754 /* If this use is for part of the insn,
4755 its value reaches if no subsequent part uses the same register.
4756 Just like the above function, don't try to do this with lots
4757 of fallthroughs. */
4758
4759 case RELOAD_FOR_OTHER_ADDRESS:
4760 /* Here we check for everything else, since these don't conflict
4761 with anything else and everything comes later. */
4762
4763 for (i = 0; i < reload_n_operands; i++)
4764 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4765 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4766 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4767 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4768 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4769 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4770 return 0;
4771
4772 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4773 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4774 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4775
4776 case RELOAD_FOR_INPUT_ADDRESS:
4777 case RELOAD_FOR_INPADDR_ADDRESS:
4778 /* Similar, except that we check only for this and subsequent inputs
4779 and the address of only subsequent inputs and we do not need
4780 to check for RELOAD_OTHER objects since they are known not to
4781 conflict. */
4782
4783 for (i = opnum; i < reload_n_operands; i++)
4784 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4785 return 0;
4786
4787 for (i = opnum + 1; i < reload_n_operands; i++)
4788 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4789 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4790 return 0;
4791
4792 for (i = 0; i < reload_n_operands; i++)
4793 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4794 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4795 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4796 return 0;
4797
4798 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4799 return 0;
4800
4801 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4802 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4803
4804 case RELOAD_FOR_INPUT:
4805 /* Similar to input address, except we start at the next operand for
4806 both input and input address and we do not check for
4807 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4808 would conflict. */
4809
4810 for (i = opnum + 1; i < reload_n_operands; i++)
4811 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4812 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4813 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4814 return 0;
4815
4816 /* ... fall through ... */
4817
4818 case RELOAD_FOR_OPERAND_ADDRESS:
4819 /* Check outputs and their addresses. */
4820
4821 for (i = 0; i < reload_n_operands; i++)
4822 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4823 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4824 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4825 return 0;
4826
4827 return 1;
4828
4829 case RELOAD_FOR_OPADDR_ADDR:
4830 for (i = 0; i < reload_n_operands; i++)
4831 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4832 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4833 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4834 return 0;
4835
4836 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4837 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4838
4839 case RELOAD_FOR_INSN:
4840 /* These conflict with other outputs with RELOAD_OTHER. So
4841 we need only check for output addresses. */
4842
4843 opnum = -1;
4844
4845 /* ... fall through ... */
4846
4847 case RELOAD_FOR_OUTPUT:
4848 case RELOAD_FOR_OUTPUT_ADDRESS:
4849 case RELOAD_FOR_OUTADDR_ADDRESS:
4850 /* We already know these can't conflict with a later output. So the
4851 only thing to check are later output addresses. */
4852 for (i = opnum + 1; i < reload_n_operands; i++)
4853 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4854 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4855 return 0;
4856
4857 return 1;
4858 }
4859
4860 abort ();
4861 }
4862 \f
4863 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4864 Return 0 otherwise.
4865
4866 This function uses the same algorithm as reload_reg_free_p above. */
4867
4868 static int
4869 reloads_conflict (r1, r2)
4870 int r1, r2;
4871 {
4872 enum reload_type r1_type = reload_when_needed[r1];
4873 enum reload_type r2_type = reload_when_needed[r2];
4874 int r1_opnum = reload_opnum[r1];
4875 int r2_opnum = reload_opnum[r2];
4876
4877 /* RELOAD_OTHER conflicts with everything. */
4878 if (r2_type == RELOAD_OTHER)
4879 return 1;
4880
4881 /* Otherwise, check conflicts differently for each type. */
4882
4883 switch (r1_type)
4884 {
4885 case RELOAD_FOR_INPUT:
4886 return (r2_type == RELOAD_FOR_INSN
4887 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4888 || r2_type == RELOAD_FOR_OPADDR_ADDR
4889 || r2_type == RELOAD_FOR_INPUT
4890 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4891 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4892 && r2_opnum > r1_opnum));
4893
4894 case RELOAD_FOR_INPUT_ADDRESS:
4895 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4896 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4897
4898 case RELOAD_FOR_INPADDR_ADDRESS:
4899 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4900 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4901
4902 case RELOAD_FOR_OUTPUT_ADDRESS:
4903 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4904 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4905
4906 case RELOAD_FOR_OUTADDR_ADDRESS:
4907 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4908 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4909
4910 case RELOAD_FOR_OPERAND_ADDRESS:
4911 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4912 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4913
4914 case RELOAD_FOR_OPADDR_ADDR:
4915 return (r2_type == RELOAD_FOR_INPUT
4916 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4917
4918 case RELOAD_FOR_OUTPUT:
4919 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4920 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4921 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4922 && r2_opnum >= r1_opnum));
4923
4924 case RELOAD_FOR_INSN:
4925 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4926 || r2_type == RELOAD_FOR_INSN
4927 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4928
4929 case RELOAD_FOR_OTHER_ADDRESS:
4930 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4931
4932 case RELOAD_OTHER:
4933 return 1;
4934
4935 default:
4936 abort ();
4937 }
4938 }
4939 \f
4940 /* Vector of reload-numbers showing the order in which the reloads should
4941 be processed. */
4942 short reload_order[MAX_RELOADS];
4943
4944 /* Indexed by reload number, 1 if incoming value
4945 inherited from previous insns. */
4946 char reload_inherited[MAX_RELOADS];
4947
4948 /* For an inherited reload, this is the insn the reload was inherited from,
4949 if we know it. Otherwise, this is 0. */
4950 rtx reload_inheritance_insn[MAX_RELOADS];
4951
4952 /* If non-zero, this is a place to get the value of the reload,
4953 rather than using reload_in. */
4954 rtx reload_override_in[MAX_RELOADS];
4955
4956 /* For each reload, the index in spill_regs of the spill register used,
4957 or -1 if we did not need one of the spill registers for this reload. */
4958 int reload_spill_index[MAX_RELOADS];
4959
4960 /* Find a spill register to use as a reload register for reload R.
4961 LAST_RELOAD is non-zero if this is the last reload for the insn being
4962 processed.
4963
4964 Set reload_reg_rtx[R] to the register allocated.
4965
4966 If NOERROR is nonzero, we return 1 if successful,
4967 or 0 if we couldn't find a spill reg and we didn't change anything. */
4968
4969 static int
4970 allocate_reload_reg (r, insn, last_reload, noerror)
4971 int r;
4972 rtx insn;
4973 int last_reload;
4974 int noerror;
4975 {
4976 int i;
4977 int pass;
4978 int count;
4979 rtx new;
4980 int regno;
4981
4982 /* If we put this reload ahead, thinking it is a group,
4983 then insist on finding a group. Otherwise we can grab a
4984 reg that some other reload needs.
4985 (That can happen when we have a 68000 DATA_OR_FP_REG
4986 which is a group of data regs or one fp reg.)
4987 We need not be so restrictive if there are no more reloads
4988 for this insn.
4989
4990 ??? Really it would be nicer to have smarter handling
4991 for that kind of reg class, where a problem like this is normal.
4992 Perhaps those classes should be avoided for reloading
4993 by use of more alternatives. */
4994
4995 int force_group = reload_nregs[r] > 1 && ! last_reload;
4996
4997 /* If we want a single register and haven't yet found one,
4998 take any reg in the right class and not in use.
4999 If we want a consecutive group, here is where we look for it.
5000
5001 We use two passes so we can first look for reload regs to
5002 reuse, which are already in use for other reloads in this insn,
5003 and only then use additional registers.
5004 I think that maximizing reuse is needed to make sure we don't
5005 run out of reload regs. Suppose we have three reloads, and
5006 reloads A and B can share regs. These need two regs.
5007 Suppose A and B are given different regs.
5008 That leaves none for C. */
5009 for (pass = 0; pass < 2; pass++)
5010 {
5011 /* I is the index in spill_regs.
5012 We advance it round-robin between insns to use all spill regs
5013 equally, so that inherited reloads have a chance
5014 of leapfrogging each other. Don't do this, however, when we have
5015 group needs and failure would be fatal; if we only have a relatively
5016 small number of spill registers, and more than one of them has
5017 group needs, then by starting in the middle, we may end up
5018 allocating the first one in such a way that we are not left with
5019 sufficient groups to handle the rest. */
5020
5021 if (noerror || ! force_group)
5022 i = last_spill_reg;
5023 else
5024 i = -1;
5025
5026 for (count = 0; count < n_spills; count++)
5027 {
5028 int class = (int) reload_reg_class[r];
5029
5030 i = (i + 1) % n_spills;
5031
5032 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
5033 reload_when_needed[r])
5034 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5035 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5036 /* Look first for regs to share, then for unshared. But
5037 don't share regs used for inherited reloads; they are
5038 the ones we want to preserve. */
5039 && (pass
5040 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5041 spill_regs[i])
5042 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5043 spill_regs[i]))))
5044 {
5045 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5046 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5047 (on 68000) got us two FP regs. If NR is 1,
5048 we would reject both of them. */
5049 if (force_group)
5050 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5051 /* If we need only one reg, we have already won. */
5052 if (nr == 1)
5053 {
5054 /* But reject a single reg if we demand a group. */
5055 if (force_group)
5056 continue;
5057 break;
5058 }
5059 /* Otherwise check that as many consecutive regs as we need
5060 are available here.
5061 Also, don't use for a group registers that are
5062 needed for nongroups. */
5063 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5064 while (nr > 1)
5065 {
5066 regno = spill_regs[i] + nr - 1;
5067 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5068 && spill_reg_order[regno] >= 0
5069 && reload_reg_free_p (regno, reload_opnum[r],
5070 reload_when_needed[r])
5071 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5072 regno)))
5073 break;
5074 nr--;
5075 }
5076 if (nr == 1)
5077 break;
5078 }
5079 }
5080
5081 /* If we found something on pass 1, omit pass 2. */
5082 if (count < n_spills)
5083 break;
5084 }
5085
5086 /* We should have found a spill register by now. */
5087 if (count == n_spills)
5088 {
5089 if (noerror)
5090 return 0;
5091 goto failure;
5092 }
5093
5094 /* I is the index in SPILL_REG_RTX of the reload register we are to
5095 allocate. Get an rtx for it and find its register number. */
5096
5097 new = spill_reg_rtx[i];
5098
5099 if (new == 0 || GET_MODE (new) != reload_mode[r])
5100 spill_reg_rtx[i] = new
5101 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
5102
5103 regno = true_regnum (new);
5104
5105 /* Detect when the reload reg can't hold the reload mode.
5106 This used to be one `if', but Sequent compiler can't handle that. */
5107 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5108 {
5109 enum machine_mode test_mode = VOIDmode;
5110 if (reload_in[r])
5111 test_mode = GET_MODE (reload_in[r]);
5112 /* If reload_in[r] has VOIDmode, it means we will load it
5113 in whatever mode the reload reg has: to wit, reload_mode[r].
5114 We have already tested that for validity. */
5115 /* Aside from that, we need to test that the expressions
5116 to reload from or into have modes which are valid for this
5117 reload register. Otherwise the reload insns would be invalid. */
5118 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5119 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5120 if (! (reload_out[r] != 0
5121 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5122 {
5123 /* The reg is OK. */
5124 last_spill_reg = i;
5125
5126 /* Mark as in use for this insn the reload regs we use
5127 for this. */
5128 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5129 reload_when_needed[r], reload_mode[r]);
5130
5131 reload_reg_rtx[r] = new;
5132 reload_spill_index[r] = i;
5133 return 1;
5134 }
5135 }
5136
5137 /* The reg is not OK. */
5138 if (noerror)
5139 return 0;
5140
5141 failure:
5142 if (asm_noperands (PATTERN (insn)) < 0)
5143 /* It's the compiler's fault. */
5144 fatal_insn ("Could not find a spill register", insn);
5145
5146 /* It's the user's fault; the operand's mode and constraint
5147 don't match. Disable this reload so we don't crash in final. */
5148 error_for_asm (insn,
5149 "`asm' operand constraint incompatible with operand size");
5150 reload_in[r] = 0;
5151 reload_out[r] = 0;
5152 reload_reg_rtx[r] = 0;
5153 reload_optional[r] = 1;
5154 reload_secondary_p[r] = 1;
5155
5156 return 1;
5157 }
5158 \f
5159 /* Assign hard reg targets for the pseudo-registers we must reload
5160 into hard regs for this insn.
5161 Also output the instructions to copy them in and out of the hard regs.
5162
5163 For machines with register classes, we are responsible for
5164 finding a reload reg in the proper class. */
5165
5166 static void
5167 choose_reload_regs (insn, avoid_return_reg)
5168 rtx insn;
5169 rtx avoid_return_reg;
5170 {
5171 register int i, j;
5172 int max_group_size = 1;
5173 enum reg_class group_class = NO_REGS;
5174 int inheritance;
5175
5176 rtx save_reload_reg_rtx[MAX_RELOADS];
5177 char save_reload_inherited[MAX_RELOADS];
5178 rtx save_reload_inheritance_insn[MAX_RELOADS];
5179 rtx save_reload_override_in[MAX_RELOADS];
5180 int save_reload_spill_index[MAX_RELOADS];
5181 HARD_REG_SET save_reload_reg_used;
5182 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5183 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5184 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5185 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5186 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5187 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5188 HARD_REG_SET save_reload_reg_used_in_op_addr;
5189 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5190 HARD_REG_SET save_reload_reg_used_in_insn;
5191 HARD_REG_SET save_reload_reg_used_in_other_addr;
5192 HARD_REG_SET save_reload_reg_used_at_all;
5193
5194 bzero (reload_inherited, MAX_RELOADS);
5195 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5196 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5197
5198 CLEAR_HARD_REG_SET (reload_reg_used);
5199 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5200 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5201 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5202 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5203 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5204
5205 for (i = 0; i < reload_n_operands; i++)
5206 {
5207 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5208 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5209 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5210 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5211 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5212 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5213 }
5214
5215 #ifdef SMALL_REGISTER_CLASSES
5216 /* Don't bother with avoiding the return reg
5217 if we have no mandatory reload that could use it. */
5218 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5219 {
5220 int do_avoid = 0;
5221 int regno = REGNO (avoid_return_reg);
5222 int nregs
5223 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5224 int r;
5225
5226 for (r = regno; r < regno + nregs; r++)
5227 if (spill_reg_order[r] >= 0)
5228 for (j = 0; j < n_reloads; j++)
5229 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5230 && (reload_in[j] != 0 || reload_out[j] != 0
5231 || reload_secondary_p[j])
5232 &&
5233 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5234 do_avoid = 1;
5235 if (!do_avoid)
5236 avoid_return_reg = 0;
5237 }
5238 #endif /* SMALL_REGISTER_CLASSES */
5239
5240 #if 0 /* Not needed, now that we can always retry without inheritance. */
5241 /* See if we have more mandatory reloads than spill regs.
5242 If so, then we cannot risk optimizations that could prevent
5243 reloads from sharing one spill register.
5244
5245 Since we will try finding a better register than reload_reg_rtx
5246 unless it is equal to reload_in or reload_out, count such reloads. */
5247
5248 {
5249 int tem = 0;
5250 #ifdef SMALL_REGISTER_CLASSES
5251 if (SMALL_REGISTER_CLASSES)
5252 tem = (avoid_return_reg != 0);
5253 #endif
5254 for (j = 0; j < n_reloads; j++)
5255 if (! reload_optional[j]
5256 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5257 && (reload_reg_rtx[j] == 0
5258 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5259 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5260 tem++;
5261 if (tem > n_spills)
5262 must_reuse = 1;
5263 }
5264 #endif
5265
5266 #ifdef SMALL_REGISTER_CLASSES
5267 /* Don't use the subroutine call return reg for a reload
5268 if we are supposed to avoid it. */
5269 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5270 {
5271 int regno = REGNO (avoid_return_reg);
5272 int nregs
5273 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5274 int r;
5275
5276 for (r = regno; r < regno + nregs; r++)
5277 if (spill_reg_order[r] >= 0)
5278 SET_HARD_REG_BIT (reload_reg_used, r);
5279 }
5280 #endif /* SMALL_REGISTER_CLASSES */
5281
5282 /* In order to be certain of getting the registers we need,
5283 we must sort the reloads into order of increasing register class.
5284 Then our grabbing of reload registers will parallel the process
5285 that provided the reload registers.
5286
5287 Also note whether any of the reloads wants a consecutive group of regs.
5288 If so, record the maximum size of the group desired and what
5289 register class contains all the groups needed by this insn. */
5290
5291 for (j = 0; j < n_reloads; j++)
5292 {
5293 reload_order[j] = j;
5294 reload_spill_index[j] = -1;
5295
5296 reload_mode[j]
5297 = (reload_inmode[j] == VOIDmode
5298 || (GET_MODE_SIZE (reload_outmode[j])
5299 > GET_MODE_SIZE (reload_inmode[j])))
5300 ? reload_outmode[j] : reload_inmode[j];
5301
5302 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5303
5304 if (reload_nregs[j] > 1)
5305 {
5306 max_group_size = MAX (reload_nregs[j], max_group_size);
5307 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5308 }
5309
5310 /* If we have already decided to use a certain register,
5311 don't use it in another way. */
5312 if (reload_reg_rtx[j])
5313 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5314 reload_when_needed[j], reload_mode[j]);
5315 }
5316
5317 if (n_reloads > 1)
5318 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5319
5320 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5321 sizeof reload_reg_rtx);
5322 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5323 bcopy ((char *) reload_inheritance_insn,
5324 (char *) save_reload_inheritance_insn,
5325 sizeof reload_inheritance_insn);
5326 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5327 sizeof reload_override_in);
5328 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5329 sizeof reload_spill_index);
5330 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5331 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5332 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5333 reload_reg_used_in_op_addr);
5334
5335 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5336 reload_reg_used_in_op_addr_reload);
5337
5338 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5339 reload_reg_used_in_insn);
5340 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5341 reload_reg_used_in_other_addr);
5342
5343 for (i = 0; i < reload_n_operands; i++)
5344 {
5345 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5346 reload_reg_used_in_output[i]);
5347 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5348 reload_reg_used_in_input[i]);
5349 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5350 reload_reg_used_in_input_addr[i]);
5351 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5352 reload_reg_used_in_inpaddr_addr[i]);
5353 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5354 reload_reg_used_in_output_addr[i]);
5355 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5356 reload_reg_used_in_outaddr_addr[i]);
5357 }
5358
5359 /* If -O, try first with inheritance, then turning it off.
5360 If not -O, don't do inheritance.
5361 Using inheritance when not optimizing leads to paradoxes
5362 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5363 because one side of the comparison might be inherited. */
5364
5365 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5366 {
5367 /* Process the reloads in order of preference just found.
5368 Beyond this point, subregs can be found in reload_reg_rtx.
5369
5370 This used to look for an existing reloaded home for all
5371 of the reloads, and only then perform any new reloads.
5372 But that could lose if the reloads were done out of reg-class order
5373 because a later reload with a looser constraint might have an old
5374 home in a register needed by an earlier reload with a tighter constraint.
5375
5376 To solve this, we make two passes over the reloads, in the order
5377 described above. In the first pass we try to inherit a reload
5378 from a previous insn. If there is a later reload that needs a
5379 class that is a proper subset of the class being processed, we must
5380 also allocate a spill register during the first pass.
5381
5382 Then make a second pass over the reloads to allocate any reloads
5383 that haven't been given registers yet. */
5384
5385 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5386
5387 for (j = 0; j < n_reloads; j++)
5388 {
5389 register int r = reload_order[j];
5390
5391 /* Ignore reloads that got marked inoperative. */
5392 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5393 continue;
5394
5395 /* If find_reloads chose a to use reload_in or reload_out as a reload
5396 register, we don't need to chose one. Otherwise, try even if it found
5397 one since we might save an insn if we find the value lying around. */
5398 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5399 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5400 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5401 continue;
5402
5403 #if 0 /* No longer needed for correct operation.
5404 It might give better code, or might not; worth an experiment? */
5405 /* If this is an optional reload, we can't inherit from earlier insns
5406 until we are sure that any non-optional reloads have been allocated.
5407 The following code takes advantage of the fact that optional reloads
5408 are at the end of reload_order. */
5409 if (reload_optional[r] != 0)
5410 for (i = 0; i < j; i++)
5411 if ((reload_out[reload_order[i]] != 0
5412 || reload_in[reload_order[i]] != 0
5413 || reload_secondary_p[reload_order[i]])
5414 && ! reload_optional[reload_order[i]]
5415 && reload_reg_rtx[reload_order[i]] == 0)
5416 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5417 #endif
5418
5419 /* First see if this pseudo is already available as reloaded
5420 for a previous insn. We cannot try to inherit for reloads
5421 that are smaller than the maximum number of registers needed
5422 for groups unless the register we would allocate cannot be used
5423 for the groups.
5424
5425 We could check here to see if this is a secondary reload for
5426 an object that is already in a register of the desired class.
5427 This would avoid the need for the secondary reload register.
5428 But this is complex because we can't easily determine what
5429 objects might want to be loaded via this reload. So let a register
5430 be allocated here. In `emit_reload_insns' we suppress one of the
5431 loads in the case described above. */
5432
5433 if (inheritance)
5434 {
5435 register int regno = -1;
5436 enum machine_mode mode;
5437
5438 if (reload_in[r] == 0)
5439 ;
5440 else if (GET_CODE (reload_in[r]) == REG)
5441 {
5442 regno = REGNO (reload_in[r]);
5443 mode = GET_MODE (reload_in[r]);
5444 }
5445 else if (GET_CODE (reload_in_reg[r]) == REG)
5446 {
5447 regno = REGNO (reload_in_reg[r]);
5448 mode = GET_MODE (reload_in_reg[r]);
5449 }
5450 #if 0
5451 /* This won't work, since REGNO can be a pseudo reg number.
5452 Also, it takes much more hair to keep track of all the things
5453 that can invalidate an inherited reload of part of a pseudoreg. */
5454 else if (GET_CODE (reload_in[r]) == SUBREG
5455 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5456 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5457 #endif
5458
5459 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5460 {
5461 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5462
5463 if (reg_reloaded_contents[i] == regno
5464 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5465 >= GET_MODE_SIZE (mode))
5466 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5467 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5468 spill_regs[i])
5469 && (reload_nregs[r] == max_group_size
5470 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5471 spill_regs[i]))
5472 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5473 reload_when_needed[r])
5474 && reload_reg_free_before_p (spill_regs[i],
5475 reload_opnum[r],
5476 reload_when_needed[r]))
5477 {
5478 /* If a group is needed, verify that all the subsequent
5479 registers still have their values intact. */
5480 int nr
5481 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5482 int k;
5483
5484 for (k = 1; k < nr; k++)
5485 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5486 != regno)
5487 break;
5488
5489 if (k == nr)
5490 {
5491 int i1;
5492
5493 /* We found a register that contains the
5494 value we need. If this register is the
5495 same as an `earlyclobber' operand of the
5496 current insn, just mark it as a place to
5497 reload from since we can't use it as the
5498 reload register itself. */
5499
5500 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5501 if (reg_overlap_mentioned_for_reload_p
5502 (reg_last_reload_reg[regno],
5503 reload_earlyclobbers[i1]))
5504 break;
5505
5506 if (i1 != n_earlyclobbers
5507 /* Don't really use the inherited spill reg
5508 if we need it wider than we've got it. */
5509 || (GET_MODE_SIZE (reload_mode[r])
5510 > GET_MODE_SIZE (mode)))
5511 reload_override_in[r] = reg_last_reload_reg[regno];
5512 else
5513 {
5514 int k;
5515 /* We can use this as a reload reg. */
5516 /* Mark the register as in use for this part of
5517 the insn. */
5518 mark_reload_reg_in_use (spill_regs[i],
5519 reload_opnum[r],
5520 reload_when_needed[r],
5521 reload_mode[r]);
5522 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5523 reload_inherited[r] = 1;
5524 reload_inheritance_insn[r]
5525 = reg_reloaded_insn[i];
5526 reload_spill_index[r] = i;
5527 for (k = 0; k < nr; k++)
5528 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5529 spill_regs[i + k]);
5530 }
5531 }
5532 }
5533 }
5534 }
5535
5536 /* Here's another way to see if the value is already lying around. */
5537 if (inheritance
5538 && reload_in[r] != 0
5539 && ! reload_inherited[r]
5540 && reload_out[r] == 0
5541 && (CONSTANT_P (reload_in[r])
5542 || GET_CODE (reload_in[r]) == PLUS
5543 || GET_CODE (reload_in[r]) == REG
5544 || GET_CODE (reload_in[r]) == MEM)
5545 && (reload_nregs[r] == max_group_size
5546 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5547 {
5548 register rtx equiv
5549 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5550 -1, NULL_PTR, 0, reload_mode[r]);
5551 int regno;
5552
5553 if (equiv != 0)
5554 {
5555 if (GET_CODE (equiv) == REG)
5556 regno = REGNO (equiv);
5557 else if (GET_CODE (equiv) == SUBREG)
5558 {
5559 /* This must be a SUBREG of a hard register.
5560 Make a new REG since this might be used in an
5561 address and not all machines support SUBREGs
5562 there. */
5563 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5564 equiv = gen_rtx (REG, reload_mode[r], regno);
5565 }
5566 else
5567 abort ();
5568 }
5569
5570 /* If we found a spill reg, reject it unless it is free
5571 and of the desired class. */
5572 if (equiv != 0
5573 && ((spill_reg_order[regno] >= 0
5574 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5575 reload_when_needed[r]))
5576 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5577 regno)))
5578 equiv = 0;
5579
5580 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5581 equiv = 0;
5582
5583 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5584 equiv = 0;
5585
5586 /* We found a register that contains the value we need.
5587 If this register is the same as an `earlyclobber' operand
5588 of the current insn, just mark it as a place to reload from
5589 since we can't use it as the reload register itself. */
5590
5591 if (equiv != 0)
5592 for (i = 0; i < n_earlyclobbers; i++)
5593 if (reg_overlap_mentioned_for_reload_p (equiv,
5594 reload_earlyclobbers[i]))
5595 {
5596 reload_override_in[r] = equiv;
5597 equiv = 0;
5598 break;
5599 }
5600
5601 /* JRV: If the equiv register we have found is
5602 explicitly clobbered in the current insn, mark but
5603 don't use, as above. */
5604
5605 if (equiv != 0 && regno_clobbered_p (regno, insn))
5606 {
5607 reload_override_in[r] = equiv;
5608 equiv = 0;
5609 }
5610
5611 /* If we found an equivalent reg, say no code need be generated
5612 to load it, and use it as our reload reg. */
5613 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5614 {
5615 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5616 int k;
5617 reload_reg_rtx[r] = equiv;
5618 reload_inherited[r] = 1;
5619
5620 /* If any of the hard registers in EQUIV are spill
5621 registers, mark them as in use for this insn. */
5622 for (k = 0; k < nr; k++)
5623 {
5624 i = spill_reg_order[regno + k];
5625 if (i >= 0)
5626 {
5627 mark_reload_reg_in_use (regno, reload_opnum[r],
5628 reload_when_needed[r],
5629 reload_mode[r]);
5630 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5631 regno + k);
5632 }
5633 }
5634 }
5635 }
5636
5637 /* If we found a register to use already, or if this is an optional
5638 reload, we are done. */
5639 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5640 continue;
5641
5642 #if 0 /* No longer needed for correct operation. Might or might not
5643 give better code on the average. Want to experiment? */
5644
5645 /* See if there is a later reload that has a class different from our
5646 class that intersects our class or that requires less register
5647 than our reload. If so, we must allocate a register to this
5648 reload now, since that reload might inherit a previous reload
5649 and take the only available register in our class. Don't do this
5650 for optional reloads since they will force all previous reloads
5651 to be allocated. Also don't do this for reloads that have been
5652 turned off. */
5653
5654 for (i = j + 1; i < n_reloads; i++)
5655 {
5656 int s = reload_order[i];
5657
5658 if ((reload_in[s] == 0 && reload_out[s] == 0
5659 && ! reload_secondary_p[s])
5660 || reload_optional[s])
5661 continue;
5662
5663 if ((reload_reg_class[s] != reload_reg_class[r]
5664 && reg_classes_intersect_p (reload_reg_class[r],
5665 reload_reg_class[s]))
5666 || reload_nregs[s] < reload_nregs[r])
5667 break;
5668 }
5669
5670 if (i == n_reloads)
5671 continue;
5672
5673 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5674 #endif
5675 }
5676
5677 /* Now allocate reload registers for anything non-optional that
5678 didn't get one yet. */
5679 for (j = 0; j < n_reloads; j++)
5680 {
5681 register int r = reload_order[j];
5682
5683 /* Ignore reloads that got marked inoperative. */
5684 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5685 continue;
5686
5687 /* Skip reloads that already have a register allocated or are
5688 optional. */
5689 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5690 continue;
5691
5692 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5693 break;
5694 }
5695
5696 /* If that loop got all the way, we have won. */
5697 if (j == n_reloads)
5698 break;
5699
5700 fail:
5701 /* Loop around and try without any inheritance. */
5702 /* First undo everything done by the failed attempt
5703 to allocate with inheritance. */
5704 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5705 sizeof reload_reg_rtx);
5706 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5707 sizeof reload_inherited);
5708 bcopy ((char *) save_reload_inheritance_insn,
5709 (char *) reload_inheritance_insn,
5710 sizeof reload_inheritance_insn);
5711 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5712 sizeof reload_override_in);
5713 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5714 sizeof reload_spill_index);
5715 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5716 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5717 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5718 save_reload_reg_used_in_op_addr);
5719 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5720 save_reload_reg_used_in_op_addr_reload);
5721 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5722 save_reload_reg_used_in_insn);
5723 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5724 save_reload_reg_used_in_other_addr);
5725
5726 for (i = 0; i < reload_n_operands; i++)
5727 {
5728 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5729 save_reload_reg_used_in_input[i]);
5730 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5731 save_reload_reg_used_in_output[i]);
5732 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5733 save_reload_reg_used_in_input_addr[i]);
5734 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5735 save_reload_reg_used_in_inpaddr_addr[i]);
5736 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5737 save_reload_reg_used_in_output_addr[i]);
5738 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5739 save_reload_reg_used_in_outaddr_addr[i]);
5740 }
5741 }
5742
5743 /* If we thought we could inherit a reload, because it seemed that
5744 nothing else wanted the same reload register earlier in the insn,
5745 verify that assumption, now that all reloads have been assigned. */
5746
5747 for (j = 0; j < n_reloads; j++)
5748 {
5749 register int r = reload_order[j];
5750
5751 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5752 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5753 reload_opnum[r],
5754 reload_when_needed[r]))
5755 reload_inherited[r] = 0;
5756
5757 /* If we found a better place to reload from,
5758 validate it in the same fashion, if it is a reload reg. */
5759 if (reload_override_in[r]
5760 && (GET_CODE (reload_override_in[r]) == REG
5761 || GET_CODE (reload_override_in[r]) == SUBREG))
5762 {
5763 int regno = true_regnum (reload_override_in[r]);
5764 if (spill_reg_order[regno] >= 0
5765 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5766 reload_when_needed[r]))
5767 reload_override_in[r] = 0;
5768 }
5769 }
5770
5771 /* Now that reload_override_in is known valid,
5772 actually override reload_in. */
5773 for (j = 0; j < n_reloads; j++)
5774 if (reload_override_in[j])
5775 reload_in[j] = reload_override_in[j];
5776
5777 /* If this reload won't be done because it has been cancelled or is
5778 optional and not inherited, clear reload_reg_rtx so other
5779 routines (such as subst_reloads) don't get confused. */
5780 for (j = 0; j < n_reloads; j++)
5781 if (reload_reg_rtx[j] != 0
5782 && ((reload_optional[j] && ! reload_inherited[j])
5783 || (reload_in[j] == 0 && reload_out[j] == 0
5784 && ! reload_secondary_p[j])))
5785 {
5786 int regno = true_regnum (reload_reg_rtx[j]);
5787
5788 if (spill_reg_order[regno] >= 0)
5789 clear_reload_reg_in_use (regno, reload_opnum[j],
5790 reload_when_needed[j], reload_mode[j]);
5791 reload_reg_rtx[j] = 0;
5792 }
5793
5794 /* Record which pseudos and which spill regs have output reloads. */
5795 for (j = 0; j < n_reloads; j++)
5796 {
5797 register int r = reload_order[j];
5798
5799 i = reload_spill_index[r];
5800
5801 /* I is nonneg if this reload used one of the spill regs.
5802 If reload_reg_rtx[r] is 0, this is an optional reload
5803 that we opted to ignore. */
5804 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5805 && reload_reg_rtx[r] != 0)
5806 {
5807 register int nregno = REGNO (reload_out[r]);
5808 int nr = 1;
5809
5810 if (nregno < FIRST_PSEUDO_REGISTER)
5811 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5812
5813 while (--nr >= 0)
5814 reg_has_output_reload[nregno + nr] = 1;
5815
5816 if (i >= 0)
5817 {
5818 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5819 while (--nr >= 0)
5820 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5821 }
5822
5823 if (reload_when_needed[r] != RELOAD_OTHER
5824 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5825 && reload_when_needed[r] != RELOAD_FOR_INSN)
5826 abort ();
5827 }
5828 }
5829 }
5830 \f
5831 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5832 reloads of the same item for fear that we might not have enough reload
5833 registers. However, normally they will get the same reload register
5834 and hence actually need not be loaded twice.
5835
5836 Here we check for the most common case of this phenomenon: when we have
5837 a number of reloads for the same object, each of which were allocated
5838 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5839 reload, and is not modified in the insn itself. If we find such,
5840 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5841 This will not increase the number of spill registers needed and will
5842 prevent redundant code. */
5843
5844 #ifdef SMALL_REGISTER_CLASSES
5845
5846 static void
5847 merge_assigned_reloads (insn)
5848 rtx insn;
5849 {
5850 int i, j;
5851
5852 /* Scan all the reloads looking for ones that only load values and
5853 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5854 assigned and not modified by INSN. */
5855
5856 for (i = 0; i < n_reloads; i++)
5857 {
5858 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5859 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5860 || reg_set_p (reload_reg_rtx[i], insn))
5861 continue;
5862
5863 /* Look at all other reloads. Ensure that the only use of this
5864 reload_reg_rtx is in a reload that just loads the same value
5865 as we do. Note that any secondary reloads must be of the identical
5866 class since the values, modes, and result registers are the
5867 same, so we need not do anything with any secondary reloads. */
5868
5869 for (j = 0; j < n_reloads; j++)
5870 {
5871 if (i == j || reload_reg_rtx[j] == 0
5872 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5873 reload_reg_rtx[i]))
5874 continue;
5875
5876 /* If the reload regs aren't exactly the same (e.g, different modes)
5877 or if the values are different, we can't merge anything with this
5878 reload register. */
5879
5880 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5881 || reload_out[j] != 0 || reload_in[j] == 0
5882 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5883 break;
5884 }
5885
5886 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5887 we, in fact, found any matching reloads. */
5888
5889 if (j == n_reloads)
5890 {
5891 for (j = 0; j < n_reloads; j++)
5892 if (i != j && reload_reg_rtx[j] != 0
5893 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5894 {
5895 reload_when_needed[i] = RELOAD_OTHER;
5896 reload_in[j] = 0;
5897 transfer_replacements (i, j);
5898 }
5899
5900 /* If this is now RELOAD_OTHER, look for any reloads that load
5901 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5902 if they were for inputs, RELOAD_OTHER for outputs. Note that
5903 this test is equivalent to looking for reloads for this operand
5904 number. */
5905
5906 if (reload_when_needed[i] == RELOAD_OTHER)
5907 for (j = 0; j < n_reloads; j++)
5908 if (reload_in[j] != 0
5909 && reload_when_needed[i] != RELOAD_OTHER
5910 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5911 reload_in[i]))
5912 reload_when_needed[j]
5913 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5914 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5915 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
5916 }
5917 }
5918 }
5919 #endif /* SMALL_RELOAD_CLASSES */
5920 \f
5921 /* Output insns to reload values in and out of the chosen reload regs. */
5922
5923 static void
5924 emit_reload_insns (insn)
5925 rtx insn;
5926 {
5927 register int j;
5928 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5929 rtx other_input_address_reload_insns = 0;
5930 rtx other_input_reload_insns = 0;
5931 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5932 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5933 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5934 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5935 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5936 rtx operand_reload_insns = 0;
5937 rtx other_operand_reload_insns = 0;
5938 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5939 rtx following_insn = NEXT_INSN (insn);
5940 rtx before_insn = insn;
5941 int special;
5942 /* Values to be put in spill_reg_store are put here first. */
5943 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5944
5945 for (j = 0; j < reload_n_operands; j++)
5946 input_reload_insns[j] = input_address_reload_insns[j]
5947 = inpaddr_address_reload_insns[j]
5948 = output_reload_insns[j] = output_address_reload_insns[j]
5949 = outaddr_address_reload_insns[j]
5950 = other_output_reload_insns[j] = 0;
5951
5952 /* Now output the instructions to copy the data into and out of the
5953 reload registers. Do these in the order that the reloads were reported,
5954 since reloads of base and index registers precede reloads of operands
5955 and the operands may need the base and index registers reloaded. */
5956
5957 for (j = 0; j < n_reloads; j++)
5958 {
5959 register rtx old;
5960 rtx oldequiv_reg = 0;
5961 rtx this_reload_insn = 0;
5962
5963 if (reload_spill_index[j] >= 0)
5964 new_spill_reg_store[reload_spill_index[j]] = 0;
5965
5966 old = reload_in[j];
5967 if (old != 0 && ! reload_inherited[j]
5968 && ! rtx_equal_p (reload_reg_rtx[j], old)
5969 && reload_reg_rtx[j] != 0)
5970 {
5971 register rtx reloadreg = reload_reg_rtx[j];
5972 rtx oldequiv = 0;
5973 enum machine_mode mode;
5974 rtx *where;
5975
5976 /* Determine the mode to reload in.
5977 This is very tricky because we have three to choose from.
5978 There is the mode the insn operand wants (reload_inmode[J]).
5979 There is the mode of the reload register RELOADREG.
5980 There is the intrinsic mode of the operand, which we could find
5981 by stripping some SUBREGs.
5982 It turns out that RELOADREG's mode is irrelevant:
5983 we can change that arbitrarily.
5984
5985 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5986 then the reload reg may not support QImode moves, so use SImode.
5987 If foo is in memory due to spilling a pseudo reg, this is safe,
5988 because the QImode value is in the least significant part of a
5989 slot big enough for a SImode. If foo is some other sort of
5990 memory reference, then it is impossible to reload this case,
5991 so previous passes had better make sure this never happens.
5992
5993 Then consider a one-word union which has SImode and one of its
5994 members is a float, being fetched as (SUBREG:SF union:SI).
5995 We must fetch that as SFmode because we could be loading into
5996 a float-only register. In this case OLD's mode is correct.
5997
5998 Consider an immediate integer: it has VOIDmode. Here we need
5999 to get a mode from something else.
6000
6001 In some cases, there is a fourth mode, the operand's
6002 containing mode. If the insn specifies a containing mode for
6003 this operand, it overrides all others.
6004
6005 I am not sure whether the algorithm here is always right,
6006 but it does the right things in those cases. */
6007
6008 mode = GET_MODE (old);
6009 if (mode == VOIDmode)
6010 mode = reload_inmode[j];
6011
6012 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6013 /* If we need a secondary register for this operation, see if
6014 the value is already in a register in that class. Don't
6015 do this if the secondary register will be used as a scratch
6016 register. */
6017
6018 if (reload_secondary_in_reload[j] >= 0
6019 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6020 && optimize)
6021 oldequiv
6022 = find_equiv_reg (old, insn,
6023 reload_reg_class[reload_secondary_in_reload[j]],
6024 -1, NULL_PTR, 0, mode);
6025 #endif
6026
6027 /* If reloading from memory, see if there is a register
6028 that already holds the same value. If so, reload from there.
6029 We can pass 0 as the reload_reg_p argument because
6030 any other reload has either already been emitted,
6031 in which case find_equiv_reg will see the reload-insn,
6032 or has yet to be emitted, in which case it doesn't matter
6033 because we will use this equiv reg right away. */
6034
6035 if (oldequiv == 0 && optimize
6036 && (GET_CODE (old) == MEM
6037 || (GET_CODE (old) == REG
6038 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6039 && reg_renumber[REGNO (old)] < 0)))
6040 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6041 -1, NULL_PTR, 0, mode);
6042
6043 if (oldequiv)
6044 {
6045 int regno = true_regnum (oldequiv);
6046
6047 /* If OLDEQUIV is a spill register, don't use it for this
6048 if any other reload needs it at an earlier stage of this insn
6049 or at this stage. */
6050 if (spill_reg_order[regno] >= 0
6051 && (! reload_reg_free_p (regno, reload_opnum[j],
6052 reload_when_needed[j])
6053 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6054 reload_when_needed[j])))
6055 oldequiv = 0;
6056
6057 /* If OLDEQUIV is not a spill register,
6058 don't use it if any other reload wants it. */
6059 if (spill_reg_order[regno] < 0)
6060 {
6061 int k;
6062 for (k = 0; k < n_reloads; k++)
6063 if (reload_reg_rtx[k] != 0 && k != j
6064 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6065 oldequiv))
6066 {
6067 oldequiv = 0;
6068 break;
6069 }
6070 }
6071
6072 /* If it is no cheaper to copy from OLDEQUIV into the
6073 reload register than it would be to move from memory,
6074 don't use it. Likewise, if we need a secondary register
6075 or memory. */
6076
6077 if (oldequiv != 0
6078 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6079 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6080 reload_reg_class[j])
6081 >= MEMORY_MOVE_COST (mode)))
6082 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6083 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6084 mode, oldequiv)
6085 != NO_REGS)
6086 #endif
6087 #ifdef SECONDARY_MEMORY_NEEDED
6088 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6089 REGNO_REG_CLASS (regno),
6090 mode)
6091 #endif
6092 ))
6093 oldequiv = 0;
6094 }
6095
6096 if (oldequiv == 0)
6097 oldequiv = old;
6098 else if (GET_CODE (oldequiv) == REG)
6099 oldequiv_reg = oldequiv;
6100 else if (GET_CODE (oldequiv) == SUBREG)
6101 oldequiv_reg = SUBREG_REG (oldequiv);
6102
6103 /* If we are reloading from a register that was recently stored in
6104 with an output-reload, see if we can prove there was
6105 actually no need to store the old value in it. */
6106
6107 if (optimize && GET_CODE (oldequiv) == REG
6108 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6109 && spill_reg_order[REGNO (oldequiv)] >= 0
6110 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
6111 && find_reg_note (insn, REG_DEAD, reload_in[j])
6112 /* This is unsafe if operand occurs more than once in current
6113 insn. Perhaps some occurrences weren't reloaded. */
6114 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6115 delete_output_reload
6116 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
6117
6118 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6119 then load RELOADREG from OLDEQUIV. Note that we cannot use
6120 gen_lowpart_common since it can do the wrong thing when
6121 RELOADREG has a multi-word mode. Note that RELOADREG
6122 must always be a REG here. */
6123
6124 if (GET_MODE (reloadreg) != mode)
6125 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6126 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6127 oldequiv = SUBREG_REG (oldequiv);
6128 if (GET_MODE (oldequiv) != VOIDmode
6129 && mode != GET_MODE (oldequiv))
6130 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
6131
6132 /* Switch to the right place to emit the reload insns. */
6133 switch (reload_when_needed[j])
6134 {
6135 case RELOAD_OTHER:
6136 where = &other_input_reload_insns;
6137 break;
6138 case RELOAD_FOR_INPUT:
6139 where = &input_reload_insns[reload_opnum[j]];
6140 break;
6141 case RELOAD_FOR_INPUT_ADDRESS:
6142 where = &input_address_reload_insns[reload_opnum[j]];
6143 break;
6144 case RELOAD_FOR_INPADDR_ADDRESS:
6145 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6146 break;
6147 case RELOAD_FOR_OUTPUT_ADDRESS:
6148 where = &output_address_reload_insns[reload_opnum[j]];
6149 break;
6150 case RELOAD_FOR_OUTADDR_ADDRESS:
6151 where = &outaddr_address_reload_insns[reload_opnum[j]];
6152 break;
6153 case RELOAD_FOR_OPERAND_ADDRESS:
6154 where = &operand_reload_insns;
6155 break;
6156 case RELOAD_FOR_OPADDR_ADDR:
6157 where = &other_operand_reload_insns;
6158 break;
6159 case RELOAD_FOR_OTHER_ADDRESS:
6160 where = &other_input_address_reload_insns;
6161 break;
6162 default:
6163 abort ();
6164 }
6165
6166 push_to_sequence (*where);
6167 special = 0;
6168
6169 /* Auto-increment addresses must be reloaded in a special way. */
6170 if (GET_CODE (oldequiv) == POST_INC
6171 || GET_CODE (oldequiv) == POST_DEC
6172 || GET_CODE (oldequiv) == PRE_INC
6173 || GET_CODE (oldequiv) == PRE_DEC)
6174 {
6175 /* We are not going to bother supporting the case where a
6176 incremented register can't be copied directly from
6177 OLDEQUIV since this seems highly unlikely. */
6178 if (reload_secondary_in_reload[j] >= 0)
6179 abort ();
6180 /* Prevent normal processing of this reload. */
6181 special = 1;
6182 /* Output a special code sequence for this case. */
6183 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6184 }
6185
6186 /* If we are reloading a pseudo-register that was set by the previous
6187 insn, see if we can get rid of that pseudo-register entirely
6188 by redirecting the previous insn into our reload register. */
6189
6190 else if (optimize && GET_CODE (old) == REG
6191 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6192 && dead_or_set_p (insn, old)
6193 /* This is unsafe if some other reload
6194 uses the same reg first. */
6195 && reload_reg_free_before_p (REGNO (reloadreg),
6196 reload_opnum[j],
6197 reload_when_needed[j]))
6198 {
6199 rtx temp = PREV_INSN (insn);
6200 while (temp && GET_CODE (temp) == NOTE)
6201 temp = PREV_INSN (temp);
6202 if (temp
6203 && GET_CODE (temp) == INSN
6204 && GET_CODE (PATTERN (temp)) == SET
6205 && SET_DEST (PATTERN (temp)) == old
6206 /* Make sure we can access insn_operand_constraint. */
6207 && asm_noperands (PATTERN (temp)) < 0
6208 /* This is unsafe if prev insn rejects our reload reg. */
6209 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6210 reloadreg)
6211 /* This is unsafe if operand occurs more than once in current
6212 insn. Perhaps some occurrences aren't reloaded. */
6213 && count_occurrences (PATTERN (insn), old) == 1
6214 /* Don't risk splitting a matching pair of operands. */
6215 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6216 {
6217 /* Store into the reload register instead of the pseudo. */
6218 SET_DEST (PATTERN (temp)) = reloadreg;
6219 /* If these are the only uses of the pseudo reg,
6220 pretend for GDB it lives in the reload reg we used. */
6221 if (reg_n_deaths[REGNO (old)] == 1
6222 && reg_n_sets[REGNO (old)] == 1)
6223 {
6224 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6225 alter_reg (REGNO (old), -1);
6226 }
6227 special = 1;
6228 }
6229 }
6230
6231 /* We can't do that, so output an insn to load RELOADREG. */
6232
6233 if (! special)
6234 {
6235 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6236 rtx second_reload_reg = 0;
6237 enum insn_code icode;
6238
6239 /* If we have a secondary reload, pick up the secondary register
6240 and icode, if any. If OLDEQUIV and OLD are different or
6241 if this is an in-out reload, recompute whether or not we
6242 still need a secondary register and what the icode should
6243 be. If we still need a secondary register and the class or
6244 icode is different, go back to reloading from OLD if using
6245 OLDEQUIV means that we got the wrong type of register. We
6246 cannot have different class or icode due to an in-out reload
6247 because we don't make such reloads when both the input and
6248 output need secondary reload registers. */
6249
6250 if (reload_secondary_in_reload[j] >= 0)
6251 {
6252 int secondary_reload = reload_secondary_in_reload[j];
6253 rtx real_oldequiv = oldequiv;
6254 rtx real_old = old;
6255
6256 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6257 and similarly for OLD.
6258 See comments in get_secondary_reload in reload.c. */
6259 if (GET_CODE (oldequiv) == REG
6260 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6261 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6262 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6263
6264 if (GET_CODE (old) == REG
6265 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6266 && reg_equiv_mem[REGNO (old)] != 0)
6267 real_old = reg_equiv_mem[REGNO (old)];
6268
6269 second_reload_reg = reload_reg_rtx[secondary_reload];
6270 icode = reload_secondary_in_icode[j];
6271
6272 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6273 || (reload_in[j] != 0 && reload_out[j] != 0))
6274 {
6275 enum reg_class new_class
6276 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6277 mode, real_oldequiv);
6278
6279 if (new_class == NO_REGS)
6280 second_reload_reg = 0;
6281 else
6282 {
6283 enum insn_code new_icode;
6284 enum machine_mode new_mode;
6285
6286 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6287 REGNO (second_reload_reg)))
6288 oldequiv = old, real_oldequiv = real_old;
6289 else
6290 {
6291 new_icode = reload_in_optab[(int) mode];
6292 if (new_icode != CODE_FOR_nothing
6293 && ((insn_operand_predicate[(int) new_icode][0]
6294 && ! ((*insn_operand_predicate[(int) new_icode][0])
6295 (reloadreg, mode)))
6296 || (insn_operand_predicate[(int) new_icode][1]
6297 && ! ((*insn_operand_predicate[(int) new_icode][1])
6298 (real_oldequiv, mode)))))
6299 new_icode = CODE_FOR_nothing;
6300
6301 if (new_icode == CODE_FOR_nothing)
6302 new_mode = mode;
6303 else
6304 new_mode = insn_operand_mode[(int) new_icode][2];
6305
6306 if (GET_MODE (second_reload_reg) != new_mode)
6307 {
6308 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6309 new_mode))
6310 oldequiv = old, real_oldequiv = real_old;
6311 else
6312 second_reload_reg
6313 = gen_rtx (REG, new_mode,
6314 REGNO (second_reload_reg));
6315 }
6316 }
6317 }
6318 }
6319
6320 /* If we still need a secondary reload register, check
6321 to see if it is being used as a scratch or intermediate
6322 register and generate code appropriately. If we need
6323 a scratch register, use REAL_OLDEQUIV since the form of
6324 the insn may depend on the actual address if it is
6325 a MEM. */
6326
6327 if (second_reload_reg)
6328 {
6329 if (icode != CODE_FOR_nothing)
6330 {
6331 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6332 second_reload_reg));
6333 special = 1;
6334 }
6335 else
6336 {
6337 /* See if we need a scratch register to load the
6338 intermediate register (a tertiary reload). */
6339 enum insn_code tertiary_icode
6340 = reload_secondary_in_icode[secondary_reload];
6341
6342 if (tertiary_icode != CODE_FOR_nothing)
6343 {
6344 rtx third_reload_reg
6345 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6346
6347 emit_insn ((GEN_FCN (tertiary_icode)
6348 (second_reload_reg, real_oldequiv,
6349 third_reload_reg)));
6350 }
6351 else
6352 gen_reload (second_reload_reg, oldequiv,
6353 reload_opnum[j],
6354 reload_when_needed[j]);
6355
6356 oldequiv = second_reload_reg;
6357 }
6358 }
6359 }
6360 #endif
6361
6362 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6363 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6364 reload_when_needed[j]);
6365
6366 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6367 /* We may have to make a REG_DEAD note for the secondary reload
6368 register in the insns we just made. Find the last insn that
6369 mentioned the register. */
6370 if (! special && second_reload_reg
6371 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6372 {
6373 rtx prev;
6374
6375 for (prev = get_last_insn (); prev;
6376 prev = PREV_INSN (prev))
6377 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6378 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6379 PATTERN (prev)))
6380 {
6381 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6382 second_reload_reg,
6383 REG_NOTES (prev));
6384 break;
6385 }
6386 }
6387 #endif
6388 }
6389
6390 this_reload_insn = get_last_insn ();
6391 /* End this sequence. */
6392 *where = get_insns ();
6393 end_sequence ();
6394 }
6395
6396 /* Add a note saying the input reload reg
6397 dies in this insn, if anyone cares. */
6398 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6399 if (old != 0
6400 && reload_reg_rtx[j] != old
6401 && reload_reg_rtx[j] != 0
6402 && reload_out[j] == 0
6403 && ! reload_inherited[j]
6404 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6405 {
6406 register rtx reloadreg = reload_reg_rtx[j];
6407
6408 #if 0
6409 /* We can't abort here because we need to support this for sched.c.
6410 It's not terrible to miss a REG_DEAD note, but we should try
6411 to figure out how to do this correctly. */
6412 /* The code below is incorrect for address-only reloads. */
6413 if (reload_when_needed[j] != RELOAD_OTHER
6414 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6415 abort ();
6416 #endif
6417
6418 /* Add a death note to this insn, for an input reload. */
6419
6420 if ((reload_when_needed[j] == RELOAD_OTHER
6421 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6422 && ! dead_or_set_p (insn, reloadreg))
6423 REG_NOTES (insn)
6424 = gen_rtx (EXPR_LIST, REG_DEAD,
6425 reloadreg, REG_NOTES (insn));
6426 }
6427
6428 /* When we inherit a reload, the last marked death of the reload reg
6429 may no longer really be a death. */
6430 if (reload_reg_rtx[j] != 0
6431 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6432 && reload_inherited[j])
6433 {
6434 /* Handle inheriting an output reload.
6435 Remove the death note from the output reload insn. */
6436 if (reload_spill_index[j] >= 0
6437 && GET_CODE (reload_in[j]) == REG
6438 && spill_reg_store[reload_spill_index[j]] != 0
6439 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6440 REG_DEAD, REGNO (reload_reg_rtx[j])))
6441 remove_death (REGNO (reload_reg_rtx[j]),
6442 spill_reg_store[reload_spill_index[j]]);
6443 /* Likewise for input reloads that were inherited. */
6444 else if (reload_spill_index[j] >= 0
6445 && GET_CODE (reload_in[j]) == REG
6446 && spill_reg_store[reload_spill_index[j]] == 0
6447 && reload_inheritance_insn[j] != 0
6448 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6449 REGNO (reload_reg_rtx[j])))
6450 remove_death (REGNO (reload_reg_rtx[j]),
6451 reload_inheritance_insn[j]);
6452 else
6453 {
6454 rtx prev;
6455
6456 /* We got this register from find_equiv_reg.
6457 Search back for its last death note and get rid of it.
6458 But don't search back too far.
6459 Don't go past a place where this reg is set,
6460 since a death note before that remains valid. */
6461 for (prev = PREV_INSN (insn);
6462 prev && GET_CODE (prev) != CODE_LABEL;
6463 prev = PREV_INSN (prev))
6464 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6465 && dead_or_set_p (prev, reload_reg_rtx[j]))
6466 {
6467 if (find_regno_note (prev, REG_DEAD,
6468 REGNO (reload_reg_rtx[j])))
6469 remove_death (REGNO (reload_reg_rtx[j]), prev);
6470 break;
6471 }
6472 }
6473 }
6474
6475 /* We might have used find_equiv_reg above to choose an alternate
6476 place from which to reload. If so, and it died, we need to remove
6477 that death and move it to one of the insns we just made. */
6478
6479 if (oldequiv_reg != 0
6480 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6481 {
6482 rtx prev, prev1;
6483
6484 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6485 prev = PREV_INSN (prev))
6486 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6487 && dead_or_set_p (prev, oldequiv_reg))
6488 {
6489 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6490 {
6491 for (prev1 = this_reload_insn;
6492 prev1; prev1 = PREV_INSN (prev1))
6493 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6494 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6495 PATTERN (prev1)))
6496 {
6497 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6498 oldequiv_reg,
6499 REG_NOTES (prev1));
6500 break;
6501 }
6502 remove_death (REGNO (oldequiv_reg), prev);
6503 }
6504 break;
6505 }
6506 }
6507 #endif
6508
6509 /* If we are reloading a register that was recently stored in with an
6510 output-reload, see if we can prove there was
6511 actually no need to store the old value in it. */
6512
6513 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6514 && reload_in[j] != 0
6515 && GET_CODE (reload_in[j]) == REG
6516 #if 0
6517 /* There doesn't seem to be any reason to restrict this to pseudos
6518 and doing so loses in the case where we are copying from a
6519 register of the wrong class. */
6520 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6521 #endif
6522 && spill_reg_store[reload_spill_index[j]] != 0
6523 /* This is unsafe if some other reload uses the same reg first. */
6524 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6525 reload_opnum[j], reload_when_needed[j])
6526 && dead_or_set_p (insn, reload_in[j])
6527 /* This is unsafe if operand occurs more than once in current
6528 insn. Perhaps some occurrences weren't reloaded. */
6529 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6530 delete_output_reload (insn, j,
6531 spill_reg_store[reload_spill_index[j]]);
6532
6533 /* Input-reloading is done. Now do output-reloading,
6534 storing the value from the reload-register after the main insn
6535 if reload_out[j] is nonzero.
6536
6537 ??? At some point we need to support handling output reloads of
6538 JUMP_INSNs or insns that set cc0. */
6539 old = reload_out[j];
6540 if (old != 0
6541 && reload_reg_rtx[j] != old
6542 && reload_reg_rtx[j] != 0)
6543 {
6544 register rtx reloadreg = reload_reg_rtx[j];
6545 register rtx second_reloadreg = 0;
6546 rtx note, p;
6547 enum machine_mode mode;
6548 int special = 0;
6549
6550 /* An output operand that dies right away does need a reload,
6551 but need not be copied from it. Show the new location in the
6552 REG_UNUSED note. */
6553 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6554 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6555 {
6556 XEXP (note, 0) = reload_reg_rtx[j];
6557 continue;
6558 }
6559 /* Likewise for a SUBREG of an operand that dies. */
6560 else if (GET_CODE (old) == SUBREG
6561 && GET_CODE (SUBREG_REG (old)) == REG
6562 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6563 SUBREG_REG (old))))
6564 {
6565 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6566 reload_reg_rtx[j]);
6567 continue;
6568 }
6569 else if (GET_CODE (old) == SCRATCH)
6570 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6571 but we don't want to make an output reload. */
6572 continue;
6573
6574 #if 0
6575 /* Strip off of OLD any size-increasing SUBREGs such as
6576 (SUBREG:SI foo:QI 0). */
6577
6578 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6579 && (GET_MODE_SIZE (GET_MODE (old))
6580 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6581 old = SUBREG_REG (old);
6582 #endif
6583
6584 /* If is a JUMP_INSN, we can't support output reloads yet. */
6585 if (GET_CODE (insn) == JUMP_INSN)
6586 abort ();
6587
6588 if (reload_when_needed[j] == RELOAD_OTHER)
6589 start_sequence ();
6590 else
6591 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6592
6593 /* Determine the mode to reload in.
6594 See comments above (for input reloading). */
6595
6596 mode = GET_MODE (old);
6597 if (mode == VOIDmode)
6598 {
6599 /* VOIDmode should never happen for an output. */
6600 if (asm_noperands (PATTERN (insn)) < 0)
6601 /* It's the compiler's fault. */
6602 fatal_insn ("VOIDmode on an output", insn);
6603 error_for_asm (insn, "output operand is constant in `asm'");
6604 /* Prevent crash--use something we know is valid. */
6605 mode = word_mode;
6606 old = gen_rtx (REG, mode, REGNO (reloadreg));
6607 }
6608
6609 if (GET_MODE (reloadreg) != mode)
6610 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6611
6612 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6613
6614 /* If we need two reload regs, set RELOADREG to the intermediate
6615 one, since it will be stored into OLD. We might need a secondary
6616 register only for an input reload, so check again here. */
6617
6618 if (reload_secondary_out_reload[j] >= 0)
6619 {
6620 rtx real_old = old;
6621
6622 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6623 && reg_equiv_mem[REGNO (old)] != 0)
6624 real_old = reg_equiv_mem[REGNO (old)];
6625
6626 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6627 mode, real_old)
6628 != NO_REGS))
6629 {
6630 second_reloadreg = reloadreg;
6631 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6632
6633 /* See if RELOADREG is to be used as a scratch register
6634 or as an intermediate register. */
6635 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6636 {
6637 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6638 (real_old, second_reloadreg, reloadreg)));
6639 special = 1;
6640 }
6641 else
6642 {
6643 /* See if we need both a scratch and intermediate reload
6644 register. */
6645
6646 int secondary_reload = reload_secondary_out_reload[j];
6647 enum insn_code tertiary_icode
6648 = reload_secondary_out_icode[secondary_reload];
6649
6650 if (GET_MODE (reloadreg) != mode)
6651 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6652
6653 if (tertiary_icode != CODE_FOR_nothing)
6654 {
6655 rtx third_reloadreg
6656 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6657 rtx tem;
6658
6659 /* Copy primary reload reg to secondary reload reg.
6660 (Note that these have been swapped above, then
6661 secondary reload reg to OLD using our insn. */
6662
6663 /* If REAL_OLD is a paradoxical SUBREG, remove it
6664 and try to put the opposite SUBREG on
6665 RELOADREG. */
6666 if (GET_CODE (real_old) == SUBREG
6667 && (GET_MODE_SIZE (GET_MODE (real_old))
6668 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6669 && 0 != (tem = gen_lowpart_common
6670 (GET_MODE (SUBREG_REG (real_old)),
6671 reloadreg)))
6672 real_old = SUBREG_REG (real_old), reloadreg = tem;
6673
6674 gen_reload (reloadreg, second_reloadreg,
6675 reload_opnum[j], reload_when_needed[j]);
6676 emit_insn ((GEN_FCN (tertiary_icode)
6677 (real_old, reloadreg, third_reloadreg)));
6678 special = 1;
6679 }
6680
6681 else
6682 /* Copy between the reload regs here and then to
6683 OUT later. */
6684
6685 gen_reload (reloadreg, second_reloadreg,
6686 reload_opnum[j], reload_when_needed[j]);
6687 }
6688 }
6689 }
6690 #endif
6691
6692 /* Output the last reload insn. */
6693 if (! special)
6694 gen_reload (old, reloadreg, reload_opnum[j],
6695 reload_when_needed[j]);
6696
6697 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6698 /* If final will look at death notes for this reg,
6699 put one on the last output-reload insn to use it. Similarly
6700 for any secondary register. */
6701 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6702 for (p = get_last_insn (); p; p = PREV_INSN (p))
6703 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6704 && reg_overlap_mentioned_for_reload_p (reloadreg,
6705 PATTERN (p)))
6706 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6707 reloadreg, REG_NOTES (p));
6708
6709 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6710 if (! special && second_reloadreg
6711 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6712 for (p = get_last_insn (); p; p = PREV_INSN (p))
6713 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6714 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6715 PATTERN (p)))
6716 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6717 second_reloadreg, REG_NOTES (p));
6718 #endif
6719 #endif
6720 /* Look at all insns we emitted, just to be safe. */
6721 for (p = get_insns (); p; p = NEXT_INSN (p))
6722 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6723 {
6724 /* If this output reload doesn't come from a spill reg,
6725 clear any memory of reloaded copies of the pseudo reg.
6726 If this output reload comes from a spill reg,
6727 reg_has_output_reload will make this do nothing. */
6728 note_stores (PATTERN (p), forget_old_reloads_1);
6729
6730 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6731 && reload_spill_index[j] >= 0)
6732 new_spill_reg_store[reload_spill_index[j]] = p;
6733 }
6734
6735 if (reload_when_needed[j] == RELOAD_OTHER)
6736 {
6737 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6738 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6739 }
6740 else
6741 output_reload_insns[reload_opnum[j]] = get_insns ();
6742
6743 end_sequence ();
6744 }
6745 }
6746
6747 /* Now write all the insns we made for reloads in the order expected by
6748 the allocation functions. Prior to the insn being reloaded, we write
6749 the following reloads:
6750
6751 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6752
6753 RELOAD_OTHER reloads.
6754
6755 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6756 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6757 RELOAD_FOR_INPUT reload for the operand.
6758
6759 RELOAD_FOR_OPADDR_ADDRS reloads.
6760
6761 RELOAD_FOR_OPERAND_ADDRESS reloads.
6762
6763 After the insn being reloaded, we write the following:
6764
6765 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
6766 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
6767 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6768 reloads for the operand. The RELOAD_OTHER output reloads are
6769 output in descending order by reload number. */
6770
6771 emit_insns_before (other_input_address_reload_insns, before_insn);
6772 emit_insns_before (other_input_reload_insns, before_insn);
6773
6774 for (j = 0; j < reload_n_operands; j++)
6775 {
6776 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
6777 emit_insns_before (input_address_reload_insns[j], before_insn);
6778 emit_insns_before (input_reload_insns[j], before_insn);
6779 }
6780
6781 emit_insns_before (other_operand_reload_insns, before_insn);
6782 emit_insns_before (operand_reload_insns, before_insn);
6783
6784 for (j = 0; j < reload_n_operands; j++)
6785 {
6786 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
6787 emit_insns_before (output_address_reload_insns[j], following_insn);
6788 emit_insns_before (output_reload_insns[j], following_insn);
6789 emit_insns_before (other_output_reload_insns[j], following_insn);
6790 }
6791
6792 /* Move death notes from INSN
6793 to output-operand-address and output reload insns. */
6794 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6795 {
6796 rtx insn1;
6797 /* Loop over those insns, last ones first. */
6798 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6799 insn1 = PREV_INSN (insn1))
6800 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6801 {
6802 rtx source = SET_SRC (PATTERN (insn1));
6803 rtx dest = SET_DEST (PATTERN (insn1));
6804
6805 /* The note we will examine next. */
6806 rtx reg_notes = REG_NOTES (insn);
6807 /* The place that pointed to this note. */
6808 rtx *prev_reg_note = &REG_NOTES (insn);
6809
6810 /* If the note is for something used in the source of this
6811 reload insn, or in the output address, move the note. */
6812 while (reg_notes)
6813 {
6814 rtx next_reg_notes = XEXP (reg_notes, 1);
6815 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6816 && GET_CODE (XEXP (reg_notes, 0)) == REG
6817 && ((GET_CODE (dest) != REG
6818 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6819 dest))
6820 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6821 source)))
6822 {
6823 *prev_reg_note = next_reg_notes;
6824 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6825 REG_NOTES (insn1) = reg_notes;
6826 }
6827 else
6828 prev_reg_note = &XEXP (reg_notes, 1);
6829
6830 reg_notes = next_reg_notes;
6831 }
6832 }
6833 }
6834 #endif
6835
6836 /* For all the spill regs newly reloaded in this instruction,
6837 record what they were reloaded from, so subsequent instructions
6838 can inherit the reloads.
6839
6840 Update spill_reg_store for the reloads of this insn.
6841 Copy the elements that were updated in the loop above. */
6842
6843 for (j = 0; j < n_reloads; j++)
6844 {
6845 register int r = reload_order[j];
6846 register int i = reload_spill_index[r];
6847
6848 /* I is nonneg if this reload used one of the spill regs.
6849 If reload_reg_rtx[r] is 0, this is an optional reload
6850 that we opted to ignore. */
6851
6852 if (i >= 0 && reload_reg_rtx[r] != 0)
6853 {
6854 int nr
6855 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6856 int k;
6857 int part_reaches_end = 0;
6858 int all_reaches_end = 1;
6859
6860 /* For a multi register reload, we need to check if all or part
6861 of the value lives to the end. */
6862 for (k = 0; k < nr; k++)
6863 {
6864 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6865 reload_when_needed[r]))
6866 part_reaches_end = 1;
6867 else
6868 all_reaches_end = 0;
6869 }
6870
6871 /* Ignore reloads that don't reach the end of the insn in
6872 entirety. */
6873 if (all_reaches_end)
6874 {
6875 /* First, clear out memory of what used to be in this spill reg.
6876 If consecutive registers are used, clear them all. */
6877
6878 for (k = 0; k < nr; k++)
6879 {
6880 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6881 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6882 }
6883
6884 /* Maybe the spill reg contains a copy of reload_out. */
6885 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6886 {
6887 register int nregno = REGNO (reload_out[r]);
6888 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6889 : HARD_REGNO_NREGS (nregno,
6890 GET_MODE (reload_reg_rtx[r])));
6891
6892 spill_reg_store[i] = new_spill_reg_store[i];
6893 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6894
6895 /* If NREGNO is a hard register, it may occupy more than
6896 one register. If it does, say what is in the
6897 rest of the registers assuming that both registers
6898 agree on how many words the object takes. If not,
6899 invalidate the subsequent registers. */
6900
6901 if (nregno < FIRST_PSEUDO_REGISTER)
6902 for (k = 1; k < nnr; k++)
6903 reg_last_reload_reg[nregno + k]
6904 = (nr == nnr
6905 ? gen_rtx (REG,
6906 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6907 REGNO (reload_reg_rtx[r]) + k)
6908 : 0);
6909
6910 /* Now do the inverse operation. */
6911 for (k = 0; k < nr; k++)
6912 {
6913 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6914 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6915 ? nregno
6916 : nregno + k);
6917 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6918 }
6919 }
6920
6921 /* Maybe the spill reg contains a copy of reload_in. Only do
6922 something if there will not be an output reload for
6923 the register being reloaded. */
6924 else if (reload_out[r] == 0
6925 && reload_in[r] != 0
6926 && ((GET_CODE (reload_in[r]) == REG
6927 && ! reg_has_output_reload[REGNO (reload_in[r])])
6928 || (GET_CODE (reload_in_reg[r]) == REG
6929 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6930 {
6931 register int nregno;
6932 int nnr;
6933
6934 if (GET_CODE (reload_in[r]) == REG)
6935 nregno = REGNO (reload_in[r]);
6936 else
6937 nregno = REGNO (reload_in_reg[r]);
6938
6939 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6940 : HARD_REGNO_NREGS (nregno,
6941 GET_MODE (reload_reg_rtx[r])));
6942
6943 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6944
6945 if (nregno < FIRST_PSEUDO_REGISTER)
6946 for (k = 1; k < nnr; k++)
6947 reg_last_reload_reg[nregno + k]
6948 = (nr == nnr
6949 ? gen_rtx (REG,
6950 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6951 REGNO (reload_reg_rtx[r]) + k)
6952 : 0);
6953
6954 /* Unless we inherited this reload, show we haven't
6955 recently done a store. */
6956 if (! reload_inherited[r])
6957 spill_reg_store[i] = 0;
6958
6959 for (k = 0; k < nr; k++)
6960 {
6961 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6962 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6963 ? nregno
6964 : nregno + k);
6965 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6966 = insn;
6967 }
6968 }
6969 }
6970
6971 /* However, if part of the reload reaches the end, then we must
6972 invalidate the old info for the part that survives to the end. */
6973 else if (part_reaches_end)
6974 {
6975 for (k = 0; k < nr; k++)
6976 if (reload_reg_reaches_end_p (spill_regs[i] + k,
6977 reload_opnum[r],
6978 reload_when_needed[r]))
6979 {
6980 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6981 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6982 }
6983 }
6984 }
6985
6986 /* The following if-statement was #if 0'd in 1.34 (or before...).
6987 It's reenabled in 1.35 because supposedly nothing else
6988 deals with this problem. */
6989
6990 /* If a register gets output-reloaded from a non-spill register,
6991 that invalidates any previous reloaded copy of it.
6992 But forget_old_reloads_1 won't get to see it, because
6993 it thinks only about the original insn. So invalidate it here. */
6994 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6995 {
6996 register int nregno = REGNO (reload_out[r]);
6997 if (nregno >= FIRST_PSEUDO_REGISTER)
6998 reg_last_reload_reg[nregno] = 0;
6999 else
7000 {
7001 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7002
7003 while (num_regs-- > 0)
7004 reg_last_reload_reg[nregno + num_regs] = 0;
7005 }
7006 }
7007 }
7008 }
7009 \f
7010 /* Emit code to perform a reload from IN (which may be a reload register) to
7011 OUT (which may also be a reload register). IN or OUT is from operand
7012 OPNUM with reload type TYPE.
7013
7014 Returns first insn emitted. */
7015
7016 rtx
7017 gen_reload (out, in, opnum, type)
7018 rtx out;
7019 rtx in;
7020 int opnum;
7021 enum reload_type type;
7022 {
7023 rtx last = get_last_insn ();
7024 rtx tem;
7025
7026 /* If IN is a paradoxical SUBREG, remove it and try to put the
7027 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7028 if (GET_CODE (in) == SUBREG
7029 && (GET_MODE_SIZE (GET_MODE (in))
7030 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7031 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7032 in = SUBREG_REG (in), out = tem;
7033 else if (GET_CODE (out) == SUBREG
7034 && (GET_MODE_SIZE (GET_MODE (out))
7035 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7036 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7037 out = SUBREG_REG (out), in = tem;
7038
7039 /* How to do this reload can get quite tricky. Normally, we are being
7040 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7041 register that didn't get a hard register. In that case we can just
7042 call emit_move_insn.
7043
7044 We can also be asked to reload a PLUS that adds a register or a MEM to
7045 another register, constant or MEM. This can occur during frame pointer
7046 elimination and while reloading addresses. This case is handled by
7047 trying to emit a single insn to perform the add. If it is not valid,
7048 we use a two insn sequence.
7049
7050 Finally, we could be called to handle an 'o' constraint by putting
7051 an address into a register. In that case, we first try to do this
7052 with a named pattern of "reload_load_address". If no such pattern
7053 exists, we just emit a SET insn and hope for the best (it will normally
7054 be valid on machines that use 'o').
7055
7056 This entire process is made complex because reload will never
7057 process the insns we generate here and so we must ensure that
7058 they will fit their constraints and also by the fact that parts of
7059 IN might be being reloaded separately and replaced with spill registers.
7060 Because of this, we are, in some sense, just guessing the right approach
7061 here. The one listed above seems to work.
7062
7063 ??? At some point, this whole thing needs to be rethought. */
7064
7065 if (GET_CODE (in) == PLUS
7066 && (GET_CODE (XEXP (in, 0)) == REG
7067 || GET_CODE (XEXP (in, 0)) == SUBREG
7068 || GET_CODE (XEXP (in, 0)) == MEM)
7069 && (GET_CODE (XEXP (in, 1)) == REG
7070 || GET_CODE (XEXP (in, 1)) == SUBREG
7071 || CONSTANT_P (XEXP (in, 1))
7072 || GET_CODE (XEXP (in, 1)) == MEM))
7073 {
7074 /* We need to compute the sum of a register or a MEM and another
7075 register, constant, or MEM, and put it into the reload
7076 register. The best possible way of doing this is if the machine
7077 has a three-operand ADD insn that accepts the required operands.
7078
7079 The simplest approach is to try to generate such an insn and see if it
7080 is recognized and matches its constraints. If so, it can be used.
7081
7082 It might be better not to actually emit the insn unless it is valid,
7083 but we need to pass the insn as an operand to `recog' and
7084 `insn_extract' and it is simpler to emit and then delete the insn if
7085 not valid than to dummy things up. */
7086
7087 rtx op0, op1, tem, insn;
7088 int code;
7089
7090 op0 = find_replacement (&XEXP (in, 0));
7091 op1 = find_replacement (&XEXP (in, 1));
7092
7093 /* Since constraint checking is strict, commutativity won't be
7094 checked, so we need to do that here to avoid spurious failure
7095 if the add instruction is two-address and the second operand
7096 of the add is the same as the reload reg, which is frequently
7097 the case. If the insn would be A = B + A, rearrange it so
7098 it will be A = A + B as constrain_operands expects. */
7099
7100 if (GET_CODE (XEXP (in, 1)) == REG
7101 && REGNO (out) == REGNO (XEXP (in, 1)))
7102 tem = op0, op0 = op1, op1 = tem;
7103
7104 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7105 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
7106
7107 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
7108 code = recog_memoized (insn);
7109
7110 if (code >= 0)
7111 {
7112 insn_extract (insn);
7113 /* We want constrain operands to treat this insn strictly in
7114 its validity determination, i.e., the way it would after reload
7115 has completed. */
7116 if (constrain_operands (code, 1))
7117 return insn;
7118 }
7119
7120 delete_insns_since (last);
7121
7122 /* If that failed, we must use a conservative two-insn sequence.
7123 use move to copy constant, MEM, or pseudo register to the reload
7124 register since "move" will be able to handle an arbitrary operand,
7125 unlike add which can't, in general. Then add the registers.
7126
7127 If there is another way to do this for a specific machine, a
7128 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7129 we emit below. */
7130
7131 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7132 || (GET_CODE (op1) == REG
7133 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7134 tem = op0, op0 = op1, op1 = tem;
7135
7136 gen_reload (out, op0, opnum, type);
7137
7138 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7139 This fixes a problem on the 32K where the stack pointer cannot
7140 be used as an operand of an add insn. */
7141
7142 if (rtx_equal_p (op0, op1))
7143 op1 = out;
7144
7145 insn = emit_insn (gen_add2_insn (out, op1));
7146
7147 /* If that failed, copy the address register to the reload register.
7148 Then add the constant to the reload register. */
7149
7150 code = recog_memoized (insn);
7151
7152 if (code >= 0)
7153 {
7154 insn_extract (insn);
7155 /* We want constrain operands to treat this insn strictly in
7156 its validity determination, i.e., the way it would after reload
7157 has completed. */
7158 if (constrain_operands (code, 1))
7159 return insn;
7160 }
7161
7162 delete_insns_since (last);
7163
7164 gen_reload (out, op1, opnum, type);
7165 emit_insn (gen_add2_insn (out, op0));
7166 }
7167
7168 #ifdef SECONDARY_MEMORY_NEEDED
7169 /* If we need a memory location to do the move, do it that way. */
7170 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7171 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7172 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7173 REGNO_REG_CLASS (REGNO (out)),
7174 GET_MODE (out)))
7175 {
7176 /* Get the memory to use and rewrite both registers to its mode. */
7177 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7178
7179 if (GET_MODE (loc) != GET_MODE (out))
7180 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
7181
7182 if (GET_MODE (loc) != GET_MODE (in))
7183 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
7184
7185 gen_reload (loc, in, opnum, type);
7186 gen_reload (out, loc, opnum, type);
7187 }
7188 #endif
7189
7190 /* If IN is a simple operand, use gen_move_insn. */
7191 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7192 emit_insn (gen_move_insn (out, in));
7193
7194 #ifdef HAVE_reload_load_address
7195 else if (HAVE_reload_load_address)
7196 emit_insn (gen_reload_load_address (out, in));
7197 #endif
7198
7199 /* Otherwise, just write (set OUT IN) and hope for the best. */
7200 else
7201 emit_insn (gen_rtx (SET, VOIDmode, out, in));
7202
7203 /* Return the first insn emitted.
7204 We can not just return get_last_insn, because there may have
7205 been multiple instructions emitted. Also note that gen_move_insn may
7206 emit more than one insn itself, so we can not assume that there is one
7207 insn emitted per emit_insn_before call. */
7208
7209 return last ? NEXT_INSN (last) : get_insns ();
7210 }
7211 \f
7212 /* Delete a previously made output-reload
7213 whose result we now believe is not needed.
7214 First we double-check.
7215
7216 INSN is the insn now being processed.
7217 OUTPUT_RELOAD_INSN is the insn of the output reload.
7218 J is the reload-number for this insn. */
7219
7220 static void
7221 delete_output_reload (insn, j, output_reload_insn)
7222 rtx insn;
7223 int j;
7224 rtx output_reload_insn;
7225 {
7226 register rtx i1;
7227
7228 /* Get the raw pseudo-register referred to. */
7229
7230 rtx reg = reload_in[j];
7231 while (GET_CODE (reg) == SUBREG)
7232 reg = SUBREG_REG (reg);
7233
7234 /* If the pseudo-reg we are reloading is no longer referenced
7235 anywhere between the store into it and here,
7236 and no jumps or labels intervene, then the value can get
7237 here through the reload reg alone.
7238 Otherwise, give up--return. */
7239 for (i1 = NEXT_INSN (output_reload_insn);
7240 i1 != insn; i1 = NEXT_INSN (i1))
7241 {
7242 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7243 return;
7244 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7245 && reg_mentioned_p (reg, PATTERN (i1)))
7246 return;
7247 }
7248
7249 if (cannot_omit_stores[REGNO (reg)])
7250 return;
7251
7252 /* If this insn will store in the pseudo again,
7253 the previous store can be removed. */
7254 if (reload_out[j] == reload_in[j])
7255 delete_insn (output_reload_insn);
7256
7257 /* See if the pseudo reg has been completely replaced
7258 with reload regs. If so, delete the store insn
7259 and forget we had a stack slot for the pseudo. */
7260 else if (reg_n_deaths[REGNO (reg)] == 1
7261 && reg_basic_block[REGNO (reg)] >= 0
7262 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7263 {
7264 rtx i2;
7265
7266 /* We know that it was used only between here
7267 and the beginning of the current basic block.
7268 (We also know that the last use before INSN was
7269 the output reload we are thinking of deleting, but never mind that.)
7270 Search that range; see if any ref remains. */
7271 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7272 {
7273 rtx set = single_set (i2);
7274
7275 /* Uses which just store in the pseudo don't count,
7276 since if they are the only uses, they are dead. */
7277 if (set != 0 && SET_DEST (set) == reg)
7278 continue;
7279 if (GET_CODE (i2) == CODE_LABEL
7280 || GET_CODE (i2) == JUMP_INSN)
7281 break;
7282 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7283 && reg_mentioned_p (reg, PATTERN (i2)))
7284 /* Some other ref remains;
7285 we can't do anything. */
7286 return;
7287 }
7288
7289 /* Delete the now-dead stores into this pseudo. */
7290 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7291 {
7292 rtx set = single_set (i2);
7293
7294 if (set != 0 && SET_DEST (set) == reg)
7295 {
7296 /* This might be a basic block head,
7297 thus don't use delete_insn. */
7298 PUT_CODE (i2, NOTE);
7299 NOTE_SOURCE_FILE (i2) = 0;
7300 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7301 }
7302 if (GET_CODE (i2) == CODE_LABEL
7303 || GET_CODE (i2) == JUMP_INSN)
7304 break;
7305 }
7306
7307 /* For the debugging info,
7308 say the pseudo lives in this reload reg. */
7309 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7310 alter_reg (REGNO (reg), -1);
7311 }
7312 }
7313 \f
7314 /* Output reload-insns to reload VALUE into RELOADREG.
7315 VALUE is an autoincrement or autodecrement RTX whose operand
7316 is a register or memory location;
7317 so reloading involves incrementing that location.
7318
7319 INC_AMOUNT is the number to increment or decrement by (always positive).
7320 This cannot be deduced from VALUE. */
7321
7322 static void
7323 inc_for_reload (reloadreg, value, inc_amount)
7324 rtx reloadreg;
7325 rtx value;
7326 int inc_amount;
7327 {
7328 /* REG or MEM to be copied and incremented. */
7329 rtx incloc = XEXP (value, 0);
7330 /* Nonzero if increment after copying. */
7331 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7332 rtx last;
7333 rtx inc;
7334 rtx add_insn;
7335 int code;
7336
7337 /* No hard register is equivalent to this register after
7338 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7339 we could inc/dec that register as well (maybe even using it for
7340 the source), but I'm not sure it's worth worrying about. */
7341 if (GET_CODE (incloc) == REG)
7342 reg_last_reload_reg[REGNO (incloc)] = 0;
7343
7344 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7345 inc_amount = - inc_amount;
7346
7347 inc = GEN_INT (inc_amount);
7348
7349 /* If this is post-increment, first copy the location to the reload reg. */
7350 if (post)
7351 emit_insn (gen_move_insn (reloadreg, incloc));
7352
7353 /* See if we can directly increment INCLOC. Use a method similar to that
7354 in gen_reload. */
7355
7356 last = get_last_insn ();
7357 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7358 gen_rtx (PLUS, GET_MODE (incloc),
7359 incloc, inc)));
7360
7361 code = recog_memoized (add_insn);
7362 if (code >= 0)
7363 {
7364 insn_extract (add_insn);
7365 if (constrain_operands (code, 1))
7366 {
7367 /* If this is a pre-increment and we have incremented the value
7368 where it lives, copy the incremented value to RELOADREG to
7369 be used as an address. */
7370
7371 if (! post)
7372 emit_insn (gen_move_insn (reloadreg, incloc));
7373
7374 return;
7375 }
7376 }
7377
7378 delete_insns_since (last);
7379
7380 /* If couldn't do the increment directly, must increment in RELOADREG.
7381 The way we do this depends on whether this is pre- or post-increment.
7382 For pre-increment, copy INCLOC to the reload register, increment it
7383 there, then save back. */
7384
7385 if (! post)
7386 {
7387 emit_insn (gen_move_insn (reloadreg, incloc));
7388 emit_insn (gen_add2_insn (reloadreg, inc));
7389 emit_insn (gen_move_insn (incloc, reloadreg));
7390 }
7391 else
7392 {
7393 /* Postincrement.
7394 Because this might be a jump insn or a compare, and because RELOADREG
7395 may not be available after the insn in an input reload, we must do
7396 the incrementation before the insn being reloaded for.
7397
7398 We have already copied INCLOC to RELOADREG. Increment the copy in
7399 RELOADREG, save that back, then decrement RELOADREG so it has
7400 the original value. */
7401
7402 emit_insn (gen_add2_insn (reloadreg, inc));
7403 emit_insn (gen_move_insn (incloc, reloadreg));
7404 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7405 }
7406
7407 return;
7408 }
7409 \f
7410 /* Return 1 if we are certain that the constraint-string STRING allows
7411 the hard register REG. Return 0 if we can't be sure of this. */
7412
7413 static int
7414 constraint_accepts_reg_p (string, reg)
7415 char *string;
7416 rtx reg;
7417 {
7418 int value = 0;
7419 int regno = true_regnum (reg);
7420 int c;
7421
7422 /* Initialize for first alternative. */
7423 value = 0;
7424 /* Check that each alternative contains `g' or `r'. */
7425 while (1)
7426 switch (c = *string++)
7427 {
7428 case 0:
7429 /* If an alternative lacks `g' or `r', we lose. */
7430 return value;
7431 case ',':
7432 /* If an alternative lacks `g' or `r', we lose. */
7433 if (value == 0)
7434 return 0;
7435 /* Initialize for next alternative. */
7436 value = 0;
7437 break;
7438 case 'g':
7439 case 'r':
7440 /* Any general reg wins for this alternative. */
7441 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7442 value = 1;
7443 break;
7444 default:
7445 /* Any reg in specified class wins for this alternative. */
7446 {
7447 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7448
7449 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7450 value = 1;
7451 }
7452 }
7453 }
7454 \f
7455 /* Return the number of places FIND appears within X, but don't count
7456 an occurrence if some SET_DEST is FIND. */
7457
7458 static int
7459 count_occurrences (x, find)
7460 register rtx x, find;
7461 {
7462 register int i, j;
7463 register enum rtx_code code;
7464 register char *format_ptr;
7465 int count;
7466
7467 if (x == find)
7468 return 1;
7469 if (x == 0)
7470 return 0;
7471
7472 code = GET_CODE (x);
7473
7474 switch (code)
7475 {
7476 case REG:
7477 case QUEUED:
7478 case CONST_INT:
7479 case CONST_DOUBLE:
7480 case SYMBOL_REF:
7481 case CODE_LABEL:
7482 case PC:
7483 case CC0:
7484 return 0;
7485
7486 case SET:
7487 if (SET_DEST (x) == find)
7488 return count_occurrences (SET_SRC (x), find);
7489 break;
7490 }
7491
7492 format_ptr = GET_RTX_FORMAT (code);
7493 count = 0;
7494
7495 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7496 {
7497 switch (*format_ptr++)
7498 {
7499 case 'e':
7500 count += count_occurrences (XEXP (x, i), find);
7501 break;
7502
7503 case 'E':
7504 if (XVEC (x, i) != NULL)
7505 {
7506 for (j = 0; j < XVECLEN (x, i); j++)
7507 count += count_occurrences (XVECEXP (x, i, j), find);
7508 }
7509 break;
7510 }
7511 }
7512 return count;
7513 }