(reload): Add IN_ADDR to IN_ADDR_ADDR when computing needs since they conflict.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include <stdio.h>
23 #include "config.h"
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37 #include "real.h"
38
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
45
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
49
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
53
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
60
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
64
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
71
72
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
75 #endif
76
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
79 #endif
80 \f
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
84
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
88
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
92
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
98
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
104
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
109
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
113
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
116
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
120
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
127
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
133
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 static int n_spills;
136
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
142
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
147
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
153
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
158
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
163
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166 static HARD_REG_SET bad_spill_regs;
167
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
172
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
177
178 HARD_REG_SET used_spill_regs;
179
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
182
183 static int last_spill_reg;
184
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
191
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
196
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
200
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
206
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
212
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
219
220 static char spill_indirect_levels;
221
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
225
226 char indirect_symref_ok;
227
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
229
230 char double_reg_address_ok;
231
232 /* Record the stack slot for each spilled hard register. */
233
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
235
236 /* Width allocated so far for that stack slot. */
237
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
244
245 char *basic_block_needs[N_REG_CLASSES];
246
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
250
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253
254 int caller_save_needed;
255
256 /* The register class to use for a base register when reloading an
257 address. This is normally BASE_REG_CLASS, but it may be different
258 when using SMALL_REGISTER_CLASSES and passing parameters in
259 registers. */
260 enum reg_class reload_address_base_reg_class;
261
262 /* The register class to use for an index register when reloading an
263 address. This is normally INDEX_REG_CLASS, but it may be different
264 when using SMALL_REGISTER_CLASSES and passing parameters in
265 registers. */
266 enum reg_class reload_address_index_reg_class;
267
268 /* Set to 1 while reload_as_needed is operating.
269 Required by some machines to handle any generated moves differently. */
270
271 int reload_in_progress = 0;
272
273 /* These arrays record the insn_code of insns that may be needed to
274 perform input and output reloads of special objects. They provide a
275 place to pass a scratch register. */
276
277 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
278 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
279
280 /* This obstack is used for allocation of rtl during register elimination.
281 The allocated storage can be freed once find_reloads has processed the
282 insn. */
283
284 struct obstack reload_obstack;
285 char *reload_firstobj;
286
287 #define obstack_chunk_alloc xmalloc
288 #define obstack_chunk_free free
289
290 /* List of labels that must never be deleted. */
291 extern rtx forced_labels;
292
293 /* Allocation number table from global register allocation. */
294 extern int *reg_allocno;
295 \f
296 /* This structure is used to record information about register eliminations.
297 Each array entry describes one possible way of eliminating a register
298 in favor of another. If there is more than one way of eliminating a
299 particular register, the most preferred should be specified first. */
300
301 static struct elim_table
302 {
303 int from; /* Register number to be eliminated. */
304 int to; /* Register number used as replacement. */
305 int initial_offset; /* Initial difference between values. */
306 int can_eliminate; /* Non-zero if this elimination can be done. */
307 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
308 insns made by reload. */
309 int offset; /* Current offset between the two regs. */
310 int max_offset; /* Maximum offset between the two regs. */
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
319 } reg_eliminate[] =
320
321 /* If a set of eliminable registers was specified, define the table from it.
322 Otherwise, default to the normal case of the frame pointer being
323 replaced by the stack pointer. */
324
325 #ifdef ELIMINABLE_REGS
326 ELIMINABLE_REGS;
327 #else
328 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
329 #endif
330
331 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
332
333 /* Record the number of pending eliminations that have an offset not equal
334 to their initial offset. If non-zero, we use a new copy of each
335 replacement result in any insns encountered. */
336 static int num_not_at_initial_offset;
337
338 /* Count the number of registers that we may be able to eliminate. */
339 static int num_eliminable;
340
341 /* For each label, we record the offset of each elimination. If we reach
342 a label by more than one path and an offset differs, we cannot do the
343 elimination. This information is indexed by the number of the label.
344 The first table is an array of flags that records whether we have yet
345 encountered a label and the second table is an array of arrays, one
346 entry in the latter array for each elimination. */
347
348 static char *offsets_known_at;
349 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
350
351 /* Number of labels in the current function. */
352
353 static int num_labels;
354
355 struct hard_reg_n_uses { int regno; int uses; };
356 \f
357 static int possible_group_p PROTO((int, int *));
358 static void count_possible_groups PROTO((int *, enum machine_mode *,
359 int *, int));
360 static int modes_equiv_for_class_p PROTO((enum machine_mode,
361 enum machine_mode,
362 enum reg_class));
363 static void spill_failure PROTO((rtx));
364 static int new_spill_reg PROTO((int, int, int *, int *, int,
365 FILE *));
366 static void delete_dead_insn PROTO((rtx));
367 static void alter_reg PROTO((int, int));
368 static void mark_scratch_live PROTO((rtx));
369 static void set_label_offsets PROTO((rtx, rtx, int));
370 static int eliminate_regs_in_insn PROTO((rtx, int));
371 static void mark_not_eliminable PROTO((rtx, rtx));
372 static int spill_hard_reg PROTO((int, int, FILE *, int));
373 static void scan_paradoxical_subregs PROTO((rtx));
374 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void order_regs_for_reload PROTO((int));
376 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
377 static void reload_as_needed PROTO((rtx, int));
378 static void forget_old_reloads_1 PROTO((rtx, rtx));
379 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
380 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
381 enum machine_mode));
382 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
383 enum machine_mode));
384 static int reload_reg_free_p PROTO((int, int, enum reload_type));
385 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
386 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
387 static int reloads_conflict PROTO((int, int));
388 static int allocate_reload_reg PROTO((int, rtx, int, int));
389 static void choose_reload_regs PROTO((rtx, rtx));
390 static void merge_assigned_reloads PROTO((rtx));
391 static void emit_reload_insns PROTO((rtx));
392 static void delete_output_reload PROTO((rtx, int, rtx));
393 static void inc_for_reload PROTO((rtx, rtx, int));
394 static int constraint_accepts_reg_p PROTO((char *, rtx));
395 static int count_occurrences PROTO((rtx, rtx));
396 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
397 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
398 static void reload_cse_invalidate_mem PROTO((rtx));
399 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
400 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
401 static int reload_cse_noop_set_p PROTO((rtx, rtx));
402 static void reload_cse_simplify_set PROTO((rtx, rtx));
403 static void reload_cse_check_clobber PROTO((rtx, rtx));
404 static void reload_cse_record_set PROTO((rtx, rtx));
405 \f
406 /* Initialize the reload pass once per compilation. */
407
408 void
409 init_reload ()
410 {
411 register int i;
412
413 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
414 Set spill_indirect_levels to the number of levels such addressing is
415 permitted, zero if it is not permitted at all. */
416
417 register rtx tem
418 = gen_rtx (MEM, Pmode,
419 gen_rtx (PLUS, Pmode,
420 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
421 GEN_INT (4)));
422 spill_indirect_levels = 0;
423
424 while (memory_address_p (QImode, tem))
425 {
426 spill_indirect_levels++;
427 tem = gen_rtx (MEM, Pmode, tem);
428 }
429
430 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
431
432 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
433 indirect_symref_ok = memory_address_p (QImode, tem);
434
435 /* See if reg+reg is a valid (and offsettable) address. */
436
437 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
438 {
439 tem = gen_rtx (PLUS, Pmode,
440 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
441 gen_rtx (REG, Pmode, i));
442 /* This way, we make sure that reg+reg is an offsettable address. */
443 tem = plus_constant (tem, 4);
444
445 if (memory_address_p (QImode, tem))
446 {
447 double_reg_address_ok = 1;
448 break;
449 }
450 }
451
452 /* Initialize obstack for our rtl allocation. */
453 gcc_obstack_init (&reload_obstack);
454 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
455
456 /* Decide which register class should be used when reloading
457 addresses. If we are using SMALL_REGISTER_CLASSES, and any
458 parameters are passed in registers, then we do not want to use
459 those registers when reloading an address. Otherwise, if a
460 function argument needs a reload, we may wind up clobbering
461 another argument to the function which was already computed. If
462 we find a subset class which simply avoids those registers, we
463 use it instead. ??? It would be better to only use the
464 restricted class when we actually are loading function arguments,
465 but that is hard to determine. */
466 reload_address_base_reg_class = BASE_REG_CLASS;
467 reload_address_index_reg_class = INDEX_REG_CLASS;
468 #ifdef SMALL_REGISTER_CLASSES
469 if (SMALL_REGISTER_CLASSES)
470 {
471 int regno;
472 HARD_REG_SET base, index;
473 enum reg_class *p;
474
475 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
476 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
477 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
478 {
479 if (FUNCTION_ARG_REGNO_P (regno))
480 {
481 CLEAR_HARD_REG_BIT (base, regno);
482 CLEAR_HARD_REG_BIT (index, regno);
483 }
484 }
485
486 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
487 baseok);
488 for (p = reg_class_subclasses[BASE_REG_CLASS];
489 *p != LIM_REG_CLASSES;
490 p++)
491 {
492 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
493 continue;
494 usebase:
495 reload_address_base_reg_class = *p;
496 break;
497 }
498 baseok:;
499
500 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
501 indexok);
502 for (p = reg_class_subclasses[INDEX_REG_CLASS];
503 *p != LIM_REG_CLASSES;
504 p++)
505 {
506 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
507 continue;
508 useindex:
509 reload_address_index_reg_class = *p;
510 break;
511 }
512 indexok:;
513 }
514 #endif /* SMALL_REGISTER_CLASSES */
515 }
516
517 /* Main entry point for the reload pass.
518
519 FIRST is the first insn of the function being compiled.
520
521 GLOBAL nonzero means we were called from global_alloc
522 and should attempt to reallocate any pseudoregs that we
523 displace from hard regs we will use for reloads.
524 If GLOBAL is zero, we do not have enough information to do that,
525 so any pseudo reg that is spilled must go to the stack.
526
527 DUMPFILE is the global-reg debugging dump file stream, or 0.
528 If it is nonzero, messages are written to it to describe
529 which registers are seized as reload regs, which pseudo regs
530 are spilled from them, and where the pseudo regs are reallocated to.
531
532 Return value is nonzero if reload failed
533 and we must not do any more for this function. */
534
535 int
536 reload (first, global, dumpfile)
537 rtx first;
538 int global;
539 FILE *dumpfile;
540 {
541 register int class;
542 register int i, j, k;
543 register rtx insn;
544 register struct elim_table *ep;
545
546 /* The two pointers used to track the true location of the memory used
547 for label offsets. */
548 char *real_known_ptr = NULL_PTR;
549 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
550
551 int something_changed;
552 int something_needs_reloads;
553 int something_needs_elimination;
554 int new_basic_block_needs;
555 enum reg_class caller_save_spill_class = NO_REGS;
556 int caller_save_group_size = 1;
557
558 /* Nonzero means we couldn't get enough spill regs. */
559 int failure = 0;
560
561 /* The basic block number currently being processed for INSN. */
562 int this_block;
563
564 /* Make sure even insns with volatile mem refs are recognizable. */
565 init_recog ();
566
567 /* Enable find_equiv_reg to distinguish insns made by reload. */
568 reload_first_uid = get_max_uid ();
569
570 for (i = 0; i < N_REG_CLASSES; i++)
571 basic_block_needs[i] = 0;
572
573 #ifdef SECONDARY_MEMORY_NEEDED
574 /* Initialize the secondary memory table. */
575 clear_secondary_mem ();
576 #endif
577
578 /* Remember which hard regs appear explicitly
579 before we merge into `regs_ever_live' the ones in which
580 pseudo regs have been allocated. */
581 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
582
583 /* We don't have a stack slot for any spill reg yet. */
584 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
585 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
586
587 /* Initialize the save area information for caller-save, in case some
588 are needed. */
589 init_save_areas ();
590
591 /* Compute which hard registers are now in use
592 as homes for pseudo registers.
593 This is done here rather than (eg) in global_alloc
594 because this point is reached even if not optimizing. */
595 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
596 mark_home_live (i);
597
598 /* A function that receives a nonlocal goto must save all call-saved
599 registers. */
600 if (current_function_has_nonlocal_label)
601 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
602 {
603 if (! call_used_regs[i] && ! fixed_regs[i])
604 regs_ever_live[i] = 1;
605 }
606
607 for (i = 0; i < scratch_list_length; i++)
608 if (scratch_list[i])
609 mark_scratch_live (scratch_list[i]);
610
611 /* Make sure that the last insn in the chain
612 is not something that needs reloading. */
613 emit_note (NULL_PTR, NOTE_INSN_DELETED);
614
615 /* Find all the pseudo registers that didn't get hard regs
616 but do have known equivalent constants or memory slots.
617 These include parameters (known equivalent to parameter slots)
618 and cse'd or loop-moved constant memory addresses.
619
620 Record constant equivalents in reg_equiv_constant
621 so they will be substituted by find_reloads.
622 Record memory equivalents in reg_mem_equiv so they can
623 be substituted eventually by altering the REG-rtx's. */
624
625 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
626 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
627 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
628 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
629 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
630 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
631 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
632 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
633 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
634 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
635 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
636 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
637 cannot_omit_stores = (char *) alloca (max_regno);
638 bzero (cannot_omit_stores, max_regno);
639
640 #ifdef SMALL_REGISTER_CLASSES
641 if (SMALL_REGISTER_CLASSES)
642 CLEAR_HARD_REG_SET (forbidden_regs);
643 #endif
644
645 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
646 Also find all paradoxical subregs and find largest such for each pseudo.
647 On machines with small register classes, record hard registers that
648 are used for user variables. These can never be used for spills.
649 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
650 caller-saved registers must be marked live. */
651
652 for (insn = first; insn; insn = NEXT_INSN (insn))
653 {
654 rtx set = single_set (insn);
655
656 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
657 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
658 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
659 if (! call_used_regs[i])
660 regs_ever_live[i] = 1;
661
662 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
663 {
664 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
665 if (note
666 #ifdef LEGITIMATE_PIC_OPERAND_P
667 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
668 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
669 #endif
670 )
671 {
672 rtx x = XEXP (note, 0);
673 i = REGNO (SET_DEST (set));
674 if (i > LAST_VIRTUAL_REGISTER)
675 {
676 if (GET_CODE (x) == MEM)
677 reg_equiv_memory_loc[i] = x;
678 else if (CONSTANT_P (x))
679 {
680 if (LEGITIMATE_CONSTANT_P (x))
681 reg_equiv_constant[i] = x;
682 else
683 reg_equiv_memory_loc[i]
684 = force_const_mem (GET_MODE (SET_DEST (set)), x);
685 }
686 else
687 continue;
688
689 /* If this register is being made equivalent to a MEM
690 and the MEM is not SET_SRC, the equivalencing insn
691 is one with the MEM as a SET_DEST and it occurs later.
692 So don't mark this insn now. */
693 if (GET_CODE (x) != MEM
694 || rtx_equal_p (SET_SRC (set), x))
695 reg_equiv_init[i] = insn;
696 }
697 }
698 }
699
700 /* If this insn is setting a MEM from a register equivalent to it,
701 this is the equivalencing insn. */
702 else if (set && GET_CODE (SET_DEST (set)) == MEM
703 && GET_CODE (SET_SRC (set)) == REG
704 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
705 && rtx_equal_p (SET_DEST (set),
706 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
707 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
708
709 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
710 scan_paradoxical_subregs (PATTERN (insn));
711 }
712
713 /* Does this function require a frame pointer? */
714
715 frame_pointer_needed = (! flag_omit_frame_pointer
716 #ifdef EXIT_IGNORE_STACK
717 /* ?? If EXIT_IGNORE_STACK is set, we will not save
718 and restore sp for alloca. So we can't eliminate
719 the frame pointer in that case. At some point,
720 we should improve this by emitting the
721 sp-adjusting insns for this case. */
722 || (current_function_calls_alloca
723 && EXIT_IGNORE_STACK)
724 #endif
725 || FRAME_POINTER_REQUIRED);
726
727 num_eliminable = 0;
728
729 /* Initialize the table of registers to eliminate. The way we do this
730 depends on how the eliminable registers were defined. */
731 #ifdef ELIMINABLE_REGS
732 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
733 {
734 ep->can_eliminate = ep->can_eliminate_previous
735 = (CAN_ELIMINATE (ep->from, ep->to)
736 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
737 }
738 #else
739 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
740 = ! frame_pointer_needed;
741 #endif
742
743 /* Count the number of eliminable registers and build the FROM and TO
744 REG rtx's. Note that code in gen_rtx will cause, e.g.,
745 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
746 We depend on this. */
747 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
748 {
749 num_eliminable += ep->can_eliminate;
750 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
751 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
752 }
753
754 num_labels = max_label_num () - get_first_label_num ();
755
756 /* Allocate the tables used to store offset information at labels. */
757 /* We used to use alloca here, but the size of what it would try to
758 allocate would occasionally cause it to exceed the stack limit and
759 cause a core dump. */
760 real_known_ptr = xmalloc (num_labels);
761 real_at_ptr
762 = (int (*)[NUM_ELIMINABLE_REGS])
763 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
764
765 offsets_known_at = real_known_ptr - get_first_label_num ();
766 offsets_at
767 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
768
769 /* Alter each pseudo-reg rtx to contain its hard reg number.
770 Assign stack slots to the pseudos that lack hard regs or equivalents.
771 Do not touch virtual registers. */
772
773 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
774 alter_reg (i, -1);
775
776 /* If we have some registers we think can be eliminated, scan all insns to
777 see if there is an insn that sets one of these registers to something
778 other than itself plus a constant. If so, the register cannot be
779 eliminated. Doing this scan here eliminates an extra pass through the
780 main reload loop in the most common case where register elimination
781 cannot be done. */
782 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
783 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
784 || GET_CODE (insn) == CALL_INSN)
785 note_stores (PATTERN (insn), mark_not_eliminable);
786
787 #ifndef REGISTER_CONSTRAINTS
788 /* If all the pseudo regs have hard regs,
789 except for those that are never referenced,
790 we know that no reloads are needed. */
791 /* But that is not true if there are register constraints, since
792 in that case some pseudos might be in the wrong kind of hard reg. */
793
794 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
795 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
796 break;
797
798 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
799 {
800 free (real_known_ptr);
801 free (real_at_ptr);
802 return;
803 }
804 #endif
805
806 /* Compute the order of preference for hard registers to spill.
807 Store them by decreasing preference in potential_reload_regs. */
808
809 order_regs_for_reload (global);
810
811 /* So far, no hard regs have been spilled. */
812 n_spills = 0;
813 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
814 spill_reg_order[i] = -1;
815
816 /* Initialize to -1, which means take the first spill register. */
817 last_spill_reg = -1;
818
819 /* On most machines, we can't use any register explicitly used in the
820 rtl as a spill register. But on some, we have to. Those will have
821 taken care to keep the life of hard regs as short as possible. */
822
823 #ifdef SMALL_REGISTER_CLASSES
824 if (! SMALL_REGISTER_CLASSES)
825 #endif
826 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
827
828 /* Spill any hard regs that we know we can't eliminate. */
829 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
830 if (! ep->can_eliminate)
831 spill_hard_reg (ep->from, global, dumpfile, 1);
832
833 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
834 if (frame_pointer_needed)
835 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
836 #endif
837
838 if (global)
839 for (i = 0; i < N_REG_CLASSES; i++)
840 {
841 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
842 bzero (basic_block_needs[i], n_basic_blocks);
843 }
844
845 /* From now on, we need to emit any moves without making new pseudos. */
846 reload_in_progress = 1;
847
848 /* This loop scans the entire function each go-round
849 and repeats until one repetition spills no additional hard regs. */
850
851 /* This flag is set when a pseudo reg is spilled,
852 to require another pass. Note that getting an additional reload
853 reg does not necessarily imply any pseudo reg was spilled;
854 sometimes we find a reload reg that no pseudo reg was allocated in. */
855 something_changed = 1;
856 /* This flag is set if there are any insns that require reloading. */
857 something_needs_reloads = 0;
858 /* This flag is set if there are any insns that require register
859 eliminations. */
860 something_needs_elimination = 0;
861 while (something_changed)
862 {
863 rtx after_call = 0;
864
865 /* For each class, number of reload regs needed in that class.
866 This is the maximum over all insns of the needs in that class
867 of the individual insn. */
868 int max_needs[N_REG_CLASSES];
869 /* For each class, size of group of consecutive regs
870 that is needed for the reloads of this class. */
871 int group_size[N_REG_CLASSES];
872 /* For each class, max number of consecutive groups needed.
873 (Each group contains group_size[CLASS] consecutive registers.) */
874 int max_groups[N_REG_CLASSES];
875 /* For each class, max number needed of regs that don't belong
876 to any of the groups. */
877 int max_nongroups[N_REG_CLASSES];
878 /* For each class, the machine mode which requires consecutive
879 groups of regs of that class.
880 If two different modes ever require groups of one class,
881 they must be the same size and equally restrictive for that class,
882 otherwise we can't handle the complexity. */
883 enum machine_mode group_mode[N_REG_CLASSES];
884 /* Record the insn where each maximum need is first found. */
885 rtx max_needs_insn[N_REG_CLASSES];
886 rtx max_groups_insn[N_REG_CLASSES];
887 rtx max_nongroups_insn[N_REG_CLASSES];
888 rtx x;
889 HOST_WIDE_INT starting_frame_size;
890 int previous_frame_pointer_needed = frame_pointer_needed;
891 static char *reg_class_names[] = REG_CLASS_NAMES;
892
893 something_changed = 0;
894 bzero ((char *) max_needs, sizeof max_needs);
895 bzero ((char *) max_groups, sizeof max_groups);
896 bzero ((char *) max_nongroups, sizeof max_nongroups);
897 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
898 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
899 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
900 bzero ((char *) group_size, sizeof group_size);
901 for (i = 0; i < N_REG_CLASSES; i++)
902 group_mode[i] = VOIDmode;
903
904 /* Keep track of which basic blocks are needing the reloads. */
905 this_block = 0;
906
907 /* Remember whether any element of basic_block_needs
908 changes from 0 to 1 in this pass. */
909 new_basic_block_needs = 0;
910
911 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
912 here because the stack size may be a part of the offset computation
913 for register elimination, and there might have been new stack slots
914 created in the last iteration of this loop. */
915 assign_stack_local (BLKmode, 0, 0);
916
917 starting_frame_size = get_frame_size ();
918
919 /* Reset all offsets on eliminable registers to their initial values. */
920 #ifdef ELIMINABLE_REGS
921 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
922 {
923 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
924 ep->previous_offset = ep->offset
925 = ep->max_offset = ep->initial_offset;
926 }
927 #else
928 #ifdef INITIAL_FRAME_POINTER_OFFSET
929 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
930 #else
931 if (!FRAME_POINTER_REQUIRED)
932 abort ();
933 reg_eliminate[0].initial_offset = 0;
934 #endif
935 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
936 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
937 #endif
938
939 num_not_at_initial_offset = 0;
940
941 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
942
943 /* Set a known offset for each forced label to be at the initial offset
944 of each elimination. We do this because we assume that all
945 computed jumps occur from a location where each elimination is
946 at its initial offset. */
947
948 for (x = forced_labels; x; x = XEXP (x, 1))
949 if (XEXP (x, 0))
950 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
951
952 /* For each pseudo register that has an equivalent location defined,
953 try to eliminate any eliminable registers (such as the frame pointer)
954 assuming initial offsets for the replacement register, which
955 is the normal case.
956
957 If the resulting location is directly addressable, substitute
958 the MEM we just got directly for the old REG.
959
960 If it is not addressable but is a constant or the sum of a hard reg
961 and constant, it is probably not addressable because the constant is
962 out of range, in that case record the address; we will generate
963 hairy code to compute the address in a register each time it is
964 needed. Similarly if it is a hard register, but one that is not
965 valid as an address register.
966
967 If the location is not addressable, but does not have one of the
968 above forms, assign a stack slot. We have to do this to avoid the
969 potential of producing lots of reloads if, e.g., a location involves
970 a pseudo that didn't get a hard register and has an equivalent memory
971 location that also involves a pseudo that didn't get a hard register.
972
973 Perhaps at some point we will improve reload_when_needed handling
974 so this problem goes away. But that's very hairy. */
975
976 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
977 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
978 {
979 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX, 0);
980
981 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
982 XEXP (x, 0)))
983 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
984 else if (CONSTANT_P (XEXP (x, 0))
985 || (GET_CODE (XEXP (x, 0)) == REG
986 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
987 || (GET_CODE (XEXP (x, 0)) == PLUS
988 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
989 && (REGNO (XEXP (XEXP (x, 0), 0))
990 < FIRST_PSEUDO_REGISTER)
991 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
992 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
993 else
994 {
995 /* Make a new stack slot. Then indicate that something
996 changed so we go back and recompute offsets for
997 eliminable registers because the allocation of memory
998 below might change some offset. reg_equiv_{mem,address}
999 will be set up for this pseudo on the next pass around
1000 the loop. */
1001 reg_equiv_memory_loc[i] = 0;
1002 reg_equiv_init[i] = 0;
1003 alter_reg (i, -1);
1004 something_changed = 1;
1005 }
1006 }
1007
1008 /* If we allocated another pseudo to the stack, redo elimination
1009 bookkeeping. */
1010 if (something_changed)
1011 continue;
1012
1013 /* If caller-saves needs a group, initialize the group to include
1014 the size and mode required for caller-saves. */
1015
1016 if (caller_save_group_size > 1)
1017 {
1018 group_mode[(int) caller_save_spill_class] = Pmode;
1019 group_size[(int) caller_save_spill_class] = caller_save_group_size;
1020 }
1021
1022 /* Compute the most additional registers needed by any instruction.
1023 Collect information separately for each class of regs. */
1024
1025 for (insn = first; insn; insn = NEXT_INSN (insn))
1026 {
1027 if (global && this_block + 1 < n_basic_blocks
1028 && insn == basic_block_head[this_block+1])
1029 ++this_block;
1030
1031 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1032 might include REG_LABEL), we need to see what effects this
1033 has on the known offsets at labels. */
1034
1035 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1036 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1037 && REG_NOTES (insn) != 0))
1038 set_label_offsets (insn, insn, 0);
1039
1040 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1041 {
1042 /* Nonzero means don't use a reload reg that overlaps
1043 the place where a function value can be returned. */
1044 rtx avoid_return_reg = 0;
1045
1046 rtx old_body = PATTERN (insn);
1047 int old_code = INSN_CODE (insn);
1048 rtx old_notes = REG_NOTES (insn);
1049 int did_elimination = 0;
1050
1051 /* To compute the number of reload registers of each class
1052 needed for an insn, we must simulate what choose_reload_regs
1053 can do. We do this by splitting an insn into an "input" and
1054 an "output" part. RELOAD_OTHER reloads are used in both.
1055 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1056 which must be live over the entire input section of reloads,
1057 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1058 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1059 inputs.
1060
1061 The registers needed for output are RELOAD_OTHER and
1062 RELOAD_FOR_OUTPUT, which are live for the entire output
1063 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1064 reloads for each operand.
1065
1066 The total number of registers needed is the maximum of the
1067 inputs and outputs. */
1068
1069 struct needs
1070 {
1071 /* [0] is normal, [1] is nongroup. */
1072 int regs[2][N_REG_CLASSES];
1073 int groups[N_REG_CLASSES];
1074 };
1075
1076 /* Each `struct needs' corresponds to one RELOAD_... type. */
1077 struct {
1078 struct needs other;
1079 struct needs input;
1080 struct needs output;
1081 struct needs insn;
1082 struct needs other_addr;
1083 struct needs op_addr;
1084 struct needs op_addr_reload;
1085 struct needs in_addr[MAX_RECOG_OPERANDS];
1086 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1087 struct needs out_addr[MAX_RECOG_OPERANDS];
1088 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1089 } insn_needs;
1090
1091 /* If needed, eliminate any eliminable registers. */
1092 if (num_eliminable)
1093 did_elimination = eliminate_regs_in_insn (insn, 0);
1094
1095 #ifdef SMALL_REGISTER_CLASSES
1096 /* Set avoid_return_reg if this is an insn
1097 that might use the value of a function call. */
1098 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1099 {
1100 if (GET_CODE (PATTERN (insn)) == SET)
1101 after_call = SET_DEST (PATTERN (insn));
1102 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1103 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1104 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1105 else
1106 after_call = 0;
1107 }
1108 else if (SMALL_REGISTER_CLASSES
1109 && after_call != 0
1110 && !(GET_CODE (PATTERN (insn)) == SET
1111 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1112 {
1113 if (reg_referenced_p (after_call, PATTERN (insn)))
1114 avoid_return_reg = after_call;
1115 after_call = 0;
1116 }
1117 #endif /* SMALL_REGISTER_CLASSES */
1118
1119 /* Analyze the instruction. */
1120 find_reloads (insn, 0, spill_indirect_levels, global,
1121 spill_reg_order);
1122
1123 /* Remember for later shortcuts which insns had any reloads or
1124 register eliminations.
1125
1126 One might think that it would be worthwhile to mark insns
1127 that need register replacements but not reloads, but this is
1128 not safe because find_reloads may do some manipulation of
1129 the insn (such as swapping commutative operands), which would
1130 be lost when we restore the old pattern after register
1131 replacement. So the actions of find_reloads must be redone in
1132 subsequent passes or in reload_as_needed.
1133
1134 However, it is safe to mark insns that need reloads
1135 but not register replacement. */
1136
1137 PUT_MODE (insn, (did_elimination ? QImode
1138 : n_reloads ? HImode
1139 : GET_MODE (insn) == DImode ? DImode
1140 : VOIDmode));
1141
1142 /* Discard any register replacements done. */
1143 if (did_elimination)
1144 {
1145 obstack_free (&reload_obstack, reload_firstobj);
1146 PATTERN (insn) = old_body;
1147 INSN_CODE (insn) = old_code;
1148 REG_NOTES (insn) = old_notes;
1149 something_needs_elimination = 1;
1150 }
1151
1152 /* If this insn has no reloads, we need not do anything except
1153 in the case of a CALL_INSN when we have caller-saves and
1154 caller-save needs reloads. */
1155
1156 if (n_reloads == 0
1157 && ! (GET_CODE (insn) == CALL_INSN
1158 && caller_save_spill_class != NO_REGS))
1159 continue;
1160
1161 something_needs_reloads = 1;
1162 bzero ((char *) &insn_needs, sizeof insn_needs);
1163
1164 /* Count each reload once in every class
1165 containing the reload's own class. */
1166
1167 for (i = 0; i < n_reloads; i++)
1168 {
1169 register enum reg_class *p;
1170 enum reg_class class = reload_reg_class[i];
1171 int size;
1172 enum machine_mode mode;
1173 int nongroup_need;
1174 struct needs *this_needs;
1175
1176 /* Don't count the dummy reloads, for which one of the
1177 regs mentioned in the insn can be used for reloading.
1178 Don't count optional reloads.
1179 Don't count reloads that got combined with others. */
1180 if (reload_reg_rtx[i] != 0
1181 || reload_optional[i] != 0
1182 || (reload_out[i] == 0 && reload_in[i] == 0
1183 && ! reload_secondary_p[i]))
1184 continue;
1185
1186 /* Show that a reload register of this class is needed
1187 in this basic block. We do not use insn_needs and
1188 insn_groups because they are overly conservative for
1189 this purpose. */
1190 if (global && ! basic_block_needs[(int) class][this_block])
1191 {
1192 basic_block_needs[(int) class][this_block] = 1;
1193 new_basic_block_needs = 1;
1194 }
1195
1196 mode = reload_inmode[i];
1197 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1198 mode = reload_outmode[i];
1199 size = CLASS_MAX_NREGS (class, mode);
1200
1201 /* If this class doesn't want a group, determine if we have
1202 a nongroup need or a regular need. We have a nongroup
1203 need if this reload conflicts with a group reload whose
1204 class intersects with this reload's class. */
1205
1206 nongroup_need = 0;
1207 if (size == 1)
1208 for (j = 0; j < n_reloads; j++)
1209 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1210 (GET_MODE_SIZE (reload_outmode[j])
1211 > GET_MODE_SIZE (reload_inmode[j]))
1212 ? reload_outmode[j]
1213 : reload_inmode[j])
1214 > 1)
1215 && (!reload_optional[j])
1216 && (reload_in[j] != 0 || reload_out[j] != 0
1217 || reload_secondary_p[j])
1218 && reloads_conflict (i, j)
1219 && reg_classes_intersect_p (class,
1220 reload_reg_class[j]))
1221 {
1222 nongroup_need = 1;
1223 break;
1224 }
1225
1226 /* Decide which time-of-use to count this reload for. */
1227 switch (reload_when_needed[i])
1228 {
1229 case RELOAD_OTHER:
1230 this_needs = &insn_needs.other;
1231 break;
1232 case RELOAD_FOR_INPUT:
1233 this_needs = &insn_needs.input;
1234 break;
1235 case RELOAD_FOR_OUTPUT:
1236 this_needs = &insn_needs.output;
1237 break;
1238 case RELOAD_FOR_INSN:
1239 this_needs = &insn_needs.insn;
1240 break;
1241 case RELOAD_FOR_OTHER_ADDRESS:
1242 this_needs = &insn_needs.other_addr;
1243 break;
1244 case RELOAD_FOR_INPUT_ADDRESS:
1245 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1246 break;
1247 case RELOAD_FOR_INPADDR_ADDRESS:
1248 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1249 break;
1250 case RELOAD_FOR_OUTPUT_ADDRESS:
1251 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1252 break;
1253 case RELOAD_FOR_OUTADDR_ADDRESS:
1254 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1255 break;
1256 case RELOAD_FOR_OPERAND_ADDRESS:
1257 this_needs = &insn_needs.op_addr;
1258 break;
1259 case RELOAD_FOR_OPADDR_ADDR:
1260 this_needs = &insn_needs.op_addr_reload;
1261 break;
1262 }
1263
1264 if (size > 1)
1265 {
1266 enum machine_mode other_mode, allocate_mode;
1267
1268 /* Count number of groups needed separately from
1269 number of individual regs needed. */
1270 this_needs->groups[(int) class]++;
1271 p = reg_class_superclasses[(int) class];
1272 while (*p != LIM_REG_CLASSES)
1273 this_needs->groups[(int) *p++]++;
1274
1275 /* Record size and mode of a group of this class. */
1276 /* If more than one size group is needed,
1277 make all groups the largest needed size. */
1278 if (group_size[(int) class] < size)
1279 {
1280 other_mode = group_mode[(int) class];
1281 allocate_mode = mode;
1282
1283 group_size[(int) class] = size;
1284 group_mode[(int) class] = mode;
1285 }
1286 else
1287 {
1288 other_mode = mode;
1289 allocate_mode = group_mode[(int) class];
1290 }
1291
1292 /* Crash if two dissimilar machine modes both need
1293 groups of consecutive regs of the same class. */
1294
1295 if (other_mode != VOIDmode && other_mode != allocate_mode
1296 && ! modes_equiv_for_class_p (allocate_mode,
1297 other_mode, class))
1298 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1299 insn);
1300 }
1301 else if (size == 1)
1302 {
1303 this_needs->regs[nongroup_need][(int) class] += 1;
1304 p = reg_class_superclasses[(int) class];
1305 while (*p != LIM_REG_CLASSES)
1306 this_needs->regs[nongroup_need][(int) *p++] += 1;
1307 }
1308 else
1309 abort ();
1310 }
1311
1312 /* All reloads have been counted for this insn;
1313 now merge the various times of use.
1314 This sets insn_needs, etc., to the maximum total number
1315 of registers needed at any point in this insn. */
1316
1317 for (i = 0; i < N_REG_CLASSES; i++)
1318 {
1319 int in_max, out_max;
1320
1321 /* Compute normal and nongroup needs. */
1322 for (j = 0; j <= 1; j++)
1323 {
1324 for (in_max = 0, out_max = 0, k = 0;
1325 k < reload_n_operands; k++)
1326 {
1327 in_max
1328 = MAX (in_max,
1329 (insn_needs.in_addr[k].regs[j][i]
1330 + insn_needs.in_addr_addr[k].regs[j][i]));
1331 out_max
1332 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1333 out_max
1334 = MAX (out_max,
1335 insn_needs.out_addr_addr[k].regs[j][i]);
1336 }
1337
1338 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1339 and operand addresses but not things used to reload
1340 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1341 don't conflict with things needed to reload inputs or
1342 outputs. */
1343
1344 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1345 insn_needs.op_addr_reload.regs[j][i]),
1346 in_max);
1347
1348 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1349
1350 insn_needs.input.regs[j][i]
1351 = MAX (insn_needs.input.regs[j][i]
1352 + insn_needs.op_addr.regs[j][i]
1353 + insn_needs.insn.regs[j][i],
1354 in_max + insn_needs.input.regs[j][i]);
1355
1356 insn_needs.output.regs[j][i] += out_max;
1357 insn_needs.other.regs[j][i]
1358 += MAX (MAX (insn_needs.input.regs[j][i],
1359 insn_needs.output.regs[j][i]),
1360 insn_needs.other_addr.regs[j][i]);
1361
1362 }
1363
1364 /* Now compute group needs. */
1365 for (in_max = 0, out_max = 0, j = 0;
1366 j < reload_n_operands; j++)
1367 {
1368 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1369 in_max = MAX (in_max,
1370 insn_needs.in_addr_addr[j].groups[i]);
1371 out_max
1372 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1373 out_max
1374 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1375 }
1376
1377 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1378 insn_needs.op_addr_reload.groups[i]),
1379 in_max);
1380 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1381
1382 insn_needs.input.groups[i]
1383 = MAX (insn_needs.input.groups[i]
1384 + insn_needs.op_addr.groups[i]
1385 + insn_needs.insn.groups[i],
1386 in_max + insn_needs.input.groups[i]);
1387
1388 insn_needs.output.groups[i] += out_max;
1389 insn_needs.other.groups[i]
1390 += MAX (MAX (insn_needs.input.groups[i],
1391 insn_needs.output.groups[i]),
1392 insn_needs.other_addr.groups[i]);
1393 }
1394
1395 /* If this is a CALL_INSN and caller-saves will need
1396 a spill register, act as if the spill register is
1397 needed for this insn. However, the spill register
1398 can be used by any reload of this insn, so we only
1399 need do something if no need for that class has
1400 been recorded.
1401
1402 The assumption that every CALL_INSN will trigger a
1403 caller-save is highly conservative, however, the number
1404 of cases where caller-saves will need a spill register but
1405 a block containing a CALL_INSN won't need a spill register
1406 of that class should be quite rare.
1407
1408 If a group is needed, the size and mode of the group will
1409 have been set up at the beginning of this loop. */
1410
1411 if (GET_CODE (insn) == CALL_INSN
1412 && caller_save_spill_class != NO_REGS)
1413 {
1414 /* See if this register would conflict with any reload
1415 that needs a group. */
1416 int nongroup_need = 0;
1417 int *caller_save_needs;
1418
1419 for (j = 0; j < n_reloads; j++)
1420 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1421 (GET_MODE_SIZE (reload_outmode[j])
1422 > GET_MODE_SIZE (reload_inmode[j]))
1423 ? reload_outmode[j]
1424 : reload_inmode[j])
1425 > 1)
1426 && reg_classes_intersect_p (caller_save_spill_class,
1427 reload_reg_class[j]))
1428 {
1429 nongroup_need = 1;
1430 break;
1431 }
1432
1433 caller_save_needs
1434 = (caller_save_group_size > 1
1435 ? insn_needs.other.groups
1436 : insn_needs.other.regs[nongroup_need]);
1437
1438 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1439 {
1440 register enum reg_class *p
1441 = reg_class_superclasses[(int) caller_save_spill_class];
1442
1443 caller_save_needs[(int) caller_save_spill_class]++;
1444
1445 while (*p != LIM_REG_CLASSES)
1446 caller_save_needs[(int) *p++] += 1;
1447 }
1448
1449 /* Show that this basic block will need a register of
1450 this class. */
1451
1452 if (global
1453 && ! (basic_block_needs[(int) caller_save_spill_class]
1454 [this_block]))
1455 {
1456 basic_block_needs[(int) caller_save_spill_class]
1457 [this_block] = 1;
1458 new_basic_block_needs = 1;
1459 }
1460 }
1461
1462 #ifdef SMALL_REGISTER_CLASSES
1463 /* If this insn stores the value of a function call,
1464 and that value is in a register that has been spilled,
1465 and if the insn needs a reload in a class
1466 that might use that register as the reload register,
1467 then add add an extra need in that class.
1468 This makes sure we have a register available that does
1469 not overlap the return value. */
1470
1471 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1472 {
1473 int regno = REGNO (avoid_return_reg);
1474 int nregs
1475 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1476 int r;
1477 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1478
1479 /* First compute the "basic needs", which counts a
1480 need only in the smallest class in which it
1481 is required. */
1482
1483 bcopy ((char *) insn_needs.other.regs[0],
1484 (char *) basic_needs, sizeof basic_needs);
1485 bcopy ((char *) insn_needs.other.groups,
1486 (char *) basic_groups, sizeof basic_groups);
1487
1488 for (i = 0; i < N_REG_CLASSES; i++)
1489 {
1490 enum reg_class *p;
1491
1492 if (basic_needs[i] >= 0)
1493 for (p = reg_class_superclasses[i];
1494 *p != LIM_REG_CLASSES; p++)
1495 basic_needs[(int) *p] -= basic_needs[i];
1496
1497 if (basic_groups[i] >= 0)
1498 for (p = reg_class_superclasses[i];
1499 *p != LIM_REG_CLASSES; p++)
1500 basic_groups[(int) *p] -= basic_groups[i];
1501 }
1502
1503 /* Now count extra regs if there might be a conflict with
1504 the return value register. */
1505
1506 for (r = regno; r < regno + nregs; r++)
1507 if (spill_reg_order[r] >= 0)
1508 for (i = 0; i < N_REG_CLASSES; i++)
1509 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1510 {
1511 if (basic_needs[i] > 0)
1512 {
1513 enum reg_class *p;
1514
1515 insn_needs.other.regs[0][i]++;
1516 p = reg_class_superclasses[i];
1517 while (*p != LIM_REG_CLASSES)
1518 insn_needs.other.regs[0][(int) *p++]++;
1519 }
1520 if (basic_groups[i] > 0)
1521 {
1522 enum reg_class *p;
1523
1524 insn_needs.other.groups[i]++;
1525 p = reg_class_superclasses[i];
1526 while (*p != LIM_REG_CLASSES)
1527 insn_needs.other.groups[(int) *p++]++;
1528 }
1529 }
1530 }
1531 #endif /* SMALL_REGISTER_CLASSES */
1532
1533 /* For each class, collect maximum need of any insn. */
1534
1535 for (i = 0; i < N_REG_CLASSES; i++)
1536 {
1537 if (max_needs[i] < insn_needs.other.regs[0][i])
1538 {
1539 max_needs[i] = insn_needs.other.regs[0][i];
1540 max_needs_insn[i] = insn;
1541 }
1542 if (max_groups[i] < insn_needs.other.groups[i])
1543 {
1544 max_groups[i] = insn_needs.other.groups[i];
1545 max_groups_insn[i] = insn;
1546 }
1547 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1548 {
1549 max_nongroups[i] = insn_needs.other.regs[1][i];
1550 max_nongroups_insn[i] = insn;
1551 }
1552 }
1553 }
1554 /* Note that there is a continue statement above. */
1555 }
1556
1557 /* If we allocated any new memory locations, make another pass
1558 since it might have changed elimination offsets. */
1559 if (starting_frame_size != get_frame_size ())
1560 something_changed = 1;
1561
1562 if (dumpfile)
1563 for (i = 0; i < N_REG_CLASSES; i++)
1564 {
1565 if (max_needs[i] > 0)
1566 fprintf (dumpfile,
1567 ";; Need %d reg%s of class %s (for insn %d).\n",
1568 max_needs[i], max_needs[i] == 1 ? "" : "s",
1569 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1570 if (max_nongroups[i] > 0)
1571 fprintf (dumpfile,
1572 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1573 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1574 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1575 if (max_groups[i] > 0)
1576 fprintf (dumpfile,
1577 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1578 max_groups[i], max_groups[i] == 1 ? "" : "s",
1579 mode_name[(int) group_mode[i]],
1580 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1581 }
1582
1583 /* If we have caller-saves, set up the save areas and see if caller-save
1584 will need a spill register. */
1585
1586 if (caller_save_needed)
1587 {
1588 /* Set the offsets for setup_save_areas. */
1589 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
1590 ep++)
1591 ep->previous_offset = ep->max_offset;
1592
1593 if ( ! setup_save_areas (&something_changed)
1594 && caller_save_spill_class == NO_REGS)
1595 {
1596 /* The class we will need depends on whether the machine
1597 supports the sum of two registers for an address; see
1598 find_address_reloads for details. */
1599
1600 caller_save_spill_class
1601 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1602 caller_save_group_size
1603 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1604 something_changed = 1;
1605 }
1606 }
1607
1608 /* See if anything that happened changes which eliminations are valid.
1609 For example, on the Sparc, whether or not the frame pointer can
1610 be eliminated can depend on what registers have been used. We need
1611 not check some conditions again (such as flag_omit_frame_pointer)
1612 since they can't have changed. */
1613
1614 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1615 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1616 #ifdef ELIMINABLE_REGS
1617 || ! CAN_ELIMINATE (ep->from, ep->to)
1618 #endif
1619 )
1620 ep->can_eliminate = 0;
1621
1622 /* Look for the case where we have discovered that we can't replace
1623 register A with register B and that means that we will now be
1624 trying to replace register A with register C. This means we can
1625 no longer replace register C with register B and we need to disable
1626 such an elimination, if it exists. This occurs often with A == ap,
1627 B == sp, and C == fp. */
1628
1629 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1630 {
1631 struct elim_table *op;
1632 register int new_to = -1;
1633
1634 if (! ep->can_eliminate && ep->can_eliminate_previous)
1635 {
1636 /* Find the current elimination for ep->from, if there is a
1637 new one. */
1638 for (op = reg_eliminate;
1639 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1640 if (op->from == ep->from && op->can_eliminate)
1641 {
1642 new_to = op->to;
1643 break;
1644 }
1645
1646 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1647 disable it. */
1648 for (op = reg_eliminate;
1649 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1650 if (op->from == new_to && op->to == ep->to)
1651 op->can_eliminate = 0;
1652 }
1653 }
1654
1655 /* See if any registers that we thought we could eliminate the previous
1656 time are no longer eliminable. If so, something has changed and we
1657 must spill the register. Also, recompute the number of eliminable
1658 registers and see if the frame pointer is needed; it is if there is
1659 no elimination of the frame pointer that we can perform. */
1660
1661 frame_pointer_needed = 1;
1662 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1663 {
1664 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1665 && ep->to != HARD_FRAME_POINTER_REGNUM)
1666 frame_pointer_needed = 0;
1667
1668 if (! ep->can_eliminate && ep->can_eliminate_previous)
1669 {
1670 ep->can_eliminate_previous = 0;
1671 spill_hard_reg (ep->from, global, dumpfile, 1);
1672 something_changed = 1;
1673 num_eliminable--;
1674 }
1675 }
1676
1677 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1678 /* If we didn't need a frame pointer last time, but we do now, spill
1679 the hard frame pointer. */
1680 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1681 {
1682 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1683 something_changed = 1;
1684 }
1685 #endif
1686
1687 /* If all needs are met, we win. */
1688
1689 for (i = 0; i < N_REG_CLASSES; i++)
1690 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1691 break;
1692 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1693 break;
1694
1695 /* Not all needs are met; must spill some hard regs. */
1696
1697 /* Put all registers spilled so far back in potential_reload_regs, but
1698 put them at the front, since we've already spilled most of the
1699 pseudos in them (we might have left some pseudos unspilled if they
1700 were in a block that didn't need any spill registers of a conflicting
1701 class. We used to try to mark off the need for those registers,
1702 but doing so properly is very complex and reallocating them is the
1703 simpler approach. First, "pack" potential_reload_regs by pushing
1704 any nonnegative entries towards the end. That will leave room
1705 for the registers we already spilled.
1706
1707 Also, undo the marking of the spill registers from the last time
1708 around in FORBIDDEN_REGS since we will be probably be allocating
1709 them again below.
1710
1711 ??? It is theoretically possible that we might end up not using one
1712 of our previously-spilled registers in this allocation, even though
1713 they are at the head of the list. It's not clear what to do about
1714 this, but it was no better before, when we marked off the needs met
1715 by the previously-spilled registers. With the current code, globals
1716 can be allocated into these registers, but locals cannot. */
1717
1718 if (n_spills)
1719 {
1720 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1721 if (potential_reload_regs[i] != -1)
1722 potential_reload_regs[j--] = potential_reload_regs[i];
1723
1724 for (i = 0; i < n_spills; i++)
1725 {
1726 potential_reload_regs[i] = spill_regs[i];
1727 spill_reg_order[spill_regs[i]] = -1;
1728 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1729 }
1730
1731 n_spills = 0;
1732 }
1733
1734 /* Now find more reload regs to satisfy the remaining need
1735 Do it by ascending class number, since otherwise a reg
1736 might be spilled for a big class and might fail to count
1737 for a smaller class even though it belongs to that class.
1738
1739 Count spilled regs in `spills', and add entries to
1740 `spill_regs' and `spill_reg_order'.
1741
1742 ??? Note there is a problem here.
1743 When there is a need for a group in a high-numbered class,
1744 and also need for non-group regs that come from a lower class,
1745 the non-group regs are chosen first. If there aren't many regs,
1746 they might leave no room for a group.
1747
1748 This was happening on the 386. To fix it, we added the code
1749 that calls possible_group_p, so that the lower class won't
1750 break up the last possible group.
1751
1752 Really fixing the problem would require changes above
1753 in counting the regs already spilled, and in choose_reload_regs.
1754 It might be hard to avoid introducing bugs there. */
1755
1756 CLEAR_HARD_REG_SET (counted_for_groups);
1757 CLEAR_HARD_REG_SET (counted_for_nongroups);
1758
1759 for (class = 0; class < N_REG_CLASSES; class++)
1760 {
1761 /* First get the groups of registers.
1762 If we got single registers first, we might fragment
1763 possible groups. */
1764 while (max_groups[class] > 0)
1765 {
1766 /* If any single spilled regs happen to form groups,
1767 count them now. Maybe we don't really need
1768 to spill another group. */
1769 count_possible_groups (group_size, group_mode, max_groups,
1770 class);
1771
1772 if (max_groups[class] <= 0)
1773 break;
1774
1775 /* Groups of size 2 (the only groups used on most machines)
1776 are treated specially. */
1777 if (group_size[class] == 2)
1778 {
1779 /* First, look for a register that will complete a group. */
1780 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1781 {
1782 int other;
1783
1784 j = potential_reload_regs[i];
1785 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1786 &&
1787 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1788 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1789 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1790 && HARD_REGNO_MODE_OK (other, group_mode[class])
1791 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1792 other)
1793 /* We don't want one part of another group.
1794 We could get "two groups" that overlap! */
1795 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1796 ||
1797 (j < FIRST_PSEUDO_REGISTER - 1
1798 && (other = j + 1, spill_reg_order[other] >= 0)
1799 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1800 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1801 && HARD_REGNO_MODE_OK (j, group_mode[class])
1802 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1803 other)
1804 && ! TEST_HARD_REG_BIT (counted_for_groups,
1805 other))))
1806 {
1807 register enum reg_class *p;
1808
1809 /* We have found one that will complete a group,
1810 so count off one group as provided. */
1811 max_groups[class]--;
1812 p = reg_class_superclasses[class];
1813 while (*p != LIM_REG_CLASSES)
1814 {
1815 if (group_size [(int) *p] <= group_size [class])
1816 max_groups[(int) *p]--;
1817 p++;
1818 }
1819
1820 /* Indicate both these regs are part of a group. */
1821 SET_HARD_REG_BIT (counted_for_groups, j);
1822 SET_HARD_REG_BIT (counted_for_groups, other);
1823 break;
1824 }
1825 }
1826 /* We can't complete a group, so start one. */
1827 #ifdef SMALL_REGISTER_CLASSES
1828 /* Look for a pair neither of which is explicitly used. */
1829 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1830 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1831 {
1832 int k;
1833 j = potential_reload_regs[i];
1834 /* Verify that J+1 is a potential reload reg. */
1835 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1836 if (potential_reload_regs[k] == j + 1)
1837 break;
1838 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1839 && k < FIRST_PSEUDO_REGISTER
1840 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1841 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1842 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1843 && HARD_REGNO_MODE_OK (j, group_mode[class])
1844 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1845 j + 1)
1846 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1847 /* Reject J at this stage
1848 if J+1 was explicitly used. */
1849 && ! regs_explicitly_used[j + 1])
1850 break;
1851 }
1852 #endif
1853 /* Now try any group at all
1854 whose registers are not in bad_spill_regs. */
1855 if (i == FIRST_PSEUDO_REGISTER)
1856 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1857 {
1858 int k;
1859 j = potential_reload_regs[i];
1860 /* Verify that J+1 is a potential reload reg. */
1861 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1862 if (potential_reload_regs[k] == j + 1)
1863 break;
1864 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1865 && k < FIRST_PSEUDO_REGISTER
1866 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1867 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1868 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1869 && HARD_REGNO_MODE_OK (j, group_mode[class])
1870 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1871 j + 1)
1872 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1873 break;
1874 }
1875
1876 /* I should be the index in potential_reload_regs
1877 of the new reload reg we have found. */
1878
1879 if (i >= FIRST_PSEUDO_REGISTER)
1880 {
1881 /* There are no groups left to spill. */
1882 spill_failure (max_groups_insn[class]);
1883 failure = 1;
1884 goto failed;
1885 }
1886 else
1887 something_changed
1888 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1889 global, dumpfile);
1890 }
1891 else
1892 {
1893 /* For groups of more than 2 registers,
1894 look for a sufficient sequence of unspilled registers,
1895 and spill them all at once. */
1896 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1897 {
1898 int k;
1899
1900 j = potential_reload_regs[i];
1901 if (j >= 0
1902 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1903 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1904 {
1905 /* Check each reg in the sequence. */
1906 for (k = 0; k < group_size[class]; k++)
1907 if (! (spill_reg_order[j + k] < 0
1908 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1909 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1910 break;
1911 /* We got a full sequence, so spill them all. */
1912 if (k == group_size[class])
1913 {
1914 register enum reg_class *p;
1915 for (k = 0; k < group_size[class]; k++)
1916 {
1917 int idx;
1918 SET_HARD_REG_BIT (counted_for_groups, j + k);
1919 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1920 if (potential_reload_regs[idx] == j + k)
1921 break;
1922 something_changed
1923 |= new_spill_reg (idx, class,
1924 max_needs, NULL_PTR,
1925 global, dumpfile);
1926 }
1927
1928 /* We have found one that will complete a group,
1929 so count off one group as provided. */
1930 max_groups[class]--;
1931 p = reg_class_superclasses[class];
1932 while (*p != LIM_REG_CLASSES)
1933 {
1934 if (group_size [(int) *p]
1935 <= group_size [class])
1936 max_groups[(int) *p]--;
1937 p++;
1938 }
1939 break;
1940 }
1941 }
1942 }
1943 /* We couldn't find any registers for this reload.
1944 Avoid going into an infinite loop. */
1945 if (i >= FIRST_PSEUDO_REGISTER)
1946 {
1947 /* There are no groups left. */
1948 spill_failure (max_groups_insn[class]);
1949 failure = 1;
1950 goto failed;
1951 }
1952 }
1953 }
1954
1955 /* Now similarly satisfy all need for single registers. */
1956
1957 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1958 {
1959 /* If we spilled enough regs, but they weren't counted
1960 against the non-group need, see if we can count them now.
1961 If so, we can avoid some actual spilling. */
1962 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1963 for (i = 0; i < n_spills; i++)
1964 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1965 spill_regs[i])
1966 && !TEST_HARD_REG_BIT (counted_for_groups,
1967 spill_regs[i])
1968 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1969 spill_regs[i])
1970 && max_nongroups[class] > 0)
1971 {
1972 register enum reg_class *p;
1973
1974 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1975 max_nongroups[class]--;
1976 p = reg_class_superclasses[class];
1977 while (*p != LIM_REG_CLASSES)
1978 max_nongroups[(int) *p++]--;
1979 }
1980 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1981 break;
1982
1983 /* Consider the potential reload regs that aren't
1984 yet in use as reload regs, in order of preference.
1985 Find the most preferred one that's in this class. */
1986
1987 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1988 if (potential_reload_regs[i] >= 0
1989 && TEST_HARD_REG_BIT (reg_class_contents[class],
1990 potential_reload_regs[i])
1991 /* If this reg will not be available for groups,
1992 pick one that does not foreclose possible groups.
1993 This is a kludge, and not very general,
1994 but it should be sufficient to make the 386 work,
1995 and the problem should not occur on machines with
1996 more registers. */
1997 && (max_nongroups[class] == 0
1998 || possible_group_p (potential_reload_regs[i], max_groups)))
1999 break;
2000
2001 /* If we couldn't get a register, try to get one even if we
2002 might foreclose possible groups. This may cause problems
2003 later, but that's better than aborting now, since it is
2004 possible that we will, in fact, be able to form the needed
2005 group even with this allocation. */
2006
2007 if (i >= FIRST_PSEUDO_REGISTER
2008 && (asm_noperands (max_needs[class] > 0
2009 ? max_needs_insn[class]
2010 : max_nongroups_insn[class])
2011 < 0))
2012 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2013 if (potential_reload_regs[i] >= 0
2014 && TEST_HARD_REG_BIT (reg_class_contents[class],
2015 potential_reload_regs[i]))
2016 break;
2017
2018 /* I should be the index in potential_reload_regs
2019 of the new reload reg we have found. */
2020
2021 if (i >= FIRST_PSEUDO_REGISTER)
2022 {
2023 /* There are no possible registers left to spill. */
2024 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
2025 : max_nongroups_insn[class]);
2026 failure = 1;
2027 goto failed;
2028 }
2029 else
2030 something_changed
2031 |= new_spill_reg (i, class, max_needs, max_nongroups,
2032 global, dumpfile);
2033 }
2034 }
2035 }
2036
2037 /* If global-alloc was run, notify it of any register eliminations we have
2038 done. */
2039 if (global)
2040 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2041 if (ep->can_eliminate)
2042 mark_elimination (ep->from, ep->to);
2043
2044 /* Insert code to save and restore call-clobbered hard regs
2045 around calls. Tell if what mode to use so that we will process
2046 those insns in reload_as_needed if we have to. */
2047
2048 if (caller_save_needed)
2049 save_call_clobbered_regs (num_eliminable ? QImode
2050 : caller_save_spill_class != NO_REGS ? HImode
2051 : VOIDmode);
2052
2053 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2054 If that insn didn't set the register (i.e., it copied the register to
2055 memory), just delete that insn instead of the equivalencing insn plus
2056 anything now dead. If we call delete_dead_insn on that insn, we may
2057 delete the insn that actually sets the register if the register die
2058 there and that is incorrect. */
2059
2060 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2061 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2062 && GET_CODE (reg_equiv_init[i]) != NOTE)
2063 {
2064 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2065 delete_dead_insn (reg_equiv_init[i]);
2066 else
2067 {
2068 PUT_CODE (reg_equiv_init[i], NOTE);
2069 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2070 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2071 }
2072 }
2073
2074 /* Use the reload registers where necessary
2075 by generating move instructions to move the must-be-register
2076 values into or out of the reload registers. */
2077
2078 if (something_needs_reloads || something_needs_elimination
2079 || (caller_save_needed && num_eliminable)
2080 || caller_save_spill_class != NO_REGS)
2081 reload_as_needed (first, global);
2082
2083 /* If we were able to eliminate the frame pointer, show that it is no
2084 longer live at the start of any basic block. If it ls live by
2085 virtue of being in a pseudo, that pseudo will be marked live
2086 and hence the frame pointer will be known to be live via that
2087 pseudo. */
2088
2089 if (! frame_pointer_needed)
2090 for (i = 0; i < n_basic_blocks; i++)
2091 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
2092 HARD_FRAME_POINTER_REGNUM);
2093
2094 /* Come here (with failure set nonzero) if we can't get enough spill regs
2095 and we decide not to abort about it. */
2096 failed:
2097
2098 reload_in_progress = 0;
2099
2100 /* Now eliminate all pseudo regs by modifying them into
2101 their equivalent memory references.
2102 The REG-rtx's for the pseudos are modified in place,
2103 so all insns that used to refer to them now refer to memory.
2104
2105 For a reg that has a reg_equiv_address, all those insns
2106 were changed by reloading so that no insns refer to it any longer;
2107 but the DECL_RTL of a variable decl may refer to it,
2108 and if so this causes the debugging info to mention the variable. */
2109
2110 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2111 {
2112 rtx addr = 0;
2113 int in_struct = 0;
2114 if (reg_equiv_mem[i])
2115 {
2116 addr = XEXP (reg_equiv_mem[i], 0);
2117 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2118 }
2119 if (reg_equiv_address[i])
2120 addr = reg_equiv_address[i];
2121 if (addr)
2122 {
2123 if (reg_renumber[i] < 0)
2124 {
2125 rtx reg = regno_reg_rtx[i];
2126 XEXP (reg, 0) = addr;
2127 REG_USERVAR_P (reg) = 0;
2128 MEM_IN_STRUCT_P (reg) = in_struct;
2129 PUT_CODE (reg, MEM);
2130 }
2131 else if (reg_equiv_mem[i])
2132 XEXP (reg_equiv_mem[i], 0) = addr;
2133 }
2134 }
2135
2136 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2137 /* Make a pass over all the insns and remove death notes for things that
2138 are no longer registers or no longer die in the insn (e.g., an input
2139 and output pseudo being tied). */
2140
2141 for (insn = first; insn; insn = NEXT_INSN (insn))
2142 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2143 {
2144 rtx note, next;
2145
2146 for (note = REG_NOTES (insn); note; note = next)
2147 {
2148 next = XEXP (note, 1);
2149 if (REG_NOTE_KIND (note) == REG_DEAD
2150 && (GET_CODE (XEXP (note, 0)) != REG
2151 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2152 remove_note (insn, note);
2153 }
2154 }
2155 #endif
2156
2157 /* If we are doing stack checking, give a warning if this function's
2158 frame size is larger than we expect. */
2159 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
2160 {
2161 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
2162
2163 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2164 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
2165 size += UNITS_PER_WORD;
2166
2167 if (size > STACK_CHECK_MAX_FRAME_SIZE)
2168 warning ("frame size too large for reliable stack checking");
2169 }
2170
2171 /* Indicate that we no longer have known memory locations or constants. */
2172 reg_equiv_constant = 0;
2173 reg_equiv_memory_loc = 0;
2174
2175 if (real_known_ptr)
2176 free (real_known_ptr);
2177 if (real_at_ptr)
2178 free (real_at_ptr);
2179
2180 if (scratch_list)
2181 free (scratch_list);
2182 scratch_list = 0;
2183 if (scratch_block)
2184 free (scratch_block);
2185 scratch_block = 0;
2186
2187 CLEAR_HARD_REG_SET (used_spill_regs);
2188 for (i = 0; i < n_spills; i++)
2189 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2190
2191 return failure;
2192 }
2193 \f
2194 /* Nonzero if, after spilling reg REGNO for non-groups,
2195 it will still be possible to find a group if we still need one. */
2196
2197 static int
2198 possible_group_p (regno, max_groups)
2199 int regno;
2200 int *max_groups;
2201 {
2202 int i;
2203 int class = (int) NO_REGS;
2204
2205 for (i = 0; i < (int) N_REG_CLASSES; i++)
2206 if (max_groups[i] > 0)
2207 {
2208 class = i;
2209 break;
2210 }
2211
2212 if (class == (int) NO_REGS)
2213 return 1;
2214
2215 /* Consider each pair of consecutive registers. */
2216 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2217 {
2218 /* Ignore pairs that include reg REGNO. */
2219 if (i == regno || i + 1 == regno)
2220 continue;
2221
2222 /* Ignore pairs that are outside the class that needs the group.
2223 ??? Here we fail to handle the case where two different classes
2224 independently need groups. But this never happens with our
2225 current machine descriptions. */
2226 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2227 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2228 continue;
2229
2230 /* A pair of consecutive regs we can still spill does the trick. */
2231 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2232 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2233 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2234 return 1;
2235
2236 /* A pair of one already spilled and one we can spill does it
2237 provided the one already spilled is not otherwise reserved. */
2238 if (spill_reg_order[i] < 0
2239 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2240 && spill_reg_order[i + 1] >= 0
2241 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2242 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2243 return 1;
2244 if (spill_reg_order[i + 1] < 0
2245 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2246 && spill_reg_order[i] >= 0
2247 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2248 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2249 return 1;
2250 }
2251
2252 return 0;
2253 }
2254 \f
2255 /* Count any groups of CLASS that can be formed from the registers recently
2256 spilled. */
2257
2258 static void
2259 count_possible_groups (group_size, group_mode, max_groups, class)
2260 int *group_size;
2261 enum machine_mode *group_mode;
2262 int *max_groups;
2263 int class;
2264 {
2265 HARD_REG_SET new;
2266 int i, j;
2267
2268 /* Now find all consecutive groups of spilled registers
2269 and mark each group off against the need for such groups.
2270 But don't count them against ordinary need, yet. */
2271
2272 if (group_size[class] == 0)
2273 return;
2274
2275 CLEAR_HARD_REG_SET (new);
2276
2277 /* Make a mask of all the regs that are spill regs in class I. */
2278 for (i = 0; i < n_spills; i++)
2279 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2280 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2281 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2282 SET_HARD_REG_BIT (new, spill_regs[i]);
2283
2284 /* Find each consecutive group of them. */
2285 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2286 if (TEST_HARD_REG_BIT (new, i)
2287 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2288 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2289 {
2290 for (j = 1; j < group_size[class]; j++)
2291 if (! TEST_HARD_REG_BIT (new, i + j))
2292 break;
2293
2294 if (j == group_size[class])
2295 {
2296 /* We found a group. Mark it off against this class's need for
2297 groups, and against each superclass too. */
2298 register enum reg_class *p;
2299
2300 max_groups[class]--;
2301 p = reg_class_superclasses[class];
2302 while (*p != LIM_REG_CLASSES)
2303 {
2304 if (group_size [(int) *p] <= group_size [class])
2305 max_groups[(int) *p]--;
2306 p++;
2307 }
2308
2309 /* Don't count these registers again. */
2310 for (j = 0; j < group_size[class]; j++)
2311 SET_HARD_REG_BIT (counted_for_groups, i + j);
2312 }
2313
2314 /* Skip to the last reg in this group. When i is incremented above,
2315 it will then point to the first reg of the next possible group. */
2316 i += j - 1;
2317 }
2318 }
2319 \f
2320 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2321 another mode that needs to be reloaded for the same register class CLASS.
2322 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2323 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2324
2325 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2326 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2327 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2328 causes unnecessary failures on machines requiring alignment of register
2329 groups when the two modes are different sizes, because the larger mode has
2330 more strict alignment rules than the smaller mode. */
2331
2332 static int
2333 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2334 enum machine_mode allocate_mode, other_mode;
2335 enum reg_class class;
2336 {
2337 register int regno;
2338 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2339 {
2340 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2341 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2342 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2343 return 0;
2344 }
2345 return 1;
2346 }
2347
2348 /* Handle the failure to find a register to spill.
2349 INSN should be one of the insns which needed this particular spill reg. */
2350
2351 static void
2352 spill_failure (insn)
2353 rtx insn;
2354 {
2355 if (asm_noperands (PATTERN (insn)) >= 0)
2356 error_for_asm (insn, "`asm' needs too many reloads");
2357 else
2358 fatal_insn ("Unable to find a register to spill.", insn);
2359 }
2360
2361 /* Add a new register to the tables of available spill-registers
2362 (as well as spilling all pseudos allocated to the register).
2363 I is the index of this register in potential_reload_regs.
2364 CLASS is the regclass whose need is being satisfied.
2365 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2366 so that this register can count off against them.
2367 MAX_NONGROUPS is 0 if this register is part of a group.
2368 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2369
2370 static int
2371 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2372 int i;
2373 int class;
2374 int *max_needs;
2375 int *max_nongroups;
2376 int global;
2377 FILE *dumpfile;
2378 {
2379 register enum reg_class *p;
2380 int val;
2381 int regno = potential_reload_regs[i];
2382
2383 if (i >= FIRST_PSEUDO_REGISTER)
2384 abort (); /* Caller failed to find any register. */
2385
2386 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2387 fatal ("fixed or forbidden register was spilled.\n\
2388 This may be due to a compiler bug or to impossible asm\n\
2389 statements or clauses.");
2390
2391 /* Make reg REGNO an additional reload reg. */
2392
2393 potential_reload_regs[i] = -1;
2394 spill_regs[n_spills] = regno;
2395 spill_reg_order[regno] = n_spills;
2396 if (dumpfile)
2397 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2398
2399 /* Clear off the needs we just satisfied. */
2400
2401 max_needs[class]--;
2402 p = reg_class_superclasses[class];
2403 while (*p != LIM_REG_CLASSES)
2404 max_needs[(int) *p++]--;
2405
2406 if (max_nongroups && max_nongroups[class] > 0)
2407 {
2408 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2409 max_nongroups[class]--;
2410 p = reg_class_superclasses[class];
2411 while (*p != LIM_REG_CLASSES)
2412 max_nongroups[(int) *p++]--;
2413 }
2414
2415 /* Spill every pseudo reg that was allocated to this reg
2416 or to something that overlaps this reg. */
2417
2418 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2419
2420 /* If there are some registers still to eliminate and this register
2421 wasn't ever used before, additional stack space may have to be
2422 allocated to store this register. Thus, we may have changed the offset
2423 between the stack and frame pointers, so mark that something has changed.
2424 (If new pseudos were spilled, thus requiring more space, VAL would have
2425 been set non-zero by the call to spill_hard_reg above since additional
2426 reloads may be needed in that case.
2427
2428 One might think that we need only set VAL to 1 if this is a call-used
2429 register. However, the set of registers that must be saved by the
2430 prologue is not identical to the call-used set. For example, the
2431 register used by the call insn for the return PC is a call-used register,
2432 but must be saved by the prologue. */
2433 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2434 val = 1;
2435
2436 regs_ever_live[spill_regs[n_spills]] = 1;
2437 n_spills++;
2438
2439 return val;
2440 }
2441 \f
2442 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2443 data that is dead in INSN. */
2444
2445 static void
2446 delete_dead_insn (insn)
2447 rtx insn;
2448 {
2449 rtx prev = prev_real_insn (insn);
2450 rtx prev_dest;
2451
2452 /* If the previous insn sets a register that dies in our insn, delete it
2453 too. */
2454 if (prev && GET_CODE (PATTERN (prev)) == SET
2455 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2456 && reg_mentioned_p (prev_dest, PATTERN (insn))
2457 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2458 delete_dead_insn (prev);
2459
2460 PUT_CODE (insn, NOTE);
2461 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2462 NOTE_SOURCE_FILE (insn) = 0;
2463 }
2464
2465 /* Modify the home of pseudo-reg I.
2466 The new home is present in reg_renumber[I].
2467
2468 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2469 or it may be -1, meaning there is none or it is not relevant.
2470 This is used so that all pseudos spilled from a given hard reg
2471 can share one stack slot. */
2472
2473 static void
2474 alter_reg (i, from_reg)
2475 register int i;
2476 int from_reg;
2477 {
2478 /* When outputting an inline function, this can happen
2479 for a reg that isn't actually used. */
2480 if (regno_reg_rtx[i] == 0)
2481 return;
2482
2483 /* If the reg got changed to a MEM at rtl-generation time,
2484 ignore it. */
2485 if (GET_CODE (regno_reg_rtx[i]) != REG)
2486 return;
2487
2488 /* Modify the reg-rtx to contain the new hard reg
2489 number or else to contain its pseudo reg number. */
2490 REGNO (regno_reg_rtx[i])
2491 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2492
2493 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2494 allocate a stack slot for it. */
2495
2496 if (reg_renumber[i] < 0
2497 && REG_N_REFS (i) > 0
2498 && reg_equiv_constant[i] == 0
2499 && reg_equiv_memory_loc[i] == 0)
2500 {
2501 register rtx x;
2502 int inherent_size = PSEUDO_REGNO_BYTES (i);
2503 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2504 int adjust = 0;
2505
2506 /* Each pseudo reg has an inherent size which comes from its own mode,
2507 and a total size which provides room for paradoxical subregs
2508 which refer to the pseudo reg in wider modes.
2509
2510 We can use a slot already allocated if it provides both
2511 enough inherent space and enough total space.
2512 Otherwise, we allocate a new slot, making sure that it has no less
2513 inherent space, and no less total space, then the previous slot. */
2514 if (from_reg == -1)
2515 {
2516 /* No known place to spill from => no slot to reuse. */
2517 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2518 inherent_size == total_size ? 0 : -1);
2519 if (BYTES_BIG_ENDIAN)
2520 /* Cancel the big-endian correction done in assign_stack_local.
2521 Get the address of the beginning of the slot.
2522 This is so we can do a big-endian correction unconditionally
2523 below. */
2524 adjust = inherent_size - total_size;
2525
2526 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2527 }
2528 /* Reuse a stack slot if possible. */
2529 else if (spill_stack_slot[from_reg] != 0
2530 && spill_stack_slot_width[from_reg] >= total_size
2531 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2532 >= inherent_size))
2533 x = spill_stack_slot[from_reg];
2534 /* Allocate a bigger slot. */
2535 else
2536 {
2537 /* Compute maximum size needed, both for inherent size
2538 and for total size. */
2539 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2540 rtx stack_slot;
2541 if (spill_stack_slot[from_reg])
2542 {
2543 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2544 > inherent_size)
2545 mode = GET_MODE (spill_stack_slot[from_reg]);
2546 if (spill_stack_slot_width[from_reg] > total_size)
2547 total_size = spill_stack_slot_width[from_reg];
2548 }
2549 /* Make a slot with that size. */
2550 x = assign_stack_local (mode, total_size,
2551 inherent_size == total_size ? 0 : -1);
2552 stack_slot = x;
2553 if (BYTES_BIG_ENDIAN)
2554 {
2555 /* Cancel the big-endian correction done in assign_stack_local.
2556 Get the address of the beginning of the slot.
2557 This is so we can do a big-endian correction unconditionally
2558 below. */
2559 adjust = GET_MODE_SIZE (mode) - total_size;
2560 if (adjust)
2561 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2562 * BITS_PER_UNIT,
2563 MODE_INT, 1),
2564 plus_constant (XEXP (x, 0), adjust));
2565 }
2566 spill_stack_slot[from_reg] = stack_slot;
2567 spill_stack_slot_width[from_reg] = total_size;
2568 }
2569
2570 /* On a big endian machine, the "address" of the slot
2571 is the address of the low part that fits its inherent mode. */
2572 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2573 adjust += (total_size - inherent_size);
2574
2575 /* If we have any adjustment to make, or if the stack slot is the
2576 wrong mode, make a new stack slot. */
2577 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2578 {
2579 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2580 plus_constant (XEXP (x, 0), adjust));
2581 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2582 }
2583
2584 /* Save the stack slot for later. */
2585 reg_equiv_memory_loc[i] = x;
2586 }
2587 }
2588
2589 /* Mark the slots in regs_ever_live for the hard regs
2590 used by pseudo-reg number REGNO. */
2591
2592 void
2593 mark_home_live (regno)
2594 int regno;
2595 {
2596 register int i, lim;
2597 i = reg_renumber[regno];
2598 if (i < 0)
2599 return;
2600 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2601 while (i < lim)
2602 regs_ever_live[i++] = 1;
2603 }
2604
2605 /* Mark the registers used in SCRATCH as being live. */
2606
2607 static void
2608 mark_scratch_live (scratch)
2609 rtx scratch;
2610 {
2611 register int i;
2612 int regno = REGNO (scratch);
2613 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2614
2615 for (i = regno; i < lim; i++)
2616 regs_ever_live[i] = 1;
2617 }
2618 \f
2619 /* This function handles the tracking of elimination offsets around branches.
2620
2621 X is a piece of RTL being scanned.
2622
2623 INSN is the insn that it came from, if any.
2624
2625 INITIAL_P is non-zero if we are to set the offset to be the initial
2626 offset and zero if we are setting the offset of the label to be the
2627 current offset. */
2628
2629 static void
2630 set_label_offsets (x, insn, initial_p)
2631 rtx x;
2632 rtx insn;
2633 int initial_p;
2634 {
2635 enum rtx_code code = GET_CODE (x);
2636 rtx tem;
2637 int i;
2638 struct elim_table *p;
2639
2640 switch (code)
2641 {
2642 case LABEL_REF:
2643 if (LABEL_REF_NONLOCAL_P (x))
2644 return;
2645
2646 x = XEXP (x, 0);
2647
2648 /* ... fall through ... */
2649
2650 case CODE_LABEL:
2651 /* If we know nothing about this label, set the desired offsets. Note
2652 that this sets the offset at a label to be the offset before a label
2653 if we don't know anything about the label. This is not correct for
2654 the label after a BARRIER, but is the best guess we can make. If
2655 we guessed wrong, we will suppress an elimination that might have
2656 been possible had we been able to guess correctly. */
2657
2658 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2659 {
2660 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2661 offsets_at[CODE_LABEL_NUMBER (x)][i]
2662 = (initial_p ? reg_eliminate[i].initial_offset
2663 : reg_eliminate[i].offset);
2664 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2665 }
2666
2667 /* Otherwise, if this is the definition of a label and it is
2668 preceded by a BARRIER, set our offsets to the known offset of
2669 that label. */
2670
2671 else if (x == insn
2672 && (tem = prev_nonnote_insn (insn)) != 0
2673 && GET_CODE (tem) == BARRIER)
2674 {
2675 num_not_at_initial_offset = 0;
2676 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2677 {
2678 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2679 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2680 if (reg_eliminate[i].can_eliminate
2681 && (reg_eliminate[i].offset
2682 != reg_eliminate[i].initial_offset))
2683 num_not_at_initial_offset++;
2684 }
2685 }
2686
2687 else
2688 /* If neither of the above cases is true, compare each offset
2689 with those previously recorded and suppress any eliminations
2690 where the offsets disagree. */
2691
2692 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2693 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2694 != (initial_p ? reg_eliminate[i].initial_offset
2695 : reg_eliminate[i].offset))
2696 reg_eliminate[i].can_eliminate = 0;
2697
2698 return;
2699
2700 case JUMP_INSN:
2701 set_label_offsets (PATTERN (insn), insn, initial_p);
2702
2703 /* ... fall through ... */
2704
2705 case INSN:
2706 case CALL_INSN:
2707 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2708 and hence must have all eliminations at their initial offsets. */
2709 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2710 if (REG_NOTE_KIND (tem) == REG_LABEL)
2711 set_label_offsets (XEXP (tem, 0), insn, 1);
2712 return;
2713
2714 case ADDR_VEC:
2715 case ADDR_DIFF_VEC:
2716 /* Each of the labels in the address vector must be at their initial
2717 offsets. We want the first first for ADDR_VEC and the second
2718 field for ADDR_DIFF_VEC. */
2719
2720 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2721 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2722 insn, initial_p);
2723 return;
2724
2725 case SET:
2726 /* We only care about setting PC. If the source is not RETURN,
2727 IF_THEN_ELSE, or a label, disable any eliminations not at
2728 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2729 isn't one of those possibilities. For branches to a label,
2730 call ourselves recursively.
2731
2732 Note that this can disable elimination unnecessarily when we have
2733 a non-local goto since it will look like a non-constant jump to
2734 someplace in the current function. This isn't a significant
2735 problem since such jumps will normally be when all elimination
2736 pairs are back to their initial offsets. */
2737
2738 if (SET_DEST (x) != pc_rtx)
2739 return;
2740
2741 switch (GET_CODE (SET_SRC (x)))
2742 {
2743 case PC:
2744 case RETURN:
2745 return;
2746
2747 case LABEL_REF:
2748 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2749 return;
2750
2751 case IF_THEN_ELSE:
2752 tem = XEXP (SET_SRC (x), 1);
2753 if (GET_CODE (tem) == LABEL_REF)
2754 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2755 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2756 break;
2757
2758 tem = XEXP (SET_SRC (x), 2);
2759 if (GET_CODE (tem) == LABEL_REF)
2760 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2761 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2762 break;
2763 return;
2764 }
2765
2766 /* If we reach here, all eliminations must be at their initial
2767 offset because we are doing a jump to a variable address. */
2768 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2769 if (p->offset != p->initial_offset)
2770 p->can_eliminate = 0;
2771 }
2772 }
2773 \f
2774 /* Used for communication between the next two function to properly share
2775 the vector for an ASM_OPERANDS. */
2776
2777 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2778
2779 /* Scan X and replace any eliminable registers (such as fp) with a
2780 replacement (such as sp), plus an offset.
2781
2782 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2783 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2784 MEM, we are allowed to replace a sum of a register and the constant zero
2785 with the register, which we cannot do outside a MEM. In addition, we need
2786 to record the fact that a register is referenced outside a MEM.
2787
2788 If INSN is an insn, it is the insn containing X. If we replace a REG
2789 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2790 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2791 that the REG is being modified.
2792
2793 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2794 That's used when we eliminate in expressions stored in notes.
2795 This means, do not set ref_outside_mem even if the reference
2796 is outside of MEMs.
2797
2798 If we see a modification to a register we know about, take the
2799 appropriate action (see case SET, below).
2800
2801 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2802 replacements done assuming all offsets are at their initial values. If
2803 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2804 encounter, return the actual location so that find_reloads will do
2805 the proper thing. */
2806
2807 rtx
2808 eliminate_regs (x, mem_mode, insn, storing)
2809 rtx x;
2810 enum machine_mode mem_mode;
2811 rtx insn;
2812 int storing;
2813 {
2814 enum rtx_code code = GET_CODE (x);
2815 struct elim_table *ep;
2816 int regno;
2817 rtx new;
2818 int i, j;
2819 char *fmt;
2820 int copied = 0;
2821
2822 switch (code)
2823 {
2824 case CONST_INT:
2825 case CONST_DOUBLE:
2826 case CONST:
2827 case SYMBOL_REF:
2828 case CODE_LABEL:
2829 case PC:
2830 case CC0:
2831 case ASM_INPUT:
2832 case ADDR_VEC:
2833 case ADDR_DIFF_VEC:
2834 case RETURN:
2835 return x;
2836
2837 case REG:
2838 regno = REGNO (x);
2839
2840 /* First handle the case where we encounter a bare register that
2841 is eliminable. Replace it with a PLUS. */
2842 if (regno < FIRST_PSEUDO_REGISTER)
2843 {
2844 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2845 ep++)
2846 if (ep->from_rtx == x && ep->can_eliminate)
2847 {
2848 if (! mem_mode
2849 /* Refs inside notes don't count for this purpose. */
2850 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2851 || GET_CODE (insn) == INSN_LIST)))
2852 ep->ref_outside_mem = 1;
2853 return plus_constant (ep->to_rtx, ep->previous_offset);
2854 }
2855
2856 }
2857 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2858 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2859 {
2860 /* In this case, find_reloads would attempt to either use an
2861 incorrect address (if something is not at its initial offset)
2862 or substitute an replaced address into an insn (which loses
2863 if the offset is changed by some later action). So we simply
2864 return the replaced stack slot (assuming it is changed by
2865 elimination) and ignore the fact that this is actually a
2866 reference to the pseudo. Ensure we make a copy of the
2867 address in case it is shared. */
2868 new = eliminate_regs (reg_equiv_memory_loc[regno],
2869 mem_mode, insn, 0);
2870 if (new != reg_equiv_memory_loc[regno])
2871 {
2872 cannot_omit_stores[regno] = 1;
2873 return copy_rtx (new);
2874 }
2875 }
2876 return x;
2877
2878 case PLUS:
2879 /* If this is the sum of an eliminable register and a constant, rework
2880 the sum. */
2881 if (GET_CODE (XEXP (x, 0)) == REG
2882 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2883 && CONSTANT_P (XEXP (x, 1)))
2884 {
2885 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2886 ep++)
2887 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2888 {
2889 if (! mem_mode
2890 /* Refs inside notes don't count for this purpose. */
2891 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2892 || GET_CODE (insn) == INSN_LIST)))
2893 ep->ref_outside_mem = 1;
2894
2895 /* The only time we want to replace a PLUS with a REG (this
2896 occurs when the constant operand of the PLUS is the negative
2897 of the offset) is when we are inside a MEM. We won't want
2898 to do so at other times because that would change the
2899 structure of the insn in a way that reload can't handle.
2900 We special-case the commonest situation in
2901 eliminate_regs_in_insn, so just replace a PLUS with a
2902 PLUS here, unless inside a MEM. */
2903 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2904 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2905 return ep->to_rtx;
2906 else
2907 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2908 plus_constant (XEXP (x, 1),
2909 ep->previous_offset));
2910 }
2911
2912 /* If the register is not eliminable, we are done since the other
2913 operand is a constant. */
2914 return x;
2915 }
2916
2917 /* If this is part of an address, we want to bring any constant to the
2918 outermost PLUS. We will do this by doing register replacement in
2919 our operands and seeing if a constant shows up in one of them.
2920
2921 We assume here this is part of an address (or a "load address" insn)
2922 since an eliminable register is not likely to appear in any other
2923 context.
2924
2925 If we have (plus (eliminable) (reg)), we want to produce
2926 (plus (plus (replacement) (reg) (const))). If this was part of a
2927 normal add insn, (plus (replacement) (reg)) will be pushed as a
2928 reload. This is the desired action. */
2929
2930 {
2931 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2932 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
2933
2934 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2935 {
2936 /* If one side is a PLUS and the other side is a pseudo that
2937 didn't get a hard register but has a reg_equiv_constant,
2938 we must replace the constant here since it may no longer
2939 be in the position of any operand. */
2940 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2941 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2942 && reg_renumber[REGNO (new1)] < 0
2943 && reg_equiv_constant != 0
2944 && reg_equiv_constant[REGNO (new1)] != 0)
2945 new1 = reg_equiv_constant[REGNO (new1)];
2946 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2947 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2948 && reg_renumber[REGNO (new0)] < 0
2949 && reg_equiv_constant[REGNO (new0)] != 0)
2950 new0 = reg_equiv_constant[REGNO (new0)];
2951
2952 new = form_sum (new0, new1);
2953
2954 /* As above, if we are not inside a MEM we do not want to
2955 turn a PLUS into something else. We might try to do so here
2956 for an addition of 0 if we aren't optimizing. */
2957 if (! mem_mode && GET_CODE (new) != PLUS)
2958 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2959 else
2960 return new;
2961 }
2962 }
2963 return x;
2964
2965 case MULT:
2966 /* If this is the product of an eliminable register and a
2967 constant, apply the distribute law and move the constant out
2968 so that we have (plus (mult ..) ..). This is needed in order
2969 to keep load-address insns valid. This case is pathological.
2970 We ignore the possibility of overflow here. */
2971 if (GET_CODE (XEXP (x, 0)) == REG
2972 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2973 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2974 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2975 ep++)
2976 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2977 {
2978 if (! mem_mode
2979 /* Refs inside notes don't count for this purpose. */
2980 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2981 || GET_CODE (insn) == INSN_LIST)))
2982 ep->ref_outside_mem = 1;
2983
2984 return
2985 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2986 ep->previous_offset * INTVAL (XEXP (x, 1)));
2987 }
2988
2989 /* ... fall through ... */
2990
2991 case CALL:
2992 case COMPARE:
2993 case MINUS:
2994 case DIV: case UDIV:
2995 case MOD: case UMOD:
2996 case AND: case IOR: case XOR:
2997 case ROTATERT: case ROTATE:
2998 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2999 case NE: case EQ:
3000 case GE: case GT: case GEU: case GTU:
3001 case LE: case LT: case LEU: case LTU:
3002 {
3003 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3004 rtx new1
3005 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn, 0) : 0;
3006
3007 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
3008 return gen_rtx (code, GET_MODE (x), new0, new1);
3009 }
3010 return x;
3011
3012 case EXPR_LIST:
3013 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3014 if (XEXP (x, 0))
3015 {
3016 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3017 if (new != XEXP (x, 0))
3018 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
3019 }
3020
3021 /* ... fall through ... */
3022
3023 case INSN_LIST:
3024 /* Now do eliminations in the rest of the chain. If this was
3025 an EXPR_LIST, this might result in allocating more memory than is
3026 strictly needed, but it simplifies the code. */
3027 if (XEXP (x, 1))
3028 {
3029 new = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
3030 if (new != XEXP (x, 1))
3031 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3032 }
3033 return x;
3034
3035 case PRE_INC:
3036 case POST_INC:
3037 case PRE_DEC:
3038 case POST_DEC:
3039 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3040 if (ep->to_rtx == XEXP (x, 0))
3041 {
3042 int size = GET_MODE_SIZE (mem_mode);
3043
3044 /* If more bytes than MEM_MODE are pushed, account for them. */
3045 #ifdef PUSH_ROUNDING
3046 if (ep->to_rtx == stack_pointer_rtx)
3047 size = PUSH_ROUNDING (size);
3048 #endif
3049 if (code == PRE_DEC || code == POST_DEC)
3050 ep->offset += size;
3051 else
3052 ep->offset -= size;
3053 }
3054
3055 /* Fall through to generic unary operation case. */
3056 case STRICT_LOW_PART:
3057 case NEG: case NOT:
3058 case SIGN_EXTEND: case ZERO_EXTEND:
3059 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3060 case FLOAT: case FIX:
3061 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3062 case ABS:
3063 case SQRT:
3064 case FFS:
3065 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3066 if (new != XEXP (x, 0))
3067 return gen_rtx (code, GET_MODE (x), new);
3068 return x;
3069
3070 case SUBREG:
3071 /* Similar to above processing, but preserve SUBREG_WORD.
3072 Convert (subreg (mem)) to (mem) if not paradoxical.
3073 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3074 pseudo didn't get a hard reg, we must replace this with the
3075 eliminated version of the memory location because push_reloads
3076 may do the replacement in certain circumstances. */
3077 if (GET_CODE (SUBREG_REG (x)) == REG
3078 && (GET_MODE_SIZE (GET_MODE (x))
3079 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3080 && reg_equiv_memory_loc != 0
3081 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3082 {
3083 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3084 mem_mode, insn, 0);
3085
3086 /* If we didn't change anything, we must retain the pseudo. */
3087 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3088 new = SUBREG_REG (x);
3089 else
3090 {
3091 /* Otherwise, ensure NEW isn't shared in case we have to reload
3092 it. */
3093 new = copy_rtx (new);
3094
3095 /* In this case, we must show that the pseudo is used in this
3096 insn so that delete_output_reload will do the right thing. */
3097 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3098 && GET_CODE (insn) != INSN_LIST)
3099 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
3100 insn);
3101 }
3102 }
3103 else
3104 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn, 0);
3105
3106 if (new != XEXP (x, 0))
3107 {
3108 int x_size = GET_MODE_SIZE (GET_MODE (x));
3109 int new_size = GET_MODE_SIZE (GET_MODE (new));
3110
3111 /* When asked to spill a partial word subreg, we need to go
3112 ahead and spill the whole thing against the possibility
3113 that we reload the whole reg and find garbage at the top. */
3114 if (storing
3115 && GET_CODE (new) == MEM
3116 && x_size < new_size
3117 && ((x_size + UNITS_PER_WORD-1) / UNITS_PER_WORD
3118 == (new_size + UNITS_PER_WORD-1) / UNITS_PER_WORD))
3119 return new;
3120 else if (GET_CODE (new) == MEM
3121 && x_size <= new_size
3122 #ifdef LOAD_EXTEND_OP
3123 /* On these machines we will be reloading what is
3124 inside the SUBREG if it originally was a pseudo and
3125 the inner and outer modes are both a word or
3126 smaller. So leave the SUBREG then. */
3127 && ! (GET_CODE (SUBREG_REG (x)) == REG
3128 && x_size <= UNITS_PER_WORD
3129 && new_size <= UNITS_PER_WORD
3130 && x_size > new_size
3131 && INTEGRAL_MODE_P (GET_MODE (new))
3132 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
3133 #endif
3134 )
3135 {
3136 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3137 enum machine_mode mode = GET_MODE (x);
3138
3139 if (BYTES_BIG_ENDIAN)
3140 offset += (MIN (UNITS_PER_WORD,
3141 GET_MODE_SIZE (GET_MODE (new)))
3142 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3143
3144 PUT_MODE (new, mode);
3145 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3146 return new;
3147 }
3148 else
3149 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
3150 }
3151
3152 return x;
3153
3154 case USE:
3155 /* If using a register that is the source of an eliminate we still
3156 think can be performed, note it cannot be performed since we don't
3157 know how this register is used. */
3158 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3159 if (ep->from_rtx == XEXP (x, 0))
3160 ep->can_eliminate = 0;
3161
3162 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3163 if (new != XEXP (x, 0))
3164 return gen_rtx (code, GET_MODE (x), new);
3165 return x;
3166
3167 case CLOBBER:
3168 /* If clobbering a register that is the replacement register for an
3169 elimination we still think can be performed, note that it cannot
3170 be performed. Otherwise, we need not be concerned about it. */
3171 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3172 if (ep->to_rtx == XEXP (x, 0))
3173 ep->can_eliminate = 0;
3174
3175 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3176 if (new != XEXP (x, 0))
3177 return gen_rtx (code, GET_MODE (x), new);
3178 return x;
3179
3180 case ASM_OPERANDS:
3181 {
3182 rtx *temp_vec;
3183 /* Properly handle sharing input and constraint vectors. */
3184 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3185 {
3186 /* When we come to a new vector not seen before,
3187 scan all its elements; keep the old vector if none
3188 of them changes; otherwise, make a copy. */
3189 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3190 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3191 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3192 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3193 mem_mode, insn, 0);
3194
3195 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3196 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3197 break;
3198
3199 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3200 new_asm_operands_vec = old_asm_operands_vec;
3201 else
3202 new_asm_operands_vec
3203 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3204 }
3205
3206 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3207 if (new_asm_operands_vec == old_asm_operands_vec)
3208 return x;
3209
3210 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3211 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3212 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3213 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3214 ASM_OPERANDS_SOURCE_FILE (x),
3215 ASM_OPERANDS_SOURCE_LINE (x));
3216 new->volatil = x->volatil;
3217 return new;
3218 }
3219
3220 case SET:
3221 /* Check for setting a register that we know about. */
3222 if (GET_CODE (SET_DEST (x)) == REG)
3223 {
3224 /* See if this is setting the replacement register for an
3225 elimination.
3226
3227 If DEST is the hard frame pointer, we do nothing because we
3228 assume that all assignments to the frame pointer are for
3229 non-local gotos and are being done at a time when they are valid
3230 and do not disturb anything else. Some machines want to
3231 eliminate a fake argument pointer (or even a fake frame pointer)
3232 with either the real frame or the stack pointer. Assignments to
3233 the hard frame pointer must not prevent this elimination. */
3234
3235 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3236 ep++)
3237 if (ep->to_rtx == SET_DEST (x)
3238 && SET_DEST (x) != hard_frame_pointer_rtx)
3239 {
3240 /* If it is being incremented, adjust the offset. Otherwise,
3241 this elimination can't be done. */
3242 rtx src = SET_SRC (x);
3243
3244 if (GET_CODE (src) == PLUS
3245 && XEXP (src, 0) == SET_DEST (x)
3246 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3247 ep->offset -= INTVAL (XEXP (src, 1));
3248 else
3249 ep->can_eliminate = 0;
3250 }
3251
3252 /* Now check to see we are assigning to a register that can be
3253 eliminated. If so, it must be as part of a PARALLEL, since we
3254 will not have been called if this is a single SET. So indicate
3255 that we can no longer eliminate this reg. */
3256 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3257 ep++)
3258 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3259 ep->can_eliminate = 0;
3260 }
3261
3262 /* Now avoid the loop below in this common case. */
3263 {
3264 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn, 1);
3265 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn, 0);
3266
3267 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3268 write a CLOBBER insn. */
3269 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3270 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3271 && GET_CODE (insn) != INSN_LIST)
3272 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3273
3274 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3275 return gen_rtx (SET, VOIDmode, new0, new1);
3276 }
3277
3278 return x;
3279
3280 case MEM:
3281 /* Our only special processing is to pass the mode of the MEM to our
3282 recursive call and copy the flags. While we are here, handle this
3283 case more efficiently. */
3284 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn, 0);
3285 if (new != XEXP (x, 0))
3286 {
3287 new = gen_rtx (MEM, GET_MODE (x), new);
3288 new->volatil = x->volatil;
3289 new->unchanging = x->unchanging;
3290 new->in_struct = x->in_struct;
3291 return new;
3292 }
3293 else
3294 return x;
3295 }
3296
3297 /* Process each of our operands recursively. If any have changed, make a
3298 copy of the rtx. */
3299 fmt = GET_RTX_FORMAT (code);
3300 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3301 {
3302 if (*fmt == 'e')
3303 {
3304 new = eliminate_regs (XEXP (x, i), mem_mode, insn, 0);
3305 if (new != XEXP (x, i) && ! copied)
3306 {
3307 rtx new_x = rtx_alloc (code);
3308 bcopy ((char *) x, (char *) new_x,
3309 (sizeof (*new_x) - sizeof (new_x->fld)
3310 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3311 x = new_x;
3312 copied = 1;
3313 }
3314 XEXP (x, i) = new;
3315 }
3316 else if (*fmt == 'E')
3317 {
3318 int copied_vec = 0;
3319 for (j = 0; j < XVECLEN (x, i); j++)
3320 {
3321 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn, 0);
3322 if (new != XVECEXP (x, i, j) && ! copied_vec)
3323 {
3324 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3325 XVEC (x, i)->elem);
3326 if (! copied)
3327 {
3328 rtx new_x = rtx_alloc (code);
3329 bcopy ((char *) x, (char *) new_x,
3330 (sizeof (*new_x) - sizeof (new_x->fld)
3331 + (sizeof (new_x->fld[0])
3332 * GET_RTX_LENGTH (code))));
3333 x = new_x;
3334 copied = 1;
3335 }
3336 XVEC (x, i) = new_v;
3337 copied_vec = 1;
3338 }
3339 XVECEXP (x, i, j) = new;
3340 }
3341 }
3342 }
3343
3344 return x;
3345 }
3346 \f
3347 /* Scan INSN and eliminate all eliminable registers in it.
3348
3349 If REPLACE is nonzero, do the replacement destructively. Also
3350 delete the insn as dead it if it is setting an eliminable register.
3351
3352 If REPLACE is zero, do all our allocations in reload_obstack.
3353
3354 If no eliminations were done and this insn doesn't require any elimination
3355 processing (these are not identical conditions: it might be updating sp,
3356 but not referencing fp; this needs to be seen during reload_as_needed so
3357 that the offset between fp and sp can be taken into consideration), zero
3358 is returned. Otherwise, 1 is returned. */
3359
3360 static int
3361 eliminate_regs_in_insn (insn, replace)
3362 rtx insn;
3363 int replace;
3364 {
3365 rtx old_body = PATTERN (insn);
3366 rtx old_set = single_set (insn);
3367 rtx new_body;
3368 int val = 0;
3369 struct elim_table *ep;
3370
3371 if (! replace)
3372 push_obstacks (&reload_obstack, &reload_obstack);
3373
3374 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3375 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3376 {
3377 /* Check for setting an eliminable register. */
3378 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3379 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3380 {
3381 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3382 /* If this is setting the frame pointer register to the
3383 hardware frame pointer register and this is an elimination
3384 that will be done (tested above), this insn is really
3385 adjusting the frame pointer downward to compensate for
3386 the adjustment done before a nonlocal goto. */
3387 if (ep->from == FRAME_POINTER_REGNUM
3388 && ep->to == HARD_FRAME_POINTER_REGNUM)
3389 {
3390 rtx src = SET_SRC (old_set);
3391 int offset, ok = 0;
3392 rtx prev_insn, prev_set;
3393
3394 if (src == ep->to_rtx)
3395 offset = 0, ok = 1;
3396 else if (GET_CODE (src) == PLUS
3397 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3398 offset = INTVAL (XEXP (src, 0)), ok = 1;
3399 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3400 && (prev_set = single_set (prev_insn)) != 0
3401 && rtx_equal_p (SET_DEST (prev_set), src))
3402 {
3403 src = SET_SRC (prev_set);
3404 if (src == ep->to_rtx)
3405 offset = 0, ok = 1;
3406 else if (GET_CODE (src) == PLUS
3407 && GET_CODE (XEXP (src, 0)) == CONST_INT
3408 && XEXP (src, 1) == ep->to_rtx)
3409 offset = INTVAL (XEXP (src, 0)), ok = 1;
3410 else if (GET_CODE (src) == PLUS
3411 && GET_CODE (XEXP (src, 1)) == CONST_INT
3412 && XEXP (src, 0) == ep->to_rtx)
3413 offset = INTVAL (XEXP (src, 1)), ok = 1;
3414 }
3415
3416 if (ok)
3417 {
3418 if (replace)
3419 {
3420 rtx src
3421 = plus_constant (ep->to_rtx, offset - ep->offset);
3422
3423 /* First see if this insn remains valid when we
3424 make the change. If not, keep the INSN_CODE
3425 the same and let reload fit it up. */
3426 validate_change (insn, &SET_SRC (old_set), src, 1);
3427 validate_change (insn, &SET_DEST (old_set),
3428 ep->to_rtx, 1);
3429 if (! apply_change_group ())
3430 {
3431 SET_SRC (old_set) = src;
3432 SET_DEST (old_set) = ep->to_rtx;
3433 }
3434 }
3435
3436 val = 1;
3437 goto done;
3438 }
3439 }
3440 #endif
3441
3442 /* In this case this insn isn't serving a useful purpose. We
3443 will delete it in reload_as_needed once we know that this
3444 elimination is, in fact, being done.
3445
3446 If REPLACE isn't set, we can't delete this insn, but needn't
3447 process it since it won't be used unless something changes. */
3448 if (replace)
3449 delete_dead_insn (insn);
3450 val = 1;
3451 goto done;
3452 }
3453
3454 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3455 in the insn is the negative of the offset in FROM. Substitute
3456 (set (reg) (reg to)) for the insn and change its code.
3457
3458 We have to do this here, rather than in eliminate_regs, do that we can
3459 change the insn code. */
3460
3461 if (GET_CODE (SET_SRC (old_set)) == PLUS
3462 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3463 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3464 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3465 ep++)
3466 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3467 && ep->can_eliminate)
3468 {
3469 /* We must stop at the first elimination that will be used.
3470 If this one would replace the PLUS with a REG, do it
3471 now. Otherwise, quit the loop and let eliminate_regs
3472 do its normal replacement. */
3473 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3474 {
3475 /* We assume here that we don't need a PARALLEL of
3476 any CLOBBERs for this assignment. There's not
3477 much we can do if we do need it. */
3478 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3479 SET_DEST (old_set), ep->to_rtx);
3480 INSN_CODE (insn) = -1;
3481 val = 1;
3482 goto done;
3483 }
3484
3485 break;
3486 }
3487 }
3488
3489 old_asm_operands_vec = 0;
3490
3491 /* Replace the body of this insn with a substituted form. If we changed
3492 something, return non-zero.
3493
3494 If we are replacing a body that was a (set X (plus Y Z)), try to
3495 re-recognize the insn. We do this in case we had a simple addition
3496 but now can do this as a load-address. This saves an insn in this
3497 common case. */
3498
3499 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX, 0);
3500 if (new_body != old_body)
3501 {
3502 /* If we aren't replacing things permanently and we changed something,
3503 make another copy to ensure that all the RTL is new. Otherwise
3504 things can go wrong if find_reload swaps commutative operands
3505 and one is inside RTL that has been copied while the other is not. */
3506
3507 /* Don't copy an asm_operands because (1) there's no need and (2)
3508 copy_rtx can't do it properly when there are multiple outputs. */
3509 if (! replace && asm_noperands (old_body) < 0)
3510 new_body = copy_rtx (new_body);
3511
3512 /* If we had a move insn but now we don't, rerecognize it. This will
3513 cause spurious re-recognition if the old move had a PARALLEL since
3514 the new one still will, but we can't call single_set without
3515 having put NEW_BODY into the insn and the re-recognition won't
3516 hurt in this rare case. */
3517 if (old_set != 0
3518 && ((GET_CODE (SET_SRC (old_set)) == REG
3519 && (GET_CODE (new_body) != SET
3520 || GET_CODE (SET_SRC (new_body)) != REG))
3521 /* If this was a load from or store to memory, compare
3522 the MEM in recog_operand to the one in the insn. If they
3523 are not equal, then rerecognize the insn. */
3524 || (old_set != 0
3525 && ((GET_CODE (SET_SRC (old_set)) == MEM
3526 && SET_SRC (old_set) != recog_operand[1])
3527 || (GET_CODE (SET_DEST (old_set)) == MEM
3528 && SET_DEST (old_set) != recog_operand[0])))
3529 /* If this was an add insn before, rerecognize. */
3530 || GET_CODE (SET_SRC (old_set)) == PLUS))
3531 {
3532 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3533 /* If recognition fails, store the new body anyway.
3534 It's normal to have recognition failures here
3535 due to bizarre memory addresses; reloading will fix them. */
3536 PATTERN (insn) = new_body;
3537 }
3538 else
3539 PATTERN (insn) = new_body;
3540
3541 val = 1;
3542 }
3543
3544 /* Loop through all elimination pairs. See if any have changed and
3545 recalculate the number not at initial offset.
3546
3547 Compute the maximum offset (minimum offset if the stack does not
3548 grow downward) for each elimination pair.
3549
3550 We also detect a cases where register elimination cannot be done,
3551 namely, if a register would be both changed and referenced outside a MEM
3552 in the resulting insn since such an insn is often undefined and, even if
3553 not, we cannot know what meaning will be given to it. Note that it is
3554 valid to have a register used in an address in an insn that changes it
3555 (presumably with a pre- or post-increment or decrement).
3556
3557 If anything changes, return nonzero. */
3558
3559 num_not_at_initial_offset = 0;
3560 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3561 {
3562 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3563 ep->can_eliminate = 0;
3564
3565 ep->ref_outside_mem = 0;
3566
3567 if (ep->previous_offset != ep->offset)
3568 val = 1;
3569
3570 ep->previous_offset = ep->offset;
3571 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3572 num_not_at_initial_offset++;
3573
3574 #ifdef STACK_GROWS_DOWNWARD
3575 ep->max_offset = MAX (ep->max_offset, ep->offset);
3576 #else
3577 ep->max_offset = MIN (ep->max_offset, ep->offset);
3578 #endif
3579 }
3580
3581 done:
3582 /* If we changed something, perform elimination in REG_NOTES. This is
3583 needed even when REPLACE is zero because a REG_DEAD note might refer
3584 to a register that we eliminate and could cause a different number
3585 of spill registers to be needed in the final reload pass than in
3586 the pre-passes. */
3587 if (val && REG_NOTES (insn) != 0)
3588 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn), 0);
3589
3590 if (! replace)
3591 pop_obstacks ();
3592
3593 return val;
3594 }
3595
3596 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3597 replacement we currently believe is valid, mark it as not eliminable if X
3598 modifies DEST in any way other than by adding a constant integer to it.
3599
3600 If DEST is the frame pointer, we do nothing because we assume that
3601 all assignments to the hard frame pointer are nonlocal gotos and are being
3602 done at a time when they are valid and do not disturb anything else.
3603 Some machines want to eliminate a fake argument pointer with either the
3604 frame or stack pointer. Assignments to the hard frame pointer must not
3605 prevent this elimination.
3606
3607 Called via note_stores from reload before starting its passes to scan
3608 the insns of the function. */
3609
3610 static void
3611 mark_not_eliminable (dest, x)
3612 rtx dest;
3613 rtx x;
3614 {
3615 register int i;
3616
3617 /* A SUBREG of a hard register here is just changing its mode. We should
3618 not see a SUBREG of an eliminable hard register, but check just in
3619 case. */
3620 if (GET_CODE (dest) == SUBREG)
3621 dest = SUBREG_REG (dest);
3622
3623 if (dest == hard_frame_pointer_rtx)
3624 return;
3625
3626 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3627 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3628 && (GET_CODE (x) != SET
3629 || GET_CODE (SET_SRC (x)) != PLUS
3630 || XEXP (SET_SRC (x), 0) != dest
3631 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3632 {
3633 reg_eliminate[i].can_eliminate_previous
3634 = reg_eliminate[i].can_eliminate = 0;
3635 num_eliminable--;
3636 }
3637 }
3638 \f
3639 /* Kick all pseudos out of hard register REGNO.
3640 If GLOBAL is nonzero, try to find someplace else to put them.
3641 If DUMPFILE is nonzero, log actions taken on that file.
3642
3643 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3644 because we found we can't eliminate some register. In the case, no pseudos
3645 are allowed to be in the register, even if they are only in a block that
3646 doesn't require spill registers, unlike the case when we are spilling this
3647 hard reg to produce another spill register.
3648
3649 Return nonzero if any pseudos needed to be kicked out. */
3650
3651 static int
3652 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3653 register int regno;
3654 int global;
3655 FILE *dumpfile;
3656 int cant_eliminate;
3657 {
3658 enum reg_class class = REGNO_REG_CLASS (regno);
3659 int something_changed = 0;
3660 register int i;
3661
3662 SET_HARD_REG_BIT (forbidden_regs, regno);
3663
3664 if (cant_eliminate)
3665 regs_ever_live[regno] = 1;
3666
3667 /* Spill every pseudo reg that was allocated to this reg
3668 or to something that overlaps this reg. */
3669
3670 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3671 if (reg_renumber[i] >= 0
3672 && reg_renumber[i] <= regno
3673 && (reg_renumber[i]
3674 + HARD_REGNO_NREGS (reg_renumber[i],
3675 PSEUDO_REGNO_MODE (i))
3676 > regno))
3677 {
3678 /* If this register belongs solely to a basic block which needed no
3679 spilling of any class that this register is contained in,
3680 leave it be, unless we are spilling this register because
3681 it was a hard register that can't be eliminated. */
3682
3683 if (! cant_eliminate
3684 && basic_block_needs[0]
3685 && REG_BASIC_BLOCK (i) >= 0
3686 && basic_block_needs[(int) class][REG_BASIC_BLOCK (i)] == 0)
3687 {
3688 enum reg_class *p;
3689
3690 for (p = reg_class_superclasses[(int) class];
3691 *p != LIM_REG_CLASSES; p++)
3692 if (basic_block_needs[(int) *p][REG_BASIC_BLOCK (i)] > 0)
3693 break;
3694
3695 if (*p == LIM_REG_CLASSES)
3696 continue;
3697 }
3698
3699 /* Mark it as no longer having a hard register home. */
3700 reg_renumber[i] = -1;
3701 /* We will need to scan everything again. */
3702 something_changed = 1;
3703 if (global)
3704 retry_global_alloc (i, forbidden_regs);
3705
3706 alter_reg (i, regno);
3707 if (dumpfile)
3708 {
3709 if (reg_renumber[i] == -1)
3710 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3711 else
3712 fprintf (dumpfile, " Register %d now in %d.\n\n",
3713 i, reg_renumber[i]);
3714 }
3715 }
3716 for (i = 0; i < scratch_list_length; i++)
3717 {
3718 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3719 {
3720 if (! cant_eliminate && basic_block_needs[0]
3721 && ! basic_block_needs[(int) class][scratch_block[i]])
3722 {
3723 enum reg_class *p;
3724
3725 for (p = reg_class_superclasses[(int) class];
3726 *p != LIM_REG_CLASSES; p++)
3727 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3728 break;
3729
3730 if (*p == LIM_REG_CLASSES)
3731 continue;
3732 }
3733 PUT_CODE (scratch_list[i], SCRATCH);
3734 scratch_list[i] = 0;
3735 something_changed = 1;
3736 continue;
3737 }
3738 }
3739
3740 return something_changed;
3741 }
3742 \f
3743 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3744 Also mark any hard registers used to store user variables as
3745 forbidden from being used for spill registers. */
3746
3747 static void
3748 scan_paradoxical_subregs (x)
3749 register rtx x;
3750 {
3751 register int i;
3752 register char *fmt;
3753 register enum rtx_code code = GET_CODE (x);
3754
3755 switch (code)
3756 {
3757 case REG:
3758 #ifdef SMALL_REGISTER_CLASSES
3759 if (SMALL_REGISTER_CLASSES
3760 && REGNO (x) < FIRST_PSEUDO_REGISTER
3761 && REG_USERVAR_P (x))
3762 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3763 #endif
3764 return;
3765
3766 case CONST_INT:
3767 case CONST:
3768 case SYMBOL_REF:
3769 case LABEL_REF:
3770 case CONST_DOUBLE:
3771 case CC0:
3772 case PC:
3773 case USE:
3774 case CLOBBER:
3775 return;
3776
3777 case SUBREG:
3778 if (GET_CODE (SUBREG_REG (x)) == REG
3779 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3780 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3781 = GET_MODE_SIZE (GET_MODE (x));
3782 return;
3783 }
3784
3785 fmt = GET_RTX_FORMAT (code);
3786 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3787 {
3788 if (fmt[i] == 'e')
3789 scan_paradoxical_subregs (XEXP (x, i));
3790 else if (fmt[i] == 'E')
3791 {
3792 register int j;
3793 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3794 scan_paradoxical_subregs (XVECEXP (x, i, j));
3795 }
3796 }
3797 }
3798 \f
3799 static int
3800 hard_reg_use_compare (p1p, p2p)
3801 const GENERIC_PTR p1p;
3802 const GENERIC_PTR p2p;
3803 {
3804 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3805 *p2 = (struct hard_reg_n_uses *)p2p;
3806 int tem = p1->uses - p2->uses;
3807 if (tem != 0) return tem;
3808 /* If regs are equally good, sort by regno,
3809 so that the results of qsort leave nothing to chance. */
3810 return p1->regno - p2->regno;
3811 }
3812
3813 /* Choose the order to consider regs for use as reload registers
3814 based on how much trouble would be caused by spilling one.
3815 Store them in order of decreasing preference in potential_reload_regs. */
3816
3817 static void
3818 order_regs_for_reload (global)
3819 int global;
3820 {
3821 register int i;
3822 register int o = 0;
3823 int large = 0;
3824
3825 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3826
3827 CLEAR_HARD_REG_SET (bad_spill_regs);
3828
3829 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3830 potential_reload_regs[i] = -1;
3831
3832 /* Count number of uses of each hard reg by pseudo regs allocated to it
3833 and then order them by decreasing use. */
3834
3835 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3836 {
3837 hard_reg_n_uses[i].uses = 0;
3838 hard_reg_n_uses[i].regno = i;
3839 }
3840
3841 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3842 {
3843 int regno = reg_renumber[i];
3844 if (regno >= 0)
3845 {
3846 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3847 while (regno < lim)
3848 {
3849 /* If allocated by local-alloc, show more uses since
3850 we're not going to be able to reallocate it, but
3851 we might if allocated by global alloc. */
3852 if (global && reg_allocno[i] < 0)
3853 hard_reg_n_uses[regno].uses += (REG_N_REFS (i) + 1) / 2;
3854
3855 hard_reg_n_uses[regno++].uses += REG_N_REFS (i);
3856 }
3857 }
3858 large += REG_N_REFS (i);
3859 }
3860
3861 /* Now fixed registers (which cannot safely be used for reloading)
3862 get a very high use count so they will be considered least desirable.
3863 Registers used explicitly in the rtl code are almost as bad. */
3864
3865 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3866 {
3867 if (fixed_regs[i])
3868 {
3869 hard_reg_n_uses[i].uses += 2 * large + 2;
3870 SET_HARD_REG_BIT (bad_spill_regs, i);
3871 }
3872 else if (regs_explicitly_used[i])
3873 {
3874 hard_reg_n_uses[i].uses += large + 1;
3875 /* ??? We are doing this here because of the potential that
3876 bad code may be generated if a register explicitly used in
3877 an insn was used as a spill register for that insn. But
3878 not using these are spill registers may lose on some machine.
3879 We'll have to see how this works out. */
3880 #ifdef SMALL_REGISTER_CLASSES
3881 if (! SMALL_REGISTER_CLASSES)
3882 #endif
3883 SET_HARD_REG_BIT (bad_spill_regs, i);
3884 }
3885 }
3886 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3887 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3888
3889 #ifdef ELIMINABLE_REGS
3890 /* If registers other than the frame pointer are eliminable, mark them as
3891 poor choices. */
3892 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3893 {
3894 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3895 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3896 }
3897 #endif
3898
3899 /* Prefer registers not so far used, for use in temporary loading.
3900 Among them, if REG_ALLOC_ORDER is defined, use that order.
3901 Otherwise, prefer registers not preserved by calls. */
3902
3903 #ifdef REG_ALLOC_ORDER
3904 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3905 {
3906 int regno = reg_alloc_order[i];
3907
3908 if (hard_reg_n_uses[regno].uses == 0)
3909 potential_reload_regs[o++] = regno;
3910 }
3911 #else
3912 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3913 {
3914 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3915 potential_reload_regs[o++] = i;
3916 }
3917 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3918 {
3919 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3920 potential_reload_regs[o++] = i;
3921 }
3922 #endif
3923
3924 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3925 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3926
3927 /* Now add the regs that are already used,
3928 preferring those used less often. The fixed and otherwise forbidden
3929 registers will be at the end of this list. */
3930
3931 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3932 if (hard_reg_n_uses[i].uses != 0)
3933 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3934 }
3935 \f
3936 /* Used in reload_as_needed to sort the spilled regs. */
3937
3938 static int
3939 compare_spill_regs (r1p, r2p)
3940 const GENERIC_PTR r1p;
3941 const GENERIC_PTR r2p;
3942 {
3943 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3944 return r1 - r2;
3945 }
3946
3947 /* Reload pseudo-registers into hard regs around each insn as needed.
3948 Additional register load insns are output before the insn that needs it
3949 and perhaps store insns after insns that modify the reloaded pseudo reg.
3950
3951 reg_last_reload_reg and reg_reloaded_contents keep track of
3952 which registers are already available in reload registers.
3953 We update these for the reloads that we perform,
3954 as the insns are scanned. */
3955
3956 static void
3957 reload_as_needed (first, live_known)
3958 rtx first;
3959 int live_known;
3960 {
3961 register rtx insn;
3962 register int i;
3963 int this_block = 0;
3964 rtx x;
3965 rtx after_call = 0;
3966
3967 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3968 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3969 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3970 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3971 reg_has_output_reload = (char *) alloca (max_regno);
3972 for (i = 0; i < n_spills; i++)
3973 {
3974 reg_reloaded_contents[i] = -1;
3975 reg_reloaded_insn[i] = 0;
3976 }
3977
3978 /* Reset all offsets on eliminable registers to their initial values. */
3979 #ifdef ELIMINABLE_REGS
3980 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3981 {
3982 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3983 reg_eliminate[i].initial_offset);
3984 reg_eliminate[i].previous_offset
3985 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3986 }
3987 #else
3988 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3989 reg_eliminate[0].previous_offset
3990 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3991 #endif
3992
3993 num_not_at_initial_offset = 0;
3994
3995 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3996 pack registers with group needs. */
3997 if (n_spills > 1)
3998 {
3999 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
4000 for (i = 0; i < n_spills; i++)
4001 spill_reg_order[spill_regs[i]] = i;
4002 }
4003
4004 for (insn = first; insn;)
4005 {
4006 register rtx next = NEXT_INSN (insn);
4007
4008 /* Notice when we move to a new basic block. */
4009 if (live_known && this_block + 1 < n_basic_blocks
4010 && insn == basic_block_head[this_block+1])
4011 ++this_block;
4012
4013 /* If we pass a label, copy the offsets from the label information
4014 into the current offsets of each elimination. */
4015 if (GET_CODE (insn) == CODE_LABEL)
4016 {
4017 num_not_at_initial_offset = 0;
4018 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4019 {
4020 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
4021 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
4022 if (reg_eliminate[i].can_eliminate
4023 && (reg_eliminate[i].offset
4024 != reg_eliminate[i].initial_offset))
4025 num_not_at_initial_offset++;
4026 }
4027 }
4028
4029 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4030 {
4031 rtx avoid_return_reg = 0;
4032 rtx oldpat = PATTERN (insn);
4033
4034 #ifdef SMALL_REGISTER_CLASSES
4035 /* Set avoid_return_reg if this is an insn
4036 that might use the value of a function call. */
4037 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
4038 {
4039 if (GET_CODE (PATTERN (insn)) == SET)
4040 after_call = SET_DEST (PATTERN (insn));
4041 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4042 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4043 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4044 else
4045 after_call = 0;
4046 }
4047 else if (SMALL_REGISTER_CLASSES
4048 && after_call != 0
4049 && !(GET_CODE (PATTERN (insn)) == SET
4050 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
4051 {
4052 if (reg_referenced_p (after_call, PATTERN (insn)))
4053 avoid_return_reg = after_call;
4054 after_call = 0;
4055 }
4056 #endif /* SMALL_REGISTER_CLASSES */
4057
4058 /* If this is a USE and CLOBBER of a MEM, ensure that any
4059 references to eliminable registers have been removed. */
4060
4061 if ((GET_CODE (PATTERN (insn)) == USE
4062 || GET_CODE (PATTERN (insn)) == CLOBBER)
4063 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4064 XEXP (XEXP (PATTERN (insn), 0), 0)
4065 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4066 GET_MODE (XEXP (PATTERN (insn), 0)),
4067 NULL_RTX, 0);
4068
4069 /* If we need to do register elimination processing, do so.
4070 This might delete the insn, in which case we are done. */
4071 if (num_eliminable && GET_MODE (insn) == QImode)
4072 {
4073 eliminate_regs_in_insn (insn, 1);
4074 if (GET_CODE (insn) == NOTE)
4075 {
4076 insn = next;
4077 continue;
4078 }
4079 }
4080
4081 if (GET_MODE (insn) == VOIDmode)
4082 n_reloads = 0;
4083 /* First find the pseudo regs that must be reloaded for this insn.
4084 This info is returned in the tables reload_... (see reload.h).
4085 Also modify the body of INSN by substituting RELOAD
4086 rtx's for those pseudo regs. */
4087 else
4088 {
4089 bzero (reg_has_output_reload, max_regno);
4090 CLEAR_HARD_REG_SET (reg_is_output_reload);
4091
4092 find_reloads (insn, 1, spill_indirect_levels, live_known,
4093 spill_reg_order);
4094 }
4095
4096 if (n_reloads > 0)
4097 {
4098 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4099 rtx p;
4100 int class;
4101
4102 /* If this block has not had spilling done for a
4103 particular clas and we have any non-optionals that need a
4104 spill reg in that class, abort. */
4105
4106 for (class = 0; class < N_REG_CLASSES; class++)
4107 if (basic_block_needs[class] != 0
4108 && basic_block_needs[class][this_block] == 0)
4109 for (i = 0; i < n_reloads; i++)
4110 if (class == (int) reload_reg_class[i]
4111 && reload_reg_rtx[i] == 0
4112 && ! reload_optional[i]
4113 && (reload_in[i] != 0 || reload_out[i] != 0
4114 || reload_secondary_p[i] != 0))
4115 fatal_insn ("Non-optional registers need a spill register", insn);
4116
4117 /* Now compute which reload regs to reload them into. Perhaps
4118 reusing reload regs from previous insns, or else output
4119 load insns to reload them. Maybe output store insns too.
4120 Record the choices of reload reg in reload_reg_rtx. */
4121 choose_reload_regs (insn, avoid_return_reg);
4122
4123 #ifdef SMALL_REGISTER_CLASSES
4124 /* Merge any reloads that we didn't combine for fear of
4125 increasing the number of spill registers needed but now
4126 discover can be safely merged. */
4127 if (SMALL_REGISTER_CLASSES)
4128 merge_assigned_reloads (insn);
4129 #endif
4130
4131 /* Generate the insns to reload operands into or out of
4132 their reload regs. */
4133 emit_reload_insns (insn);
4134
4135 /* Substitute the chosen reload regs from reload_reg_rtx
4136 into the insn's body (or perhaps into the bodies of other
4137 load and store insn that we just made for reloading
4138 and that we moved the structure into). */
4139 subst_reloads ();
4140
4141 /* If this was an ASM, make sure that all the reload insns
4142 we have generated are valid. If not, give an error
4143 and delete them. */
4144
4145 if (asm_noperands (PATTERN (insn)) >= 0)
4146 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4147 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4148 && (recog_memoized (p) < 0
4149 || (insn_extract (p),
4150 ! constrain_operands (INSN_CODE (p), 1))))
4151 {
4152 error_for_asm (insn,
4153 "`asm' operand requires impossible reload");
4154 PUT_CODE (p, NOTE);
4155 NOTE_SOURCE_FILE (p) = 0;
4156 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4157 }
4158 }
4159 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4160 is no longer validly lying around to save a future reload.
4161 Note that this does not detect pseudos that were reloaded
4162 for this insn in order to be stored in
4163 (obeying register constraints). That is correct; such reload
4164 registers ARE still valid. */
4165 note_stores (oldpat, forget_old_reloads_1);
4166
4167 /* There may have been CLOBBER insns placed after INSN. So scan
4168 between INSN and NEXT and use them to forget old reloads. */
4169 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4170 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4171 note_stores (PATTERN (x), forget_old_reloads_1);
4172
4173 #ifdef AUTO_INC_DEC
4174 /* Likewise for regs altered by auto-increment in this insn.
4175 But note that the reg-notes are not changed by reloading:
4176 they still contain the pseudo-regs, not the spill regs. */
4177 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4178 if (REG_NOTE_KIND (x) == REG_INC)
4179 {
4180 /* See if this pseudo reg was reloaded in this insn.
4181 If so, its last-reload info is still valid
4182 because it is based on this insn's reload. */
4183 for (i = 0; i < n_reloads; i++)
4184 if (reload_out[i] == XEXP (x, 0))
4185 break;
4186
4187 if (i == n_reloads)
4188 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4189 }
4190 #endif
4191 }
4192 /* A reload reg's contents are unknown after a label. */
4193 if (GET_CODE (insn) == CODE_LABEL)
4194 for (i = 0; i < n_spills; i++)
4195 {
4196 reg_reloaded_contents[i] = -1;
4197 reg_reloaded_insn[i] = 0;
4198 }
4199
4200 /* Don't assume a reload reg is still good after a call insn
4201 if it is a call-used reg. */
4202 else if (GET_CODE (insn) == CALL_INSN)
4203 for (i = 0; i < n_spills; i++)
4204 if (call_used_regs[spill_regs[i]])
4205 {
4206 reg_reloaded_contents[i] = -1;
4207 reg_reloaded_insn[i] = 0;
4208 }
4209
4210 /* In case registers overlap, allow certain insns to invalidate
4211 particular hard registers. */
4212
4213 #ifdef INSN_CLOBBERS_REGNO_P
4214 for (i = 0 ; i < n_spills ; i++)
4215 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4216 {
4217 reg_reloaded_contents[i] = -1;
4218 reg_reloaded_insn[i] = 0;
4219 }
4220 #endif
4221
4222 insn = next;
4223
4224 #ifdef USE_C_ALLOCA
4225 alloca (0);
4226 #endif
4227 }
4228 }
4229
4230 /* Discard all record of any value reloaded from X,
4231 or reloaded in X from someplace else;
4232 unless X is an output reload reg of the current insn.
4233
4234 X may be a hard reg (the reload reg)
4235 or it may be a pseudo reg that was reloaded from. */
4236
4237 static void
4238 forget_old_reloads_1 (x, ignored)
4239 rtx x;
4240 rtx ignored;
4241 {
4242 register int regno;
4243 int nr;
4244 int offset = 0;
4245
4246 /* note_stores does give us subregs of hard regs. */
4247 while (GET_CODE (x) == SUBREG)
4248 {
4249 offset += SUBREG_WORD (x);
4250 x = SUBREG_REG (x);
4251 }
4252
4253 if (GET_CODE (x) != REG)
4254 return;
4255
4256 regno = REGNO (x) + offset;
4257
4258 if (regno >= FIRST_PSEUDO_REGISTER)
4259 nr = 1;
4260 else
4261 {
4262 int i;
4263 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4264 /* Storing into a spilled-reg invalidates its contents.
4265 This can happen if a block-local pseudo is allocated to that reg
4266 and it wasn't spilled because this block's total need is 0.
4267 Then some insn might have an optional reload and use this reg. */
4268 for (i = 0; i < nr; i++)
4269 if (spill_reg_order[regno + i] >= 0
4270 /* But don't do this if the reg actually serves as an output
4271 reload reg in the current instruction. */
4272 && (n_reloads == 0
4273 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4274 {
4275 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4276 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4277 }
4278 }
4279
4280 /* Since value of X has changed,
4281 forget any value previously copied from it. */
4282
4283 while (nr-- > 0)
4284 /* But don't forget a copy if this is the output reload
4285 that establishes the copy's validity. */
4286 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4287 reg_last_reload_reg[regno + nr] = 0;
4288 }
4289 \f
4290 /* For each reload, the mode of the reload register. */
4291 static enum machine_mode reload_mode[MAX_RELOADS];
4292
4293 /* For each reload, the largest number of registers it will require. */
4294 static int reload_nregs[MAX_RELOADS];
4295
4296 /* Comparison function for qsort to decide which of two reloads
4297 should be handled first. *P1 and *P2 are the reload numbers. */
4298
4299 static int
4300 reload_reg_class_lower (r1p, r2p)
4301 const GENERIC_PTR r1p;
4302 const GENERIC_PTR r2p;
4303 {
4304 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4305 register int t;
4306
4307 /* Consider required reloads before optional ones. */
4308 t = reload_optional[r1] - reload_optional[r2];
4309 if (t != 0)
4310 return t;
4311
4312 /* Count all solitary classes before non-solitary ones. */
4313 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4314 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4315 if (t != 0)
4316 return t;
4317
4318 /* Aside from solitaires, consider all multi-reg groups first. */
4319 t = reload_nregs[r2] - reload_nregs[r1];
4320 if (t != 0)
4321 return t;
4322
4323 /* Consider reloads in order of increasing reg-class number. */
4324 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4325 if (t != 0)
4326 return t;
4327
4328 /* If reloads are equally urgent, sort by reload number,
4329 so that the results of qsort leave nothing to chance. */
4330 return r1 - r2;
4331 }
4332 \f
4333 /* The following HARD_REG_SETs indicate when each hard register is
4334 used for a reload of various parts of the current insn. */
4335
4336 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4337 static HARD_REG_SET reload_reg_used;
4338 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4339 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4340 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4341 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4342 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4343 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4344 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4345 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4346 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4347 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4348 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4349 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4350 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4351 static HARD_REG_SET reload_reg_used_in_op_addr;
4352 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4353 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4354 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4355 static HARD_REG_SET reload_reg_used_in_insn;
4356 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4357 static HARD_REG_SET reload_reg_used_in_other_addr;
4358
4359 /* If reg is in use as a reload reg for any sort of reload. */
4360 static HARD_REG_SET reload_reg_used_at_all;
4361
4362 /* If reg is use as an inherited reload. We just mark the first register
4363 in the group. */
4364 static HARD_REG_SET reload_reg_used_for_inherit;
4365
4366 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4367 TYPE. MODE is used to indicate how many consecutive regs are
4368 actually used. */
4369
4370 static void
4371 mark_reload_reg_in_use (regno, opnum, type, mode)
4372 int regno;
4373 int opnum;
4374 enum reload_type type;
4375 enum machine_mode mode;
4376 {
4377 int nregs = HARD_REGNO_NREGS (regno, mode);
4378 int i;
4379
4380 for (i = regno; i < nregs + regno; i++)
4381 {
4382 switch (type)
4383 {
4384 case RELOAD_OTHER:
4385 SET_HARD_REG_BIT (reload_reg_used, i);
4386 break;
4387
4388 case RELOAD_FOR_INPUT_ADDRESS:
4389 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4390 break;
4391
4392 case RELOAD_FOR_INPADDR_ADDRESS:
4393 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4394 break;
4395
4396 case RELOAD_FOR_OUTPUT_ADDRESS:
4397 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4398 break;
4399
4400 case RELOAD_FOR_OUTADDR_ADDRESS:
4401 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4402 break;
4403
4404 case RELOAD_FOR_OPERAND_ADDRESS:
4405 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4406 break;
4407
4408 case RELOAD_FOR_OPADDR_ADDR:
4409 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4410 break;
4411
4412 case RELOAD_FOR_OTHER_ADDRESS:
4413 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4414 break;
4415
4416 case RELOAD_FOR_INPUT:
4417 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4418 break;
4419
4420 case RELOAD_FOR_OUTPUT:
4421 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4422 break;
4423
4424 case RELOAD_FOR_INSN:
4425 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4426 break;
4427 }
4428
4429 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4430 }
4431 }
4432
4433 /* Similarly, but show REGNO is no longer in use for a reload. */
4434
4435 static void
4436 clear_reload_reg_in_use (regno, opnum, type, mode)
4437 int regno;
4438 int opnum;
4439 enum reload_type type;
4440 enum machine_mode mode;
4441 {
4442 int nregs = HARD_REGNO_NREGS (regno, mode);
4443 int i;
4444
4445 for (i = regno; i < nregs + regno; i++)
4446 {
4447 switch (type)
4448 {
4449 case RELOAD_OTHER:
4450 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4451 break;
4452
4453 case RELOAD_FOR_INPUT_ADDRESS:
4454 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4455 break;
4456
4457 case RELOAD_FOR_INPADDR_ADDRESS:
4458 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4459 break;
4460
4461 case RELOAD_FOR_OUTPUT_ADDRESS:
4462 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4463 break;
4464
4465 case RELOAD_FOR_OUTADDR_ADDRESS:
4466 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4467 break;
4468
4469 case RELOAD_FOR_OPERAND_ADDRESS:
4470 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4471 break;
4472
4473 case RELOAD_FOR_OPADDR_ADDR:
4474 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4475 break;
4476
4477 case RELOAD_FOR_OTHER_ADDRESS:
4478 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4479 break;
4480
4481 case RELOAD_FOR_INPUT:
4482 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4483 break;
4484
4485 case RELOAD_FOR_OUTPUT:
4486 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4487 break;
4488
4489 case RELOAD_FOR_INSN:
4490 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4491 break;
4492 }
4493 }
4494 }
4495
4496 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4497 specified by OPNUM and TYPE. */
4498
4499 static int
4500 reload_reg_free_p (regno, opnum, type)
4501 int regno;
4502 int opnum;
4503 enum reload_type type;
4504 {
4505 int i;
4506
4507 /* In use for a RELOAD_OTHER means it's not available for anything. */
4508 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4509 return 0;
4510
4511 switch (type)
4512 {
4513 case RELOAD_OTHER:
4514 /* In use for anything means we can't use it for RELOAD_OTHER. */
4515 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4516 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4517 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4518 return 0;
4519
4520 for (i = 0; i < reload_n_operands; i++)
4521 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4522 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4523 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4524 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4525 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4526 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4527 return 0;
4528
4529 return 1;
4530
4531 case RELOAD_FOR_INPUT:
4532 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4533 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4534 return 0;
4535
4536 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4537 return 0;
4538
4539 /* If it is used for some other input, can't use it. */
4540 for (i = 0; i < reload_n_operands; i++)
4541 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4542 return 0;
4543
4544 /* If it is used in a later operand's address, can't use it. */
4545 for (i = opnum + 1; i < reload_n_operands; i++)
4546 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4547 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4548 return 0;
4549
4550 return 1;
4551
4552 case RELOAD_FOR_INPUT_ADDRESS:
4553 /* Can't use a register if it is used for an input address for this
4554 operand or used as an input in an earlier one. */
4555 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4556 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4557 return 0;
4558
4559 for (i = 0; i < opnum; i++)
4560 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4561 return 0;
4562
4563 return 1;
4564
4565 case RELOAD_FOR_INPADDR_ADDRESS:
4566 /* Can't use a register if it is used for an input address
4567 address for this operand or used as an input in an earlier
4568 one. */
4569 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4570 return 0;
4571
4572 for (i = 0; i < opnum; i++)
4573 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4574 return 0;
4575
4576 return 1;
4577
4578 case RELOAD_FOR_OUTPUT_ADDRESS:
4579 /* Can't use a register if it is used for an output address for this
4580 operand or used as an output in this or a later operand. */
4581 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4582 return 0;
4583
4584 for (i = opnum; i < reload_n_operands; i++)
4585 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4586 return 0;
4587
4588 return 1;
4589
4590 case RELOAD_FOR_OUTADDR_ADDRESS:
4591 /* Can't use a register if it is used for an output address
4592 address for this operand or used as an output in this or a
4593 later operand. */
4594 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4595 return 0;
4596
4597 for (i = opnum; i < reload_n_operands; i++)
4598 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4599 return 0;
4600
4601 return 1;
4602
4603 case RELOAD_FOR_OPERAND_ADDRESS:
4604 for (i = 0; i < reload_n_operands; i++)
4605 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4606 return 0;
4607
4608 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4609 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4610
4611 case RELOAD_FOR_OPADDR_ADDR:
4612 for (i = 0; i < reload_n_operands; i++)
4613 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4614 return 0;
4615
4616 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4617
4618 case RELOAD_FOR_OUTPUT:
4619 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4620 outputs, or an operand address for this or an earlier output. */
4621 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4622 return 0;
4623
4624 for (i = 0; i < reload_n_operands; i++)
4625 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4626 return 0;
4627
4628 for (i = 0; i <= opnum; i++)
4629 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4630 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4631 return 0;
4632
4633 return 1;
4634
4635 case RELOAD_FOR_INSN:
4636 for (i = 0; i < reload_n_operands; i++)
4637 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4638 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4639 return 0;
4640
4641 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4642 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4643
4644 case RELOAD_FOR_OTHER_ADDRESS:
4645 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4646 }
4647 abort ();
4648 }
4649
4650 /* Return 1 if the value in reload reg REGNO, as used by a reload
4651 needed for the part of the insn specified by OPNUM and TYPE,
4652 is not in use for a reload in any prior part of the insn.
4653
4654 We can assume that the reload reg was already tested for availability
4655 at the time it is needed, and we should not check this again,
4656 in case the reg has already been marked in use. */
4657
4658 static int
4659 reload_reg_free_before_p (regno, opnum, type)
4660 int regno;
4661 int opnum;
4662 enum reload_type type;
4663 {
4664 int i;
4665
4666 switch (type)
4667 {
4668 case RELOAD_FOR_OTHER_ADDRESS:
4669 /* These always come first. */
4670 return 1;
4671
4672 case RELOAD_OTHER:
4673 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4674
4675 /* If this use is for part of the insn,
4676 check the reg is not in use for any prior part. It is tempting
4677 to try to do this by falling through from objecs that occur
4678 later in the insn to ones that occur earlier, but that will not
4679 correctly take into account the fact that here we MUST ignore
4680 things that would prevent the register from being allocated in
4681 the first place, since we know that it was allocated. */
4682
4683 case RELOAD_FOR_OUTPUT_ADDRESS:
4684 case RELOAD_FOR_OUTADDR_ADDRESS:
4685 /* Earlier reloads are for earlier outputs or their addresses,
4686 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4687 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4688 RELOAD_OTHER).. */
4689 for (i = 0; i < opnum; i++)
4690 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4691 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4692 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4693 return 0;
4694
4695 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4696 return 0;
4697
4698 for (i = 0; i < reload_n_operands; i++)
4699 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4700 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4701 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4702 return 0;
4703
4704 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4705 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4706 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4707
4708 case RELOAD_FOR_OUTPUT:
4709 /* This can't be used in the output address for this operand and
4710 anything that can't be used for it, except that we've already
4711 tested for RELOAD_FOR_INSN objects. */
4712
4713 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4714 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4715 return 0;
4716
4717 for (i = 0; i < opnum; i++)
4718 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4719 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4720 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4721 return 0;
4722
4723 for (i = 0; i < reload_n_operands; i++)
4724 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4725 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4726 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4727 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4728 return 0;
4729
4730 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4731
4732 case RELOAD_FOR_OPERAND_ADDRESS:
4733 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4734 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4735 return 0;
4736
4737 /* ... fall through ... */
4738
4739 case RELOAD_FOR_OPADDR_ADDR:
4740 case RELOAD_FOR_INSN:
4741 /* These can't conflict with inputs, or each other, so all we have to
4742 test is input addresses and the addresses of OTHER items. */
4743
4744 for (i = 0; i < reload_n_operands; i++)
4745 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4746 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4747 return 0;
4748
4749 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4750
4751 case RELOAD_FOR_INPUT:
4752 /* The only things earlier are the address for this and
4753 earlier inputs, other inputs (which we know we don't conflict
4754 with), and addresses of RELOAD_OTHER objects. */
4755
4756 for (i = 0; i <= opnum; i++)
4757 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4758 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4759 return 0;
4760
4761 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4762
4763 case RELOAD_FOR_INPUT_ADDRESS:
4764 case RELOAD_FOR_INPADDR_ADDRESS:
4765 /* Similarly, all we have to check is for use in earlier inputs'
4766 addresses. */
4767 for (i = 0; i < opnum; i++)
4768 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4769 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4770 return 0;
4771
4772 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4773 }
4774 abort ();
4775 }
4776
4777 /* Return 1 if the value in reload reg REGNO, as used by a reload
4778 needed for the part of the insn specified by OPNUM and TYPE,
4779 is still available in REGNO at the end of the insn.
4780
4781 We can assume that the reload reg was already tested for availability
4782 at the time it is needed, and we should not check this again,
4783 in case the reg has already been marked in use. */
4784
4785 static int
4786 reload_reg_reaches_end_p (regno, opnum, type)
4787 int regno;
4788 int opnum;
4789 enum reload_type type;
4790 {
4791 int i;
4792
4793 switch (type)
4794 {
4795 case RELOAD_OTHER:
4796 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4797 its value must reach the end. */
4798 return 1;
4799
4800 /* If this use is for part of the insn,
4801 its value reaches if no subsequent part uses the same register.
4802 Just like the above function, don't try to do this with lots
4803 of fallthroughs. */
4804
4805 case RELOAD_FOR_OTHER_ADDRESS:
4806 /* Here we check for everything else, since these don't conflict
4807 with anything else and everything comes later. */
4808
4809 for (i = 0; i < reload_n_operands; i++)
4810 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4811 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4812 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4813 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4814 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4815 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4816 return 0;
4817
4818 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4819 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4820 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4821
4822 case RELOAD_FOR_INPUT_ADDRESS:
4823 case RELOAD_FOR_INPADDR_ADDRESS:
4824 /* Similar, except that we check only for this and subsequent inputs
4825 and the address of only subsequent inputs and we do not need
4826 to check for RELOAD_OTHER objects since they are known not to
4827 conflict. */
4828
4829 for (i = opnum; i < reload_n_operands; i++)
4830 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4831 return 0;
4832
4833 for (i = opnum + 1; i < reload_n_operands; i++)
4834 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4835 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4836 return 0;
4837
4838 for (i = 0; i < reload_n_operands; i++)
4839 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4840 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4841 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4842 return 0;
4843
4844 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4845 return 0;
4846
4847 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4848 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4849
4850 case RELOAD_FOR_INPUT:
4851 /* Similar to input address, except we start at the next operand for
4852 both input and input address and we do not check for
4853 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4854 would conflict. */
4855
4856 for (i = opnum + 1; i < reload_n_operands; i++)
4857 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4858 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4859 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4860 return 0;
4861
4862 /* ... fall through ... */
4863
4864 case RELOAD_FOR_OPERAND_ADDRESS:
4865 /* Check outputs and their addresses. */
4866
4867 for (i = 0; i < reload_n_operands; i++)
4868 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4869 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4870 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4871 return 0;
4872
4873 return 1;
4874
4875 case RELOAD_FOR_OPADDR_ADDR:
4876 for (i = 0; i < reload_n_operands; i++)
4877 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4878 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4879 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4880 return 0;
4881
4882 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4883 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4884
4885 case RELOAD_FOR_INSN:
4886 /* These conflict with other outputs with RELOAD_OTHER. So
4887 we need only check for output addresses. */
4888
4889 opnum = -1;
4890
4891 /* ... fall through ... */
4892
4893 case RELOAD_FOR_OUTPUT:
4894 case RELOAD_FOR_OUTPUT_ADDRESS:
4895 case RELOAD_FOR_OUTADDR_ADDRESS:
4896 /* We already know these can't conflict with a later output. So the
4897 only thing to check are later output addresses. */
4898 for (i = opnum + 1; i < reload_n_operands; i++)
4899 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4900 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4901 return 0;
4902
4903 return 1;
4904 }
4905
4906 abort ();
4907 }
4908 \f
4909 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4910 Return 0 otherwise.
4911
4912 This function uses the same algorithm as reload_reg_free_p above. */
4913
4914 static int
4915 reloads_conflict (r1, r2)
4916 int r1, r2;
4917 {
4918 enum reload_type r1_type = reload_when_needed[r1];
4919 enum reload_type r2_type = reload_when_needed[r2];
4920 int r1_opnum = reload_opnum[r1];
4921 int r2_opnum = reload_opnum[r2];
4922
4923 /* RELOAD_OTHER conflicts with everything. */
4924 if (r2_type == RELOAD_OTHER)
4925 return 1;
4926
4927 /* Otherwise, check conflicts differently for each type. */
4928
4929 switch (r1_type)
4930 {
4931 case RELOAD_FOR_INPUT:
4932 return (r2_type == RELOAD_FOR_INSN
4933 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4934 || r2_type == RELOAD_FOR_OPADDR_ADDR
4935 || r2_type == RELOAD_FOR_INPUT
4936 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4937 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4938 && r2_opnum > r1_opnum));
4939
4940 case RELOAD_FOR_INPUT_ADDRESS:
4941 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4942 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4943
4944 case RELOAD_FOR_INPADDR_ADDRESS:
4945 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4946 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4947
4948 case RELOAD_FOR_OUTPUT_ADDRESS:
4949 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4950 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4951
4952 case RELOAD_FOR_OUTADDR_ADDRESS:
4953 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4954 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4955
4956 case RELOAD_FOR_OPERAND_ADDRESS:
4957 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4958 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4959
4960 case RELOAD_FOR_OPADDR_ADDR:
4961 return (r2_type == RELOAD_FOR_INPUT
4962 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4963
4964 case RELOAD_FOR_OUTPUT:
4965 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4966 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4967 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4968 && r2_opnum >= r1_opnum));
4969
4970 case RELOAD_FOR_INSN:
4971 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4972 || r2_type == RELOAD_FOR_INSN
4973 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4974
4975 case RELOAD_FOR_OTHER_ADDRESS:
4976 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4977
4978 case RELOAD_OTHER:
4979 return 1;
4980
4981 default:
4982 abort ();
4983 }
4984 }
4985 \f
4986 /* Vector of reload-numbers showing the order in which the reloads should
4987 be processed. */
4988 short reload_order[MAX_RELOADS];
4989
4990 /* Indexed by reload number, 1 if incoming value
4991 inherited from previous insns. */
4992 char reload_inherited[MAX_RELOADS];
4993
4994 /* For an inherited reload, this is the insn the reload was inherited from,
4995 if we know it. Otherwise, this is 0. */
4996 rtx reload_inheritance_insn[MAX_RELOADS];
4997
4998 /* If non-zero, this is a place to get the value of the reload,
4999 rather than using reload_in. */
5000 rtx reload_override_in[MAX_RELOADS];
5001
5002 /* For each reload, the index in spill_regs of the spill register used,
5003 or -1 if we did not need one of the spill registers for this reload. */
5004 int reload_spill_index[MAX_RELOADS];
5005
5006 /* Find a spill register to use as a reload register for reload R.
5007 LAST_RELOAD is non-zero if this is the last reload for the insn being
5008 processed.
5009
5010 Set reload_reg_rtx[R] to the register allocated.
5011
5012 If NOERROR is nonzero, we return 1 if successful,
5013 or 0 if we couldn't find a spill reg and we didn't change anything. */
5014
5015 static int
5016 allocate_reload_reg (r, insn, last_reload, noerror)
5017 int r;
5018 rtx insn;
5019 int last_reload;
5020 int noerror;
5021 {
5022 int i;
5023 int pass;
5024 int count;
5025 rtx new;
5026 int regno;
5027
5028 /* If we put this reload ahead, thinking it is a group,
5029 then insist on finding a group. Otherwise we can grab a
5030 reg that some other reload needs.
5031 (That can happen when we have a 68000 DATA_OR_FP_REG
5032 which is a group of data regs or one fp reg.)
5033 We need not be so restrictive if there are no more reloads
5034 for this insn.
5035
5036 ??? Really it would be nicer to have smarter handling
5037 for that kind of reg class, where a problem like this is normal.
5038 Perhaps those classes should be avoided for reloading
5039 by use of more alternatives. */
5040
5041 int force_group = reload_nregs[r] > 1 && ! last_reload;
5042
5043 /* If we want a single register and haven't yet found one,
5044 take any reg in the right class and not in use.
5045 If we want a consecutive group, here is where we look for it.
5046
5047 We use two passes so we can first look for reload regs to
5048 reuse, which are already in use for other reloads in this insn,
5049 and only then use additional registers.
5050 I think that maximizing reuse is needed to make sure we don't
5051 run out of reload regs. Suppose we have three reloads, and
5052 reloads A and B can share regs. These need two regs.
5053 Suppose A and B are given different regs.
5054 That leaves none for C. */
5055 for (pass = 0; pass < 2; pass++)
5056 {
5057 /* I is the index in spill_regs.
5058 We advance it round-robin between insns to use all spill regs
5059 equally, so that inherited reloads have a chance
5060 of leapfrogging each other. Don't do this, however, when we have
5061 group needs and failure would be fatal; if we only have a relatively
5062 small number of spill registers, and more than one of them has
5063 group needs, then by starting in the middle, we may end up
5064 allocating the first one in such a way that we are not left with
5065 sufficient groups to handle the rest. */
5066
5067 if (noerror || ! force_group)
5068 i = last_spill_reg;
5069 else
5070 i = -1;
5071
5072 for (count = 0; count < n_spills; count++)
5073 {
5074 int class = (int) reload_reg_class[r];
5075
5076 i = (i + 1) % n_spills;
5077
5078 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
5079 reload_when_needed[r])
5080 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5081 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5082 /* Look first for regs to share, then for unshared. But
5083 don't share regs used for inherited reloads; they are
5084 the ones we want to preserve. */
5085 && (pass
5086 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5087 spill_regs[i])
5088 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5089 spill_regs[i]))))
5090 {
5091 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5092 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5093 (on 68000) got us two FP regs. If NR is 1,
5094 we would reject both of them. */
5095 if (force_group)
5096 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5097 /* If we need only one reg, we have already won. */
5098 if (nr == 1)
5099 {
5100 /* But reject a single reg if we demand a group. */
5101 if (force_group)
5102 continue;
5103 break;
5104 }
5105 /* Otherwise check that as many consecutive regs as we need
5106 are available here.
5107 Also, don't use for a group registers that are
5108 needed for nongroups. */
5109 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5110 while (nr > 1)
5111 {
5112 regno = spill_regs[i] + nr - 1;
5113 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5114 && spill_reg_order[regno] >= 0
5115 && reload_reg_free_p (regno, reload_opnum[r],
5116 reload_when_needed[r])
5117 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5118 regno)))
5119 break;
5120 nr--;
5121 }
5122 if (nr == 1)
5123 break;
5124 }
5125 }
5126
5127 /* If we found something on pass 1, omit pass 2. */
5128 if (count < n_spills)
5129 break;
5130 }
5131
5132 /* We should have found a spill register by now. */
5133 if (count == n_spills)
5134 {
5135 if (noerror)
5136 return 0;
5137 goto failure;
5138 }
5139
5140 /* I is the index in SPILL_REG_RTX of the reload register we are to
5141 allocate. Get an rtx for it and find its register number. */
5142
5143 new = spill_reg_rtx[i];
5144
5145 if (new == 0 || GET_MODE (new) != reload_mode[r])
5146 spill_reg_rtx[i] = new
5147 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
5148
5149 regno = true_regnum (new);
5150
5151 /* Detect when the reload reg can't hold the reload mode.
5152 This used to be one `if', but Sequent compiler can't handle that. */
5153 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5154 {
5155 enum machine_mode test_mode = VOIDmode;
5156 if (reload_in[r])
5157 test_mode = GET_MODE (reload_in[r]);
5158 /* If reload_in[r] has VOIDmode, it means we will load it
5159 in whatever mode the reload reg has: to wit, reload_mode[r].
5160 We have already tested that for validity. */
5161 /* Aside from that, we need to test that the expressions
5162 to reload from or into have modes which are valid for this
5163 reload register. Otherwise the reload insns would be invalid. */
5164 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5165 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5166 if (! (reload_out[r] != 0
5167 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5168 {
5169 /* The reg is OK. */
5170 last_spill_reg = i;
5171
5172 /* Mark as in use for this insn the reload regs we use
5173 for this. */
5174 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5175 reload_when_needed[r], reload_mode[r]);
5176
5177 reload_reg_rtx[r] = new;
5178 reload_spill_index[r] = i;
5179 return 1;
5180 }
5181 }
5182
5183 /* The reg is not OK. */
5184 if (noerror)
5185 return 0;
5186
5187 failure:
5188 if (asm_noperands (PATTERN (insn)) < 0)
5189 /* It's the compiler's fault. */
5190 fatal_insn ("Could not find a spill register", insn);
5191
5192 /* It's the user's fault; the operand's mode and constraint
5193 don't match. Disable this reload so we don't crash in final. */
5194 error_for_asm (insn,
5195 "`asm' operand constraint incompatible with operand size");
5196 reload_in[r] = 0;
5197 reload_out[r] = 0;
5198 reload_reg_rtx[r] = 0;
5199 reload_optional[r] = 1;
5200 reload_secondary_p[r] = 1;
5201
5202 return 1;
5203 }
5204 \f
5205 /* Assign hard reg targets for the pseudo-registers we must reload
5206 into hard regs for this insn.
5207 Also output the instructions to copy them in and out of the hard regs.
5208
5209 For machines with register classes, we are responsible for
5210 finding a reload reg in the proper class. */
5211
5212 static void
5213 choose_reload_regs (insn, avoid_return_reg)
5214 rtx insn;
5215 rtx avoid_return_reg;
5216 {
5217 register int i, j;
5218 int max_group_size = 1;
5219 enum reg_class group_class = NO_REGS;
5220 int inheritance;
5221
5222 rtx save_reload_reg_rtx[MAX_RELOADS];
5223 char save_reload_inherited[MAX_RELOADS];
5224 rtx save_reload_inheritance_insn[MAX_RELOADS];
5225 rtx save_reload_override_in[MAX_RELOADS];
5226 int save_reload_spill_index[MAX_RELOADS];
5227 HARD_REG_SET save_reload_reg_used;
5228 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5229 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5230 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5231 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5232 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5233 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5234 HARD_REG_SET save_reload_reg_used_in_op_addr;
5235 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5236 HARD_REG_SET save_reload_reg_used_in_insn;
5237 HARD_REG_SET save_reload_reg_used_in_other_addr;
5238 HARD_REG_SET save_reload_reg_used_at_all;
5239
5240 bzero (reload_inherited, MAX_RELOADS);
5241 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5242 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5243
5244 CLEAR_HARD_REG_SET (reload_reg_used);
5245 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5246 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5247 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5248 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5249 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5250
5251 for (i = 0; i < reload_n_operands; i++)
5252 {
5253 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5254 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5255 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5256 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5257 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5258 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5259 }
5260
5261 #ifdef SMALL_REGISTER_CLASSES
5262 /* Don't bother with avoiding the return reg
5263 if we have no mandatory reload that could use it. */
5264 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5265 {
5266 int do_avoid = 0;
5267 int regno = REGNO (avoid_return_reg);
5268 int nregs
5269 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5270 int r;
5271
5272 for (r = regno; r < regno + nregs; r++)
5273 if (spill_reg_order[r] >= 0)
5274 for (j = 0; j < n_reloads; j++)
5275 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5276 && (reload_in[j] != 0 || reload_out[j] != 0
5277 || reload_secondary_p[j])
5278 &&
5279 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5280 do_avoid = 1;
5281 if (!do_avoid)
5282 avoid_return_reg = 0;
5283 }
5284 #endif /* SMALL_REGISTER_CLASSES */
5285
5286 #if 0 /* Not needed, now that we can always retry without inheritance. */
5287 /* See if we have more mandatory reloads than spill regs.
5288 If so, then we cannot risk optimizations that could prevent
5289 reloads from sharing one spill register.
5290
5291 Since we will try finding a better register than reload_reg_rtx
5292 unless it is equal to reload_in or reload_out, count such reloads. */
5293
5294 {
5295 int tem = 0;
5296 #ifdef SMALL_REGISTER_CLASSES
5297 if (SMALL_REGISTER_CLASSES)
5298 tem = (avoid_return_reg != 0);
5299 #endif
5300 for (j = 0; j < n_reloads; j++)
5301 if (! reload_optional[j]
5302 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5303 && (reload_reg_rtx[j] == 0
5304 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5305 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5306 tem++;
5307 if (tem > n_spills)
5308 must_reuse = 1;
5309 }
5310 #endif
5311
5312 #ifdef SMALL_REGISTER_CLASSES
5313 /* Don't use the subroutine call return reg for a reload
5314 if we are supposed to avoid it. */
5315 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5316 {
5317 int regno = REGNO (avoid_return_reg);
5318 int nregs
5319 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5320 int r;
5321
5322 for (r = regno; r < regno + nregs; r++)
5323 if (spill_reg_order[r] >= 0)
5324 SET_HARD_REG_BIT (reload_reg_used, r);
5325 }
5326 #endif /* SMALL_REGISTER_CLASSES */
5327
5328 /* In order to be certain of getting the registers we need,
5329 we must sort the reloads into order of increasing register class.
5330 Then our grabbing of reload registers will parallel the process
5331 that provided the reload registers.
5332
5333 Also note whether any of the reloads wants a consecutive group of regs.
5334 If so, record the maximum size of the group desired and what
5335 register class contains all the groups needed by this insn. */
5336
5337 for (j = 0; j < n_reloads; j++)
5338 {
5339 reload_order[j] = j;
5340 reload_spill_index[j] = -1;
5341
5342 reload_mode[j]
5343 = (reload_inmode[j] == VOIDmode
5344 || (GET_MODE_SIZE (reload_outmode[j])
5345 > GET_MODE_SIZE (reload_inmode[j])))
5346 ? reload_outmode[j] : reload_inmode[j];
5347
5348 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5349
5350 if (reload_nregs[j] > 1)
5351 {
5352 max_group_size = MAX (reload_nregs[j], max_group_size);
5353 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5354 }
5355
5356 /* If we have already decided to use a certain register,
5357 don't use it in another way. */
5358 if (reload_reg_rtx[j])
5359 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5360 reload_when_needed[j], reload_mode[j]);
5361 }
5362
5363 if (n_reloads > 1)
5364 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5365
5366 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5367 sizeof reload_reg_rtx);
5368 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5369 bcopy ((char *) reload_inheritance_insn,
5370 (char *) save_reload_inheritance_insn,
5371 sizeof reload_inheritance_insn);
5372 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5373 sizeof reload_override_in);
5374 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5375 sizeof reload_spill_index);
5376 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5377 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5378 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5379 reload_reg_used_in_op_addr);
5380
5381 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5382 reload_reg_used_in_op_addr_reload);
5383
5384 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5385 reload_reg_used_in_insn);
5386 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5387 reload_reg_used_in_other_addr);
5388
5389 for (i = 0; i < reload_n_operands; i++)
5390 {
5391 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5392 reload_reg_used_in_output[i]);
5393 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5394 reload_reg_used_in_input[i]);
5395 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5396 reload_reg_used_in_input_addr[i]);
5397 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5398 reload_reg_used_in_inpaddr_addr[i]);
5399 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5400 reload_reg_used_in_output_addr[i]);
5401 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5402 reload_reg_used_in_outaddr_addr[i]);
5403 }
5404
5405 /* If -O, try first with inheritance, then turning it off.
5406 If not -O, don't do inheritance.
5407 Using inheritance when not optimizing leads to paradoxes
5408 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5409 because one side of the comparison might be inherited. */
5410
5411 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5412 {
5413 /* Process the reloads in order of preference just found.
5414 Beyond this point, subregs can be found in reload_reg_rtx.
5415
5416 This used to look for an existing reloaded home for all
5417 of the reloads, and only then perform any new reloads.
5418 But that could lose if the reloads were done out of reg-class order
5419 because a later reload with a looser constraint might have an old
5420 home in a register needed by an earlier reload with a tighter constraint.
5421
5422 To solve this, we make two passes over the reloads, in the order
5423 described above. In the first pass we try to inherit a reload
5424 from a previous insn. If there is a later reload that needs a
5425 class that is a proper subset of the class being processed, we must
5426 also allocate a spill register during the first pass.
5427
5428 Then make a second pass over the reloads to allocate any reloads
5429 that haven't been given registers yet. */
5430
5431 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5432
5433 for (j = 0; j < n_reloads; j++)
5434 {
5435 register int r = reload_order[j];
5436
5437 /* Ignore reloads that got marked inoperative. */
5438 if (reload_out[r] == 0 && reload_in[r] == 0
5439 && ! reload_secondary_p[r])
5440 continue;
5441
5442 /* If find_reloads chose a to use reload_in or reload_out as a reload
5443 register, we don't need to chose one. Otherwise, try even if it
5444 found one since we might save an insn if we find the value lying
5445 around. */
5446 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5447 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5448 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5449 continue;
5450
5451 #if 0 /* No longer needed for correct operation.
5452 It might give better code, or might not; worth an experiment? */
5453 /* If this is an optional reload, we can't inherit from earlier insns
5454 until we are sure that any non-optional reloads have been allocated.
5455 The following code takes advantage of the fact that optional reloads
5456 are at the end of reload_order. */
5457 if (reload_optional[r] != 0)
5458 for (i = 0; i < j; i++)
5459 if ((reload_out[reload_order[i]] != 0
5460 || reload_in[reload_order[i]] != 0
5461 || reload_secondary_p[reload_order[i]])
5462 && ! reload_optional[reload_order[i]]
5463 && reload_reg_rtx[reload_order[i]] == 0)
5464 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5465 #endif
5466
5467 /* First see if this pseudo is already available as reloaded
5468 for a previous insn. We cannot try to inherit for reloads
5469 that are smaller than the maximum number of registers needed
5470 for groups unless the register we would allocate cannot be used
5471 for the groups.
5472
5473 We could check here to see if this is a secondary reload for
5474 an object that is already in a register of the desired class.
5475 This would avoid the need for the secondary reload register.
5476 But this is complex because we can't easily determine what
5477 objects might want to be loaded via this reload. So let a
5478 register be allocated here. In `emit_reload_insns' we suppress
5479 one of the loads in the case described above. */
5480
5481 if (inheritance)
5482 {
5483 register int regno = -1;
5484 enum machine_mode mode;
5485
5486 if (reload_in[r] == 0)
5487 ;
5488 else if (GET_CODE (reload_in[r]) == REG)
5489 {
5490 regno = REGNO (reload_in[r]);
5491 mode = GET_MODE (reload_in[r]);
5492 }
5493 else if (GET_CODE (reload_in_reg[r]) == REG)
5494 {
5495 regno = REGNO (reload_in_reg[r]);
5496 mode = GET_MODE (reload_in_reg[r]);
5497 }
5498 #if 0
5499 /* This won't work, since REGNO can be a pseudo reg number.
5500 Also, it takes much more hair to keep track of all the things
5501 that can invalidate an inherited reload of part of a pseudoreg. */
5502 else if (GET_CODE (reload_in[r]) == SUBREG
5503 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5504 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5505 #endif
5506
5507 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5508 {
5509 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5510
5511 if (reg_reloaded_contents[i] == regno
5512 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5513 >= GET_MODE_SIZE (mode))
5514 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5515 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5516 spill_regs[i])
5517 && (reload_nregs[r] == max_group_size
5518 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5519 spill_regs[i]))
5520 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5521 reload_when_needed[r])
5522 && reload_reg_free_before_p (spill_regs[i],
5523 reload_opnum[r],
5524 reload_when_needed[r]))
5525 {
5526 /* If a group is needed, verify that all the subsequent
5527 registers still have their values intact. */
5528 int nr
5529 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5530 int k;
5531
5532 for (k = 1; k < nr; k++)
5533 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5534 != regno)
5535 break;
5536
5537 if (k == nr)
5538 {
5539 int i1;
5540
5541 /* We found a register that contains the
5542 value we need. If this register is the
5543 same as an `earlyclobber' operand of the
5544 current insn, just mark it as a place to
5545 reload from since we can't use it as the
5546 reload register itself. */
5547
5548 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5549 if (reg_overlap_mentioned_for_reload_p
5550 (reg_last_reload_reg[regno],
5551 reload_earlyclobbers[i1]))
5552 break;
5553
5554 if (i1 != n_earlyclobbers
5555 /* Don't really use the inherited spill reg
5556 if we need it wider than we've got it. */
5557 || (GET_MODE_SIZE (reload_mode[r])
5558 > GET_MODE_SIZE (mode)))
5559 reload_override_in[r] = reg_last_reload_reg[regno];
5560 else
5561 {
5562 int k;
5563 /* We can use this as a reload reg. */
5564 /* Mark the register as in use for this part of
5565 the insn. */
5566 mark_reload_reg_in_use (spill_regs[i],
5567 reload_opnum[r],
5568 reload_when_needed[r],
5569 reload_mode[r]);
5570 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5571 reload_inherited[r] = 1;
5572 reload_inheritance_insn[r]
5573 = reg_reloaded_insn[i];
5574 reload_spill_index[r] = i;
5575 for (k = 0; k < nr; k++)
5576 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5577 spill_regs[i + k]);
5578 }
5579 }
5580 }
5581 }
5582 }
5583
5584 /* Here's another way to see if the value is already lying around. */
5585 if (inheritance
5586 && reload_in[r] != 0
5587 && ! reload_inherited[r]
5588 && reload_out[r] == 0
5589 && (CONSTANT_P (reload_in[r])
5590 || GET_CODE (reload_in[r]) == PLUS
5591 || GET_CODE (reload_in[r]) == REG
5592 || GET_CODE (reload_in[r]) == MEM)
5593 && (reload_nregs[r] == max_group_size
5594 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5595 {
5596 register rtx equiv
5597 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5598 -1, NULL_PTR, 0, reload_mode[r]);
5599 int regno;
5600
5601 if (equiv != 0)
5602 {
5603 if (GET_CODE (equiv) == REG)
5604 regno = REGNO (equiv);
5605 else if (GET_CODE (equiv) == SUBREG)
5606 {
5607 /* This must be a SUBREG of a hard register.
5608 Make a new REG since this might be used in an
5609 address and not all machines support SUBREGs
5610 there. */
5611 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5612 equiv = gen_rtx (REG, reload_mode[r], regno);
5613 }
5614 else
5615 abort ();
5616 }
5617
5618 /* If we found a spill reg, reject it unless it is free
5619 and of the desired class. */
5620 if (equiv != 0
5621 && ((spill_reg_order[regno] >= 0
5622 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5623 reload_when_needed[r]))
5624 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5625 regno)))
5626 equiv = 0;
5627
5628 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5629 equiv = 0;
5630
5631 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5632 equiv = 0;
5633
5634 /* We found a register that contains the value we need.
5635 If this register is the same as an `earlyclobber' operand
5636 of the current insn, just mark it as a place to reload from
5637 since we can't use it as the reload register itself. */
5638
5639 if (equiv != 0)
5640 for (i = 0; i < n_earlyclobbers; i++)
5641 if (reg_overlap_mentioned_for_reload_p (equiv,
5642 reload_earlyclobbers[i]))
5643 {
5644 reload_override_in[r] = equiv;
5645 equiv = 0;
5646 break;
5647 }
5648
5649 /* JRV: If the equiv register we have found is
5650 explicitly clobbered in the current insn, mark but
5651 don't use, as above. */
5652
5653 if (equiv != 0 && regno_clobbered_p (regno, insn))
5654 {
5655 reload_override_in[r] = equiv;
5656 equiv = 0;
5657 }
5658
5659 /* If we found an equivalent reg, say no code need be generated
5660 to load it, and use it as our reload reg. */
5661 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5662 {
5663 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5664 int k;
5665 reload_reg_rtx[r] = equiv;
5666 reload_inherited[r] = 1;
5667
5668 /* If any of the hard registers in EQUIV are spill
5669 registers, mark them as in use for this insn. */
5670 for (k = 0; k < nr; k++)
5671 {
5672 i = spill_reg_order[regno + k];
5673 if (i >= 0)
5674 {
5675 mark_reload_reg_in_use (regno, reload_opnum[r],
5676 reload_when_needed[r],
5677 reload_mode[r]);
5678 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5679 regno + k);
5680 }
5681 }
5682 }
5683 }
5684
5685 /* If we found a register to use already, or if this is an optional
5686 reload, we are done. */
5687 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5688 continue;
5689
5690 #if 0 /* No longer needed for correct operation. Might or might not
5691 give better code on the average. Want to experiment? */
5692
5693 /* See if there is a later reload that has a class different from our
5694 class that intersects our class or that requires less register
5695 than our reload. If so, we must allocate a register to this
5696 reload now, since that reload might inherit a previous reload
5697 and take the only available register in our class. Don't do this
5698 for optional reloads since they will force all previous reloads
5699 to be allocated. Also don't do this for reloads that have been
5700 turned off. */
5701
5702 for (i = j + 1; i < n_reloads; i++)
5703 {
5704 int s = reload_order[i];
5705
5706 if ((reload_in[s] == 0 && reload_out[s] == 0
5707 && ! reload_secondary_p[s])
5708 || reload_optional[s])
5709 continue;
5710
5711 if ((reload_reg_class[s] != reload_reg_class[r]
5712 && reg_classes_intersect_p (reload_reg_class[r],
5713 reload_reg_class[s]))
5714 || reload_nregs[s] < reload_nregs[r])
5715 break;
5716 }
5717
5718 if (i == n_reloads)
5719 continue;
5720
5721 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5722 #endif
5723 }
5724
5725 /* Now allocate reload registers for anything non-optional that
5726 didn't get one yet. */
5727 for (j = 0; j < n_reloads; j++)
5728 {
5729 register int r = reload_order[j];
5730
5731 /* Ignore reloads that got marked inoperative. */
5732 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5733 continue;
5734
5735 /* Skip reloads that already have a register allocated or are
5736 optional. */
5737 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5738 continue;
5739
5740 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5741 break;
5742 }
5743
5744 /* If that loop got all the way, we have won. */
5745 if (j == n_reloads)
5746 break;
5747
5748 fail:
5749 /* Loop around and try without any inheritance. */
5750 /* First undo everything done by the failed attempt
5751 to allocate with inheritance. */
5752 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5753 sizeof reload_reg_rtx);
5754 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5755 sizeof reload_inherited);
5756 bcopy ((char *) save_reload_inheritance_insn,
5757 (char *) reload_inheritance_insn,
5758 sizeof reload_inheritance_insn);
5759 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5760 sizeof reload_override_in);
5761 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5762 sizeof reload_spill_index);
5763 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5764 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5765 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5766 save_reload_reg_used_in_op_addr);
5767 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5768 save_reload_reg_used_in_op_addr_reload);
5769 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5770 save_reload_reg_used_in_insn);
5771 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5772 save_reload_reg_used_in_other_addr);
5773
5774 for (i = 0; i < reload_n_operands; i++)
5775 {
5776 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5777 save_reload_reg_used_in_input[i]);
5778 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5779 save_reload_reg_used_in_output[i]);
5780 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5781 save_reload_reg_used_in_input_addr[i]);
5782 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5783 save_reload_reg_used_in_inpaddr_addr[i]);
5784 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5785 save_reload_reg_used_in_output_addr[i]);
5786 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5787 save_reload_reg_used_in_outaddr_addr[i]);
5788 }
5789 }
5790
5791 /* If we thought we could inherit a reload, because it seemed that
5792 nothing else wanted the same reload register earlier in the insn,
5793 verify that assumption, now that all reloads have been assigned. */
5794
5795 for (j = 0; j < n_reloads; j++)
5796 {
5797 register int r = reload_order[j];
5798
5799 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5800 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5801 reload_opnum[r],
5802 reload_when_needed[r]))
5803 reload_inherited[r] = 0;
5804
5805 /* If we found a better place to reload from,
5806 validate it in the same fashion, if it is a reload reg. */
5807 if (reload_override_in[r]
5808 && (GET_CODE (reload_override_in[r]) == REG
5809 || GET_CODE (reload_override_in[r]) == SUBREG))
5810 {
5811 int regno = true_regnum (reload_override_in[r]);
5812 if (spill_reg_order[regno] >= 0
5813 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5814 reload_when_needed[r]))
5815 reload_override_in[r] = 0;
5816 }
5817 }
5818
5819 /* Now that reload_override_in is known valid,
5820 actually override reload_in. */
5821 for (j = 0; j < n_reloads; j++)
5822 if (reload_override_in[j])
5823 reload_in[j] = reload_override_in[j];
5824
5825 /* If this reload won't be done because it has been cancelled or is
5826 optional and not inherited, clear reload_reg_rtx so other
5827 routines (such as subst_reloads) don't get confused. */
5828 for (j = 0; j < n_reloads; j++)
5829 if (reload_reg_rtx[j] != 0
5830 && ((reload_optional[j] && ! reload_inherited[j])
5831 || (reload_in[j] == 0 && reload_out[j] == 0
5832 && ! reload_secondary_p[j])))
5833 {
5834 int regno = true_regnum (reload_reg_rtx[j]);
5835
5836 if (spill_reg_order[regno] >= 0)
5837 clear_reload_reg_in_use (regno, reload_opnum[j],
5838 reload_when_needed[j], reload_mode[j]);
5839 reload_reg_rtx[j] = 0;
5840 }
5841
5842 /* Record which pseudos and which spill regs have output reloads. */
5843 for (j = 0; j < n_reloads; j++)
5844 {
5845 register int r = reload_order[j];
5846
5847 i = reload_spill_index[r];
5848
5849 /* I is nonneg if this reload used one of the spill regs.
5850 If reload_reg_rtx[r] is 0, this is an optional reload
5851 that we opted to ignore. */
5852 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5853 && reload_reg_rtx[r] != 0)
5854 {
5855 register int nregno = REGNO (reload_out[r]);
5856 int nr = 1;
5857
5858 if (nregno < FIRST_PSEUDO_REGISTER)
5859 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5860
5861 while (--nr >= 0)
5862 reg_has_output_reload[nregno + nr] = 1;
5863
5864 if (i >= 0)
5865 {
5866 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5867 while (--nr >= 0)
5868 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5869 }
5870
5871 if (reload_when_needed[r] != RELOAD_OTHER
5872 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5873 && reload_when_needed[r] != RELOAD_FOR_INSN)
5874 abort ();
5875 }
5876 }
5877 }
5878 \f
5879 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5880 reloads of the same item for fear that we might not have enough reload
5881 registers. However, normally they will get the same reload register
5882 and hence actually need not be loaded twice.
5883
5884 Here we check for the most common case of this phenomenon: when we have
5885 a number of reloads for the same object, each of which were allocated
5886 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5887 reload, and is not modified in the insn itself. If we find such,
5888 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5889 This will not increase the number of spill registers needed and will
5890 prevent redundant code. */
5891
5892 #ifdef SMALL_REGISTER_CLASSES
5893
5894 static void
5895 merge_assigned_reloads (insn)
5896 rtx insn;
5897 {
5898 int i, j;
5899
5900 /* Scan all the reloads looking for ones that only load values and
5901 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5902 assigned and not modified by INSN. */
5903
5904 for (i = 0; i < n_reloads; i++)
5905 {
5906 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5907 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5908 || reg_set_p (reload_reg_rtx[i], insn))
5909 continue;
5910
5911 /* Look at all other reloads. Ensure that the only use of this
5912 reload_reg_rtx is in a reload that just loads the same value
5913 as we do. Note that any secondary reloads must be of the identical
5914 class since the values, modes, and result registers are the
5915 same, so we need not do anything with any secondary reloads. */
5916
5917 for (j = 0; j < n_reloads; j++)
5918 {
5919 if (i == j || reload_reg_rtx[j] == 0
5920 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5921 reload_reg_rtx[i]))
5922 continue;
5923
5924 /* If the reload regs aren't exactly the same (e.g, different modes)
5925 or if the values are different, we can't merge anything with this
5926 reload register. */
5927
5928 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5929 || reload_out[j] != 0 || reload_in[j] == 0
5930 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5931 break;
5932 }
5933
5934 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5935 we, in fact, found any matching reloads. */
5936
5937 if (j == n_reloads)
5938 {
5939 for (j = 0; j < n_reloads; j++)
5940 if (i != j && reload_reg_rtx[j] != 0
5941 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5942 {
5943 reload_when_needed[i] = RELOAD_OTHER;
5944 reload_in[j] = 0;
5945 transfer_replacements (i, j);
5946 }
5947
5948 /* If this is now RELOAD_OTHER, look for any reloads that load
5949 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5950 if they were for inputs, RELOAD_OTHER for outputs. Note that
5951 this test is equivalent to looking for reloads for this operand
5952 number. */
5953
5954 if (reload_when_needed[i] == RELOAD_OTHER)
5955 for (j = 0; j < n_reloads; j++)
5956 if (reload_in[j] != 0
5957 && reload_when_needed[i] != RELOAD_OTHER
5958 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5959 reload_in[i]))
5960 reload_when_needed[j]
5961 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5962 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5963 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
5964 }
5965 }
5966 }
5967 #endif /* SMALL_RELOAD_CLASSES */
5968 \f
5969 /* Output insns to reload values in and out of the chosen reload regs. */
5970
5971 static void
5972 emit_reload_insns (insn)
5973 rtx insn;
5974 {
5975 register int j;
5976 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5977 rtx other_input_address_reload_insns = 0;
5978 rtx other_input_reload_insns = 0;
5979 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5980 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5981 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5982 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5983 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5984 rtx operand_reload_insns = 0;
5985 rtx other_operand_reload_insns = 0;
5986 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5987 rtx following_insn = NEXT_INSN (insn);
5988 rtx before_insn = insn;
5989 int special;
5990 /* Values to be put in spill_reg_store are put here first. */
5991 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5992
5993 for (j = 0; j < reload_n_operands; j++)
5994 input_reload_insns[j] = input_address_reload_insns[j]
5995 = inpaddr_address_reload_insns[j]
5996 = output_reload_insns[j] = output_address_reload_insns[j]
5997 = outaddr_address_reload_insns[j]
5998 = other_output_reload_insns[j] = 0;
5999
6000 /* Now output the instructions to copy the data into and out of the
6001 reload registers. Do these in the order that the reloads were reported,
6002 since reloads of base and index registers precede reloads of operands
6003 and the operands may need the base and index registers reloaded. */
6004
6005 for (j = 0; j < n_reloads; j++)
6006 {
6007 register rtx old;
6008 rtx oldequiv_reg = 0;
6009 rtx this_reload_insn = 0;
6010
6011 if (reload_spill_index[j] >= 0)
6012 new_spill_reg_store[reload_spill_index[j]] = 0;
6013
6014 old = reload_in[j];
6015 if (old != 0 && ! reload_inherited[j]
6016 && ! rtx_equal_p (reload_reg_rtx[j], old)
6017 && reload_reg_rtx[j] != 0)
6018 {
6019 register rtx reloadreg = reload_reg_rtx[j];
6020 rtx oldequiv = 0;
6021 enum machine_mode mode;
6022 rtx *where;
6023
6024 /* Determine the mode to reload in.
6025 This is very tricky because we have three to choose from.
6026 There is the mode the insn operand wants (reload_inmode[J]).
6027 There is the mode of the reload register RELOADREG.
6028 There is the intrinsic mode of the operand, which we could find
6029 by stripping some SUBREGs.
6030 It turns out that RELOADREG's mode is irrelevant:
6031 we can change that arbitrarily.
6032
6033 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6034 then the reload reg may not support QImode moves, so use SImode.
6035 If foo is in memory due to spilling a pseudo reg, this is safe,
6036 because the QImode value is in the least significant part of a
6037 slot big enough for a SImode. If foo is some other sort of
6038 memory reference, then it is impossible to reload this case,
6039 so previous passes had better make sure this never happens.
6040
6041 Then consider a one-word union which has SImode and one of its
6042 members is a float, being fetched as (SUBREG:SF union:SI).
6043 We must fetch that as SFmode because we could be loading into
6044 a float-only register. In this case OLD's mode is correct.
6045
6046 Consider an immediate integer: it has VOIDmode. Here we need
6047 to get a mode from something else.
6048
6049 In some cases, there is a fourth mode, the operand's
6050 containing mode. If the insn specifies a containing mode for
6051 this operand, it overrides all others.
6052
6053 I am not sure whether the algorithm here is always right,
6054 but it does the right things in those cases. */
6055
6056 mode = GET_MODE (old);
6057 if (mode == VOIDmode)
6058 mode = reload_inmode[j];
6059
6060 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6061 /* If we need a secondary register for this operation, see if
6062 the value is already in a register in that class. Don't
6063 do this if the secondary register will be used as a scratch
6064 register. */
6065
6066 if (reload_secondary_in_reload[j] >= 0
6067 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6068 && optimize)
6069 oldequiv
6070 = find_equiv_reg (old, insn,
6071 reload_reg_class[reload_secondary_in_reload[j]],
6072 -1, NULL_PTR, 0, mode);
6073 #endif
6074
6075 /* If reloading from memory, see if there is a register
6076 that already holds the same value. If so, reload from there.
6077 We can pass 0 as the reload_reg_p argument because
6078 any other reload has either already been emitted,
6079 in which case find_equiv_reg will see the reload-insn,
6080 or has yet to be emitted, in which case it doesn't matter
6081 because we will use this equiv reg right away. */
6082
6083 if (oldequiv == 0 && optimize
6084 && (GET_CODE (old) == MEM
6085 || (GET_CODE (old) == REG
6086 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6087 && reg_renumber[REGNO (old)] < 0)))
6088 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6089 -1, NULL_PTR, 0, mode);
6090
6091 if (oldequiv)
6092 {
6093 int regno = true_regnum (oldequiv);
6094
6095 /* If OLDEQUIV is a spill register, don't use it for this
6096 if any other reload needs it at an earlier stage of this insn
6097 or at this stage. */
6098 if (spill_reg_order[regno] >= 0
6099 && (! reload_reg_free_p (regno, reload_opnum[j],
6100 reload_when_needed[j])
6101 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6102 reload_when_needed[j])))
6103 oldequiv = 0;
6104
6105 /* If OLDEQUIV is not a spill register,
6106 don't use it if any other reload wants it. */
6107 if (spill_reg_order[regno] < 0)
6108 {
6109 int k;
6110 for (k = 0; k < n_reloads; k++)
6111 if (reload_reg_rtx[k] != 0 && k != j
6112 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6113 oldequiv))
6114 {
6115 oldequiv = 0;
6116 break;
6117 }
6118 }
6119
6120 /* If it is no cheaper to copy from OLDEQUIV into the
6121 reload register than it would be to move from memory,
6122 don't use it. Likewise, if we need a secondary register
6123 or memory. */
6124
6125 if (oldequiv != 0
6126 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6127 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6128 reload_reg_class[j])
6129 >= MEMORY_MOVE_COST (mode)))
6130 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6131 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6132 mode, oldequiv)
6133 != NO_REGS)
6134 #endif
6135 #ifdef SECONDARY_MEMORY_NEEDED
6136 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6137 REGNO_REG_CLASS (regno),
6138 mode)
6139 #endif
6140 ))
6141 oldequiv = 0;
6142 }
6143
6144 if (oldequiv == 0)
6145 oldequiv = old;
6146 else if (GET_CODE (oldequiv) == REG)
6147 oldequiv_reg = oldequiv;
6148 else if (GET_CODE (oldequiv) == SUBREG)
6149 oldequiv_reg = SUBREG_REG (oldequiv);
6150
6151 /* If we are reloading from a register that was recently stored in
6152 with an output-reload, see if we can prove there was
6153 actually no need to store the old value in it. */
6154
6155 if (optimize && GET_CODE (oldequiv) == REG
6156 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6157 && spill_reg_order[REGNO (oldequiv)] >= 0
6158 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
6159 && find_reg_note (insn, REG_DEAD, reload_in[j])
6160 /* This is unsafe if operand occurs more than once in current
6161 insn. Perhaps some occurrences weren't reloaded. */
6162 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6163 delete_output_reload
6164 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
6165
6166 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6167 then load RELOADREG from OLDEQUIV. Note that we cannot use
6168 gen_lowpart_common since it can do the wrong thing when
6169 RELOADREG has a multi-word mode. Note that RELOADREG
6170 must always be a REG here. */
6171
6172 if (GET_MODE (reloadreg) != mode)
6173 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6174 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6175 oldequiv = SUBREG_REG (oldequiv);
6176 if (GET_MODE (oldequiv) != VOIDmode
6177 && mode != GET_MODE (oldequiv))
6178 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
6179
6180 /* Switch to the right place to emit the reload insns. */
6181 switch (reload_when_needed[j])
6182 {
6183 case RELOAD_OTHER:
6184 where = &other_input_reload_insns;
6185 break;
6186 case RELOAD_FOR_INPUT:
6187 where = &input_reload_insns[reload_opnum[j]];
6188 break;
6189 case RELOAD_FOR_INPUT_ADDRESS:
6190 where = &input_address_reload_insns[reload_opnum[j]];
6191 break;
6192 case RELOAD_FOR_INPADDR_ADDRESS:
6193 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6194 break;
6195 case RELOAD_FOR_OUTPUT_ADDRESS:
6196 where = &output_address_reload_insns[reload_opnum[j]];
6197 break;
6198 case RELOAD_FOR_OUTADDR_ADDRESS:
6199 where = &outaddr_address_reload_insns[reload_opnum[j]];
6200 break;
6201 case RELOAD_FOR_OPERAND_ADDRESS:
6202 where = &operand_reload_insns;
6203 break;
6204 case RELOAD_FOR_OPADDR_ADDR:
6205 where = &other_operand_reload_insns;
6206 break;
6207 case RELOAD_FOR_OTHER_ADDRESS:
6208 where = &other_input_address_reload_insns;
6209 break;
6210 default:
6211 abort ();
6212 }
6213
6214 push_to_sequence (*where);
6215 special = 0;
6216
6217 /* Auto-increment addresses must be reloaded in a special way. */
6218 if (GET_CODE (oldequiv) == POST_INC
6219 || GET_CODE (oldequiv) == POST_DEC
6220 || GET_CODE (oldequiv) == PRE_INC
6221 || GET_CODE (oldequiv) == PRE_DEC)
6222 {
6223 /* We are not going to bother supporting the case where a
6224 incremented register can't be copied directly from
6225 OLDEQUIV since this seems highly unlikely. */
6226 if (reload_secondary_in_reload[j] >= 0)
6227 abort ();
6228 /* Prevent normal processing of this reload. */
6229 special = 1;
6230 /* Output a special code sequence for this case. */
6231 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6232 }
6233
6234 /* If we are reloading a pseudo-register that was set by the previous
6235 insn, see if we can get rid of that pseudo-register entirely
6236 by redirecting the previous insn into our reload register. */
6237
6238 else if (optimize && GET_CODE (old) == REG
6239 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6240 && dead_or_set_p (insn, old)
6241 /* This is unsafe if some other reload
6242 uses the same reg first. */
6243 && reload_reg_free_before_p (REGNO (reloadreg),
6244 reload_opnum[j],
6245 reload_when_needed[j]))
6246 {
6247 rtx temp = PREV_INSN (insn);
6248 while (temp && GET_CODE (temp) == NOTE)
6249 temp = PREV_INSN (temp);
6250 if (temp
6251 && GET_CODE (temp) == INSN
6252 && GET_CODE (PATTERN (temp)) == SET
6253 && SET_DEST (PATTERN (temp)) == old
6254 /* Make sure we can access insn_operand_constraint. */
6255 && asm_noperands (PATTERN (temp)) < 0
6256 /* This is unsafe if prev insn rejects our reload reg. */
6257 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6258 reloadreg)
6259 /* This is unsafe if operand occurs more than once in current
6260 insn. Perhaps some occurrences aren't reloaded. */
6261 && count_occurrences (PATTERN (insn), old) == 1
6262 /* Don't risk splitting a matching pair of operands. */
6263 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6264 {
6265 /* Store into the reload register instead of the pseudo. */
6266 SET_DEST (PATTERN (temp)) = reloadreg;
6267 /* If these are the only uses of the pseudo reg,
6268 pretend for GDB it lives in the reload reg we used. */
6269 if (REG_N_DEATHS (REGNO (old)) == 1
6270 && REG_N_SETS (REGNO (old)) == 1)
6271 {
6272 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6273 alter_reg (REGNO (old), -1);
6274 }
6275 special = 1;
6276 }
6277 }
6278
6279 /* We can't do that, so output an insn to load RELOADREG. */
6280
6281 if (! special)
6282 {
6283 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6284 rtx second_reload_reg = 0;
6285 enum insn_code icode;
6286
6287 /* If we have a secondary reload, pick up the secondary register
6288 and icode, if any. If OLDEQUIV and OLD are different or
6289 if this is an in-out reload, recompute whether or not we
6290 still need a secondary register and what the icode should
6291 be. If we still need a secondary register and the class or
6292 icode is different, go back to reloading from OLD if using
6293 OLDEQUIV means that we got the wrong type of register. We
6294 cannot have different class or icode due to an in-out reload
6295 because we don't make such reloads when both the input and
6296 output need secondary reload registers. */
6297
6298 if (reload_secondary_in_reload[j] >= 0)
6299 {
6300 int secondary_reload = reload_secondary_in_reload[j];
6301 rtx real_oldequiv = oldequiv;
6302 rtx real_old = old;
6303
6304 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6305 and similarly for OLD.
6306 See comments in get_secondary_reload in reload.c. */
6307 if (GET_CODE (oldequiv) == REG
6308 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6309 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6310 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6311
6312 if (GET_CODE (old) == REG
6313 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6314 && reg_equiv_mem[REGNO (old)] != 0)
6315 real_old = reg_equiv_mem[REGNO (old)];
6316
6317 second_reload_reg = reload_reg_rtx[secondary_reload];
6318 icode = reload_secondary_in_icode[j];
6319
6320 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6321 || (reload_in[j] != 0 && reload_out[j] != 0))
6322 {
6323 enum reg_class new_class
6324 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6325 mode, real_oldequiv);
6326
6327 if (new_class == NO_REGS)
6328 second_reload_reg = 0;
6329 else
6330 {
6331 enum insn_code new_icode;
6332 enum machine_mode new_mode;
6333
6334 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6335 REGNO (second_reload_reg)))
6336 oldequiv = old, real_oldequiv = real_old;
6337 else
6338 {
6339 new_icode = reload_in_optab[(int) mode];
6340 if (new_icode != CODE_FOR_nothing
6341 && ((insn_operand_predicate[(int) new_icode][0]
6342 && ! ((*insn_operand_predicate[(int) new_icode][0])
6343 (reloadreg, mode)))
6344 || (insn_operand_predicate[(int) new_icode][1]
6345 && ! ((*insn_operand_predicate[(int) new_icode][1])
6346 (real_oldequiv, mode)))))
6347 new_icode = CODE_FOR_nothing;
6348
6349 if (new_icode == CODE_FOR_nothing)
6350 new_mode = mode;
6351 else
6352 new_mode = insn_operand_mode[(int) new_icode][2];
6353
6354 if (GET_MODE (second_reload_reg) != new_mode)
6355 {
6356 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6357 new_mode))
6358 oldequiv = old, real_oldequiv = real_old;
6359 else
6360 second_reload_reg
6361 = gen_rtx (REG, new_mode,
6362 REGNO (second_reload_reg));
6363 }
6364 }
6365 }
6366 }
6367
6368 /* If we still need a secondary reload register, check
6369 to see if it is being used as a scratch or intermediate
6370 register and generate code appropriately. If we need
6371 a scratch register, use REAL_OLDEQUIV since the form of
6372 the insn may depend on the actual address if it is
6373 a MEM. */
6374
6375 if (second_reload_reg)
6376 {
6377 if (icode != CODE_FOR_nothing)
6378 {
6379 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6380 second_reload_reg));
6381 special = 1;
6382 }
6383 else
6384 {
6385 /* See if we need a scratch register to load the
6386 intermediate register (a tertiary reload). */
6387 enum insn_code tertiary_icode
6388 = reload_secondary_in_icode[secondary_reload];
6389
6390 if (tertiary_icode != CODE_FOR_nothing)
6391 {
6392 rtx third_reload_reg
6393 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6394
6395 emit_insn ((GEN_FCN (tertiary_icode)
6396 (second_reload_reg, real_oldequiv,
6397 third_reload_reg)));
6398 }
6399 else
6400 gen_reload (second_reload_reg, oldequiv,
6401 reload_opnum[j],
6402 reload_when_needed[j]);
6403
6404 oldequiv = second_reload_reg;
6405 }
6406 }
6407 }
6408 #endif
6409
6410 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6411 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6412 reload_when_needed[j]);
6413
6414 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6415 /* We may have to make a REG_DEAD note for the secondary reload
6416 register in the insns we just made. Find the last insn that
6417 mentioned the register. */
6418 if (! special && second_reload_reg
6419 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6420 {
6421 rtx prev;
6422
6423 for (prev = get_last_insn (); prev;
6424 prev = PREV_INSN (prev))
6425 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6426 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6427 PATTERN (prev)))
6428 {
6429 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6430 second_reload_reg,
6431 REG_NOTES (prev));
6432 break;
6433 }
6434 }
6435 #endif
6436 }
6437
6438 this_reload_insn = get_last_insn ();
6439 /* End this sequence. */
6440 *where = get_insns ();
6441 end_sequence ();
6442 }
6443
6444 /* Add a note saying the input reload reg
6445 dies in this insn, if anyone cares. */
6446 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6447 if (old != 0
6448 && reload_reg_rtx[j] != old
6449 && reload_reg_rtx[j] != 0
6450 && reload_out[j] == 0
6451 && ! reload_inherited[j]
6452 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6453 {
6454 register rtx reloadreg = reload_reg_rtx[j];
6455
6456 #if 0
6457 /* We can't abort here because we need to support this for sched.c.
6458 It's not terrible to miss a REG_DEAD note, but we should try
6459 to figure out how to do this correctly. */
6460 /* The code below is incorrect for address-only reloads. */
6461 if (reload_when_needed[j] != RELOAD_OTHER
6462 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6463 abort ();
6464 #endif
6465
6466 /* Add a death note to this insn, for an input reload. */
6467
6468 if ((reload_when_needed[j] == RELOAD_OTHER
6469 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6470 && ! dead_or_set_p (insn, reloadreg))
6471 REG_NOTES (insn)
6472 = gen_rtx (EXPR_LIST, REG_DEAD,
6473 reloadreg, REG_NOTES (insn));
6474 }
6475
6476 /* When we inherit a reload, the last marked death of the reload reg
6477 may no longer really be a death. */
6478 if (reload_reg_rtx[j] != 0
6479 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6480 && reload_inherited[j])
6481 {
6482 /* Handle inheriting an output reload.
6483 Remove the death note from the output reload insn. */
6484 if (reload_spill_index[j] >= 0
6485 && GET_CODE (reload_in[j]) == REG
6486 && spill_reg_store[reload_spill_index[j]] != 0
6487 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6488 REG_DEAD, REGNO (reload_reg_rtx[j])))
6489 remove_death (REGNO (reload_reg_rtx[j]),
6490 spill_reg_store[reload_spill_index[j]]);
6491 /* Likewise for input reloads that were inherited. */
6492 else if (reload_spill_index[j] >= 0
6493 && GET_CODE (reload_in[j]) == REG
6494 && spill_reg_store[reload_spill_index[j]] == 0
6495 && reload_inheritance_insn[j] != 0
6496 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6497 REGNO (reload_reg_rtx[j])))
6498 remove_death (REGNO (reload_reg_rtx[j]),
6499 reload_inheritance_insn[j]);
6500 else
6501 {
6502 rtx prev;
6503
6504 /* We got this register from find_equiv_reg.
6505 Search back for its last death note and get rid of it.
6506 But don't search back too far.
6507 Don't go past a place where this reg is set,
6508 since a death note before that remains valid. */
6509 for (prev = PREV_INSN (insn);
6510 prev && GET_CODE (prev) != CODE_LABEL;
6511 prev = PREV_INSN (prev))
6512 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6513 && dead_or_set_p (prev, reload_reg_rtx[j]))
6514 {
6515 if (find_regno_note (prev, REG_DEAD,
6516 REGNO (reload_reg_rtx[j])))
6517 remove_death (REGNO (reload_reg_rtx[j]), prev);
6518 break;
6519 }
6520 }
6521 }
6522
6523 /* We might have used find_equiv_reg above to choose an alternate
6524 place from which to reload. If so, and it died, we need to remove
6525 that death and move it to one of the insns we just made. */
6526
6527 if (oldequiv_reg != 0
6528 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6529 {
6530 rtx prev, prev1;
6531
6532 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6533 prev = PREV_INSN (prev))
6534 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6535 && dead_or_set_p (prev, oldequiv_reg))
6536 {
6537 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6538 {
6539 for (prev1 = this_reload_insn;
6540 prev1; prev1 = PREV_INSN (prev1))
6541 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6542 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6543 PATTERN (prev1)))
6544 {
6545 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6546 oldequiv_reg,
6547 REG_NOTES (prev1));
6548 break;
6549 }
6550 remove_death (REGNO (oldequiv_reg), prev);
6551 }
6552 break;
6553 }
6554 }
6555 #endif
6556
6557 /* If we are reloading a register that was recently stored in with an
6558 output-reload, see if we can prove there was
6559 actually no need to store the old value in it. */
6560
6561 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6562 && reload_in[j] != 0
6563 && GET_CODE (reload_in[j]) == REG
6564 #if 0
6565 /* There doesn't seem to be any reason to restrict this to pseudos
6566 and doing so loses in the case where we are copying from a
6567 register of the wrong class. */
6568 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6569 #endif
6570 && spill_reg_store[reload_spill_index[j]] != 0
6571 /* This is unsafe if some other reload uses the same reg first. */
6572 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6573 reload_opnum[j], reload_when_needed[j])
6574 && dead_or_set_p (insn, reload_in[j])
6575 /* This is unsafe if operand occurs more than once in current
6576 insn. Perhaps some occurrences weren't reloaded. */
6577 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6578 delete_output_reload (insn, j,
6579 spill_reg_store[reload_spill_index[j]]);
6580
6581 /* Input-reloading is done. Now do output-reloading,
6582 storing the value from the reload-register after the main insn
6583 if reload_out[j] is nonzero.
6584
6585 ??? At some point we need to support handling output reloads of
6586 JUMP_INSNs or insns that set cc0. */
6587 old = reload_out[j];
6588 if (old != 0
6589 && reload_reg_rtx[j] != old
6590 && reload_reg_rtx[j] != 0)
6591 {
6592 register rtx reloadreg = reload_reg_rtx[j];
6593 register rtx second_reloadreg = 0;
6594 rtx note, p;
6595 enum machine_mode mode;
6596 int special = 0;
6597
6598 /* An output operand that dies right away does need a reload,
6599 but need not be copied from it. Show the new location in the
6600 REG_UNUSED note. */
6601 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6602 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6603 {
6604 XEXP (note, 0) = reload_reg_rtx[j];
6605 continue;
6606 }
6607 /* Likewise for a SUBREG of an operand that dies. */
6608 else if (GET_CODE (old) == SUBREG
6609 && GET_CODE (SUBREG_REG (old)) == REG
6610 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6611 SUBREG_REG (old))))
6612 {
6613 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6614 reload_reg_rtx[j]);
6615 continue;
6616 }
6617 else if (GET_CODE (old) == SCRATCH)
6618 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6619 but we don't want to make an output reload. */
6620 continue;
6621
6622 #if 0
6623 /* Strip off of OLD any size-increasing SUBREGs such as
6624 (SUBREG:SI foo:QI 0). */
6625
6626 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6627 && (GET_MODE_SIZE (GET_MODE (old))
6628 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6629 old = SUBREG_REG (old);
6630 #endif
6631
6632 /* If is a JUMP_INSN, we can't support output reloads yet. */
6633 if (GET_CODE (insn) == JUMP_INSN)
6634 abort ();
6635
6636 if (reload_when_needed[j] == RELOAD_OTHER)
6637 start_sequence ();
6638 else
6639 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6640
6641 /* Determine the mode to reload in.
6642 See comments above (for input reloading). */
6643
6644 mode = GET_MODE (old);
6645 if (mode == VOIDmode)
6646 {
6647 /* VOIDmode should never happen for an output. */
6648 if (asm_noperands (PATTERN (insn)) < 0)
6649 /* It's the compiler's fault. */
6650 fatal_insn ("VOIDmode on an output", insn);
6651 error_for_asm (insn, "output operand is constant in `asm'");
6652 /* Prevent crash--use something we know is valid. */
6653 mode = word_mode;
6654 old = gen_rtx (REG, mode, REGNO (reloadreg));
6655 }
6656
6657 if (GET_MODE (reloadreg) != mode)
6658 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6659
6660 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6661
6662 /* If we need two reload regs, set RELOADREG to the intermediate
6663 one, since it will be stored into OLD. We might need a secondary
6664 register only for an input reload, so check again here. */
6665
6666 if (reload_secondary_out_reload[j] >= 0)
6667 {
6668 rtx real_old = old;
6669
6670 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6671 && reg_equiv_mem[REGNO (old)] != 0)
6672 real_old = reg_equiv_mem[REGNO (old)];
6673
6674 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6675 mode, real_old)
6676 != NO_REGS))
6677 {
6678 second_reloadreg = reloadreg;
6679 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6680
6681 /* See if RELOADREG is to be used as a scratch register
6682 or as an intermediate register. */
6683 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6684 {
6685 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6686 (real_old, second_reloadreg, reloadreg)));
6687 special = 1;
6688 }
6689 else
6690 {
6691 /* See if we need both a scratch and intermediate reload
6692 register. */
6693
6694 int secondary_reload = reload_secondary_out_reload[j];
6695 enum insn_code tertiary_icode
6696 = reload_secondary_out_icode[secondary_reload];
6697
6698 if (GET_MODE (reloadreg) != mode)
6699 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6700
6701 if (tertiary_icode != CODE_FOR_nothing)
6702 {
6703 rtx third_reloadreg
6704 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6705 rtx tem;
6706
6707 /* Copy primary reload reg to secondary reload reg.
6708 (Note that these have been swapped above, then
6709 secondary reload reg to OLD using our insn. */
6710
6711 /* If REAL_OLD is a paradoxical SUBREG, remove it
6712 and try to put the opposite SUBREG on
6713 RELOADREG. */
6714 if (GET_CODE (real_old) == SUBREG
6715 && (GET_MODE_SIZE (GET_MODE (real_old))
6716 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6717 && 0 != (tem = gen_lowpart_common
6718 (GET_MODE (SUBREG_REG (real_old)),
6719 reloadreg)))
6720 real_old = SUBREG_REG (real_old), reloadreg = tem;
6721
6722 gen_reload (reloadreg, second_reloadreg,
6723 reload_opnum[j], reload_when_needed[j]);
6724 emit_insn ((GEN_FCN (tertiary_icode)
6725 (real_old, reloadreg, third_reloadreg)));
6726 special = 1;
6727 }
6728
6729 else
6730 /* Copy between the reload regs here and then to
6731 OUT later. */
6732
6733 gen_reload (reloadreg, second_reloadreg,
6734 reload_opnum[j], reload_when_needed[j]);
6735 }
6736 }
6737 }
6738 #endif
6739
6740 /* Output the last reload insn. */
6741 if (! special)
6742 gen_reload (old, reloadreg, reload_opnum[j],
6743 reload_when_needed[j]);
6744
6745 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6746 /* If final will look at death notes for this reg,
6747 put one on the last output-reload insn to use it. Similarly
6748 for any secondary register. */
6749 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6750 for (p = get_last_insn (); p; p = PREV_INSN (p))
6751 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6752 && reg_overlap_mentioned_for_reload_p (reloadreg,
6753 PATTERN (p)))
6754 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6755 reloadreg, REG_NOTES (p));
6756
6757 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6758 if (! special && second_reloadreg
6759 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6760 for (p = get_last_insn (); p; p = PREV_INSN (p))
6761 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6762 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6763 PATTERN (p)))
6764 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6765 second_reloadreg, REG_NOTES (p));
6766 #endif
6767 #endif
6768 /* Look at all insns we emitted, just to be safe. */
6769 for (p = get_insns (); p; p = NEXT_INSN (p))
6770 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6771 {
6772 /* If this output reload doesn't come from a spill reg,
6773 clear any memory of reloaded copies of the pseudo reg.
6774 If this output reload comes from a spill reg,
6775 reg_has_output_reload will make this do nothing. */
6776 note_stores (PATTERN (p), forget_old_reloads_1);
6777
6778 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6779 && reload_spill_index[j] >= 0)
6780 new_spill_reg_store[reload_spill_index[j]] = p;
6781 }
6782
6783 if (reload_when_needed[j] == RELOAD_OTHER)
6784 {
6785 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6786 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6787 }
6788 else
6789 output_reload_insns[reload_opnum[j]] = get_insns ();
6790
6791 end_sequence ();
6792 }
6793 }
6794
6795 /* Now write all the insns we made for reloads in the order expected by
6796 the allocation functions. Prior to the insn being reloaded, we write
6797 the following reloads:
6798
6799 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6800
6801 RELOAD_OTHER reloads.
6802
6803 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6804 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6805 RELOAD_FOR_INPUT reload for the operand.
6806
6807 RELOAD_FOR_OPADDR_ADDRS reloads.
6808
6809 RELOAD_FOR_OPERAND_ADDRESS reloads.
6810
6811 After the insn being reloaded, we write the following:
6812
6813 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
6814 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
6815 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6816 reloads for the operand. The RELOAD_OTHER output reloads are
6817 output in descending order by reload number. */
6818
6819 emit_insns_before (other_input_address_reload_insns, before_insn);
6820 emit_insns_before (other_input_reload_insns, before_insn);
6821
6822 for (j = 0; j < reload_n_operands; j++)
6823 {
6824 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
6825 emit_insns_before (input_address_reload_insns[j], before_insn);
6826 emit_insns_before (input_reload_insns[j], before_insn);
6827 }
6828
6829 emit_insns_before (other_operand_reload_insns, before_insn);
6830 emit_insns_before (operand_reload_insns, before_insn);
6831
6832 for (j = 0; j < reload_n_operands; j++)
6833 {
6834 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
6835 emit_insns_before (output_address_reload_insns[j], following_insn);
6836 emit_insns_before (output_reload_insns[j], following_insn);
6837 emit_insns_before (other_output_reload_insns[j], following_insn);
6838 }
6839
6840 /* Move death notes from INSN
6841 to output-operand-address and output reload insns. */
6842 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6843 {
6844 rtx insn1;
6845 /* Loop over those insns, last ones first. */
6846 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6847 insn1 = PREV_INSN (insn1))
6848 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6849 {
6850 rtx source = SET_SRC (PATTERN (insn1));
6851 rtx dest = SET_DEST (PATTERN (insn1));
6852
6853 /* The note we will examine next. */
6854 rtx reg_notes = REG_NOTES (insn);
6855 /* The place that pointed to this note. */
6856 rtx *prev_reg_note = &REG_NOTES (insn);
6857
6858 /* If the note is for something used in the source of this
6859 reload insn, or in the output address, move the note. */
6860 while (reg_notes)
6861 {
6862 rtx next_reg_notes = XEXP (reg_notes, 1);
6863 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6864 && GET_CODE (XEXP (reg_notes, 0)) == REG
6865 && ((GET_CODE (dest) != REG
6866 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6867 dest))
6868 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6869 source)))
6870 {
6871 *prev_reg_note = next_reg_notes;
6872 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6873 REG_NOTES (insn1) = reg_notes;
6874 }
6875 else
6876 prev_reg_note = &XEXP (reg_notes, 1);
6877
6878 reg_notes = next_reg_notes;
6879 }
6880 }
6881 }
6882 #endif
6883
6884 /* For all the spill regs newly reloaded in this instruction,
6885 record what they were reloaded from, so subsequent instructions
6886 can inherit the reloads.
6887
6888 Update spill_reg_store for the reloads of this insn.
6889 Copy the elements that were updated in the loop above. */
6890
6891 for (j = 0; j < n_reloads; j++)
6892 {
6893 register int r = reload_order[j];
6894 register int i = reload_spill_index[r];
6895
6896 /* I is nonneg if this reload used one of the spill regs.
6897 If reload_reg_rtx[r] is 0, this is an optional reload
6898 that we opted to ignore. */
6899
6900 if (i >= 0 && reload_reg_rtx[r] != 0)
6901 {
6902 int nr
6903 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6904 int k;
6905 int part_reaches_end = 0;
6906 int all_reaches_end = 1;
6907
6908 /* For a multi register reload, we need to check if all or part
6909 of the value lives to the end. */
6910 for (k = 0; k < nr; k++)
6911 {
6912 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6913 reload_when_needed[r]))
6914 part_reaches_end = 1;
6915 else
6916 all_reaches_end = 0;
6917 }
6918
6919 /* Ignore reloads that don't reach the end of the insn in
6920 entirety. */
6921 if (all_reaches_end)
6922 {
6923 /* First, clear out memory of what used to be in this spill reg.
6924 If consecutive registers are used, clear them all. */
6925
6926 for (k = 0; k < nr; k++)
6927 {
6928 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6929 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6930 }
6931
6932 /* Maybe the spill reg contains a copy of reload_out. */
6933 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6934 {
6935 register int nregno = REGNO (reload_out[r]);
6936 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6937 : HARD_REGNO_NREGS (nregno,
6938 GET_MODE (reload_reg_rtx[r])));
6939
6940 spill_reg_store[i] = new_spill_reg_store[i];
6941 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6942
6943 /* If NREGNO is a hard register, it may occupy more than
6944 one register. If it does, say what is in the
6945 rest of the registers assuming that both registers
6946 agree on how many words the object takes. If not,
6947 invalidate the subsequent registers. */
6948
6949 if (nregno < FIRST_PSEUDO_REGISTER)
6950 for (k = 1; k < nnr; k++)
6951 reg_last_reload_reg[nregno + k]
6952 = (nr == nnr
6953 ? gen_rtx (REG,
6954 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6955 REGNO (reload_reg_rtx[r]) + k)
6956 : 0);
6957
6958 /* Now do the inverse operation. */
6959 for (k = 0; k < nr; k++)
6960 {
6961 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6962 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6963 ? nregno
6964 : nregno + k);
6965 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6966 }
6967 }
6968
6969 /* Maybe the spill reg contains a copy of reload_in. Only do
6970 something if there will not be an output reload for
6971 the register being reloaded. */
6972 else if (reload_out[r] == 0
6973 && reload_in[r] != 0
6974 && ((GET_CODE (reload_in[r]) == REG
6975 && ! reg_has_output_reload[REGNO (reload_in[r])])
6976 || (GET_CODE (reload_in_reg[r]) == REG
6977 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6978 {
6979 register int nregno;
6980 int nnr;
6981
6982 if (GET_CODE (reload_in[r]) == REG)
6983 nregno = REGNO (reload_in[r]);
6984 else
6985 nregno = REGNO (reload_in_reg[r]);
6986
6987 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6988 : HARD_REGNO_NREGS (nregno,
6989 GET_MODE (reload_reg_rtx[r])));
6990
6991 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6992
6993 if (nregno < FIRST_PSEUDO_REGISTER)
6994 for (k = 1; k < nnr; k++)
6995 reg_last_reload_reg[nregno + k]
6996 = (nr == nnr
6997 ? gen_rtx (REG,
6998 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6999 REGNO (reload_reg_rtx[r]) + k)
7000 : 0);
7001
7002 /* Unless we inherited this reload, show we haven't
7003 recently done a store. */
7004 if (! reload_inherited[r])
7005 spill_reg_store[i] = 0;
7006
7007 for (k = 0; k < nr; k++)
7008 {
7009 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
7010 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7011 ? nregno
7012 : nregno + k);
7013 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
7014 = insn;
7015 }
7016 }
7017 }
7018
7019 /* However, if part of the reload reaches the end, then we must
7020 invalidate the old info for the part that survives to the end. */
7021 else if (part_reaches_end)
7022 {
7023 for (k = 0; k < nr; k++)
7024 if (reload_reg_reaches_end_p (spill_regs[i] + k,
7025 reload_opnum[r],
7026 reload_when_needed[r]))
7027 {
7028 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
7029 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
7030 }
7031 }
7032 }
7033
7034 /* The following if-statement was #if 0'd in 1.34 (or before...).
7035 It's reenabled in 1.35 because supposedly nothing else
7036 deals with this problem. */
7037
7038 /* If a register gets output-reloaded from a non-spill register,
7039 that invalidates any previous reloaded copy of it.
7040 But forget_old_reloads_1 won't get to see it, because
7041 it thinks only about the original insn. So invalidate it here. */
7042 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7043 {
7044 register int nregno = REGNO (reload_out[r]);
7045 if (nregno >= FIRST_PSEUDO_REGISTER)
7046 reg_last_reload_reg[nregno] = 0;
7047 else
7048 {
7049 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7050
7051 while (num_regs-- > 0)
7052 reg_last_reload_reg[nregno + num_regs] = 0;
7053 }
7054 }
7055 }
7056 }
7057 \f
7058 /* Emit code to perform a reload from IN (which may be a reload register) to
7059 OUT (which may also be a reload register). IN or OUT is from operand
7060 OPNUM with reload type TYPE.
7061
7062 Returns first insn emitted. */
7063
7064 rtx
7065 gen_reload (out, in, opnum, type)
7066 rtx out;
7067 rtx in;
7068 int opnum;
7069 enum reload_type type;
7070 {
7071 rtx last = get_last_insn ();
7072 rtx tem;
7073
7074 /* If IN is a paradoxical SUBREG, remove it and try to put the
7075 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7076 if (GET_CODE (in) == SUBREG
7077 && (GET_MODE_SIZE (GET_MODE (in))
7078 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7079 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7080 in = SUBREG_REG (in), out = tem;
7081 else if (GET_CODE (out) == SUBREG
7082 && (GET_MODE_SIZE (GET_MODE (out))
7083 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7084 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7085 out = SUBREG_REG (out), in = tem;
7086
7087 /* How to do this reload can get quite tricky. Normally, we are being
7088 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7089 register that didn't get a hard register. In that case we can just
7090 call emit_move_insn.
7091
7092 We can also be asked to reload a PLUS that adds a register or a MEM to
7093 another register, constant or MEM. This can occur during frame pointer
7094 elimination and while reloading addresses. This case is handled by
7095 trying to emit a single insn to perform the add. If it is not valid,
7096 we use a two insn sequence.
7097
7098 Finally, we could be called to handle an 'o' constraint by putting
7099 an address into a register. In that case, we first try to do this
7100 with a named pattern of "reload_load_address". If no such pattern
7101 exists, we just emit a SET insn and hope for the best (it will normally
7102 be valid on machines that use 'o').
7103
7104 This entire process is made complex because reload will never
7105 process the insns we generate here and so we must ensure that
7106 they will fit their constraints and also by the fact that parts of
7107 IN might be being reloaded separately and replaced with spill registers.
7108 Because of this, we are, in some sense, just guessing the right approach
7109 here. The one listed above seems to work.
7110
7111 ??? At some point, this whole thing needs to be rethought. */
7112
7113 if (GET_CODE (in) == PLUS
7114 && (GET_CODE (XEXP (in, 0)) == REG
7115 || GET_CODE (XEXP (in, 0)) == SUBREG
7116 || GET_CODE (XEXP (in, 0)) == MEM)
7117 && (GET_CODE (XEXP (in, 1)) == REG
7118 || GET_CODE (XEXP (in, 1)) == SUBREG
7119 || CONSTANT_P (XEXP (in, 1))
7120 || GET_CODE (XEXP (in, 1)) == MEM))
7121 {
7122 /* We need to compute the sum of a register or a MEM and another
7123 register, constant, or MEM, and put it into the reload
7124 register. The best possible way of doing this is if the machine
7125 has a three-operand ADD insn that accepts the required operands.
7126
7127 The simplest approach is to try to generate such an insn and see if it
7128 is recognized and matches its constraints. If so, it can be used.
7129
7130 It might be better not to actually emit the insn unless it is valid,
7131 but we need to pass the insn as an operand to `recog' and
7132 `insn_extract' and it is simpler to emit and then delete the insn if
7133 not valid than to dummy things up. */
7134
7135 rtx op0, op1, tem, insn;
7136 int code;
7137
7138 op0 = find_replacement (&XEXP (in, 0));
7139 op1 = find_replacement (&XEXP (in, 1));
7140
7141 /* Since constraint checking is strict, commutativity won't be
7142 checked, so we need to do that here to avoid spurious failure
7143 if the add instruction is two-address and the second operand
7144 of the add is the same as the reload reg, which is frequently
7145 the case. If the insn would be A = B + A, rearrange it so
7146 it will be A = A + B as constrain_operands expects. */
7147
7148 if (GET_CODE (XEXP (in, 1)) == REG
7149 && REGNO (out) == REGNO (XEXP (in, 1)))
7150 tem = op0, op0 = op1, op1 = tem;
7151
7152 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7153 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
7154
7155 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
7156 code = recog_memoized (insn);
7157
7158 if (code >= 0)
7159 {
7160 insn_extract (insn);
7161 /* We want constrain operands to treat this insn strictly in
7162 its validity determination, i.e., the way it would after reload
7163 has completed. */
7164 if (constrain_operands (code, 1))
7165 return insn;
7166 }
7167
7168 delete_insns_since (last);
7169
7170 /* If that failed, we must use a conservative two-insn sequence.
7171 use move to copy constant, MEM, or pseudo register to the reload
7172 register since "move" will be able to handle an arbitrary operand,
7173 unlike add which can't, in general. Then add the registers.
7174
7175 If there is another way to do this for a specific machine, a
7176 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7177 we emit below. */
7178
7179 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7180 || (GET_CODE (op1) == REG
7181 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7182 tem = op0, op0 = op1, op1 = tem;
7183
7184 gen_reload (out, op0, opnum, type);
7185
7186 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7187 This fixes a problem on the 32K where the stack pointer cannot
7188 be used as an operand of an add insn. */
7189
7190 if (rtx_equal_p (op0, op1))
7191 op1 = out;
7192
7193 insn = emit_insn (gen_add2_insn (out, op1));
7194
7195 /* If that failed, copy the address register to the reload register.
7196 Then add the constant to the reload register. */
7197
7198 code = recog_memoized (insn);
7199
7200 if (code >= 0)
7201 {
7202 insn_extract (insn);
7203 /* We want constrain operands to treat this insn strictly in
7204 its validity determination, i.e., the way it would after reload
7205 has completed. */
7206 if (constrain_operands (code, 1))
7207 return insn;
7208 }
7209
7210 delete_insns_since (last);
7211
7212 gen_reload (out, op1, opnum, type);
7213 emit_insn (gen_add2_insn (out, op0));
7214 }
7215
7216 #ifdef SECONDARY_MEMORY_NEEDED
7217 /* If we need a memory location to do the move, do it that way. */
7218 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7219 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7220 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7221 REGNO_REG_CLASS (REGNO (out)),
7222 GET_MODE (out)))
7223 {
7224 /* Get the memory to use and rewrite both registers to its mode. */
7225 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7226
7227 if (GET_MODE (loc) != GET_MODE (out))
7228 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
7229
7230 if (GET_MODE (loc) != GET_MODE (in))
7231 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
7232
7233 gen_reload (loc, in, opnum, type);
7234 gen_reload (out, loc, opnum, type);
7235 }
7236 #endif
7237
7238 /* If IN is a simple operand, use gen_move_insn. */
7239 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7240 emit_insn (gen_move_insn (out, in));
7241
7242 #ifdef HAVE_reload_load_address
7243 else if (HAVE_reload_load_address)
7244 emit_insn (gen_reload_load_address (out, in));
7245 #endif
7246
7247 /* Otherwise, just write (set OUT IN) and hope for the best. */
7248 else
7249 emit_insn (gen_rtx (SET, VOIDmode, out, in));
7250
7251 /* Return the first insn emitted.
7252 We can not just return get_last_insn, because there may have
7253 been multiple instructions emitted. Also note that gen_move_insn may
7254 emit more than one insn itself, so we can not assume that there is one
7255 insn emitted per emit_insn_before call. */
7256
7257 return last ? NEXT_INSN (last) : get_insns ();
7258 }
7259 \f
7260 /* Delete a previously made output-reload
7261 whose result we now believe is not needed.
7262 First we double-check.
7263
7264 INSN is the insn now being processed.
7265 OUTPUT_RELOAD_INSN is the insn of the output reload.
7266 J is the reload-number for this insn. */
7267
7268 static void
7269 delete_output_reload (insn, j, output_reload_insn)
7270 rtx insn;
7271 int j;
7272 rtx output_reload_insn;
7273 {
7274 register rtx i1;
7275
7276 /* Get the raw pseudo-register referred to. */
7277
7278 rtx reg = reload_in[j];
7279 while (GET_CODE (reg) == SUBREG)
7280 reg = SUBREG_REG (reg);
7281
7282 /* If the pseudo-reg we are reloading is no longer referenced
7283 anywhere between the store into it and here,
7284 and no jumps or labels intervene, then the value can get
7285 here through the reload reg alone.
7286 Otherwise, give up--return. */
7287 for (i1 = NEXT_INSN (output_reload_insn);
7288 i1 != insn; i1 = NEXT_INSN (i1))
7289 {
7290 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7291 return;
7292 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7293 && reg_mentioned_p (reg, PATTERN (i1)))
7294 return;
7295 }
7296
7297 if (cannot_omit_stores[REGNO (reg)])
7298 return;
7299
7300 /* If this insn will store in the pseudo again,
7301 the previous store can be removed. */
7302 if (reload_out[j] == reload_in[j])
7303 delete_insn (output_reload_insn);
7304
7305 /* See if the pseudo reg has been completely replaced
7306 with reload regs. If so, delete the store insn
7307 and forget we had a stack slot for the pseudo. */
7308 else if (REG_N_DEATHS (REGNO (reg)) == 1
7309 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7310 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7311 {
7312 rtx i2;
7313
7314 /* We know that it was used only between here
7315 and the beginning of the current basic block.
7316 (We also know that the last use before INSN was
7317 the output reload we are thinking of deleting, but never mind that.)
7318 Search that range; see if any ref remains. */
7319 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7320 {
7321 rtx set = single_set (i2);
7322
7323 /* Uses which just store in the pseudo don't count,
7324 since if they are the only uses, they are dead. */
7325 if (set != 0 && SET_DEST (set) == reg)
7326 continue;
7327 if (GET_CODE (i2) == CODE_LABEL
7328 || GET_CODE (i2) == JUMP_INSN)
7329 break;
7330 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7331 && reg_mentioned_p (reg, PATTERN (i2)))
7332 /* Some other ref remains;
7333 we can't do anything. */
7334 return;
7335 }
7336
7337 /* Delete the now-dead stores into this pseudo. */
7338 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7339 {
7340 rtx set = single_set (i2);
7341
7342 if (set != 0 && SET_DEST (set) == reg)
7343 {
7344 /* This might be a basic block head,
7345 thus don't use delete_insn. */
7346 PUT_CODE (i2, NOTE);
7347 NOTE_SOURCE_FILE (i2) = 0;
7348 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7349 }
7350 if (GET_CODE (i2) == CODE_LABEL
7351 || GET_CODE (i2) == JUMP_INSN)
7352 break;
7353 }
7354
7355 /* For the debugging info,
7356 say the pseudo lives in this reload reg. */
7357 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7358 alter_reg (REGNO (reg), -1);
7359 }
7360 }
7361 \f
7362 /* Output reload-insns to reload VALUE into RELOADREG.
7363 VALUE is an autoincrement or autodecrement RTX whose operand
7364 is a register or memory location;
7365 so reloading involves incrementing that location.
7366
7367 INC_AMOUNT is the number to increment or decrement by (always positive).
7368 This cannot be deduced from VALUE. */
7369
7370 static void
7371 inc_for_reload (reloadreg, value, inc_amount)
7372 rtx reloadreg;
7373 rtx value;
7374 int inc_amount;
7375 {
7376 /* REG or MEM to be copied and incremented. */
7377 rtx incloc = XEXP (value, 0);
7378 /* Nonzero if increment after copying. */
7379 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7380 rtx last;
7381 rtx inc;
7382 rtx add_insn;
7383 int code;
7384
7385 /* No hard register is equivalent to this register after
7386 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7387 we could inc/dec that register as well (maybe even using it for
7388 the source), but I'm not sure it's worth worrying about. */
7389 if (GET_CODE (incloc) == REG)
7390 reg_last_reload_reg[REGNO (incloc)] = 0;
7391
7392 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7393 inc_amount = - inc_amount;
7394
7395 inc = GEN_INT (inc_amount);
7396
7397 /* If this is post-increment, first copy the location to the reload reg. */
7398 if (post)
7399 emit_insn (gen_move_insn (reloadreg, incloc));
7400
7401 /* See if we can directly increment INCLOC. Use a method similar to that
7402 in gen_reload. */
7403
7404 last = get_last_insn ();
7405 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7406 gen_rtx (PLUS, GET_MODE (incloc),
7407 incloc, inc)));
7408
7409 code = recog_memoized (add_insn);
7410 if (code >= 0)
7411 {
7412 insn_extract (add_insn);
7413 if (constrain_operands (code, 1))
7414 {
7415 /* If this is a pre-increment and we have incremented the value
7416 where it lives, copy the incremented value to RELOADREG to
7417 be used as an address. */
7418
7419 if (! post)
7420 emit_insn (gen_move_insn (reloadreg, incloc));
7421
7422 return;
7423 }
7424 }
7425
7426 delete_insns_since (last);
7427
7428 /* If couldn't do the increment directly, must increment in RELOADREG.
7429 The way we do this depends on whether this is pre- or post-increment.
7430 For pre-increment, copy INCLOC to the reload register, increment it
7431 there, then save back. */
7432
7433 if (! post)
7434 {
7435 emit_insn (gen_move_insn (reloadreg, incloc));
7436 emit_insn (gen_add2_insn (reloadreg, inc));
7437 emit_insn (gen_move_insn (incloc, reloadreg));
7438 }
7439 else
7440 {
7441 /* Postincrement.
7442 Because this might be a jump insn or a compare, and because RELOADREG
7443 may not be available after the insn in an input reload, we must do
7444 the incrementation before the insn being reloaded for.
7445
7446 We have already copied INCLOC to RELOADREG. Increment the copy in
7447 RELOADREG, save that back, then decrement RELOADREG so it has
7448 the original value. */
7449
7450 emit_insn (gen_add2_insn (reloadreg, inc));
7451 emit_insn (gen_move_insn (incloc, reloadreg));
7452 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7453 }
7454
7455 return;
7456 }
7457 \f
7458 /* Return 1 if we are certain that the constraint-string STRING allows
7459 the hard register REG. Return 0 if we can't be sure of this. */
7460
7461 static int
7462 constraint_accepts_reg_p (string, reg)
7463 char *string;
7464 rtx reg;
7465 {
7466 int value = 0;
7467 int regno = true_regnum (reg);
7468 int c;
7469
7470 /* Initialize for first alternative. */
7471 value = 0;
7472 /* Check that each alternative contains `g' or `r'. */
7473 while (1)
7474 switch (c = *string++)
7475 {
7476 case 0:
7477 /* If an alternative lacks `g' or `r', we lose. */
7478 return value;
7479 case ',':
7480 /* If an alternative lacks `g' or `r', we lose. */
7481 if (value == 0)
7482 return 0;
7483 /* Initialize for next alternative. */
7484 value = 0;
7485 break;
7486 case 'g':
7487 case 'r':
7488 /* Any general reg wins for this alternative. */
7489 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7490 value = 1;
7491 break;
7492 default:
7493 /* Any reg in specified class wins for this alternative. */
7494 {
7495 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7496
7497 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7498 value = 1;
7499 }
7500 }
7501 }
7502 \f
7503 /* Return the number of places FIND appears within X, but don't count
7504 an occurrence if some SET_DEST is FIND. */
7505
7506 static int
7507 count_occurrences (x, find)
7508 register rtx x, find;
7509 {
7510 register int i, j;
7511 register enum rtx_code code;
7512 register char *format_ptr;
7513 int count;
7514
7515 if (x == find)
7516 return 1;
7517 if (x == 0)
7518 return 0;
7519
7520 code = GET_CODE (x);
7521
7522 switch (code)
7523 {
7524 case REG:
7525 case QUEUED:
7526 case CONST_INT:
7527 case CONST_DOUBLE:
7528 case SYMBOL_REF:
7529 case CODE_LABEL:
7530 case PC:
7531 case CC0:
7532 return 0;
7533
7534 case SET:
7535 if (SET_DEST (x) == find)
7536 return count_occurrences (SET_SRC (x), find);
7537 break;
7538 }
7539
7540 format_ptr = GET_RTX_FORMAT (code);
7541 count = 0;
7542
7543 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7544 {
7545 switch (*format_ptr++)
7546 {
7547 case 'e':
7548 count += count_occurrences (XEXP (x, i), find);
7549 break;
7550
7551 case 'E':
7552 if (XVEC (x, i) != NULL)
7553 {
7554 for (j = 0; j < XVECLEN (x, i); j++)
7555 count += count_occurrences (XVECEXP (x, i, j), find);
7556 }
7557 break;
7558 }
7559 }
7560 return count;
7561 }
7562 \f
7563 /* This array holds values which are equivalent to a hard register
7564 during reload_cse_regs. Each array element is an EXPR_LIST of
7565 values. Each time a hard register is set, we set the corresponding
7566 array element to the value. Each time a hard register is copied
7567 into memory, we add the memory location to the corresponding array
7568 element. We don't store values or memory addresses with side
7569 effects in this array.
7570
7571 If the value is a CONST_INT, then the mode of the containing
7572 EXPR_LIST is the mode in which that CONST_INT was referenced.
7573
7574 We sometimes clobber a specific entry in a list. In that case, we
7575 just set XEXP (list-entry, 0) to 0. */
7576
7577 static rtx *reg_values;
7578
7579 /* This is a preallocated REG rtx which we use as a temporary in
7580 reload_cse_invalidate_regno, so that we don't need to allocate a
7581 new one each time through a loop in that function. */
7582
7583 static rtx invalidate_regno_rtx;
7584
7585 /* Invalidate any entries in reg_values which depend on REGNO,
7586 including those for REGNO itself. This is called if REGNO is
7587 changing. If CLOBBER is true, then always forget anything we
7588 currently know about REGNO. MODE is the mode of the assignment to
7589 REGNO, which is used to determine how many hard registers are being
7590 changed. If MODE is VOIDmode, then only REGNO is being changed;
7591 this is used when invalidating call clobbered registers across a
7592 call. */
7593
7594 static void
7595 reload_cse_invalidate_regno (regno, mode, clobber)
7596 int regno;
7597 enum machine_mode mode;
7598 int clobber;
7599 {
7600 int endregno;
7601 register int i;
7602
7603 /* Our callers don't always go through true_regnum; we may see a
7604 pseudo-register here from a CLOBBER or the like. We probably
7605 won't ever see a pseudo-register that has a real register number,
7606 for we check anyhow for safety. */
7607 if (regno >= FIRST_PSEUDO_REGISTER)
7608 regno = reg_renumber[regno];
7609 if (regno < 0)
7610 return;
7611
7612 if (mode == VOIDmode)
7613 endregno = regno + 1;
7614 else
7615 endregno = regno + HARD_REGNO_NREGS (regno, mode);
7616
7617 if (clobber)
7618 for (i = regno; i < endregno; i++)
7619 reg_values[i] = 0;
7620
7621 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7622 {
7623 rtx x;
7624
7625 for (x = reg_values[i]; x; x = XEXP (x, 1))
7626 {
7627 if (XEXP (x, 0) != 0
7628 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
7629 {
7630 /* If this is the only entry on the list, clear
7631 reg_values[i]. Otherwise, just clear this entry on
7632 the list. */
7633 if (XEXP (x, 1) == 0 && x == reg_values[i])
7634 {
7635 reg_values[i] = 0;
7636 break;
7637 }
7638 XEXP (x, 0) = 0;
7639 }
7640 }
7641 }
7642
7643 /* We must look at earlier registers, in case REGNO is part of a
7644 multi word value but is not the first register. If an earlier
7645 register has a value in a mode which overlaps REGNO, then we must
7646 invalidate that earlier register. Note that we do not need to
7647 check REGNO or later registers (we must not check REGNO itself,
7648 because we would incorrectly conclude that there was a conflict). */
7649
7650 for (i = 0; i < regno; i++)
7651 {
7652 rtx x;
7653
7654 for (x = reg_values[i]; x; x = XEXP (x, 1))
7655 {
7656 if (XEXP (x, 0) != 0)
7657 {
7658 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
7659 REGNO (invalidate_regno_rtx) = i;
7660 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
7661 NULL_PTR))
7662 {
7663 reload_cse_invalidate_regno (i, VOIDmode, 1);
7664 break;
7665 }
7666 }
7667 }
7668 }
7669 }
7670
7671 /* The memory at address MEM_BASE is being changed.
7672 Return whether this change will invalidate VAL. */
7673
7674 static int
7675 reload_cse_mem_conflict_p (mem_base, val)
7676 rtx mem_base;
7677 rtx val;
7678 {
7679 enum rtx_code code;
7680 char *fmt;
7681 int i;
7682
7683 code = GET_CODE (val);
7684 switch (code)
7685 {
7686 /* Get rid of a few simple cases quickly. */
7687 case REG:
7688 case PC:
7689 case CC0:
7690 case SCRATCH:
7691 case CONST:
7692 case CONST_INT:
7693 case CONST_DOUBLE:
7694 case SYMBOL_REF:
7695 case LABEL_REF:
7696 return 0;
7697
7698 case MEM:
7699 if (GET_MODE (mem_base) == BLKmode
7700 || GET_MODE (val) == BLKmode)
7701 return 1;
7702 return anti_dependence (val, mem_base);
7703
7704 default:
7705 break;
7706 }
7707
7708 fmt = GET_RTX_FORMAT (code);
7709
7710 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7711 {
7712 if (fmt[i] == 'e')
7713 {
7714 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
7715 return 1;
7716 }
7717 else if (fmt[i] == 'E')
7718 {
7719 int j;
7720
7721 for (j = 0; j < XVECLEN (val, i); j++)
7722 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
7723 return 1;
7724 }
7725 }
7726
7727 return 0;
7728 }
7729
7730 /* Invalidate any entries in reg_values which are changed because of a
7731 store to MEM_RTX. If this is called because of a non-const call
7732 instruction, MEM_RTX is (mem:BLK const0_rtx). */
7733
7734 static void
7735 reload_cse_invalidate_mem (mem_rtx)
7736 rtx mem_rtx;
7737 {
7738 register int i;
7739
7740 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7741 {
7742 rtx x;
7743
7744 for (x = reg_values[i]; x; x = XEXP (x, 1))
7745 {
7746 if (XEXP (x, 0) != 0
7747 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
7748 {
7749 /* If this is the only entry on the list, clear
7750 reg_values[i]. Otherwise, just clear this entry on
7751 the list. */
7752 if (XEXP (x, 1) == 0 && x == reg_values[i])
7753 {
7754 reg_values[i] = 0;
7755 break;
7756 }
7757 XEXP (x, 0) = 0;
7758 }
7759 }
7760 }
7761 }
7762
7763 /* Invalidate DEST, which is being assigned to or clobbered. The
7764 second parameter exists so that this function can be passed to
7765 note_stores; it is ignored. */
7766
7767 static void
7768 reload_cse_invalidate_rtx (dest, ignore)
7769 rtx dest;
7770 rtx ignore;
7771 {
7772 while (GET_CODE (dest) == STRICT_LOW_PART
7773 || GET_CODE (dest) == SIGN_EXTRACT
7774 || GET_CODE (dest) == ZERO_EXTRACT
7775 || GET_CODE (dest) == SUBREG)
7776 dest = XEXP (dest, 0);
7777
7778 if (GET_CODE (dest) == REG)
7779 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
7780 else if (GET_CODE (dest) == MEM)
7781 reload_cse_invalidate_mem (dest);
7782 }
7783
7784 /* Do a very simple CSE pass over the hard registers.
7785
7786 This function detects no-op moves where we happened to assign two
7787 different pseudo-registers to the same hard register, and then
7788 copied one to the other. Reload will generate a useless
7789 instruction copying a register to itself.
7790
7791 This function also detects cases where we load a value from memory
7792 into two different registers, and (if memory is more expensive than
7793 registers) changes it to simply copy the first register into the
7794 second register. */
7795
7796 void
7797 reload_cse_regs (first)
7798 rtx first;
7799 {
7800 char *firstobj;
7801 rtx callmem;
7802 register int i;
7803 rtx insn;
7804
7805 init_alias_analysis ();
7806
7807 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7808 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7809 reg_values[i] = 0;
7810
7811 /* Create our EXPR_LIST structures on reload_obstack, so that we can
7812 free them when we are done. */
7813 push_obstacks (&reload_obstack, &reload_obstack);
7814 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
7815
7816 /* We pass this to reload_cse_invalidate_mem to invalidate all of
7817 memory for a non-const call instruction. */
7818 callmem = gen_rtx (MEM, BLKmode, const0_rtx);
7819
7820 /* This is used in reload_cse_invalidate_regno to avoid consing a
7821 new REG in a loop in that function. */
7822 invalidate_regno_rtx = gen_rtx (REG, VOIDmode, 0);
7823
7824 for (insn = first; insn; insn = NEXT_INSN (insn))
7825 {
7826 rtx body;
7827
7828 if (GET_CODE (insn) == CODE_LABEL)
7829 {
7830 /* Forget all the register values at a code label. We don't
7831 try to do anything clever around jumps. */
7832 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7833 reg_values[i] = 0;
7834
7835 continue;
7836 }
7837
7838 #ifdef NON_SAVING_SETJMP
7839 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
7840 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
7841 {
7842 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7843 reg_values[i] = 0;
7844
7845 continue;
7846 }
7847 #endif
7848
7849 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7850 continue;
7851
7852 /* If this is a call instruction, forget anything stored in a
7853 call clobbered register, or, if this is not a const call, in
7854 memory. */
7855 if (GET_CODE (insn) == CALL_INSN)
7856 {
7857 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7858 if (call_used_regs[i])
7859 reload_cse_invalidate_regno (i, VOIDmode, 1);
7860
7861 if (! CONST_CALL_P (insn))
7862 reload_cse_invalidate_mem (callmem);
7863 }
7864
7865 body = PATTERN (insn);
7866 if (GET_CODE (body) == SET)
7867 {
7868 if (reload_cse_noop_set_p (body, insn))
7869 {
7870 PUT_CODE (insn, NOTE);
7871 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7872 NOTE_SOURCE_FILE (insn) = 0;
7873
7874 /* We're done with this insn. */
7875 continue;
7876 }
7877
7878 reload_cse_simplify_set (body, insn);
7879 reload_cse_record_set (body, body);
7880 }
7881 else if (GET_CODE (body) == PARALLEL)
7882 {
7883 int delete;
7884
7885 /* If every action in a PARALLEL is a noop, we can delete
7886 the entire PARALLEL. */
7887 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7888 if ((GET_CODE (XVECEXP (body, 0, i)) != SET
7889 || ! reload_cse_noop_set_p (XVECEXP (body, 0, i), insn))
7890 && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
7891 break;
7892 if (i < 0)
7893 {
7894 PUT_CODE (insn, NOTE);
7895 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7896 NOTE_SOURCE_FILE (insn) = 0;
7897
7898 /* We're done with this insn. */
7899 continue;
7900 }
7901
7902 /* Look through the PARALLEL and record the values being
7903 set, if possible. Also handle any CLOBBERs. */
7904 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7905 {
7906 rtx x = XVECEXP (body, 0, i);
7907
7908 if (GET_CODE (x) == SET)
7909 reload_cse_record_set (x, body);
7910 else
7911 note_stores (x, reload_cse_invalidate_rtx);
7912 }
7913 }
7914 else
7915 note_stores (body, reload_cse_invalidate_rtx);
7916
7917 #ifdef AUTO_INC_DEC
7918 /* Clobber any registers which appear in REG_INC notes. We
7919 could keep track of the changes to their values, but it is
7920 unlikely to help. */
7921 {
7922 rtx x;
7923
7924 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
7925 if (REG_NOTE_KIND (x) == REG_INC)
7926 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
7927 }
7928 #endif
7929
7930 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
7931 after we have processed the insn. */
7932 if (GET_CODE (insn) == CALL_INSN)
7933 {
7934 rtx x;
7935
7936 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
7937 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
7938 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
7939 }
7940 }
7941
7942 /* Free all the temporary structures we created, and go back to the
7943 regular obstacks. */
7944 obstack_free (&reload_obstack, firstobj);
7945 pop_obstacks ();
7946 }
7947
7948 /* Return whether the values known for REGNO are equal to VAL. MODE
7949 is the mode of the object that VAL is being copied to; this matters
7950 if VAL is a CONST_INT. */
7951
7952 static int
7953 reload_cse_regno_equal_p (regno, val, mode)
7954 int regno;
7955 rtx val;
7956 enum machine_mode mode;
7957 {
7958 rtx x;
7959
7960 if (val == 0)
7961 return 0;
7962
7963 for (x = reg_values[regno]; x; x = XEXP (x, 1))
7964 if (XEXP (x, 0) != 0
7965 && rtx_equal_p (XEXP (x, 0), val)
7966 && (GET_CODE (val) != CONST_INT
7967 || mode == GET_MODE (x)
7968 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
7969 /* On a big endian machine if the value spans more than
7970 one register then this register holds the high part of
7971 it and we can't use it.
7972
7973 ??? We should also compare with the high part of the
7974 value. */
7975 && !(WORDS_BIG_ENDIAN
7976 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
7977 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
7978 GET_MODE_BITSIZE (GET_MODE (x))))))
7979 return 1;
7980
7981 return 0;
7982 }
7983
7984 /* See whether a single set is a noop. SET is the set instruction we
7985 are should check, and INSN is the instruction from which it came. */
7986
7987 static int
7988 reload_cse_noop_set_p (set, insn)
7989 rtx set;
7990 rtx insn;
7991 {
7992 rtx src, dest;
7993 enum machine_mode dest_mode;
7994 int dreg, sreg;
7995 int ret;
7996
7997 src = SET_SRC (set);
7998 dest = SET_DEST (set);
7999 dest_mode = GET_MODE (dest);
8000
8001 if (side_effects_p (src))
8002 return 0;
8003
8004 dreg = true_regnum (dest);
8005 sreg = true_regnum (src);
8006
8007 /* Check for setting a register to itself. In this case, we don't
8008 have to worry about REG_DEAD notes. */
8009 if (dreg >= 0 && dreg == sreg)
8010 return 1;
8011
8012 ret = 0;
8013 if (dreg >= 0)
8014 {
8015 /* Check for setting a register to itself. */
8016 if (dreg == sreg)
8017 ret = 1;
8018
8019 /* Check for setting a register to a value which we already know
8020 is in the register. */
8021 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8022 ret = 1;
8023
8024 /* Check for setting a register DREG to another register SREG
8025 where SREG is equal to a value which is already in DREG. */
8026 else if (sreg >= 0)
8027 {
8028 rtx x;
8029
8030 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8031 {
8032 rtx tmp;
8033
8034 if (XEXP (x, 0) == 0)
8035 continue;
8036
8037 if (dest_mode == GET_MODE (x))
8038 tmp = XEXP (x, 0);
8039 else if (GET_MODE_BITSIZE (dest_mode)
8040 < GET_MODE_BITSIZE (GET_MODE (x)))
8041 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8042 else
8043 continue;
8044
8045 if (tmp
8046 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8047 {
8048 ret = 1;
8049 break;
8050 }
8051 }
8052 }
8053 }
8054 else if (GET_CODE (dest) == MEM)
8055 {
8056 /* Check for storing a register to memory when we know that the
8057 register is equivalent to the memory location. */
8058 if (sreg >= 0
8059 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8060 && ! side_effects_p (dest))
8061 ret = 1;
8062 }
8063
8064 /* If we can delete this SET, then we need to look for an earlier
8065 REG_DEAD note on DREG, and remove it if it exists. */
8066 if (ret)
8067 {
8068 if (! find_regno_note (insn, REG_UNUSED, dreg))
8069 {
8070 rtx trial;
8071
8072 for (trial = prev_nonnote_insn (insn);
8073 (trial
8074 && GET_CODE (trial) != CODE_LABEL
8075 && GET_CODE (trial) != BARRIER);
8076 trial = prev_nonnote_insn (trial))
8077 {
8078 if (find_regno_note (trial, REG_DEAD, dreg))
8079 {
8080 remove_death (dreg, trial);
8081 break;
8082 }
8083 }
8084 }
8085 }
8086
8087 return ret;
8088 }
8089
8090 /* Try to simplify a single SET instruction. SET is the set pattern.
8091 INSN is the instruction it came from. */
8092
8093 static void
8094 reload_cse_simplify_set (set, insn)
8095 rtx set;
8096 rtx insn;
8097 {
8098 int dreg;
8099 rtx src;
8100 enum machine_mode dest_mode;
8101 enum reg_class dclass;
8102 register int i;
8103
8104 /* We only handle one case: if we set a register to a value which is
8105 not a register, we try to find that value in some other register
8106 and change the set into a register copy. */
8107
8108 dreg = true_regnum (SET_DEST (set));
8109 if (dreg < 0)
8110 return;
8111
8112 src = SET_SRC (set);
8113 if (side_effects_p (src) || true_regnum (src) >= 0)
8114 return;
8115
8116 /* If memory loads are cheaper than register copies, don't change
8117 them. */
8118 if (GET_CODE (src) == MEM && MEMORY_MOVE_COST (GET_MODE (src)) < 2)
8119 return;
8120
8121 dest_mode = GET_MODE (SET_DEST (set));
8122 dclass = REGNO_REG_CLASS (dreg);
8123 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8124 {
8125 if (i != dreg
8126 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8127 && reload_cse_regno_equal_p (i, src, dest_mode))
8128 {
8129 int validated;
8130
8131 /* Pop back to the real obstacks while changing the insn. */
8132 pop_obstacks ();
8133
8134 validated = validate_change (insn, &SET_SRC (set),
8135 gen_rtx (REG, dest_mode, i), 0);
8136
8137 /* Go back to the obstack we are using for temporary
8138 storage. */
8139 push_obstacks (&reload_obstack, &reload_obstack);
8140
8141 if (validated)
8142 {
8143 /* We need to look for an earlier REG_DEAD note on I,
8144 and remove it if it exists. */
8145 if (! find_regno_note (insn, REG_UNUSED, i))
8146 {
8147 rtx trial;
8148
8149 for (trial = prev_nonnote_insn (insn);
8150 (trial
8151 && GET_CODE (trial) != CODE_LABEL
8152 && GET_CODE (trial) != BARRIER);
8153 trial = prev_nonnote_insn (trial))
8154 {
8155 if (find_regno_note (trial, REG_DEAD, i))
8156 {
8157 remove_death (i, trial);
8158 break;
8159 }
8160 }
8161 }
8162
8163 return;
8164 }
8165 }
8166 }
8167 }
8168
8169 /* These two variables are used to pass information from
8170 reload_cse_record_set to reload_cse_check_clobber. */
8171
8172 static int reload_cse_check_clobbered;
8173 static rtx reload_cse_check_src;
8174
8175 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8176 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8177 second argument, which is passed by note_stores, is ignored. */
8178
8179 static void
8180 reload_cse_check_clobber (dest, ignore)
8181 rtx dest;
8182 rtx ignore;
8183 {
8184 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
8185 reload_cse_check_clobbered = 1;
8186 }
8187
8188 /* Record the result of a SET instruction. SET is the set pattern.
8189 BODY is the pattern of the insn that it came from. */
8190
8191 static void
8192 reload_cse_record_set (set, body)
8193 rtx set;
8194 rtx body;
8195 {
8196 rtx dest, src, x;
8197 int dreg, sreg;
8198 enum machine_mode dest_mode;
8199
8200 dest = SET_DEST (set);
8201 src = SET_SRC (set);
8202 dreg = true_regnum (dest);
8203 sreg = true_regnum (src);
8204 dest_mode = GET_MODE (dest);
8205
8206 /* Some machines don't define AUTO_INC_DEC, but they still use push
8207 instructions. We need to catch that case here in order to
8208 invalidate the stack pointer correctly. Note that invalidating
8209 the stack pointer is different from invalidating DEST. */
8210 x = dest;
8211 while (GET_CODE (x) == SUBREG
8212 || GET_CODE (x) == ZERO_EXTRACT
8213 || GET_CODE (x) == SIGN_EXTRACT
8214 || GET_CODE (x) == STRICT_LOW_PART)
8215 x = XEXP (x, 0);
8216 if (push_operand (x, GET_MODE (x)))
8217 {
8218 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
8219 reload_cse_invalidate_rtx (dest, NULL_RTX);
8220 return;
8221 }
8222
8223 /* We can only handle an assignment to a register, or a store of a
8224 register to a memory location. For other cases, we just clobber
8225 the destination. We also have to just clobber if there are side
8226 effects in SRC or DEST. */
8227 if ((dreg < 0 && GET_CODE (dest) != MEM)
8228 || side_effects_p (src)
8229 || side_effects_p (dest))
8230 {
8231 reload_cse_invalidate_rtx (dest, NULL_RTX);
8232 return;
8233 }
8234
8235 #ifdef HAVE_cc0
8236 /* We don't try to handle values involving CC, because it's a pain
8237 to keep track of when they have to be invalidated. */
8238 if (reg_mentioned_p (cc0_rtx, src)
8239 || reg_mentioned_p (cc0_rtx, dest))
8240 {
8241 reload_cse_invalidate_rtx (dest, NULL_RTX);
8242 return;
8243 }
8244 #endif
8245
8246 /* If BODY is a PARALLEL, then we need to see whether the source of
8247 SET is clobbered by some other instruction in the PARALLEL. */
8248 if (GET_CODE (body) == PARALLEL)
8249 {
8250 int i;
8251
8252 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8253 {
8254 rtx x;
8255
8256 x = XVECEXP (body, 0, i);
8257 if (x == set)
8258 continue;
8259
8260 reload_cse_check_clobbered = 0;
8261 reload_cse_check_src = src;
8262 note_stores (x, reload_cse_check_clobber);
8263 if (reload_cse_check_clobbered)
8264 {
8265 reload_cse_invalidate_rtx (dest, NULL_RTX);
8266 return;
8267 }
8268 }
8269 }
8270
8271 if (dreg >= 0)
8272 {
8273 int i;
8274
8275 /* This is an assignment to a register. Update the value we
8276 have stored for the register. */
8277 if (sreg >= 0)
8278 {
8279 rtx x;
8280
8281 /* This is a copy from one register to another. Any values
8282 which were valid for SREG are now valid for DREG. If the
8283 mode changes, we use gen_lowpart_common to extract only
8284 the part of the value that is copied. */
8285 reg_values[dreg] = 0;
8286 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8287 {
8288 rtx tmp;
8289
8290 if (XEXP (x, 0) == 0)
8291 continue;
8292 if (dest_mode == GET_MODE (XEXP (x, 0)))
8293 tmp = XEXP (x, 0);
8294 else if (GET_MODE_BITSIZE (dest_mode)
8295 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
8296 continue;
8297 else
8298 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8299 if (tmp)
8300 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, tmp,
8301 reg_values[dreg]);
8302 }
8303 }
8304 else
8305 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, src, NULL_RTX);
8306
8307 /* We've changed DREG, so invalidate any values held by other
8308 registers that depend upon it. */
8309 reload_cse_invalidate_regno (dreg, dest_mode, 0);
8310
8311 /* If this assignment changes more than one hard register,
8312 forget anything we know about the others. */
8313 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
8314 reg_values[dreg + i] = 0;
8315 }
8316 else if (GET_CODE (dest) == MEM)
8317 {
8318 /* Invalidate conflicting memory locations. */
8319 reload_cse_invalidate_mem (dest);
8320
8321 /* If we're storing a register to memory, add DEST to the list
8322 in REG_VALUES. */
8323 if (sreg >= 0 && ! side_effects_p (dest))
8324 reg_values[sreg] = gen_rtx (EXPR_LIST, dest_mode, dest,
8325 reg_values[sreg]);
8326 }
8327 else
8328 {
8329 /* We should have bailed out earlier. */
8330 abort ();
8331 }
8332 }